From 83d8bbb81c72d882c7ddafd9820d4fffcd794c28 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Thu, 27 Feb 2020 12:17:08 +0000 Subject: [PATCH 001/363] Initial commit --- .gitignore | 129 +++++++++++++++++++++++++++++++++++++++++++++++++++++ LICENSE | 25 +++++++++++ 2 files changed, 154 insertions(+) create mode 100644 .gitignore create mode 100644 LICENSE diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..b6e4761 --- /dev/null +++ b/.gitignore @@ -0,0 +1,129 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..9789a3b --- /dev/null +++ b/LICENSE @@ -0,0 +1,25 @@ +BSD 2-Clause License + +Copyright (c) 2020, European Spallation Source - Data Management and Software Centre +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. From b63d6ce085b72a7e83101f3b12b112897125ff2c Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Thu, 27 Feb 2020 13:24:58 +0100 Subject: [PATCH 002/363] Initial commit --- MANIFEST.in | 0 README.md | 10 + requirements.txt | 3 + setup.py | 20 ++ streaming_data_types/__init__.py | 0 streaming_data_types/fbschemas/__init__.py | 0 streaming_data_types/fbschemas/hs00/Array.py | 11 + .../fbschemas/hs00/ArrayDouble.py | 63 +++++ .../fbschemas/hs00/ArrayFloat.py | 63 +++++ .../fbschemas/hs00/ArrayUInt.py | 63 +++++ .../fbschemas/hs00/ArrayULong.py | 63 +++++ .../fbschemas/hs00/DimensionMetaData.py | 95 +++++++ .../fbschemas/hs00/EventHistogram.py | 237 ++++++++++++++++++ .../fbschemas/hs00/__init__.py | 0 streaming_data_types/hs00.py | 202 +++++++++++++++ streaming_data_types/tests/__init__.py | 0 streaming_data_types/tests/context.py | 5 + streaming_data_types/tests/test_hs00.py | 77 ++++++ 18 files changed, 912 insertions(+) create mode 100644 MANIFEST.in create mode 100644 README.md create mode 100644 requirements.txt create mode 100644 setup.py create mode 100644 streaming_data_types/__init__.py create mode 100644 streaming_data_types/fbschemas/__init__.py create mode 100644 streaming_data_types/fbschemas/hs00/Array.py create mode 100644 streaming_data_types/fbschemas/hs00/ArrayDouble.py create mode 100644 streaming_data_types/fbschemas/hs00/ArrayFloat.py create mode 100644 streaming_data_types/fbschemas/hs00/ArrayUInt.py create mode 100644 streaming_data_types/fbschemas/hs00/ArrayULong.py create mode 100644 streaming_data_types/fbschemas/hs00/DimensionMetaData.py create mode 100644 streaming_data_types/fbschemas/hs00/EventHistogram.py create mode 100644 streaming_data_types/fbschemas/hs00/__init__.py create mode 100644 streaming_data_types/hs00.py create mode 100644 streaming_data_types/tests/__init__.py create mode 100644 streaming_data_types/tests/context.py create mode 100644 streaming_data_types/tests/test_hs00.py diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 0000000..e69de29 diff --git a/README.md b/README.md new file mode 100644 index 0000000..9353160 --- /dev/null +++ b/README.md @@ -0,0 +1,10 @@ +## FlatBuffer Schemas + +|name|description|verifiable| +|----|-----------|----------| +|hs00|Histogram schema|Y| + +## Building package +``` +python setup.py sdist bdist_wheel +``` diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..8ae0b6d --- /dev/null +++ b/requirements.txt @@ -0,0 +1,3 @@ +flatbuffers +numpy +pytest diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..2820daa --- /dev/null +++ b/setup.py @@ -0,0 +1,20 @@ +from setuptools import setup, find_packages + + +with open('README.md') as f: + readme = f.read() + +with open('LICENSE') as f: + license = f.read() + +setup( + name='streaming_data_types', + version='0.1.0', + description='Python utilities foe handling ESS streamed data', + long_description=readme, + author='ScreamingUdder', + author_email='NoAddress@Nowhere.com', + url='https://github.com/ess-dmsc/python-streaming-data-types', + license=license, + packages=find_packages(exclude='tests') +) diff --git a/streaming_data_types/__init__.py b/streaming_data_types/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/streaming_data_types/fbschemas/__init__.py b/streaming_data_types/fbschemas/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/streaming_data_types/fbschemas/hs00/Array.py b/streaming_data_types/fbschemas/hs00/Array.py new file mode 100644 index 0000000..ed3c3e9 --- /dev/null +++ b/streaming_data_types/fbschemas/hs00/Array.py @@ -0,0 +1,11 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + + +class Array(object): + NONE = 0 + ArrayUInt = 1 + ArrayULong = 2 + ArrayDouble = 3 + ArrayFloat = 4 diff --git a/streaming_data_types/fbschemas/hs00/ArrayDouble.py b/streaming_data_types/fbschemas/hs00/ArrayDouble.py new file mode 100644 index 0000000..7b57488 --- /dev/null +++ b/streaming_data_types/fbschemas/hs00/ArrayDouble.py @@ -0,0 +1,63 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + + +class ArrayDouble(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAsArrayDouble(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ArrayDouble() + x.Init(buf, n + offset) + return x + + # ArrayDouble + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ArrayDouble + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Float64Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8), + ) + return 0 + + # ArrayDouble + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Float64Flags, o) + return 0 + + # ArrayDouble + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + +def ArrayDoubleStart(builder): + builder.StartObject(1) + + +def ArrayDoubleAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 + ) + + +def ArrayDoubleStartValueVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + + +def ArrayDoubleEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/hs00/ArrayFloat.py b/streaming_data_types/fbschemas/hs00/ArrayFloat.py new file mode 100644 index 0000000..9b1a84c --- /dev/null +++ b/streaming_data_types/fbschemas/hs00/ArrayFloat.py @@ -0,0 +1,63 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + + +class ArrayFloat(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAsArrayFloat(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ArrayFloat() + x.Init(buf, n + offset) + return x + + # ArrayFloat + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ArrayFloat + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Float32Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), + ) + return 0 + + # ArrayFloat + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Float32Flags, o) + return 0 + + # ArrayFloat + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + +def ArrayFloatStart(builder): + builder.StartObject(1) + + +def ArrayFloatAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 + ) + + +def ArrayFloatStartValueVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + +def ArrayFloatEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/hs00/ArrayUInt.py b/streaming_data_types/fbschemas/hs00/ArrayUInt.py new file mode 100644 index 0000000..615967d --- /dev/null +++ b/streaming_data_types/fbschemas/hs00/ArrayUInt.py @@ -0,0 +1,63 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + + +class ArrayUInt(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAsArrayUInt(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ArrayUInt() + x.Init(buf, n + offset) + return x + + # ArrayUInt + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ArrayUInt + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Uint32Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), + ) + return 0 + + # ArrayUInt + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint32Flags, o) + return 0 + + # ArrayUInt + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + +def ArrayUIntStart(builder): + builder.StartObject(1) + + +def ArrayUIntAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 + ) + + +def ArrayUIntStartValueVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + +def ArrayUIntEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/hs00/ArrayULong.py b/streaming_data_types/fbschemas/hs00/ArrayULong.py new file mode 100644 index 0000000..6e29d22 --- /dev/null +++ b/streaming_data_types/fbschemas/hs00/ArrayULong.py @@ -0,0 +1,63 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + + +class ArrayULong(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAsArrayULong(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ArrayULong() + x.Init(buf, n + offset) + return x + + # ArrayULong + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ArrayULong + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Uint64Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8), + ) + return 0 + + # ArrayULong + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint64Flags, o) + return 0 + + # ArrayULong + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + +def ArrayULongStart(builder): + builder.StartObject(1) + + +def ArrayULongAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 + ) + + +def ArrayULongStartValueVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + + +def ArrayULongEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/hs00/DimensionMetaData.py b/streaming_data_types/fbschemas/hs00/DimensionMetaData.py new file mode 100644 index 0000000..a498793 --- /dev/null +++ b/streaming_data_types/fbschemas/hs00/DimensionMetaData.py @@ -0,0 +1,95 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + + +class DimensionMetaData(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAsDimensionMetaData(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = DimensionMetaData() + x.Init(buf, n + offset) + return x + + # DimensionMetaData + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # DimensionMetaData + def Length(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get( + flatbuffers.number_types.Uint32Flags, o + self._tab.Pos + ) + return 0 + + # DimensionMetaData + def Unit(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # DimensionMetaData + def Label(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # DimensionMetaData + def BinBoundariesType(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) + return 0 + + # DimensionMetaData + def BinBoundaries(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + from flatbuffers.table import Table + + obj = Table(bytearray(), 0) + self._tab.Union(obj, o) + return obj + return None + + +def DimensionMetaDataStart(builder): + builder.StartObject(5) + + +def DimensionMetaDataAddLength(builder, length): + builder.PrependUint32Slot(0, length, 0) + + +def DimensionMetaDataAddUnit(builder, unit): + builder.PrependUOffsetTRelativeSlot( + 1, flatbuffers.number_types.UOffsetTFlags.py_type(unit), 0 + ) + + +def DimensionMetaDataAddLabel(builder, label): + builder.PrependUOffsetTRelativeSlot( + 2, flatbuffers.number_types.UOffsetTFlags.py_type(label), 0 + ) + + +def DimensionMetaDataAddBinBoundariesType(builder, binBoundariesType): + builder.PrependUint8Slot(3, binBoundariesType, 0) + + +def DimensionMetaDataAddBinBoundaries(builder, binBoundaries): + builder.PrependUOffsetTRelativeSlot( + 4, flatbuffers.number_types.UOffsetTFlags.py_type(binBoundaries), 0 + ) + + +def DimensionMetaDataEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/hs00/EventHistogram.py b/streaming_data_types/fbschemas/hs00/EventHistogram.py new file mode 100644 index 0000000..593a31d --- /dev/null +++ b/streaming_data_types/fbschemas/hs00/EventHistogram.py @@ -0,0 +1,237 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + + +class EventHistogram(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAsEventHistogram(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = EventHistogram() + x.Init(buf, n + offset) + return x + + # EventHistogram + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # EventHistogram + def Source(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # EventHistogram + def Timestamp(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get( + flatbuffers.number_types.Uint64Flags, o + self._tab.Pos + ) + return 0 + + # EventHistogram + def DimMetadata(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + x = self._tab.Vector(o) + x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 + x = self._tab.Indirect(x) + from .DimensionMetaData import DimensionMetaData + + obj = DimensionMetaData() + obj.Init(self._tab.Bytes, x) + return obj + return None + + # EventHistogram + def DimMetadataLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # EventHistogram + def LastMetadataTimestamp(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.Get( + flatbuffers.number_types.Uint64Flags, o + self._tab.Pos + ) + return 0 + + # EventHistogram + def CurrentShape(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Uint32Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), + ) + return 0 + + # EventHistogram + def CurrentShapeAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint32Flags, o) + return 0 + + # EventHistogram + def CurrentShapeLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # EventHistogram + def Offset(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Uint32Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), + ) + return 0 + + # EventHistogram + def OffsetAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint32Flags, o) + return 0 + + # EventHistogram + def OffsetLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # EventHistogram + def DataType(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) + return 0 + + # EventHistogram + def Data(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) + if o != 0: + from flatbuffers.table import Table + + obj = Table(bytearray(), 0) + self._tab.Union(obj, o) + return obj + return None + + # EventHistogram + def ErrorsType(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(20)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) + return 0 + + # EventHistogram + def Errors(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(22)) + if o != 0: + from flatbuffers.table import Table + + obj = Table(bytearray(), 0) + self._tab.Union(obj, o) + return obj + return None + + # EventHistogram + def Info(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(24)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + +def EventHistogramStart(builder): + builder.StartObject(11) + + +def EventHistogramAddSource(builder, source): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(source), 0 + ) + + +def EventHistogramAddTimestamp(builder, timestamp): + builder.PrependUint64Slot(1, timestamp, 0) + + +def EventHistogramAddDimMetadata(builder, dimMetadata): + builder.PrependUOffsetTRelativeSlot( + 2, flatbuffers.number_types.UOffsetTFlags.py_type(dimMetadata), 0 + ) + + +def EventHistogramStartDimMetadataVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + +def EventHistogramAddLastMetadataTimestamp(builder, lastMetadataTimestamp): + builder.PrependUint64Slot(3, lastMetadataTimestamp, 0) + + +def EventHistogramAddCurrentShape(builder, currentShape): + builder.PrependUOffsetTRelativeSlot( + 4, flatbuffers.number_types.UOffsetTFlags.py_type(currentShape), 0 + ) + + +def EventHistogramStartCurrentShapeVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + +def EventHistogramAddOffset(builder, offset): + builder.PrependUOffsetTRelativeSlot( + 5, flatbuffers.number_types.UOffsetTFlags.py_type(offset), 0 + ) + + +def EventHistogramStartOffsetVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + +def EventHistogramAddDataType(builder, dataType): + builder.PrependUint8Slot(6, dataType, 0) + + +def EventHistogramAddData(builder, data): + builder.PrependUOffsetTRelativeSlot( + 7, flatbuffers.number_types.UOffsetTFlags.py_type(data), 0 + ) + + +def EventHistogramAddErrorsType(builder, errorsType): + builder.PrependUint8Slot(8, errorsType, 0) + + +def EventHistogramAddErrors(builder, errors): + builder.PrependUOffsetTRelativeSlot( + 9, flatbuffers.number_types.UOffsetTFlags.py_type(errors), 0 + ) + + +def EventHistogramAddInfo(builder, info): + builder.PrependUOffsetTRelativeSlot( + 10, flatbuffers.number_types.UOffsetTFlags.py_type(info), 0 + ) + + +def EventHistogramEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/hs00/__init__.py b/streaming_data_types/fbschemas/hs00/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/streaming_data_types/hs00.py b/streaming_data_types/hs00.py new file mode 100644 index 0000000..026a254 --- /dev/null +++ b/streaming_data_types/hs00.py @@ -0,0 +1,202 @@ +from functools import reduce +import operator +import flatbuffers +import numpy as np +import streaming_data_types.fbschemas.hs00.ArrayDouble as ArrayDouble +import streaming_data_types.fbschemas.hs00.DimensionMetaData as DimensionMetaData +import streaming_data_types.fbschemas.hs00.EventHistogram as EventHistogram +from streaming_data_types.fbschemas.hs00.Array import Array + + +FILE_IDENTIFIER = b"hs00" + + +def get_schema(buf): + """ + Extract the schema code embedded in the buffer + + :param buf: The raw buffer of the FlatBuffers message. + :return: The schema name + """ + return buf[4:8].decode("utf-8") + + +def deserialise_hs00(buf): + """ + Convert flatbuffer into a histogram. + + :param buf: + :return: dict of histogram information + """ + # Check schema is correct + if get_schema(buf) != "hs00": + raise RuntimeError(f"Incorrect schema: expected hs00 but got {get_schema(buf)}") + + event_hist = EventHistogram.EventHistogram.GetRootAsEventHistogram(buf, 0) + + dims = [] + for i in range(event_hist.DimMetadataLength()): + bins_fb = event_hist.DimMetadata(i).BinBoundaries() + + # Get bins + temp = ArrayDouble.ArrayDouble() + temp.Init(bins_fb.Bytes, bins_fb.Pos) + bins = temp.ValueAsNumpy() + + # Get type + if event_hist.DimMetadata(i).BinBoundariesType() == Array.ArrayDouble: + bin_type = np.float64 + else: + raise TypeError("Type of the bin boundaries is incorrect") + + hist_info = { + "length": event_hist.DimMetadata(i).Length(), + "edges": bins.tolist(), + "type": bin_type, + "unit": event_hist.DimMetadata(i).Unit().decode("utf-8"), + "label": event_hist.DimMetadata(i).Label().decode("utf-8"), + } + dims.append(hist_info) + + metadata_timestamp = event_hist.LastMetadataTimestamp() + + # Get the data + if event_hist.DataType() != Array.ArrayDouble: + raise TypeError("Type of the data array is incorrect") + + data_fb = event_hist.Data() + temp = ArrayDouble.ArrayDouble() + temp.Init(data_fb.Bytes, data_fb.Pos) + shape = event_hist.CurrentShapeAsNumpy().tolist() + data = temp.ValueAsNumpy().reshape(shape) + + # Get the errors + errors_fb = event_hist.Errors() + if errors_fb: + temp = ArrayDouble.ArrayDouble() + temp.Init(errors_fb.Bytes, errors_fb.Pos) + errors = temp.ValueAsNumpy().reshape(shape) + else: + errors = [] + + + hist = { + "source": event_hist.Source().decode("utf-8") if event_hist.Source() else "", + "timestamp": event_hist.Timestamp(), + "shape": shape, + "dims": dims, + "data": data, + "errors": errors, + "last_metadata_timestamp": metadata_timestamp, + "info": event_hist.Info().decode("utf-8") if event_hist.Info() else "", + } + return hist + + +def _serialise_metadata(builder, length, edges, unit, label): + unit_encoded = builder.CreateString(unit) + label_encoded = builder.CreateString(label) + + ArrayDouble.ArrayDoubleStartValueVector(builder, len(edges)) + # FlatBuffers builds arrays backwards + for x in reversed(edges): + builder.PrependFloat64(x) + bins = builder.EndVector(len(edges)) + # Add the bins + ArrayDouble.ArrayDoubleStart(builder) + ArrayDouble.ArrayDoubleAddValue(builder, bins) + pos_bin = ArrayDouble.ArrayDoubleEnd(builder) + + DimensionMetaData.DimensionMetaDataStart(builder) + DimensionMetaData.DimensionMetaDataAddLength(builder, length) + DimensionMetaData.DimensionMetaDataAddBinBoundaries(builder, pos_bin) + DimensionMetaData.DimensionMetaDataAddBinBoundariesType(builder, Array.ArrayDouble) + DimensionMetaData.DimensionMetaDataAddLabel(builder, label_encoded) + DimensionMetaData.DimensionMetaDataAddUnit(builder, unit_encoded) + return DimensionMetaData.DimensionMetaDataEnd(builder) + + +def serialise_hs00(histogram): + """ + Serialise a histogram as an hs00 FlatBuffers message. + + :param histogram: A dictionary containing the histogram to serialise. + """ + source = None + info = None + + builder = flatbuffers.Builder(1024) + if "source" in histogram: + source = builder.CreateString(histogram["source"]) + if "info" in histogram: + info = builder.CreateString(histogram["info"]) + + # Build shape array + rank = len(histogram["current_shape"]) + EventHistogram.EventHistogramStartCurrentShapeVector(builder, rank) + # FlatBuffers builds arrays backwards + for s in reversed(histogram["current_shape"]): + builder.PrependUint32(s) + shape = builder.EndVector(rank) + + # Build dimensions metadata + metadata = [] + for meta in histogram["dim_metadata"]: + unit = "" if "unit" not in meta else meta["unit"] + label = "" if "label" not in meta else meta["label"] + metadata.append( + _serialise_metadata( + builder, meta["length"], meta["bin_boundaries"], unit, label + ) + ) + + EventHistogram.EventHistogramStartDimMetadataVector(builder, rank) + # FlatBuffers builds arrays backwards + for m in reversed(metadata): + builder.PrependUOffsetTRelative(m) + metadata_vector = builder.EndVector(rank) + + # Build the data + data_len = reduce(operator.mul, histogram["current_shape"], 1) + + ArrayDouble.ArrayDoubleStartValueVector(builder, data_len) + # FlatBuffers builds arrays backwards + for x in reversed(histogram["data"]): + builder.PrependFloat64(x) + data = builder.EndVector(data_len) + ArrayDouble.ArrayDoubleStart(builder) + ArrayDouble.ArrayDoubleAddValue(builder, data) + pos_data = ArrayDouble.ArrayDoubleEnd(builder) + + if "errors" in histogram: + ArrayDouble.ArrayDoubleStartValueVector(builder, data_len) + for x in reversed(histogram["errors"]): + builder.PrependFloat64(x) + errors = builder.EndVector(data_len) + ArrayDouble.ArrayDoubleStart(builder) + ArrayDouble.ArrayDoubleAddValue(builder, errors) + pos_errors = ArrayDouble.ArrayDoubleEnd(builder) + + # Build the actual buffer + EventHistogram.EventHistogramStart(builder) + if info: + EventHistogram.EventHistogramAddInfo(builder, info) + EventHistogram.EventHistogramAddData(builder, pos_data) + EventHistogram.EventHistogramAddCurrentShape(builder, shape) + EventHistogram.EventHistogramAddDimMetadata(builder, metadata_vector) + EventHistogram.EventHistogramAddTimestamp(builder, histogram["timestamp"]) + if source: + EventHistogram.EventHistogramAddSource(builder, source) + EventHistogram.EventHistogramAddDataType(builder, Array.ArrayDouble) + if "errors" in histogram: + EventHistogram.EventHistogramAddErrors(builder, pos_errors) + EventHistogram.EventHistogramAddErrorsType(builder, Array.ArrayDouble) + if "last_metadata_timestamp" in histogram: + EventHistogram.EventHistogramAddLastMetadataTimestamp(builder, histogram["last_metadata_timestamp"]) + hist = EventHistogram.EventHistogramEnd(builder) + builder.Finish(hist) + + # Generate the output and replace the file_identifier + buff = builder.Output() + buff[4:8] = FILE_IDENTIFIER + return buff diff --git a/streaming_data_types/tests/__init__.py b/streaming_data_types/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/streaming_data_types/tests/context.py b/streaming_data_types/tests/context.py new file mode 100644 index 0000000..455e331 --- /dev/null +++ b/streaming_data_types/tests/context.py @@ -0,0 +1,5 @@ +import os +import sys +sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) + +import streaming_data_types diff --git a/streaming_data_types/tests/test_hs00.py b/streaming_data_types/tests/test_hs00.py new file mode 100644 index 0000000..b6359ca --- /dev/null +++ b/streaming_data_types/tests/test_hs00.py @@ -0,0 +1,77 @@ +from .context import streaming_data_types +import numpy as np +import pytest +from streaming_data_types.hs00 import serialise_hs00, deserialise_hs00 + + +class TestSerialisationHs00: + def test_serialises_and_deserialises_hs00_message_correctly_for_full_1d_data(self): + """ + Round-trip to check what we serialise is what we get back. + """ + original_hist = { + "source": "some_source", + "timestamp": 123456, + "current_shape": [5], + "dim_metadata": [ + { + "length": 5, + "unit": "m", + "label": "some_label", + "bin_boundaries": [0, 1, 2, 3, 4, 5], + } + ], + "last_metadata_timestamp": 123456, + "data": [1, 2, 3, 4, 5], + "errors": [5, 4, 3, 2, 1], + "info": "info_string", + } + buf = serialise_hs00(original_hist) + + hist = deserialise_hs00(buf) + assert hist["source"] == original_hist["source"] + assert hist["timestamp"] == original_hist["timestamp"] + assert hist["shape"] == original_hist["current_shape"] + assert hist["dims"][0]["edges"] == original_hist["dim_metadata"][0]["bin_boundaries"] + assert hist["dims"][0]["length"] == original_hist["dim_metadata"][0]["length"] + assert hist["dims"][0]["unit"] == original_hist["dim_metadata"][0]["unit"] + assert hist["dims"][0]["label"] == original_hist["dim_metadata"][0]["label"] + assert np.array_equal(hist["data"], original_hist["data"]) + assert np.array_equal(hist["errors"], original_hist["errors"]) + assert hist["info"] == original_hist["info"] + assert hist["last_metadata_timestamp"] == original_hist["last_metadata_timestamp"] + + def test_serialises_and_deserialises_hs00_message_correctly_for_minimal_1d_data( + self): + """ + Round-trip to check what we serialise is what we get back. + """ + original_hist = { + "timestamp": 123456, + "current_shape": [5], + "dim_metadata": [ + { + "length": 5, + "unit": "m", + "label": "some_label", + "bin_boundaries": [0, 1, 2, 3, 4, 5], + } + ], + "data": [1, 2, 3, 4, 5], + } + buf = serialise_hs00(original_hist) + + hist = deserialise_hs00(buf) + assert hist["source"] == "" + assert hist["timestamp"] == original_hist["timestamp"] + assert hist["shape"] == original_hist["current_shape"] + assert hist["dims"][0]["edges"] == original_hist["dim_metadata"][0][ + "bin_boundaries"] + assert hist["dims"][0]["length"] == original_hist["dim_metadata"][0]["length"] + assert hist["dims"][0]["unit"] == original_hist["dim_metadata"][0]["unit"] + assert hist["dims"][0]["label"] == original_hist["dim_metadata"][0]["label"] + assert np.array_equal(hist["data"], original_hist["data"]) + assert len(hist["errors"]) == 0 + assert hist["info"] == "" + +# TODO: test with non-required fields missing, M-D data From 3d02ed92bf455b2141a682b27e16b6baf514be8f Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Thu, 27 Feb 2020 13:34:20 +0100 Subject: [PATCH 003/363] Fine tuning --- .flake8 | 9 ++++++++ .pre-commit-config.yaml | 10 +++++++++ README.md | 20 ++++++++++++++++- requirements.txt | 2 ++ setup.py | 18 +++++++-------- streaming_data_types/hs00.py | 5 +++-- streaming_data_types/tests/context.py | 5 ----- .../tests => tests}/__init__.py | 0 .../tests => tests}/test_hs00.py | 22 +++++++++++++------ 9 files changed, 67 insertions(+), 24 deletions(-) create mode 100644 .flake8 create mode 100644 .pre-commit-config.yaml delete mode 100644 streaming_data_types/tests/context.py rename {streaming_data_types/tests => tests}/__init__.py (100%) rename {streaming_data_types/tests => tests}/test_hs00.py (86%) diff --git a/.flake8 b/.flake8 new file mode 100644 index 0000000..00da88c --- /dev/null +++ b/.flake8 @@ -0,0 +1,9 @@ +[flake8] +ignore = E501, E203, E402, W503, Q000 +# E501 & E203: Formatting handled by Black +# E402 complains about imports not being at the top +# W503 complains about splitting if across lines which conflicts with Black +# Q000 complains about using "" and not '' which conflicts with Black +exclude = + fbschemas + README.md diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..e97f183 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,10 @@ +repos: +- repo: https://github.com/ambv/black + rev: stable + hooks: + - id: black + language_version: python3.7 +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v1.2.3 + hooks: + - id: flake8 \ No newline at end of file diff --git a/README.md b/README.md index 9353160..1820034 100644 --- a/README.md +++ b/README.md @@ -4,7 +4,25 @@ |----|-----------|----------| |hs00|Histogram schema|Y| -## Building package +## For developers + +### Building the package ``` python setup.py sdist bdist_wheel ``` + +### Install the commit hooks (important) +There are commit hooks for Black and Flake8. + +The commit hooks are handled using [pre-commit](https://pre-commit.com). + +To install the hooks for this project run: +``` +pre-commit install +``` + +To test the hooks run: +``` +pre-commit run --all-files +``` +This command can also be used to run the hooks manually. diff --git a/requirements.txt b/requirements.txt index 8ae0b6d..5f2f5c2 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,5 @@ +flake8 flatbuffers numpy +pre-commit pytest diff --git a/setup.py b/setup.py index 2820daa..1872482 100644 --- a/setup.py +++ b/setup.py @@ -1,20 +1,20 @@ from setuptools import setup, find_packages -with open('README.md') as f: +with open("README.md") as f: readme = f.read() -with open('LICENSE') as f: +with open("LICENSE") as f: license = f.read() setup( - name='streaming_data_types', - version='0.1.0', - description='Python utilities foe handling ESS streamed data', + name="streaming_data_types", + version="0.1.0", + description="Python utilities foe handling ESS streamed data", long_description=readme, - author='ScreamingUdder', - author_email='NoAddress@Nowhere.com', - url='https://github.com/ess-dmsc/python-streaming-data-types', + author="ScreamingUdder", + author_email="NoAddress@Nowhere.com", + url="https://github.com/ess-dmsc/python-streaming-data-types", license=license, - packages=find_packages(exclude='tests') + packages=find_packages(exclude="tests"), ) diff --git a/streaming_data_types/hs00.py b/streaming_data_types/hs00.py index 026a254..5ed0e84 100644 --- a/streaming_data_types/hs00.py +++ b/streaming_data_types/hs00.py @@ -79,7 +79,6 @@ def deserialise_hs00(buf): else: errors = [] - hist = { "source": event_hist.Source().decode("utf-8") if event_hist.Source() else "", "timestamp": event_hist.Timestamp(), @@ -192,7 +191,9 @@ def serialise_hs00(histogram): EventHistogram.EventHistogramAddErrors(builder, pos_errors) EventHistogram.EventHistogramAddErrorsType(builder, Array.ArrayDouble) if "last_metadata_timestamp" in histogram: - EventHistogram.EventHistogramAddLastMetadataTimestamp(builder, histogram["last_metadata_timestamp"]) + EventHistogram.EventHistogramAddLastMetadataTimestamp( + builder, histogram["last_metadata_timestamp"] + ) hist = EventHistogram.EventHistogramEnd(builder) builder.Finish(hist) diff --git a/streaming_data_types/tests/context.py b/streaming_data_types/tests/context.py deleted file mode 100644 index 455e331..0000000 --- a/streaming_data_types/tests/context.py +++ /dev/null @@ -1,5 +0,0 @@ -import os -import sys -sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) - -import streaming_data_types diff --git a/streaming_data_types/tests/__init__.py b/tests/__init__.py similarity index 100% rename from streaming_data_types/tests/__init__.py rename to tests/__init__.py diff --git a/streaming_data_types/tests/test_hs00.py b/tests/test_hs00.py similarity index 86% rename from streaming_data_types/tests/test_hs00.py rename to tests/test_hs00.py index b6359ca..744af3f 100644 --- a/streaming_data_types/tests/test_hs00.py +++ b/tests/test_hs00.py @@ -1,6 +1,5 @@ -from .context import streaming_data_types +# from .context import streaming_data_types # NOQA import numpy as np -import pytest from streaming_data_types.hs00 import serialise_hs00, deserialise_hs00 @@ -32,17 +31,23 @@ def test_serialises_and_deserialises_hs00_message_correctly_for_full_1d_data(sel assert hist["source"] == original_hist["source"] assert hist["timestamp"] == original_hist["timestamp"] assert hist["shape"] == original_hist["current_shape"] - assert hist["dims"][0]["edges"] == original_hist["dim_metadata"][0]["bin_boundaries"] + assert ( + hist["dims"][0]["edges"] + == original_hist["dim_metadata"][0]["bin_boundaries"] + ) assert hist["dims"][0]["length"] == original_hist["dim_metadata"][0]["length"] assert hist["dims"][0]["unit"] == original_hist["dim_metadata"][0]["unit"] assert hist["dims"][0]["label"] == original_hist["dim_metadata"][0]["label"] assert np.array_equal(hist["data"], original_hist["data"]) assert np.array_equal(hist["errors"], original_hist["errors"]) assert hist["info"] == original_hist["info"] - assert hist["last_metadata_timestamp"] == original_hist["last_metadata_timestamp"] + assert ( + hist["last_metadata_timestamp"] == original_hist["last_metadata_timestamp"] + ) def test_serialises_and_deserialises_hs00_message_correctly_for_minimal_1d_data( - self): + self + ): """ Round-trip to check what we serialise is what we get back. """ @@ -65,8 +70,10 @@ def test_serialises_and_deserialises_hs00_message_correctly_for_minimal_1d_data( assert hist["source"] == "" assert hist["timestamp"] == original_hist["timestamp"] assert hist["shape"] == original_hist["current_shape"] - assert hist["dims"][0]["edges"] == original_hist["dim_metadata"][0][ - "bin_boundaries"] + assert ( + hist["dims"][0]["edges"] + == original_hist["dim_metadata"][0]["bin_boundaries"] + ) assert hist["dims"][0]["length"] == original_hist["dim_metadata"][0]["length"] assert hist["dims"][0]["unit"] == original_hist["dim_metadata"][0]["unit"] assert hist["dims"][0]["label"] == original_hist["dim_metadata"][0]["label"] @@ -74,4 +81,5 @@ def test_serialises_and_deserialises_hs00_message_correctly_for_minimal_1d_data( assert len(hist["errors"]) == 0 assert hist["info"] == "" + # TODO: test with non-required fields missing, M-D data From c65914eb85e3de9b72d391bfbf5d715119f2590a Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Thu, 27 Feb 2020 13:42:06 +0100 Subject: [PATCH 004/363] Added tox --- tox.ini | 12 ++++++++++++ 1 file changed, 12 insertions(+) create mode 100644 tox.ini diff --git a/tox.ini b/tox.ini new file mode 100644 index 0000000..e448780 --- /dev/null +++ b/tox.ini @@ -0,0 +1,12 @@ +[tox] +envlist = py36, py37, py38 +requires = tox-conda +isolated_build = true +skipsdist=true + +[testenv] +deps = + pytest + -r requirements.txt +commands = + pytest From 01313b3a1ee15a286f6a09a5758303737a47d08f Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Thu, 27 Feb 2020 14:17:38 +0100 Subject: [PATCH 005/363] Works for hs00 2-D --- README.md | 9 +++++ streaming_data_types/hs00.py | 4 +- tests/test_hs00.py | 74 +++++++++++++++++++++++++++--------- 3 files changed, 68 insertions(+), 19 deletions(-) diff --git a/README.md b/README.md index 1820034..3b33afa 100644 --- a/README.md +++ b/README.md @@ -26,3 +26,12 @@ To test the hooks run: pre-commit run --all-files ``` This command can also be used to run the hooks manually. + +### Tox +Tox allows the unit tests to be run against multiple versions of Python. +See the tox.ini file for which versions are supported. +From the top directory: +``` +tox +``` + diff --git a/streaming_data_types/hs00.py b/streaming_data_types/hs00.py index 5ed0e84..32a77e6 100644 --- a/streaming_data_types/hs00.py +++ b/streaming_data_types/hs00.py @@ -160,7 +160,7 @@ def serialise_hs00(histogram): ArrayDouble.ArrayDoubleStartValueVector(builder, data_len) # FlatBuffers builds arrays backwards - for x in reversed(histogram["data"]): + for x in reversed(histogram["data"].flatten()): builder.PrependFloat64(x) data = builder.EndVector(data_len) ArrayDouble.ArrayDoubleStart(builder) @@ -169,7 +169,7 @@ def serialise_hs00(histogram): if "errors" in histogram: ArrayDouble.ArrayDoubleStartValueVector(builder, data_len) - for x in reversed(histogram["errors"]): + for x in reversed(histogram["errors"].flatten()): builder.PrependFloat64(x) errors = builder.EndVector(data_len) ArrayDouble.ArrayDoubleStart(builder) diff --git a/tests/test_hs00.py b/tests/test_hs00.py index 744af3f..9c64549 100644 --- a/tests/test_hs00.py +++ b/tests/test_hs00.py @@ -1,9 +1,14 @@ -# from .context import streaming_data_types # NOQA import numpy as np from streaming_data_types.hs00 import serialise_hs00, deserialise_hs00 class TestSerialisationHs00: + def _check_metadata_for_one_dimension(self, data, original_data): + assert data["edges"] == original_data["bin_boundaries"] + assert data["length"] == original_data["length"] + assert data["unit"] == original_data["unit"] + assert data["label"] == original_data["label"] + def test_serialises_and_deserialises_hs00_message_correctly_for_full_1d_data(self): """ Round-trip to check what we serialise is what we get back. @@ -21,8 +26,8 @@ def test_serialises_and_deserialises_hs00_message_correctly_for_full_1d_data(sel } ], "last_metadata_timestamp": 123456, - "data": [1, 2, 3, 4, 5], - "errors": [5, 4, 3, 2, 1], + "data": np.array([1, 2, 3, 4, 5]), + "errors": np.array([5, 4, 3, 2, 1]), "info": "info_string", } buf = serialise_hs00(original_hist) @@ -31,13 +36,9 @@ def test_serialises_and_deserialises_hs00_message_correctly_for_full_1d_data(sel assert hist["source"] == original_hist["source"] assert hist["timestamp"] == original_hist["timestamp"] assert hist["shape"] == original_hist["current_shape"] - assert ( - hist["dims"][0]["edges"] - == original_hist["dim_metadata"][0]["bin_boundaries"] + self._check_metadata_for_one_dimension( + hist["dims"][0], original_hist["dim_metadata"][0] ) - assert hist["dims"][0]["length"] == original_hist["dim_metadata"][0]["length"] - assert hist["dims"][0]["unit"] == original_hist["dim_metadata"][0]["unit"] - assert hist["dims"][0]["label"] == original_hist["dim_metadata"][0]["label"] assert np.array_equal(hist["data"], original_hist["data"]) assert np.array_equal(hist["errors"], original_hist["errors"]) assert hist["info"] == original_hist["info"] @@ -62,7 +63,7 @@ def test_serialises_and_deserialises_hs00_message_correctly_for_minimal_1d_data( "bin_boundaries": [0, 1, 2, 3, 4, 5], } ], - "data": [1, 2, 3, 4, 5], + "data": np.array([1, 2, 3, 4, 5]), } buf = serialise_hs00(original_hist) @@ -70,16 +71,55 @@ def test_serialises_and_deserialises_hs00_message_correctly_for_minimal_1d_data( assert hist["source"] == "" assert hist["timestamp"] == original_hist["timestamp"] assert hist["shape"] == original_hist["current_shape"] - assert ( - hist["dims"][0]["edges"] - == original_hist["dim_metadata"][0]["bin_boundaries"] + self._check_metadata_for_one_dimension( + hist["dims"][0], original_hist["dim_metadata"][0] ) - assert hist["dims"][0]["length"] == original_hist["dim_metadata"][0]["length"] - assert hist["dims"][0]["unit"] == original_hist["dim_metadata"][0]["unit"] - assert hist["dims"][0]["label"] == original_hist["dim_metadata"][0]["label"] assert np.array_equal(hist["data"], original_hist["data"]) assert len(hist["errors"]) == 0 assert hist["info"] == "" + def test_serialises_and_deserialises_hs00_message_correctly_for_full_2d_data(self): + """ + Round-trip to check what we serialise is what we get back. + """ + original_hist = { + "source": "some_source", + "timestamp": 123456, + "current_shape": [2, 5], + "dim_metadata": [ + { + "length": 2, + "unit": "b", + "label": "y", + "bin_boundaries": [10, 11, 12], + }, + { + "length": 5, + "unit": "m", + "label": "x", + "bin_boundaries": [0, 1, 2, 3, 4, 5], + }, + ], + "last_metadata_timestamp": 123456, + "data": np.array([[1, 2, 3, 4, 5], [6, 7, 8, 9, 10]]), + "errors": np.array([[5, 4, 3, 2, 1], [10, 9, 8, 7, 6]]), + "info": "info_string", + } + buf = serialise_hs00(original_hist) -# TODO: test with non-required fields missing, M-D data + hist = deserialise_hs00(buf) + assert hist["source"] == original_hist["source"] + assert hist["timestamp"] == original_hist["timestamp"] + assert hist["shape"] == original_hist["current_shape"] + self._check_metadata_for_one_dimension( + hist["dims"][0], original_hist["dim_metadata"][0] + ) + self._check_metadata_for_one_dimension( + hist["dims"][1], original_hist["dim_metadata"][1] + ) + assert np.array_equal(hist["data"], original_hist["data"]) + assert np.array_equal(hist["errors"], original_hist["errors"]) + assert hist["info"] == original_hist["info"] + assert ( + hist["last_metadata_timestamp"] == original_hist["last_metadata_timestamp"] + ) From 3cae077699bf68c68a80ea872c0a1bd2e447ea26 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Thu, 27 Feb 2020 15:15:54 +0100 Subject: [PATCH 006/363] Added ns10 --- README.md | 1 + .../fbschemas/ns10/CacheEntry.py | 94 +++++++++++++++++++ .../fbschemas/ns10/__init__.py | 0 streaming_data_types/hs00.py | 17 +--- streaming_data_types/ns10.py | 58 ++++++++++++ streaming_data_types/utils.py | 8 ++ tests/test_hs00.py | 3 +- tests/test_ns10.py | 24 +++++ 8 files changed, 192 insertions(+), 13 deletions(-) create mode 100644 streaming_data_types/fbschemas/ns10/CacheEntry.py create mode 100644 streaming_data_types/fbschemas/ns10/__init__.py create mode 100644 streaming_data_types/ns10.py create mode 100644 streaming_data_types/utils.py create mode 100644 tests/test_ns10.py diff --git a/README.md b/README.md index 3b33afa..7b6e3e5 100644 --- a/README.md +++ b/README.md @@ -3,6 +3,7 @@ |name|description|verifiable| |----|-----------|----------| |hs00|Histogram schema|Y| +|ns10|NICOS cache entry schema|Y| ## For developers diff --git a/streaming_data_types/fbschemas/ns10/CacheEntry.py b/streaming_data_types/fbschemas/ns10/CacheEntry.py new file mode 100644 index 0000000..36d2ba9 --- /dev/null +++ b/streaming_data_types/fbschemas/ns10/CacheEntry.py @@ -0,0 +1,94 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +from __future__ import absolute_import, division, print_function + +import flatbuffers + + +# /// pylint: skip-file +class CacheEntry(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAsCacheEntry(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = CacheEntry() + x.Init(buf, n + offset) + return x + + # CacheEntry + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # CacheEntry + def Key(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return "" + + # CacheEntry + def Time(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get( + flatbuffers.number_types.Float64Flags, o + self._tab.Pos + ) + return 0.0 + + # CacheEntry + def Ttl(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.Get( + flatbuffers.number_types.Float64Flags, o + self._tab.Pos + ) + return 0.0 + + # CacheEntry + def Expired(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos) + return 0 + + # CacheEntry + def Value(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return "" + + +def CacheEntryStart(builder): + builder.StartObject(5) + + +def CacheEntryAddKey(builder, key): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(key), 0 + ) + + +def CacheEntryAddTime(builder, time): + builder.PrependFloat64Slot(1, time, 0.0) + + +def CacheEntryAddTtl(builder, ttl): + builder.PrependFloat64Slot(2, ttl, 0.0) + + +def CacheEntryAddExpired(builder, expired): + builder.PrependBoolSlot(3, expired, 0) + + +def CacheEntryAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot( + 4, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 + ) + + +def CacheEntryEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/ns10/__init__.py b/streaming_data_types/fbschemas/ns10/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/streaming_data_types/hs00.py b/streaming_data_types/hs00.py index 32a77e6..e07990c 100644 --- a/streaming_data_types/hs00.py +++ b/streaming_data_types/hs00.py @@ -6,21 +6,12 @@ import streaming_data_types.fbschemas.hs00.DimensionMetaData as DimensionMetaData import streaming_data_types.fbschemas.hs00.EventHistogram as EventHistogram from streaming_data_types.fbschemas.hs00.Array import Array +from streaming_data_types.utils import get_schema FILE_IDENTIFIER = b"hs00" -def get_schema(buf): - """ - Extract the schema code embedded in the buffer - - :param buf: The raw buffer of the FlatBuffers message. - :return: The schema name - """ - return buf[4:8].decode("utf-8") - - def deserialise_hs00(buf): """ Convert flatbuffer into a histogram. @@ -29,8 +20,10 @@ def deserialise_hs00(buf): :return: dict of histogram information """ # Check schema is correct - if get_schema(buf) != "hs00": - raise RuntimeError(f"Incorrect schema: expected hs00 but got {get_schema(buf)}") + if get_schema(buf) != FILE_IDENTIFIER.decode(): + raise RuntimeError( + f"Incorrect schema: expected {FILE_IDENTIFIER} but got {get_schema(buf)}" + ) event_hist = EventHistogram.EventHistogram.GetRootAsEventHistogram(buf, 0) diff --git a/streaming_data_types/ns10.py b/streaming_data_types/ns10.py new file mode 100644 index 0000000..ad6047a --- /dev/null +++ b/streaming_data_types/ns10.py @@ -0,0 +1,58 @@ +import flatbuffers +from streaming_data_types.fbschemas.ns10 import CacheEntry +from streaming_data_types.utils import get_schema + + +FILE_IDENTIFIER = b"ns10" + + +def serialise_ns10(cache_entry): + builder = flatbuffers.Builder(128) + + value = builder.CreateString(cache_entry["value"]) + key = builder.CreateString(cache_entry["key"]) + + ttl = cache_entry["ttl"] if "ttl" in cache_entry else False + time_stamp = cache_entry["time"] if "time" in cache_entry else 0 + expired = cache_entry["expired"] if "expired" in cache_entry else False + + CacheEntry.CacheEntryStart(builder) + CacheEntry.CacheEntryAddValue(builder, value) + CacheEntry.CacheEntryAddExpired(builder, expired) + CacheEntry.CacheEntryAddTtl(builder, ttl) + CacheEntry.CacheEntryAddTime(builder, time_stamp) + CacheEntry.CacheEntryAddKey(builder, key) + entry = CacheEntry.CacheEntryEnd(builder) + builder.Finish(entry) + + # Generate the output and replace the file_identifier + buff = builder.Output() + buff[4:8] = b"ns10" + + return buff + + +def deserialise_ns10(buf): + # Check schema is correct + if get_schema(buf) != FILE_IDENTIFIER.decode(): + raise RuntimeError( + f"Incorrect schema: expected {FILE_IDENTIFIER} but got {get_schema(buf)}" + ) + + entry = CacheEntry.CacheEntry.GetRootAsCacheEntry(buf, 0) + + key = entry.Key() if entry.Key() else "" + time_stamp = entry.Time() + ttl = entry.Ttl() if entry.Ttl() else 0 + expired = entry.Expired() if entry.Expired() else False + value = entry.Value() if entry.Value() else "" + + cache_entry = { + "key": key.decode("utf-8"), + "time": time_stamp, + "ttl": ttl, + "expired": expired, + "value": value.decode("utf-8"), + } + + return cache_entry diff --git a/streaming_data_types/utils.py b/streaming_data_types/utils.py new file mode 100644 index 0000000..b56a23d --- /dev/null +++ b/streaming_data_types/utils.py @@ -0,0 +1,8 @@ +def get_schema(buf): + """ + Extract the schema code embedded in the buffer + + :param buf: The raw buffer of the FlatBuffers message. + :return: The schema name + """ + return buf[4:8].decode("utf-8") diff --git a/tests/test_hs00.py b/tests/test_hs00.py index 9c64549..51086a2 100644 --- a/tests/test_hs00.py +++ b/tests/test_hs00.py @@ -30,9 +30,10 @@ def test_serialises_and_deserialises_hs00_message_correctly_for_full_1d_data(sel "errors": np.array([5, 4, 3, 2, 1]), "info": "info_string", } - buf = serialise_hs00(original_hist) + buf = serialise_hs00(original_hist) hist = deserialise_hs00(buf) + assert hist["source"] == original_hist["source"] assert hist["timestamp"] == original_hist["timestamp"] assert hist["shape"] == original_hist["current_shape"] diff --git a/tests/test_ns10.py b/tests/test_ns10.py new file mode 100644 index 0000000..25456d3 --- /dev/null +++ b/tests/test_ns10.py @@ -0,0 +1,24 @@ +from streaming_data_types.ns10 import serialise_ns10, deserialise_ns10 + + +class TestSerialisationNs10: + def test_serialises_and_deserialises_ns10_message_correctly(self): + """ + Round-trip to check what we serialise is what we get back. + """ + original_entry = { + "key": "some_key", + "time": 123456, + "ttl": 567890, + "expired": True, + "value": "some_value", + } + + buf = serialise_ns10(original_entry) + entry = deserialise_ns10(buf) + + assert entry["key"] == original_entry["key"] + assert entry["time"] == original_entry["time"] + assert entry["ttl"] == original_entry["ttl"] + assert entry["expired"] == original_entry["expired"] + assert entry["value"] == original_entry["value"] From f1d8170112348555dddea39be931423bedeef65d Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Thu, 27 Feb 2020 15:15:54 +0100 Subject: [PATCH 007/363] Added ns10 --- README.md | 1 + .../fbschemas/ns10/CacheEntry.py | 94 +++++++++++++++++++ .../fbschemas/ns10/__init__.py | 0 streaming_data_types/hs00.py | 33 +++---- streaming_data_types/ns10.py | 58 ++++++++++++ streaming_data_types/utils.py | 8 ++ tests/test_hs00.py | 19 ++-- tests/test_ns10.py | 24 +++++ 8 files changed, 206 insertions(+), 31 deletions(-) create mode 100644 streaming_data_types/fbschemas/ns10/CacheEntry.py create mode 100644 streaming_data_types/fbschemas/ns10/__init__.py create mode 100644 streaming_data_types/ns10.py create mode 100644 streaming_data_types/utils.py create mode 100644 tests/test_ns10.py diff --git a/README.md b/README.md index 3b33afa..7b6e3e5 100644 --- a/README.md +++ b/README.md @@ -3,6 +3,7 @@ |name|description|verifiable| |----|-----------|----------| |hs00|Histogram schema|Y| +|ns10|NICOS cache entry schema|Y| ## For developers diff --git a/streaming_data_types/fbschemas/ns10/CacheEntry.py b/streaming_data_types/fbschemas/ns10/CacheEntry.py new file mode 100644 index 0000000..36d2ba9 --- /dev/null +++ b/streaming_data_types/fbschemas/ns10/CacheEntry.py @@ -0,0 +1,94 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +from __future__ import absolute_import, division, print_function + +import flatbuffers + + +# /// pylint: skip-file +class CacheEntry(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAsCacheEntry(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = CacheEntry() + x.Init(buf, n + offset) + return x + + # CacheEntry + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # CacheEntry + def Key(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return "" + + # CacheEntry + def Time(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get( + flatbuffers.number_types.Float64Flags, o + self._tab.Pos + ) + return 0.0 + + # CacheEntry + def Ttl(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.Get( + flatbuffers.number_types.Float64Flags, o + self._tab.Pos + ) + return 0.0 + + # CacheEntry + def Expired(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos) + return 0 + + # CacheEntry + def Value(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return "" + + +def CacheEntryStart(builder): + builder.StartObject(5) + + +def CacheEntryAddKey(builder, key): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(key), 0 + ) + + +def CacheEntryAddTime(builder, time): + builder.PrependFloat64Slot(1, time, 0.0) + + +def CacheEntryAddTtl(builder, ttl): + builder.PrependFloat64Slot(2, ttl, 0.0) + + +def CacheEntryAddExpired(builder, expired): + builder.PrependBoolSlot(3, expired, 0) + + +def CacheEntryAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot( + 4, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 + ) + + +def CacheEntryEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/ns10/__init__.py b/streaming_data_types/fbschemas/ns10/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/streaming_data_types/hs00.py b/streaming_data_types/hs00.py index 32a77e6..20db815 100644 --- a/streaming_data_types/hs00.py +++ b/streaming_data_types/hs00.py @@ -1,26 +1,16 @@ from functools import reduce import operator import flatbuffers -import numpy as np import streaming_data_types.fbschemas.hs00.ArrayDouble as ArrayDouble import streaming_data_types.fbschemas.hs00.DimensionMetaData as DimensionMetaData import streaming_data_types.fbschemas.hs00.EventHistogram as EventHistogram from streaming_data_types.fbschemas.hs00.Array import Array +from streaming_data_types.utils import get_schema FILE_IDENTIFIER = b"hs00" -def get_schema(buf): - """ - Extract the schema code embedded in the buffer - - :param buf: The raw buffer of the FlatBuffers message. - :return: The schema name - """ - return buf[4:8].decode("utf-8") - - def deserialise_hs00(buf): """ Convert flatbuffer into a histogram. @@ -29,8 +19,10 @@ def deserialise_hs00(buf): :return: dict of histogram information """ # Check schema is correct - if get_schema(buf) != "hs00": - raise RuntimeError(f"Incorrect schema: expected hs00 but got {get_schema(buf)}") + if get_schema(buf) != FILE_IDENTIFIER.decode(): + raise RuntimeError( + f"Incorrect schema: expected {FILE_IDENTIFIER} but got {get_schema(buf)}" + ) event_hist = EventHistogram.EventHistogram.GetRootAsEventHistogram(buf, 0) @@ -43,16 +35,13 @@ def deserialise_hs00(buf): temp.Init(bins_fb.Bytes, bins_fb.Pos) bins = temp.ValueAsNumpy() - # Get type - if event_hist.DimMetadata(i).BinBoundariesType() == Array.ArrayDouble: - bin_type = np.float64 - else: - raise TypeError("Type of the bin boundaries is incorrect") + # Check type + if event_hist.DimMetadata(i).BinBoundariesType() != Array.ArrayDouble: + raise TypeError("Type of the bin boundaries is incorrect, should be double") hist_info = { "length": event_hist.DimMetadata(i).Length(), - "edges": bins.tolist(), - "type": bin_type, + "bin_boundaries": bins.tolist(), "unit": event_hist.DimMetadata(i).Unit().decode("utf-8"), "label": event_hist.DimMetadata(i).Label().decode("utf-8"), } @@ -82,8 +71,8 @@ def deserialise_hs00(buf): hist = { "source": event_hist.Source().decode("utf-8") if event_hist.Source() else "", "timestamp": event_hist.Timestamp(), - "shape": shape, - "dims": dims, + "current_shape": shape, + "dim_metadata": dims, "data": data, "errors": errors, "last_metadata_timestamp": metadata_timestamp, diff --git a/streaming_data_types/ns10.py b/streaming_data_types/ns10.py new file mode 100644 index 0000000..ad6047a --- /dev/null +++ b/streaming_data_types/ns10.py @@ -0,0 +1,58 @@ +import flatbuffers +from streaming_data_types.fbschemas.ns10 import CacheEntry +from streaming_data_types.utils import get_schema + + +FILE_IDENTIFIER = b"ns10" + + +def serialise_ns10(cache_entry): + builder = flatbuffers.Builder(128) + + value = builder.CreateString(cache_entry["value"]) + key = builder.CreateString(cache_entry["key"]) + + ttl = cache_entry["ttl"] if "ttl" in cache_entry else False + time_stamp = cache_entry["time"] if "time" in cache_entry else 0 + expired = cache_entry["expired"] if "expired" in cache_entry else False + + CacheEntry.CacheEntryStart(builder) + CacheEntry.CacheEntryAddValue(builder, value) + CacheEntry.CacheEntryAddExpired(builder, expired) + CacheEntry.CacheEntryAddTtl(builder, ttl) + CacheEntry.CacheEntryAddTime(builder, time_stamp) + CacheEntry.CacheEntryAddKey(builder, key) + entry = CacheEntry.CacheEntryEnd(builder) + builder.Finish(entry) + + # Generate the output and replace the file_identifier + buff = builder.Output() + buff[4:8] = b"ns10" + + return buff + + +def deserialise_ns10(buf): + # Check schema is correct + if get_schema(buf) != FILE_IDENTIFIER.decode(): + raise RuntimeError( + f"Incorrect schema: expected {FILE_IDENTIFIER} but got {get_schema(buf)}" + ) + + entry = CacheEntry.CacheEntry.GetRootAsCacheEntry(buf, 0) + + key = entry.Key() if entry.Key() else "" + time_stamp = entry.Time() + ttl = entry.Ttl() if entry.Ttl() else 0 + expired = entry.Expired() if entry.Expired() else False + value = entry.Value() if entry.Value() else "" + + cache_entry = { + "key": key.decode("utf-8"), + "time": time_stamp, + "ttl": ttl, + "expired": expired, + "value": value.decode("utf-8"), + } + + return cache_entry diff --git a/streaming_data_types/utils.py b/streaming_data_types/utils.py new file mode 100644 index 0000000..b56a23d --- /dev/null +++ b/streaming_data_types/utils.py @@ -0,0 +1,8 @@ +def get_schema(buf): + """ + Extract the schema code embedded in the buffer + + :param buf: The raw buffer of the FlatBuffers message. + :return: The schema name + """ + return buf[4:8].decode("utf-8") diff --git a/tests/test_hs00.py b/tests/test_hs00.py index 9c64549..88225ed 100644 --- a/tests/test_hs00.py +++ b/tests/test_hs00.py @@ -4,7 +4,7 @@ class TestSerialisationHs00: def _check_metadata_for_one_dimension(self, data, original_data): - assert data["edges"] == original_data["bin_boundaries"] + assert data["bin_boundaries"] == original_data["bin_boundaries"] assert data["length"] == original_data["length"] assert data["unit"] == original_data["unit"] assert data["label"] == original_data["label"] @@ -30,14 +30,15 @@ def test_serialises_and_deserialises_hs00_message_correctly_for_full_1d_data(sel "errors": np.array([5, 4, 3, 2, 1]), "info": "info_string", } - buf = serialise_hs00(original_hist) + buf = serialise_hs00(original_hist) hist = deserialise_hs00(buf) + assert hist["source"] == original_hist["source"] assert hist["timestamp"] == original_hist["timestamp"] - assert hist["shape"] == original_hist["current_shape"] + assert hist["current_shape"] == original_hist["current_shape"] self._check_metadata_for_one_dimension( - hist["dims"][0], original_hist["dim_metadata"][0] + hist["dim_metadata"][0], original_hist["dim_metadata"][0] ) assert np.array_equal(hist["data"], original_hist["data"]) assert np.array_equal(hist["errors"], original_hist["errors"]) @@ -70,9 +71,9 @@ def test_serialises_and_deserialises_hs00_message_correctly_for_minimal_1d_data( hist = deserialise_hs00(buf) assert hist["source"] == "" assert hist["timestamp"] == original_hist["timestamp"] - assert hist["shape"] == original_hist["current_shape"] + assert hist["current_shape"] == original_hist["current_shape"] self._check_metadata_for_one_dimension( - hist["dims"][0], original_hist["dim_metadata"][0] + hist["dim_metadata"][0], original_hist["dim_metadata"][0] ) assert np.array_equal(hist["data"], original_hist["data"]) assert len(hist["errors"]) == 0 @@ -110,12 +111,12 @@ def test_serialises_and_deserialises_hs00_message_correctly_for_full_2d_data(sel hist = deserialise_hs00(buf) assert hist["source"] == original_hist["source"] assert hist["timestamp"] == original_hist["timestamp"] - assert hist["shape"] == original_hist["current_shape"] + assert hist["current_shape"] == original_hist["current_shape"] self._check_metadata_for_one_dimension( - hist["dims"][0], original_hist["dim_metadata"][0] + hist["dim_metadata"][0], original_hist["dim_metadata"][0] ) self._check_metadata_for_one_dimension( - hist["dims"][1], original_hist["dim_metadata"][1] + hist["dim_metadata"][1], original_hist["dim_metadata"][1] ) assert np.array_equal(hist["data"], original_hist["data"]) assert np.array_equal(hist["errors"], original_hist["errors"]) diff --git a/tests/test_ns10.py b/tests/test_ns10.py new file mode 100644 index 0000000..25456d3 --- /dev/null +++ b/tests/test_ns10.py @@ -0,0 +1,24 @@ +from streaming_data_types.ns10 import serialise_ns10, deserialise_ns10 + + +class TestSerialisationNs10: + def test_serialises_and_deserialises_ns10_message_correctly(self): + """ + Round-trip to check what we serialise is what we get back. + """ + original_entry = { + "key": "some_key", + "time": 123456, + "ttl": 567890, + "expired": True, + "value": "some_value", + } + + buf = serialise_ns10(original_entry) + entry = deserialise_ns10(buf) + + assert entry["key"] == original_entry["key"] + assert entry["time"] == original_entry["time"] + assert entry["ttl"] == original_entry["ttl"] + assert entry["expired"] == original_entry["expired"] + assert entry["value"] == original_entry["value"] From 1e307e2dd08b46bdd2cd36f2f430776eeaaa0dc1 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Thu, 27 Feb 2020 15:32:14 +0100 Subject: [PATCH 008/363] Test for wrong ID --- tests/test_hs00.py | 23 +++++++++++++++++++++++ tests/test_ns10.py | 17 +++++++++++++++++ 2 files changed, 40 insertions(+) diff --git a/tests/test_hs00.py b/tests/test_hs00.py index 88225ed..cc603d3 100644 --- a/tests/test_hs00.py +++ b/tests/test_hs00.py @@ -1,4 +1,5 @@ import numpy as np +import pytest from streaming_data_types.hs00 import serialise_hs00, deserialise_hs00 @@ -124,3 +125,25 @@ def test_serialises_and_deserialises_hs00_message_correctly_for_full_2d_data(sel assert ( hist["last_metadata_timestamp"] == original_hist["last_metadata_timestamp"] ) + + def test_if_buffer_has_wrong_id_then_throws(self): + original_hist = { + "timestamp": 123456, + "current_shape": [5], + "dim_metadata": [ + { + "length": 5, + "unit": "m", + "label": "some_label", + "bin_boundaries": [0, 1, 2, 3, 4, 5], + } + ], + "data": np.array([1, 2, 3, 4, 5]), + } + buf = serialise_hs00(original_hist) + + # Manually hack the id + buf[4:8] = b"1234" + + with pytest.raises(RuntimeError): + deserialise_hs00(buf) diff --git a/tests/test_ns10.py b/tests/test_ns10.py index 25456d3..7ee8267 100644 --- a/tests/test_ns10.py +++ b/tests/test_ns10.py @@ -1,3 +1,4 @@ +import pytest from streaming_data_types.ns10 import serialise_ns10, deserialise_ns10 @@ -22,3 +23,19 @@ def test_serialises_and_deserialises_ns10_message_correctly(self): assert entry["ttl"] == original_entry["ttl"] assert entry["expired"] == original_entry["expired"] assert entry["value"] == original_entry["value"] + + def test_if_buffer_has_wrong_id_then_throws(self): + original_entry = { + "key": "some_key", + "time": 123456, + "ttl": 567890, + "expired": True, + "value": "some_value", + } + buf = serialise_ns10(original_entry) + + # Manually hack the id + buf[4:8] = b"1234" + + with pytest.raises(RuntimeError): + deserialise_ns10(buf) From 68017a43791f8177e5eb6b7023592d391aa26a84 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Fri, 28 Feb 2020 07:36:12 +0100 Subject: [PATCH 009/363] Add licence to manifest --- MANIFEST.in | 1 + 1 file changed, 1 insertion(+) diff --git a/MANIFEST.in b/MANIFEST.in index e69de29..1aba38f 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -0,0 +1 @@ +include LICENSE From 6b3ac9694dbf130fea74fe8edb8cb17d481074f1 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Fri, 28 Feb 2020 07:47:19 +0100 Subject: [PATCH 010/363] Added makefile --- Makefile | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 Makefile diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..ab4c425 --- /dev/null +++ b/Makefile @@ -0,0 +1,5 @@ +init: + pip install -r requirements.txt + +test: + pytest tests From f868742d3915894fbb08fdd33962c57e952998f6 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Fri, 28 Feb 2020 07:59:31 +0100 Subject: [PATCH 011/363] Include requirements and makefile in manifest --- MANIFEST.in | 2 ++ setup.py | 12 +++--------- 2 files changed, 5 insertions(+), 9 deletions(-) diff --git a/MANIFEST.in b/MANIFEST.in index 1aba38f..a9c4405 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1 +1,3 @@ include LICENSE +include requirements.txt +include Makefile diff --git a/setup.py b/setup.py index 1872482..b682d4c 100644 --- a/setup.py +++ b/setup.py @@ -1,20 +1,14 @@ from setuptools import setup, find_packages -with open("README.md") as f: - readme = f.read() - -with open("LICENSE") as f: - license = f.read() - setup( name="streaming_data_types", version="0.1.0", - description="Python utilities foe handling ESS streamed data", - long_description=readme, + description="Python utilities for handling ESS streamed data", + long_description="Python utilities for serialising and deserialising data via FlatBuffers for the European Spallation Source ERIC", author="ScreamingUdder", author_email="NoAddress@Nowhere.com", url="https://github.com/ess-dmsc/python-streaming-data-types", - license=license, + license="BSD 2-Clause License", packages=find_packages(exclude="tests"), ) From 0b92e412f24106e0e58278f98da54167aa0f2238 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Fri, 28 Feb 2020 09:07:39 +0100 Subject: [PATCH 012/363] More docs, simplified ns10, changed setup.py --- MANIFEST.in | 2 +- README.md | 43 ++++++++++++++++++++++++++++++++++++ requirements-dev.txt | 4 ++++ requirements.txt | 3 --- setup.py | 2 ++ streaming_data_types/hs00.py | 2 +- streaming_data_types/ns10.py | 14 +++++------- tests/test_hs00.py | 4 ++-- tests/test_ns10.py | 10 ++++----- tox.ini | 6 ++++- 10 files changed, 69 insertions(+), 21 deletions(-) create mode 100644 requirements-dev.txt diff --git a/MANIFEST.in b/MANIFEST.in index a9c4405..408c775 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,3 +1,3 @@ include LICENSE -include requirements.txt +include requirements*.* include Makefile diff --git a/README.md b/README.md index 7b6e3e5..e1b7b5f 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,9 @@ +# Python Streaming Data Types +Utilities for working with the FlatBuffers schemas used at the European +Spallation Source ERIC for data transport. + +https://github.com/ess-dmsc/streaming-data-types + ## FlatBuffer Schemas |name|description|verifiable| @@ -5,6 +11,43 @@ |hs00|Histogram schema|Y| |ns10|NICOS cache entry schema|Y| +### hs00 +Schema for histogram data. It is one of the more complicated to use schemas. +It takes a Python dictionary as its input; this dictionary needs to have correctly +named fields. + +The input histogram data for serialistation and the output deserialisation data +have the same dictionary "layout". +Example for a 2-D histogram: +```json +hist = { + "source": "some_source", + "timestamp": 123456, + "current_shape": [2, 5], + "dim_metadata": [ + { + "length": 2, + "unit": "a", + "label": "x", + "bin_boundaries": np.array([10, 11, 12]), + }, + { + "length": 5, + "unit": "b", + "label": "y", + "bin_boundaries": np.array([0, 1, 2, 3, 4, 5]), + }, + ], + "last_metadata_timestamp": 123456, + "data": np.array([[1, 2, 3, 4, 5], [6, 7, 8, 9, 10]]), + "errors": np.array([[5, 4, 3, 2, 1], [10, 9, 8, 7, 6]]), + "info": "info_string", +} +``` +The arrays passed in for `data` and `errors` must be NumPy arrays. `bin_boundaries` +can be passed in as a Python list or a NumPy array, but on deserialisation it will be a +NumPy array. + ## For developers ### Building the package diff --git a/requirements-dev.txt b/requirements-dev.txt new file mode 100644 index 0000000..96815bf --- /dev/null +++ b/requirements-dev.txt @@ -0,0 +1,4 @@ +flake8 +pre-commit +pytest +tox diff --git a/requirements.txt b/requirements.txt index 5f2f5c2..15438bd 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,2 @@ -flake8 flatbuffers numpy -pre-commit -pytest diff --git a/setup.py b/setup.py index b682d4c..d01ac4c 100644 --- a/setup.py +++ b/setup.py @@ -11,4 +11,6 @@ url="https://github.com/ess-dmsc/python-streaming-data-types", license="BSD 2-Clause License", packages=find_packages(exclude="tests"), + install_requires=["flatbuffers", "numpy"], + extras_requires={"dev": ["flake8", "pre-commit", "pytest", "tox"]}, ) diff --git a/streaming_data_types/hs00.py b/streaming_data_types/hs00.py index 20db815..f4ee3f5 100644 --- a/streaming_data_types/hs00.py +++ b/streaming_data_types/hs00.py @@ -41,7 +41,7 @@ def deserialise_hs00(buf): hist_info = { "length": event_hist.DimMetadata(i).Length(), - "bin_boundaries": bins.tolist(), + "bin_boundaries": bins, "unit": event_hist.DimMetadata(i).Unit().decode("utf-8"), "label": event_hist.DimMetadata(i).Label().decode("utf-8"), } diff --git a/streaming_data_types/ns10.py b/streaming_data_types/ns10.py index ad6047a..c381ff6 100644 --- a/streaming_data_types/ns10.py +++ b/streaming_data_types/ns10.py @@ -6,15 +6,13 @@ FILE_IDENTIFIER = b"ns10" -def serialise_ns10(cache_entry): +def serialise_ns10( + key: str, value: str, time_stamp: float = 0, ttl: float = 0, expired: bool = False +): builder = flatbuffers.Builder(128) - value = builder.CreateString(cache_entry["value"]) - key = builder.CreateString(cache_entry["key"]) - - ttl = cache_entry["ttl"] if "ttl" in cache_entry else False - time_stamp = cache_entry["time"] if "time" in cache_entry else 0 - expired = cache_entry["expired"] if "expired" in cache_entry else False + value = builder.CreateString(value) + key = builder.CreateString(key) CacheEntry.CacheEntryStart(builder) CacheEntry.CacheEntryAddValue(builder, value) @@ -49,7 +47,7 @@ def deserialise_ns10(buf): cache_entry = { "key": key.decode("utf-8"), - "time": time_stamp, + "time_stamp": time_stamp, "ttl": ttl, "expired": expired, "value": value.decode("utf-8"), diff --git a/tests/test_hs00.py b/tests/test_hs00.py index cc603d3..04f528d 100644 --- a/tests/test_hs00.py +++ b/tests/test_hs00.py @@ -5,7 +5,7 @@ class TestSerialisationHs00: def _check_metadata_for_one_dimension(self, data, original_data): - assert data["bin_boundaries"] == original_data["bin_boundaries"] + assert np.array_equal(data["bin_boundaries"], original_data["bin_boundaries"]) assert data["length"] == original_data["length"] assert data["unit"] == original_data["unit"] assert data["label"] == original_data["label"] @@ -93,7 +93,7 @@ def test_serialises_and_deserialises_hs00_message_correctly_for_full_2d_data(sel "length": 2, "unit": "b", "label": "y", - "bin_boundaries": [10, 11, 12], + "bin_boundaries": np.array([10, 11, 12]), }, { "length": 5, diff --git a/tests/test_ns10.py b/tests/test_ns10.py index 7ee8267..4df4bff 100644 --- a/tests/test_ns10.py +++ b/tests/test_ns10.py @@ -9,17 +9,17 @@ def test_serialises_and_deserialises_ns10_message_correctly(self): """ original_entry = { "key": "some_key", - "time": 123456, + "time_stamp": 123456, "ttl": 567890, "expired": True, "value": "some_value", } - buf = serialise_ns10(original_entry) + buf = serialise_ns10(**original_entry) entry = deserialise_ns10(buf) assert entry["key"] == original_entry["key"] - assert entry["time"] == original_entry["time"] + assert entry["time_stamp"] == original_entry["time_stamp"] assert entry["ttl"] == original_entry["ttl"] assert entry["expired"] == original_entry["expired"] assert entry["value"] == original_entry["value"] @@ -27,12 +27,12 @@ def test_serialises_and_deserialises_ns10_message_correctly(self): def test_if_buffer_has_wrong_id_then_throws(self): original_entry = { "key": "some_key", - "time": 123456, + "time_stamp": 123456, "ttl": 567890, "expired": True, "value": "some_value", } - buf = serialise_ns10(original_entry) + buf = serialise_ns10(**original_entry) # Manually hack the id buf[4:8] = b"1234" diff --git a/tox.ini b/tox.ini index e448780..45b357f 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py36, py37, py38 +envlist = py36, py37, py38, flake8 requires = tox-conda isolated_build = true skipsdist=true @@ -10,3 +10,7 @@ deps = -r requirements.txt commands = pytest + +[testenv:flake8] +commands = + flake8 test src From 000e3d97846e52f8df93b18d15a85118e7925778 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Fri, 28 Feb 2020 09:13:58 +0100 Subject: [PATCH 013/363] Added Jenkinsfile --- Jenkinsfile | 73 +++++++++++++++++++++++++++++++++++++++++++++++++++++ tox.ini | 2 +- 2 files changed, 74 insertions(+), 1 deletion(-) create mode 100644 Jenkinsfile diff --git a/Jenkinsfile b/Jenkinsfile new file mode 100644 index 0000000..c538b07 --- /dev/null +++ b/Jenkinsfile @@ -0,0 +1,73 @@ +@Library('ecdc-pipeline') +import ecdcpipeline.ContainerBuildNode +import ecdcpipeline.PipelineBuilder + +project = "python-streaming-data-types" + +python = "python3.6" + +container_build_nodes = [ + 'centos7-release': ContainerBuildNode.getDefaultContainerBuildNode('centos7'), +] + +// Define number of old builds to keep. +num_artifacts_to_keep = '1' + +// Set number of old builds to keep. +properties([[ + $class: 'BuildDiscarderProperty', + strategy: [ + $class: 'LogRotator', + artifactDaysToKeepStr: '', + artifactNumToKeepStr: num_artifacts_to_keep, + daysToKeepStr: '', + numToKeepStr: num_artifacts_to_keep + ] +]]); + + +pipeline_builder = new PipelineBuilder(this, container_build_nodes) +pipeline_builder.activateEmailFailureNotifications() + +builders = pipeline_builder.createBuilders { container -> + pipeline_builder.stage("${container.key}: Checkout") { + dir(pipeline_builder.project) { + scm_vars = checkout scm + } + container.copyTo(pipeline_builder.project, pipeline_builder.project) + } // stage + + pipeline_builder.stage("${container.key}: Dependencies") { + def conan_remote = "ess-dmsc-local" + container.sh """ + pip install --user -r ${project}/requirements.txt + pip install --user -r ${project}/requirements-dev.txt + """ + } // stage + + pipeline_builder.stage("${container.key}: Test") { + def test_output = "TestResults.xml" + container.sh """ + ${python} --version + cd ${project} + ${python} -m pytest --junitxml=${test_output} + """ + container.copyFrom("${project}/${test_output}", ".") + xunit thresholds: [failed(unstableThreshold: '0')], tools: [JUnit(deleteOutputFiles: true, pattern: '*.xml', skipNoTestFiles: false, stopProcessingIfError: true)] + } // stage +} // createBuilders + +node { + dir("${project}") { + scm_vars = checkout scm + } + + try { + parallel builders + } catch (e) { + throw e + } + + // Delete workspace when build is done + cleanWs() +} diff --git a/tox.ini b/tox.ini index 45b357f..1768c17 100644 --- a/tox.ini +++ b/tox.ini @@ -6,8 +6,8 @@ skipsdist=true [testenv] deps = - pytest -r requirements.txt + -r requirements-dev.txt commands = pytest From 557ffccbc57b97b4041c9dd1651e52e5492f7495 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Fri, 28 Feb 2020 09:46:48 +0100 Subject: [PATCH 014/363] Pytest ini file --- pytest.ini | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 pytest.ini diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 0000000..dbdfd63 --- /dev/null +++ b/pytest.ini @@ -0,0 +1,3 @@ +[pytest] +junit_family=xunit2 +testpaths = ./tests From 92cf7db2cdf476949584073fd414a9321118b4e0 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Fri, 28 Feb 2020 10:02:14 +0100 Subject: [PATCH 015/363] Try running tox on Jenkins --- Jenkinsfile | 3 +-- tox.ini | 2 +- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index c538b07..df54e0d 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -48,9 +48,8 @@ builders = pipeline_builder.createBuilders { container -> pipeline_builder.stage("${container.key}: Test") { def test_output = "TestResults.xml" container.sh """ - ${python} --version cd ${project} - ${python} -m pytest --junitxml=${test_output} + tox -- --junitxml=${test_output} """ container.copyFrom("${project}/${test_output}", ".") xunit thresholds: [failed(unstableThreshold: '0')], tools: [JUnit(deleteOutputFiles: true, pattern: '*.xml', skipNoTestFiles: false, stopProcessingIfError: true)] diff --git a/tox.ini b/tox.ini index 1768c17..4e658a3 100644 --- a/tox.ini +++ b/tox.ini @@ -9,7 +9,7 @@ deps = -r requirements.txt -r requirements-dev.txt commands = - pytest + pytest {posargs} [testenv:flake8] commands = From 2c6884e4298b1afd1aa4b552c8f5326acf47cfac Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Fri, 28 Feb 2020 10:48:35 +0100 Subject: [PATCH 016/363] Try this --- Jenkinsfile | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index df54e0d..6256899 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -38,7 +38,6 @@ builders = pipeline_builder.createBuilders { container -> } // stage pipeline_builder.stage("${container.key}: Dependencies") { - def conan_remote = "ess-dmsc-local" container.sh """ pip install --user -r ${project}/requirements.txt pip install --user -r ${project}/requirements-dev.txt @@ -49,7 +48,7 @@ builders = pipeline_builder.createBuilders { container -> def test_output = "TestResults.xml" container.sh """ cd ${project} - tox -- --junitxml=${test_output} + ${python} -m tox -- --junitxml=${test_output} """ container.copyFrom("${project}/${test_output}", ".") xunit thresholds: [failed(unstableThreshold: '0')], tools: [JUnit(deleteOutputFiles: true, pattern: '*.xml', skipNoTestFiles: false, stopProcessingIfError: true)] From e8b66a434fb7be6aa48a4276d71c6e16c37ae2b8 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Fri, 28 Feb 2020 10:58:47 +0100 Subject: [PATCH 017/363] Remove conda dependency --- tox.ini | 1 - 1 file changed, 1 deletion(-) diff --git a/tox.ini b/tox.ini index 4e658a3..2e5de0b 100644 --- a/tox.ini +++ b/tox.ini @@ -1,6 +1,5 @@ [tox] envlist = py36, py37, py38, flake8 -requires = tox-conda isolated_build = true skipsdist=true From 7124141e6b1a949e7f364f6005bb592e8675d9af Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Fri, 28 Feb 2020 11:19:52 +0100 Subject: [PATCH 018/363] Try installing conda --- Jenkinsfile | 13 +++++++++---- tox.ini | 1 + 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index 6256899..123850d 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -4,7 +4,7 @@ import ecdcpipeline.PipelineBuilder project = "python-streaming-data-types" -python = "python3.6" +python_version = "3.7" container_build_nodes = [ 'centos7-release': ContainerBuildNode.getDefaultContainerBuildNode('centos7'), @@ -39,8 +39,13 @@ builders = pipeline_builder.createBuilders { container -> pipeline_builder.stage("${container.key}: Dependencies") { container.sh """ - pip install --user -r ${project}/requirements.txt - pip install --user -r ${project}/requirements-dev.txt + wget -O miniconda.sh https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh + sh miniconda.sh -b -p miniconda + miniconda/bin/conda create -n env python=${python_version} + miniconda/bin/conda activate env + python --version + python -m pip install --user -r ${project}/requirements.txt + python -m pip install --user -r ${project}/requirements-dev.txt """ } // stage @@ -48,7 +53,7 @@ builders = pipeline_builder.createBuilders { container -> def test_output = "TestResults.xml" container.sh """ cd ${project} - ${python} -m tox -- --junitxml=${test_output} + python -m tox -- --junitxml=${test_output} """ container.copyFrom("${project}/${test_output}", ".") xunit thresholds: [failed(unstableThreshold: '0')], tools: [JUnit(deleteOutputFiles: true, pattern: '*.xml', skipNoTestFiles: false, stopProcessingIfError: true)] diff --git a/tox.ini b/tox.ini index 2e5de0b..4e658a3 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,6 @@ [tox] envlist = py36, py37, py38, flake8 +requires = tox-conda isolated_build = true skipsdist=true From 5259c9bbc22edf817f02a7c10540c02373e56a9a Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Fri, 28 Feb 2020 11:22:06 +0100 Subject: [PATCH 019/363] Can we yum --- Jenkinsfile | 1 + 1 file changed, 1 insertion(+) diff --git a/Jenkinsfile b/Jenkinsfile index 123850d..d3d9fe4 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -39,6 +39,7 @@ builders = pipeline_builder.createBuilders { container -> pipeline_builder.stage("${container.key}: Dependencies") { container.sh """ + yum install -yq wget wget -O miniconda.sh https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh sh miniconda.sh -b -p miniconda miniconda/bin/conda create -n env python=${python_version} From 05c11af7e06911f12dd671fa09816a4693405976 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Fri, 28 Feb 2020 11:25:04 +0100 Subject: [PATCH 020/363] Try curl instead --- Jenkinsfile | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index d3d9fe4..2efa625 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -39,8 +39,7 @@ builders = pipeline_builder.createBuilders { container -> pipeline_builder.stage("${container.key}: Dependencies") { container.sh """ - yum install -yq wget - wget -O miniconda.sh https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh + curl https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh --output miniconda.sh sh miniconda.sh -b -p miniconda miniconda/bin/conda create -n env python=${python_version} miniconda/bin/conda activate env From 92ce34b9662fadf7bc23434848e0585ee2f2fb7e Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Fri, 28 Feb 2020 11:29:02 +0100 Subject: [PATCH 021/363] Conda init --- Jenkinsfile | 1 + 1 file changed, 1 insertion(+) diff --git a/Jenkinsfile b/Jenkinsfile index 2efa625..55dd980 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -41,6 +41,7 @@ builders = pipeline_builder.createBuilders { container -> container.sh """ curl https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh --output miniconda.sh sh miniconda.sh -b -p miniconda + miniconda/bin/conda init bash miniconda/bin/conda create -n env python=${python_version} miniconda/bin/conda activate env python --version From 396ff6a299407cc35024a3aeaca372d024dc6085 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Fri, 28 Feb 2020 11:34:34 +0100 Subject: [PATCH 022/363] Separate stage for installing conda --- Jenkinsfile | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index 55dd980..6f42bb7 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -37,13 +37,18 @@ builders = pipeline_builder.createBuilders { container -> container.copyTo(pipeline_builder.project, pipeline_builder.project) } // stage - pipeline_builder.stage("${container.key}: Dependencies") { + pipeline_builder.stage("${container.key}: Conda") { container.sh """ curl https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh --output miniconda.sh - sh miniconda.sh -b -p miniconda - miniconda/bin/conda init bash - miniconda/bin/conda create -n env python=${python_version} - miniconda/bin/conda activate env + sh miniconda.sh -b -p /home/jenkins/miniconda + /home/jenkins/miniconda/bin/conda init bash + /home/jenkins/miniconda/bin/conda create -n env python=${python_version} + echo "/home/nicos/miniconda/bin/conda activate env" >> ~/.bashrc + """ + } // stage + + pipeline_builder.stage("${container.key}: Dependencies") { + container.sh """ python --version python -m pip install --user -r ${project}/requirements.txt python -m pip install --user -r ${project}/requirements-dev.txt From 703f6f1c197d554e966665fee8a22c82834235e4 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Fri, 28 Feb 2020 11:37:23 +0100 Subject: [PATCH 023/363] Paths --- Jenkinsfile | 2 ++ 1 file changed, 2 insertions(+) diff --git a/Jenkinsfile b/Jenkinsfile index 6f42bb7..915cd71 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -44,11 +44,13 @@ builders = pipeline_builder.createBuilders { container -> /home/jenkins/miniconda/bin/conda init bash /home/jenkins/miniconda/bin/conda create -n env python=${python_version} echo "/home/nicos/miniconda/bin/conda activate env" >> ~/.bashrc + export PYTHONPATH= """ } // stage pipeline_builder.stage("${container.key}: Dependencies") { container.sh """ + export PATH=/home/jenkins/miniconda/envs/env/bin:$PATH python --version python -m pip install --user -r ${project}/requirements.txt python -m pip install --user -r ${project}/requirements-dev.txt From 026e2113f6322f4b1e80fadcd5338f19542ccf03 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Fri, 28 Feb 2020 11:40:16 +0100 Subject: [PATCH 024/363] More paths --- Jenkinsfile | 1 + 1 file changed, 1 insertion(+) diff --git a/Jenkinsfile b/Jenkinsfile index 915cd71..140c58f 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -60,6 +60,7 @@ builders = pipeline_builder.createBuilders { container -> pipeline_builder.stage("${container.key}: Test") { def test_output = "TestResults.xml" container.sh """ + export PATH=/home/jenkins/miniconda/envs/env/bin:$PATH cd ${project} python -m tox -- --junitxml=${test_output} """ From 9948e13589a99a149f1355922668f1406300d054 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Fri, 28 Feb 2020 11:47:42 +0100 Subject: [PATCH 025/363] No venv --- Jenkinsfile | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index 140c58f..4fa2308 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -42,15 +42,14 @@ builders = pipeline_builder.createBuilders { container -> curl https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh --output miniconda.sh sh miniconda.sh -b -p /home/jenkins/miniconda /home/jenkins/miniconda/bin/conda init bash - /home/jenkins/miniconda/bin/conda create -n env python=${python_version} - echo "/home/nicos/miniconda/bin/conda activate env" >> ~/.bashrc export PYTHONPATH= """ } // stage pipeline_builder.stage("${container.key}: Dependencies") { container.sh """ - export PATH=/home/jenkins/miniconda/envs/env/bin:$PATH + export PYTHONPATH= + export PATH=/home/jenkins/miniconda/bin:$PATH python --version python -m pip install --user -r ${project}/requirements.txt python -m pip install --user -r ${project}/requirements-dev.txt @@ -60,7 +59,8 @@ builders = pipeline_builder.createBuilders { container -> pipeline_builder.stage("${container.key}: Test") { def test_output = "TestResults.xml" container.sh """ - export PATH=/home/jenkins/miniconda/envs/env/bin:$PATH + export PYTHONPATH= + export PATH=/home/jenkins/miniconda/bin:$PATH cd ${project} python -m tox -- --junitxml=${test_output} """ From b0798a4d9637daee6f14c8e99e60c0952a5d564d Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Fri, 28 Feb 2020 11:56:12 +0100 Subject: [PATCH 026/363] Typo in tox.ini --- Jenkinsfile | 1 + tox.ini | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/Jenkinsfile b/Jenkinsfile index 4fa2308..aa4f739 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -41,6 +41,7 @@ builders = pipeline_builder.createBuilders { container -> container.sh """ curl https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh --output miniconda.sh sh miniconda.sh -b -p /home/jenkins/miniconda + /home/jenkins/miniconda/bin/conda update -n base -c defaults conda -y /home/jenkins/miniconda/bin/conda init bash export PYTHONPATH= """ diff --git a/tox.ini b/tox.ini index 4e658a3..ea65914 100644 --- a/tox.ini +++ b/tox.ini @@ -13,4 +13,4 @@ commands = [testenv:flake8] commands = - flake8 test src + flake8 tests src From cf5029d812150a747f4ef669b493f62b34c81094 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Fri, 28 Feb 2020 12:02:59 +0100 Subject: [PATCH 027/363] Clutching at straws --- Jenkinsfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Jenkinsfile b/Jenkinsfile index aa4f739..de920b5 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -4,7 +4,7 @@ import ecdcpipeline.PipelineBuilder project = "python-streaming-data-types" -python_version = "3.7" +python_version = "3.8" container_build_nodes = [ 'centos7-release': ContainerBuildNode.getDefaultContainerBuildNode('centos7'), From e8e1553fa70530c8845b552262580734be55dd50 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Fri, 28 Feb 2020 12:04:07 +0100 Subject: [PATCH 028/363] Use -m syntax --- tox.ini | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tox.ini b/tox.ini index ea65914..8ea8384 100644 --- a/tox.ini +++ b/tox.ini @@ -9,8 +9,8 @@ deps = -r requirements.txt -r requirements-dev.txt commands = - pytest {posargs} + python -m pytest {posargs} [testenv:flake8] commands = - flake8 tests src + python -m flake8 tests src From 0221d74cde79abcd69ec6968b3c4cdf3639869cb Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Mon, 2 Mar 2020 11:31:37 +0100 Subject: [PATCH 029/363] Added run start and stop --- .../{hs00 => histogram_hs00}/Array.py | 0 .../{hs00 => histogram_hs00}/ArrayDouble.py | 0 .../{hs00 => histogram_hs00}/ArrayFloat.py | 0 .../{hs00 => histogram_hs00}/ArrayUInt.py | 0 .../{hs00 => histogram_hs00}/ArrayULong.py | 0 .../DimensionMetaData.py | 0 .../EventHistogram.py | 0 .../{hs00 => histogram_hs00}/__init__.py | 0 .../{ns10 => nicos_cache_ns10}/CacheEntry.py | 0 .../{ns10 => nicos_cache_ns10}/__init__.py | 0 .../fbschemas/run_start_pl72/RunStart.py | 176 ++++++++++++++++++ .../fbschemas/run_start_pl72/__init__.py | 0 .../fbschemas/run_stop_6s4t/RunStop.py | 80 ++++++++ .../fbschemas/run_stop_6s4t/__init__.py | 0 .../{hs00.py => histogram_hs00.py} | 10 +- .../{ns10.py => nicos_cache_ns10.py} | 4 +- streaming_data_types/run_start_pl72.py | 54 ++++++ streaming_data_types/run_stop_6s4t.py | 36 ++++ tests/test_hs00.py | 2 +- tests/test_ns10.py | 2 +- 20 files changed, 355 insertions(+), 9 deletions(-) rename streaming_data_types/fbschemas/{hs00 => histogram_hs00}/Array.py (100%) rename streaming_data_types/fbschemas/{hs00 => histogram_hs00}/ArrayDouble.py (100%) rename streaming_data_types/fbschemas/{hs00 => histogram_hs00}/ArrayFloat.py (100%) rename streaming_data_types/fbschemas/{hs00 => histogram_hs00}/ArrayUInt.py (100%) rename streaming_data_types/fbschemas/{hs00 => histogram_hs00}/ArrayULong.py (100%) rename streaming_data_types/fbschemas/{hs00 => histogram_hs00}/DimensionMetaData.py (100%) rename streaming_data_types/fbschemas/{hs00 => histogram_hs00}/EventHistogram.py (100%) rename streaming_data_types/fbschemas/{hs00 => histogram_hs00}/__init__.py (100%) rename streaming_data_types/fbschemas/{ns10 => nicos_cache_ns10}/CacheEntry.py (100%) rename streaming_data_types/fbschemas/{ns10 => nicos_cache_ns10}/__init__.py (100%) create mode 100644 streaming_data_types/fbschemas/run_start_pl72/RunStart.py create mode 100644 streaming_data_types/fbschemas/run_start_pl72/__init__.py create mode 100644 streaming_data_types/fbschemas/run_stop_6s4t/RunStop.py create mode 100644 streaming_data_types/fbschemas/run_stop_6s4t/__init__.py rename streaming_data_types/{hs00.py => histogram_hs00.py} (94%) rename streaming_data_types/{ns10.py => nicos_cache_ns10.py} (93%) create mode 100644 streaming_data_types/run_start_pl72.py create mode 100644 streaming_data_types/run_stop_6s4t.py diff --git a/streaming_data_types/fbschemas/hs00/Array.py b/streaming_data_types/fbschemas/histogram_hs00/Array.py similarity index 100% rename from streaming_data_types/fbschemas/hs00/Array.py rename to streaming_data_types/fbschemas/histogram_hs00/Array.py diff --git a/streaming_data_types/fbschemas/hs00/ArrayDouble.py b/streaming_data_types/fbschemas/histogram_hs00/ArrayDouble.py similarity index 100% rename from streaming_data_types/fbschemas/hs00/ArrayDouble.py rename to streaming_data_types/fbschemas/histogram_hs00/ArrayDouble.py diff --git a/streaming_data_types/fbschemas/hs00/ArrayFloat.py b/streaming_data_types/fbschemas/histogram_hs00/ArrayFloat.py similarity index 100% rename from streaming_data_types/fbschemas/hs00/ArrayFloat.py rename to streaming_data_types/fbschemas/histogram_hs00/ArrayFloat.py diff --git a/streaming_data_types/fbschemas/hs00/ArrayUInt.py b/streaming_data_types/fbschemas/histogram_hs00/ArrayUInt.py similarity index 100% rename from streaming_data_types/fbschemas/hs00/ArrayUInt.py rename to streaming_data_types/fbschemas/histogram_hs00/ArrayUInt.py diff --git a/streaming_data_types/fbschemas/hs00/ArrayULong.py b/streaming_data_types/fbschemas/histogram_hs00/ArrayULong.py similarity index 100% rename from streaming_data_types/fbschemas/hs00/ArrayULong.py rename to streaming_data_types/fbschemas/histogram_hs00/ArrayULong.py diff --git a/streaming_data_types/fbschemas/hs00/DimensionMetaData.py b/streaming_data_types/fbschemas/histogram_hs00/DimensionMetaData.py similarity index 100% rename from streaming_data_types/fbschemas/hs00/DimensionMetaData.py rename to streaming_data_types/fbschemas/histogram_hs00/DimensionMetaData.py diff --git a/streaming_data_types/fbschemas/hs00/EventHistogram.py b/streaming_data_types/fbschemas/histogram_hs00/EventHistogram.py similarity index 100% rename from streaming_data_types/fbschemas/hs00/EventHistogram.py rename to streaming_data_types/fbschemas/histogram_hs00/EventHistogram.py diff --git a/streaming_data_types/fbschemas/hs00/__init__.py b/streaming_data_types/fbschemas/histogram_hs00/__init__.py similarity index 100% rename from streaming_data_types/fbschemas/hs00/__init__.py rename to streaming_data_types/fbschemas/histogram_hs00/__init__.py diff --git a/streaming_data_types/fbschemas/ns10/CacheEntry.py b/streaming_data_types/fbschemas/nicos_cache_ns10/CacheEntry.py similarity index 100% rename from streaming_data_types/fbschemas/ns10/CacheEntry.py rename to streaming_data_types/fbschemas/nicos_cache_ns10/CacheEntry.py diff --git a/streaming_data_types/fbschemas/ns10/__init__.py b/streaming_data_types/fbschemas/nicos_cache_ns10/__init__.py similarity index 100% rename from streaming_data_types/fbschemas/ns10/__init__.py rename to streaming_data_types/fbschemas/nicos_cache_ns10/__init__.py diff --git a/streaming_data_types/fbschemas/run_start_pl72/RunStart.py b/streaming_data_types/fbschemas/run_start_pl72/RunStart.py new file mode 100644 index 0000000..6a5192e --- /dev/null +++ b/streaming_data_types/fbschemas/run_start_pl72/RunStart.py @@ -0,0 +1,176 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + + +class RunStart(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAsRunStart(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = RunStart() + x.Init(buf, n + offset) + return x + + # RunStart + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # RunStart + def StartTime(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get( + flatbuffers.number_types.Uint64Flags, o + self._tab.Pos + ) + return 0 + + # RunStart + def StopTime(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get( + flatbuffers.number_types.Uint64Flags, o + self._tab.Pos + ) + return 0 + + # RunStart + def RunName(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # RunStart + def InstrumentName(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # RunStart + def NexusStructure(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # RunStart + def JobId(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # RunStart + def Broker(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # RunStart + def ServiceId(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # RunStart + def Filename(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(20)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # RunStart + def NPeriods(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(22)) + if o != 0: + return self._tab.Get( + flatbuffers.number_types.Uint32Flags, o + self._tab.Pos + ) + return 1 + + # RunStart + def DetectorSpectrumMap(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(24)) + if o != 0: + x = self._tab.Indirect(o + self._tab.Pos) + from .SpectraDetectorMapping import SpectraDetectorMapping + + obj = SpectraDetectorMapping() + obj.Init(self._tab.Bytes, x) + return obj + return None + + +def RunStartStart(builder): + builder.StartObject(11) + + +def RunStartAddStartTime(builder, startTime): + builder.PrependUint64Slot(0, startTime, 0) + + +def RunStartAddStopTime(builder, stopTime): + builder.PrependUint64Slot(1, stopTime, 0) + + +def RunStartAddRunName(builder, runName): + builder.PrependUOffsetTRelativeSlot( + 2, flatbuffers.number_types.UOffsetTFlags.py_type(runName), 0 + ) + + +def RunStartAddInstrumentName(builder, instrumentName): + builder.PrependUOffsetTRelativeSlot( + 3, flatbuffers.number_types.UOffsetTFlags.py_type(instrumentName), 0 + ) + + +def RunStartAddNexusStructure(builder, nexusStructure): + builder.PrependUOffsetTRelativeSlot( + 4, flatbuffers.number_types.UOffsetTFlags.py_type(nexusStructure), 0 + ) + + +def RunStartAddJobId(builder, jobId): + builder.PrependUOffsetTRelativeSlot( + 5, flatbuffers.number_types.UOffsetTFlags.py_type(jobId), 0 + ) + + +def RunStartAddBroker(builder, broker): + builder.PrependUOffsetTRelativeSlot( + 6, flatbuffers.number_types.UOffsetTFlags.py_type(broker), 0 + ) + + +def RunStartAddServiceId(builder, serviceId): + builder.PrependUOffsetTRelativeSlot( + 7, flatbuffers.number_types.UOffsetTFlags.py_type(serviceId), 0 + ) + + +def RunStartAddFilename(builder, filename): + builder.PrependUOffsetTRelativeSlot( + 8, flatbuffers.number_types.UOffsetTFlags.py_type(filename), 0 + ) + + +def RunStartAddNPeriods(builder, nPeriods): + builder.PrependUint32Slot(9, nPeriods, 1) + + +def RunStartAddDetectorSpectrumMap(builder, detectorSpectrumMap): + builder.PrependUOffsetTRelativeSlot( + 10, flatbuffers.number_types.UOffsetTFlags.py_type(detectorSpectrumMap), 0 + ) + + +def RunStartEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/run_start_pl72/__init__.py b/streaming_data_types/fbschemas/run_start_pl72/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/streaming_data_types/fbschemas/run_stop_6s4t/RunStop.py b/streaming_data_types/fbschemas/run_stop_6s4t/RunStop.py new file mode 100644 index 0000000..d30d41d --- /dev/null +++ b/streaming_data_types/fbschemas/run_stop_6s4t/RunStop.py @@ -0,0 +1,80 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + + +class RunStop(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAsRunStop(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = RunStop() + x.Init(buf, n + offset) + return x + + # RunStop + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # RunStop + def StopTime(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get( + flatbuffers.number_types.Uint64Flags, o + self._tab.Pos + ) + return 0 + + # RunStop + def RunName(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # RunStop + def JobId(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # RunStop + def ServiceId(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + +def RunStopStart(builder): + builder.StartObject(4) + + +def RunStopAddStopTime(builder, stopTime): + builder.PrependUint64Slot(0, stopTime, 0) + + +def RunStopAddRunName(builder, runName): + builder.PrependUOffsetTRelativeSlot( + 1, flatbuffers.number_types.UOffsetTFlags.py_type(runName), 0 + ) + + +def RunStopAddJobId(builder, jobId): + builder.PrependUOffsetTRelativeSlot( + 2, flatbuffers.number_types.UOffsetTFlags.py_type(jobId), 0 + ) + + +def RunStopAddServiceId(builder, serviceId): + builder.PrependUOffsetTRelativeSlot( + 3, flatbuffers.number_types.UOffsetTFlags.py_type(serviceId), 0 + ) + + +def RunStopEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/run_stop_6s4t/__init__.py b/streaming_data_types/fbschemas/run_stop_6s4t/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/streaming_data_types/hs00.py b/streaming_data_types/histogram_hs00.py similarity index 94% rename from streaming_data_types/hs00.py rename to streaming_data_types/histogram_hs00.py index f4ee3f5..deac68c 100644 --- a/streaming_data_types/hs00.py +++ b/streaming_data_types/histogram_hs00.py @@ -1,10 +1,10 @@ from functools import reduce import operator import flatbuffers -import streaming_data_types.fbschemas.hs00.ArrayDouble as ArrayDouble -import streaming_data_types.fbschemas.hs00.DimensionMetaData as DimensionMetaData -import streaming_data_types.fbschemas.hs00.EventHistogram as EventHistogram -from streaming_data_types.fbschemas.hs00.Array import Array +import streaming_data_types.fbschemas.histogram_hs00.ArrayDouble as ArrayDouble +import streaming_data_types.fbschemas.histogram_hs00.DimensionMetaData as DimensionMetaData +import streaming_data_types.fbschemas.histogram_hs00.EventHistogram as EventHistogram +from streaming_data_types.fbschemas.histogram_hs00.Array import Array from streaming_data_types.utils import get_schema @@ -13,7 +13,7 @@ def deserialise_hs00(buf): """ - Convert flatbuffer into a histogram. + Deserialise flatbuffer hs10 into a histogram. :param buf: :return: dict of histogram information diff --git a/streaming_data_types/ns10.py b/streaming_data_types/nicos_cache_ns10.py similarity index 93% rename from streaming_data_types/ns10.py rename to streaming_data_types/nicos_cache_ns10.py index c381ff6..ecfc7b8 100644 --- a/streaming_data_types/ns10.py +++ b/streaming_data_types/nicos_cache_ns10.py @@ -1,5 +1,5 @@ import flatbuffers -from streaming_data_types.fbschemas.ns10 import CacheEntry +from streaming_data_types.fbschemas.nicos_cache_ns10 import CacheEntry from streaming_data_types.utils import get_schema @@ -25,7 +25,7 @@ def serialise_ns10( # Generate the output and replace the file_identifier buff = builder.Output() - buff[4:8] = b"ns10" + buff[4:8] = FILE_IDENTIFIER return buff diff --git a/streaming_data_types/run_start_pl72.py b/streaming_data_types/run_start_pl72.py new file mode 100644 index 0000000..61ad452 --- /dev/null +++ b/streaming_data_types/run_start_pl72.py @@ -0,0 +1,54 @@ +import time +from typing import Optional +import flatbuffers +from streaming_data_types.fbschemas.run_start_pl72 import RunStart + + +def serialise_pl72( + job_id: str, + filename: str, + start_time: Optional[int] = None, + stop_time: Optional[int] = None, + run_name: str = "test_run", + nexus_structure: str = "{}", + service_id: str = "", + instrument_name: str = "TEST", + broker: str = "localhost:9092", +) -> bytes: + builder = flatbuffers.Builder(136) + + if start_time is None: + start_time = int(time.time() * 1000) + if service_id is None: + service_id = "" + if stop_time is None: + stop_time = 0 + + service_id_offset = builder.CreateString(service_id) + broker_offset = builder.CreateString(broker) + job_id_offset = builder.CreateString(job_id) + nexus_structure_offset = builder.CreateString(nexus_structure) + instrument_name_offset = builder.CreateString(instrument_name) + run_name_offset = builder.CreateString(run_name) + filename_offset = builder.CreateString(filename) + + # Build the actual buffer + RunStart.RunStartStart(builder) + RunStart.RunStartAddServiceId(builder, service_id_offset) + RunStart.RunStartAddBroker(builder, broker_offset) + RunStart.RunStartAddJobId(builder, job_id_offset) + RunStart.RunStartAddNexusStructure(builder, nexus_structure_offset) + RunStart.RunStartAddInstrumentName(builder, instrument_name_offset) + RunStart.RunStartAddRunName(builder, run_name_offset) + RunStart.RunStartAddStopTime(builder, stop_time) + RunStart.RunStartAddStartTime(builder, start_time) + RunStart.RunStartAddFilename(builder, filename_offset) + RunStart.RunStartAddNPeriods(builder, 1) + + run_start_message = RunStart.RunStartEnd(builder) + builder.Finish(run_start_message) + + # Generate the output and replace the file_identifier + buff = builder.Output() + buff[4:8] = b"run_start_pl72" + return bytes(buff) diff --git a/streaming_data_types/run_stop_6s4t.py b/streaming_data_types/run_stop_6s4t.py new file mode 100644 index 0000000..d9db01d --- /dev/null +++ b/streaming_data_types/run_stop_6s4t.py @@ -0,0 +1,36 @@ +from typing import Optional +import flatbuffers +from streaming_data_types.fbschemas.run_stop_6s4t import RunStop + + +def serialise_6s4t( + job_id: str, + run_name: str = "test_run", + service_id: str = "", + stop_time: Optional[int] = None, +) -> bytes: + builder = flatbuffers.Builder(136) + + if service_id is None: + service_id = "" + if stop_time is None: + stop_time = 0 + + service_id_offset = builder.CreateString(service_id) + job_id_offset = builder.CreateString(job_id) + run_name_offset = builder.CreateString(run_name) + + # Build the actual buffer + RunStop.RunStopStart(builder) + RunStop.RunStopAddServiceId(builder, service_id_offset) + RunStop.RunStopAddJobId(builder, job_id_offset) + RunStop.RunStopAddRunName(builder, run_name_offset) + RunStop.RunStopAddStopTime(builder, stop_time) + + run_stop_message = RunStop.RunStopEnd(builder) + builder.Finish(run_stop_message) + + # Generate the output and replace the file_identifier + buff = builder.Output() + buff[4:8] = b"6s4t" + return bytes(buff) diff --git a/tests/test_hs00.py b/tests/test_hs00.py index 04f528d..25853bb 100644 --- a/tests/test_hs00.py +++ b/tests/test_hs00.py @@ -1,6 +1,6 @@ import numpy as np import pytest -from streaming_data_types.hs00 import serialise_hs00, deserialise_hs00 +from streaming_data_types.histogram_hs00 import serialise_hs00, deserialise_hs00 class TestSerialisationHs00: diff --git a/tests/test_ns10.py b/tests/test_ns10.py index 4df4bff..9025bb0 100644 --- a/tests/test_ns10.py +++ b/tests/test_ns10.py @@ -1,5 +1,5 @@ import pytest -from streaming_data_types.ns10 import serialise_ns10, deserialise_ns10 +from streaming_data_types.nicos_cache_ns10 import serialise_ns10, deserialise_ns10 class TestSerialisationNs10: From b04e67ae1f62f628de66f128d56d22f4b86acee4 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Mon, 2 Mar 2020 11:42:13 +0100 Subject: [PATCH 030/363] oops! --- streaming_data_types/run_start_pl72.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/streaming_data_types/run_start_pl72.py b/streaming_data_types/run_start_pl72.py index 61ad452..2115ce5 100644 --- a/streaming_data_types/run_start_pl72.py +++ b/streaming_data_types/run_start_pl72.py @@ -50,5 +50,5 @@ def serialise_pl72( # Generate the output and replace the file_identifier buff = builder.Output() - buff[4:8] = b"run_start_pl72" + buff[4:8] = b"pl72" return bytes(buff) From 9acd8c11b5d3d3a0e16464e7554e64f1c0bb7ec8 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Mon, 2 Mar 2020 14:21:10 +0100 Subject: [PATCH 031/363] WIP --- streaming_data_types/nicos_cache_ns10.py | 15 +++++--------- streaming_data_types/run_start_pl72.py | 26 +++++++++++++++++++++--- tests/test_ns10.py | 10 ++++----- tests/test_pl72.py | 7 +++++++ 4 files changed, 40 insertions(+), 18 deletions(-) create mode 100644 tests/test_pl72.py diff --git a/streaming_data_types/nicos_cache_ns10.py b/streaming_data_types/nicos_cache_ns10.py index ecfc7b8..f85b2c1 100644 --- a/streaming_data_types/nicos_cache_ns10.py +++ b/streaming_data_types/nicos_cache_ns10.py @@ -1,3 +1,4 @@ +from collections import namedtuple import flatbuffers from streaming_data_types.fbschemas.nicos_cache_ns10 import CacheEntry from streaming_data_types.utils import get_schema @@ -39,18 +40,12 @@ def deserialise_ns10(buf): entry = CacheEntry.CacheEntry.GetRootAsCacheEntry(buf, 0) - key = entry.Key() if entry.Key() else "" + key = entry.Key() if entry.Key() else b"" time_stamp = entry.Time() ttl = entry.Ttl() if entry.Ttl() else 0 expired = entry.Expired() if entry.Expired() else False - value = entry.Value() if entry.Value() else "" + value = entry.Value() if entry.Value() else b"" - cache_entry = { - "key": key.decode("utf-8"), - "time_stamp": time_stamp, - "ttl": ttl, - "expired": expired, - "value": value.decode("utf-8"), - } + Entry = namedtuple("Entry", "key time_stamp ttl expired value") - return cache_entry + return Entry(key.decode(), time_stamp, ttl, expired, value.decode()) diff --git a/streaming_data_types/run_start_pl72.py b/streaming_data_types/run_start_pl72.py index 2115ce5..d42cc00 100644 --- a/streaming_data_types/run_start_pl72.py +++ b/streaming_data_types/run_start_pl72.py @@ -2,6 +2,10 @@ from typing import Optional import flatbuffers from streaming_data_types.fbschemas.run_start_pl72 import RunStart +from streaming_data_types.utils import get_schema + + +FILE_IDENTIFIER = b"pl72" def serialise_pl72( @@ -49,6 +53,22 @@ def serialise_pl72( builder.Finish(run_start_message) # Generate the output and replace the file_identifier - buff = builder.Output() - buff[4:8] = b"pl72" - return bytes(buff) + buffer = builder.Output() + buffer[4:8] = FILE_IDENTIFIER + return bytes(buffer) + + +def deserialise_pl72(buffer): + # Check schema is correct + if get_schema(buffer) != FILE_IDENTIFIER.decode(): + raise RuntimeError( + f"Incorrect schema: expected {FILE_IDENTIFIER} but got " + f"{get_schema(buffer)}" + ) + + # run_start = RunStart.RunStart.GetRootAsRunStart(buffer, 0) + # service_id = run_start.ServiceId() if run_start.ServiceId() else b"" + # broker = run_start.Broker() if run_start.Broker() else b"" + # job_id = + + # TODO: return a namedTuple like ns10? diff --git a/tests/test_ns10.py b/tests/test_ns10.py index 9025bb0..7892096 100644 --- a/tests/test_ns10.py +++ b/tests/test_ns10.py @@ -18,11 +18,11 @@ def test_serialises_and_deserialises_ns10_message_correctly(self): buf = serialise_ns10(**original_entry) entry = deserialise_ns10(buf) - assert entry["key"] == original_entry["key"] - assert entry["time_stamp"] == original_entry["time_stamp"] - assert entry["ttl"] == original_entry["ttl"] - assert entry["expired"] == original_entry["expired"] - assert entry["value"] == original_entry["value"] + assert entry.key == original_entry["key"] + assert entry.time_stamp == original_entry["time_stamp"] + assert entry.ttl == original_entry["ttl"] + assert entry.expired == original_entry["expired"] + assert entry.value == original_entry["value"] def test_if_buffer_has_wrong_id_then_throws(self): original_entry = { diff --git a/tests/test_pl72.py b/tests/test_pl72.py new file mode 100644 index 0000000..aadc3c4 --- /dev/null +++ b/tests/test_pl72.py @@ -0,0 +1,7 @@ +# import pytest +# from streaming_data_types.run_start_pl72 import serialise_pl72 + + +class TestSerialisationPl72: + def test_serialises_and_deserialises_pl72_message_correctly(self): + pass From 4813aa4a5f7e7cd3ff5928dd7a4b2ac9312bff42 Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Mon, 2 Mar 2020 13:32:11 +0000 Subject: [PATCH 032/363] Refactor: extract method for checking schema id --- streaming_data_types/histogram_hs00.py | 8 ++------ streaming_data_types/nicos_cache_ns10.py | 8 ++------ streaming_data_types/run_start_pl72.py | 9 ++------- streaming_data_types/utils.py | 21 +++++++++++++++++---- 4 files changed, 23 insertions(+), 23 deletions(-) diff --git a/streaming_data_types/histogram_hs00.py b/streaming_data_types/histogram_hs00.py index deac68c..1af8b8c 100644 --- a/streaming_data_types/histogram_hs00.py +++ b/streaming_data_types/histogram_hs00.py @@ -5,7 +5,7 @@ import streaming_data_types.fbschemas.histogram_hs00.DimensionMetaData as DimensionMetaData import streaming_data_types.fbschemas.histogram_hs00.EventHistogram as EventHistogram from streaming_data_types.fbschemas.histogram_hs00.Array import Array -from streaming_data_types.utils import get_schema +from streaming_data_types.utils import check_schema_identifier FILE_IDENTIFIER = b"hs00" @@ -18,11 +18,7 @@ def deserialise_hs00(buf): :param buf: :return: dict of histogram information """ - # Check schema is correct - if get_schema(buf) != FILE_IDENTIFIER.decode(): - raise RuntimeError( - f"Incorrect schema: expected {FILE_IDENTIFIER} but got {get_schema(buf)}" - ) + check_schema_identifier(buf, FILE_IDENTIFIER) event_hist = EventHistogram.EventHistogram.GetRootAsEventHistogram(buf, 0) diff --git a/streaming_data_types/nicos_cache_ns10.py b/streaming_data_types/nicos_cache_ns10.py index f85b2c1..72866cb 100644 --- a/streaming_data_types/nicos_cache_ns10.py +++ b/streaming_data_types/nicos_cache_ns10.py @@ -1,7 +1,7 @@ from collections import namedtuple import flatbuffers from streaming_data_types.fbschemas.nicos_cache_ns10 import CacheEntry -from streaming_data_types.utils import get_schema +from streaming_data_types.utils import check_schema_identifier FILE_IDENTIFIER = b"ns10" @@ -32,11 +32,7 @@ def serialise_ns10( def deserialise_ns10(buf): - # Check schema is correct - if get_schema(buf) != FILE_IDENTIFIER.decode(): - raise RuntimeError( - f"Incorrect schema: expected {FILE_IDENTIFIER} but got {get_schema(buf)}" - ) + check_schema_identifier(buf, FILE_IDENTIFIER) entry = CacheEntry.CacheEntry.GetRootAsCacheEntry(buf, 0) diff --git a/streaming_data_types/run_start_pl72.py b/streaming_data_types/run_start_pl72.py index d42cc00..a40c516 100644 --- a/streaming_data_types/run_start_pl72.py +++ b/streaming_data_types/run_start_pl72.py @@ -2,7 +2,7 @@ from typing import Optional import flatbuffers from streaming_data_types.fbschemas.run_start_pl72 import RunStart -from streaming_data_types.utils import get_schema +from streaming_data_types.utils import check_schema_identifier FILE_IDENTIFIER = b"pl72" @@ -59,12 +59,7 @@ def serialise_pl72( def deserialise_pl72(buffer): - # Check schema is correct - if get_schema(buffer) != FILE_IDENTIFIER.decode(): - raise RuntimeError( - f"Incorrect schema: expected {FILE_IDENTIFIER} but got " - f"{get_schema(buffer)}" - ) + check_schema_identifier(buffer, FILE_IDENTIFIER) # run_start = RunStart.RunStart.GetRootAsRunStart(buffer, 0) # service_id = run_start.ServiceId() if run_start.ServiceId() else b"" diff --git a/streaming_data_types/utils.py b/streaming_data_types/utils.py index b56a23d..c56d312 100644 --- a/streaming_data_types/utils.py +++ b/streaming_data_types/utils.py @@ -1,8 +1,21 @@ -def get_schema(buf): +def _get_schema(buffer) -> str: """ Extract the schema code embedded in the buffer - :param buf: The raw buffer of the FlatBuffers message. - :return: The schema name + :param buffer: The raw buffer of the FlatBuffers message. + :return: The schema identifier """ - return buf[4:8].decode("utf-8") + return buffer[4:8].decode("utf-8") + + +def check_schema_identifier(buffer, expected_identifer: bytes): + """ + Check the schema code embedded in the buffer matches an expected identifier + + :param buffer: The raw buffer of the FlatBuffers message + :param expected_identifer: The expected flatbuffer identifier + """ + if _get_schema(buffer) != expected_identifer.decode(): + raise RuntimeError( + f"Incorrect schema: expected {expected_identifer} but got {_get_schema(buffer)}" + ) From e7fc3d6886a7b09ac9425490ef138419fb733dd8 Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Mon, 2 Mar 2020 14:01:34 +0000 Subject: [PATCH 033/363] Add deserialisation for run stop --- streaming_data_types/run_stop_6s4t.py | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/streaming_data_types/run_stop_6s4t.py b/streaming_data_types/run_stop_6s4t.py index d9db01d..9baeb1b 100644 --- a/streaming_data_types/run_stop_6s4t.py +++ b/streaming_data_types/run_stop_6s4t.py @@ -1,6 +1,10 @@ from typing import Optional import flatbuffers from streaming_data_types.fbschemas.run_stop_6s4t import RunStop +from streaming_data_types.utils import check_schema_identifier +from collections import namedtuple + +FILE_IDENTIFIER = b"6s4t" def serialise_6s4t( @@ -32,5 +36,18 @@ def serialise_6s4t( # Generate the output and replace the file_identifier buff = builder.Output() - buff[4:8] = b"6s4t" + buff[4:8] = FILE_IDENTIFIER return bytes(buff) + + +def deserialise_6s4t(buffer): + check_schema_identifier(buffer, FILE_IDENTIFIER) + + run_stop = RunStop.RunStop.GetRootAsRunStop(buffer, 0) + service_id = run_stop.ServiceId() if run_stop.ServiceId() else b"" + job_id = run_stop.JobId() if run_stop.JobId() else b"" + run_name = run_stop.RunName() if run_stop.RunName() else b"" + stop_time = run_stop.StopTime() + + RunStopInfo = namedtuple("RunStopInfo", "stop_time run_name job_id service_id") + return RunStopInfo(stop_time, run_name, job_id, service_id) From 95e6420a11b0422c1f8e5261aa2756d6a4187d54 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Tue, 3 Mar 2020 09:54:51 +0000 Subject: [PATCH 034/363] Removed unused variable --- Jenkinsfile | 2 -- 1 file changed, 2 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index de920b5..65b4923 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -4,8 +4,6 @@ import ecdcpipeline.PipelineBuilder project = "python-streaming-data-types" -python_version = "3.8" - container_build_nodes = [ 'centos7-release': ContainerBuildNode.getDefaultContainerBuildNode('centos7'), ] From 75b9f9a3306f5885236f12df185fb126e28f1d5e Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Tue, 3 Mar 2020 13:05:32 +0000 Subject: [PATCH 035/363] Use conda in image --- Jenkinsfile | 12 +----------- 1 file changed, 1 insertion(+), 11 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index 65b4923..b700a86 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -35,20 +35,10 @@ builders = pipeline_builder.createBuilders { container -> container.copyTo(pipeline_builder.project, pipeline_builder.project) } // stage - pipeline_builder.stage("${container.key}: Conda") { - container.sh """ - curl https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh --output miniconda.sh - sh miniconda.sh -b -p /home/jenkins/miniconda - /home/jenkins/miniconda/bin/conda update -n base -c defaults conda -y - /home/jenkins/miniconda/bin/conda init bash - export PYTHONPATH= - """ - } // stage - pipeline_builder.stage("${container.key}: Dependencies") { container.sh """ export PYTHONPATH= - export PATH=/home/jenkins/miniconda/bin:$PATH + export PATH=/opt/miniconda/bin:$PATH python --version python -m pip install --user -r ${project}/requirements.txt python -m pip install --user -r ${project}/requirements-dev.txt From 54f68cffb126c03a3447e4b684616c8ad618c364 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Tue, 3 Mar 2020 13:09:54 +0000 Subject: [PATCH 036/363] 2nd attempt --- Jenkinsfile | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index b700a86..d5fa2c3 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -39,9 +39,9 @@ builders = pipeline_builder.createBuilders { container -> container.sh """ export PYTHONPATH= export PATH=/opt/miniconda/bin:$PATH - python --version - python -m pip install --user -r ${project}/requirements.txt - python -m pip install --user -r ${project}/requirements-dev.txt + /opt/miniconda/bin/python --version + /opt/miniconda/bin/python -m pip install --user -r ${project}/requirements.txt + /opt/miniconda/bin/python -m pip install --user -r ${project}/requirements-dev.txt """ } // stage @@ -51,7 +51,7 @@ builders = pipeline_builder.createBuilders { container -> export PYTHONPATH= export PATH=/home/jenkins/miniconda/bin:$PATH cd ${project} - python -m tox -- --junitxml=${test_output} + /opt/miniconda/bin/python -m tox -- --junitxml=${test_output} """ container.copyFrom("${project}/${test_output}", ".") xunit thresholds: [failed(unstableThreshold: '0')], tools: [JUnit(deleteOutputFiles: true, pattern: '*.xml', skipNoTestFiles: false, stopProcessingIfError: true)] From 9031a83cf5beb5f3efb4042403b5ff965722bf04 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Tue, 3 Mar 2020 13:12:47 +0000 Subject: [PATCH 037/363] Update Jenkinsfile --- Jenkinsfile | 1 + 1 file changed, 1 insertion(+) diff --git a/Jenkinsfile b/Jenkinsfile index d5fa2c3..8a0b77e 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -37,6 +37,7 @@ builders = pipeline_builder.createBuilders { container -> pipeline_builder.stage("${container.key}: Dependencies") { container.sh """ + /opt/miniconda/bin/conda init bash export PYTHONPATH= export PATH=/opt/miniconda/bin:$PATH /opt/miniconda/bin/python --version From 31f1566ba3e980be91a4afb4d48f84e877ba61f3 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Tue, 3 Mar 2020 13:38:04 +0000 Subject: [PATCH 038/363] Changed image name --- Jenkinsfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Jenkinsfile b/Jenkinsfile index 8a0b77e..f5cda27 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -5,7 +5,7 @@ import ecdcpipeline.PipelineBuilder project = "python-streaming-data-types" container_build_nodes = [ - 'centos7-release': ContainerBuildNode.getDefaultContainerBuildNode('centos7'), + 'centos7-release': ContainerBuildNode.getDefaultContainerBuildNode('centos7-gcc8'), ] // Define number of old builds to keep. From 74eeb10f4f89df8888d856d8a8593224e5414ce6 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Tue, 3 Mar 2020 13:46:50 +0000 Subject: [PATCH 039/363] Update Jenkinsfile --- Jenkinsfile | 1 - 1 file changed, 1 deletion(-) diff --git a/Jenkinsfile b/Jenkinsfile index f5cda27..4777df3 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -23,7 +23,6 @@ properties([[ ] ]]); - pipeline_builder = new PipelineBuilder(this, container_build_nodes) pipeline_builder.activateEmailFailureNotifications() From 36b3277aedab4dc3b2a4ed711540c0e68683763a Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Tue, 3 Mar 2020 13:54:13 +0000 Subject: [PATCH 040/363] Update Jenkinsfile --- Jenkinsfile | 1 + 1 file changed, 1 insertion(+) diff --git a/Jenkinsfile b/Jenkinsfile index 4777df3..e1a5e9b 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -48,6 +48,7 @@ builders = pipeline_builder.createBuilders { container -> pipeline_builder.stage("${container.key}: Test") { def test_output = "TestResults.xml" container.sh """ + /opt/miniconda/bin/conda init bash export PYTHONPATH= export PATH=/home/jenkins/miniconda/bin:$PATH cd ${project} From 36199c49e3658e9821cac57aa5438e9fd71a1a43 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Tue, 3 Mar 2020 13:55:49 +0000 Subject: [PATCH 041/363] D'oh! --- Jenkinsfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Jenkinsfile b/Jenkinsfile index e1a5e9b..4cbc42b 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -50,7 +50,7 @@ builders = pipeline_builder.createBuilders { container -> container.sh """ /opt/miniconda/bin/conda init bash export PYTHONPATH= - export PATH=/home/jenkins/miniconda/bin:$PATH + export PATH=/opt/miniconda/bin:$PATH cd ${project} /opt/miniconda/bin/python -m tox -- --junitxml=${test_output} """ From 26cba89c29ea99387ee3296651976de7c4642a04 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Tue, 3 Mar 2020 13:59:04 +0000 Subject: [PATCH 042/363] test --- Jenkinsfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Jenkinsfile b/Jenkinsfile index 4cbc42b..fb49c03 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -39,7 +39,7 @@ builders = pipeline_builder.createBuilders { container -> /opt/miniconda/bin/conda init bash export PYTHONPATH= export PATH=/opt/miniconda/bin:$PATH - /opt/miniconda/bin/python --version + bin/python --version /opt/miniconda/bin/python -m pip install --user -r ${project}/requirements.txt /opt/miniconda/bin/python -m pip install --user -r ${project}/requirements-dev.txt """ From 3bb4b336755980c5fe0aea9247aef49845bcb46e Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Tue, 3 Mar 2020 14:02:19 +0000 Subject: [PATCH 043/363] Update Jenkinsfile --- Jenkinsfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Jenkinsfile b/Jenkinsfile index fb49c03..d5c8fda 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -39,7 +39,7 @@ builders = pipeline_builder.createBuilders { container -> /opt/miniconda/bin/conda init bash export PYTHONPATH= export PATH=/opt/miniconda/bin:$PATH - bin/python --version + python --version /opt/miniconda/bin/python -m pip install --user -r ${project}/requirements.txt /opt/miniconda/bin/python -m pip install --user -r ${project}/requirements-dev.txt """ From 01b45d564ab1fed4a5d8d4cf5d97416f59255bc2 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Tue, 3 Mar 2020 14:08:29 +0000 Subject: [PATCH 044/363] Can we get away with just this? --- Jenkinsfile | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index d5c8fda..50ce765 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -40,19 +40,16 @@ builders = pipeline_builder.createBuilders { container -> export PYTHONPATH= export PATH=/opt/miniconda/bin:$PATH python --version - /opt/miniconda/bin/python -m pip install --user -r ${project}/requirements.txt - /opt/miniconda/bin/python -m pip install --user -r ${project}/requirements-dev.txt + python -m pip install --user -r ${project}/requirements.txt + python -m pip install --user -r ${project}/requirements-dev.txt """ } // stage pipeline_builder.stage("${container.key}: Test") { def test_output = "TestResults.xml" container.sh """ - /opt/miniconda/bin/conda init bash - export PYTHONPATH= - export PATH=/opt/miniconda/bin:$PATH cd ${project} - /opt/miniconda/bin/python -m tox -- --junitxml=${test_output} + python -m tox -- --junitxml=${test_output} """ container.copyFrom("${project}/${test_output}", ".") xunit thresholds: [failed(unstableThreshold: '0')], tools: [JUnit(deleteOutputFiles: true, pattern: '*.xml', skipNoTestFiles: false, stopProcessingIfError: true)] From b31c70ff85080b54121bfe8de6b62f5f419b0a0b Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Tue, 3 Mar 2020 14:12:51 +0000 Subject: [PATCH 045/363] revert --- Jenkinsfile | 1 + 1 file changed, 1 insertion(+) diff --git a/Jenkinsfile b/Jenkinsfile index 50ce765..42be46e 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -48,6 +48,7 @@ builders = pipeline_builder.createBuilders { container -> pipeline_builder.stage("${container.key}: Test") { def test_output = "TestResults.xml" container.sh """ + export PATH=/opt/miniconda/bin:$PATH cd ${project} python -m tox -- --junitxml=${test_output} """ From 5ce98141f5ecab3fc567a048d669f2d2f6a25b6e Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Tue, 3 Mar 2020 14:17:53 +0000 Subject: [PATCH 046/363] Update Jenkinsfile --- Jenkinsfile | 1 - 1 file changed, 1 deletion(-) diff --git a/Jenkinsfile b/Jenkinsfile index 42be46e..e1b5721 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -37,7 +37,6 @@ builders = pipeline_builder.createBuilders { container -> pipeline_builder.stage("${container.key}: Dependencies") { container.sh """ /opt/miniconda/bin/conda init bash - export PYTHONPATH= export PATH=/opt/miniconda/bin:$PATH python --version python -m pip install --user -r ${project}/requirements.txt From eec38c95941a795c2b27bb4170a78d730e6caf0a Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Thu, 5 Mar 2020 12:26:20 +0000 Subject: [PATCH 047/363] Add deserialisation for run start --- streaming_data_types/run_start_pl72.py | 34 ++++++++++++++++++++------ streaming_data_types/run_stop_6s4t.py | 4 +-- 2 files changed, 28 insertions(+), 10 deletions(-) diff --git a/streaming_data_types/run_start_pl72.py b/streaming_data_types/run_start_pl72.py index a40c516..62d6ae3 100644 --- a/streaming_data_types/run_start_pl72.py +++ b/streaming_data_types/run_start_pl72.py @@ -1,9 +1,9 @@ import time -from typing import Optional +from typing import Optional, NamedTuple import flatbuffers from streaming_data_types.fbschemas.run_start_pl72 import RunStart from streaming_data_types.utils import check_schema_identifier - +from collections import namedtuple FILE_IDENTIFIER = b"pl72" @@ -58,12 +58,30 @@ def serialise_pl72( return bytes(buffer) -def deserialise_pl72(buffer): +def deserialise_pl72(buffer: bytes) -> NamedTuple: check_schema_identifier(buffer, FILE_IDENTIFIER) - # run_start = RunStart.RunStart.GetRootAsRunStart(buffer, 0) - # service_id = run_start.ServiceId() if run_start.ServiceId() else b"" - # broker = run_start.Broker() if run_start.Broker() else b"" - # job_id = + run_start = RunStart.RunStart.GetRootAsRunStart(buffer, 0) + service_id = run_start.ServiceId() if run_start.ServiceId() else b"" + broker = run_start.Broker() if run_start.Broker() else b"" + job_id = run_start.JobId() if run_start.JobId() else b"" + filename = run_start.Filename() if run_start.Filename() else b"" + nexus_structure = run_start.NexusStructure() if run_start.NexusStructure() else b"" + instrument_name = run_start.InstrumentName() if run_start.InstrumentName() else b"" + run_name = run_start.RunName() if run_start.RunName() else b"" - # TODO: return a namedTuple like ns10? + RunStartInfo = namedtuple( + "RunStartInfo", + "job_id filename start_time stop_time run_name nexus_structure service_id instrument_name broker", + ) + return RunStartInfo( + job_id, + filename, + run_start.StartTime(), + run_start.StopTime(), + run_name, + nexus_structure, + service_id, + instrument_name, + broker, + ) diff --git a/streaming_data_types/run_stop_6s4t.py b/streaming_data_types/run_stop_6s4t.py index 9baeb1b..e4d6055 100644 --- a/streaming_data_types/run_stop_6s4t.py +++ b/streaming_data_types/run_stop_6s4t.py @@ -1,4 +1,4 @@ -from typing import Optional +from typing import Optional, NamedTuple import flatbuffers from streaming_data_types.fbschemas.run_stop_6s4t import RunStop from streaming_data_types.utils import check_schema_identifier @@ -40,7 +40,7 @@ def serialise_6s4t( return bytes(buff) -def deserialise_6s4t(buffer): +def deserialise_6s4t(buffer: bytes) -> NamedTuple: check_schema_identifier(buffer, FILE_IDENTIFIER) run_stop = RunStop.RunStop.GetRootAsRunStop(buffer, 0) From ec5a9374d1314a717ba04db81725c37ebf914faa Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Thu, 5 Mar 2020 13:40:47 +0000 Subject: [PATCH 048/363] Add tests for pl72 --- streaming_data_types/run_start_pl72.py | 20 ++++++------ streaming_data_types/run_stop_6s4t.py | 10 +++--- tests/test_pl72.py | 42 ++++++++++++++++++++++++-- 3 files changed, 55 insertions(+), 17 deletions(-) diff --git a/streaming_data_types/run_start_pl72.py b/streaming_data_types/run_start_pl72.py index 62d6ae3..732512f 100644 --- a/streaming_data_types/run_start_pl72.py +++ b/streaming_data_types/run_start_pl72.py @@ -18,7 +18,7 @@ def serialise_pl72( service_id: str = "", instrument_name: str = "TEST", broker: str = "localhost:9092", -) -> bytes: +) -> bytearray: builder = flatbuffers.Builder(136) if start_time is None: @@ -55,10 +55,10 @@ def serialise_pl72( # Generate the output and replace the file_identifier buffer = builder.Output() buffer[4:8] = FILE_IDENTIFIER - return bytes(buffer) + return buffer -def deserialise_pl72(buffer: bytes) -> NamedTuple: +def deserialise_pl72(buffer: bytearray) -> NamedTuple: check_schema_identifier(buffer, FILE_IDENTIFIER) run_start = RunStart.RunStart.GetRootAsRunStart(buffer, 0) @@ -75,13 +75,13 @@ def deserialise_pl72(buffer: bytes) -> NamedTuple: "job_id filename start_time stop_time run_name nexus_structure service_id instrument_name broker", ) return RunStartInfo( - job_id, - filename, + job_id.decode(), + filename.decode(), run_start.StartTime(), run_start.StopTime(), - run_name, - nexus_structure, - service_id, - instrument_name, - broker, + run_name.decode(), + nexus_structure.decode(), + service_id.decode(), + instrument_name.decode(), + broker.decode(), ) diff --git a/streaming_data_types/run_stop_6s4t.py b/streaming_data_types/run_stop_6s4t.py index e4d6055..f26da3e 100644 --- a/streaming_data_types/run_stop_6s4t.py +++ b/streaming_data_types/run_stop_6s4t.py @@ -12,7 +12,7 @@ def serialise_6s4t( run_name: str = "test_run", service_id: str = "", stop_time: Optional[int] = None, -) -> bytes: +) -> bytearray: builder = flatbuffers.Builder(136) if service_id is None: @@ -37,10 +37,10 @@ def serialise_6s4t( # Generate the output and replace the file_identifier buff = builder.Output() buff[4:8] = FILE_IDENTIFIER - return bytes(buff) + return buff -def deserialise_6s4t(buffer: bytes) -> NamedTuple: +def deserialise_6s4t(buffer: bytearray) -> NamedTuple: check_schema_identifier(buffer, FILE_IDENTIFIER) run_stop = RunStop.RunStop.GetRootAsRunStop(buffer, 0) @@ -50,4 +50,6 @@ def deserialise_6s4t(buffer: bytes) -> NamedTuple: stop_time = run_stop.StopTime() RunStopInfo = namedtuple("RunStopInfo", "stop_time run_name job_id service_id") - return RunStopInfo(stop_time, run_name, job_id, service_id) + return RunStopInfo( + stop_time, run_name.decode(), job_id.decode(), service_id.decode() + ) diff --git a/tests/test_pl72.py b/tests/test_pl72.py index aadc3c4..f139eea 100644 --- a/tests/test_pl72.py +++ b/tests/test_pl72.py @@ -1,7 +1,43 @@ -# import pytest -# from streaming_data_types.run_start_pl72 import serialise_pl72 +import pytest +from streaming_data_types.run_start_pl72 import serialise_pl72, deserialise_pl72 class TestSerialisationPl72: + original_entry = { + "job_id": "some_key", + "filename": "test_file.nxs", + "start_time": 567890, + "stop_time": 578214, + "run_name": "test_run", + "nexus_structure": "{}", + "service_id": "filewriter1", + "instrument_name": "LOKI", + "broker": "localhost:9092", + } + def test_serialises_and_deserialises_pl72_message_correctly(self): - pass + buf = serialise_pl72(**self.original_entry) + deserialised_tuple = deserialise_pl72(buf) + + assert deserialised_tuple.job_id == self.original_entry["job_id"] + assert deserialised_tuple.filename == self.original_entry["filename"] + assert deserialised_tuple.start_time == self.original_entry["start_time"] + assert deserialised_tuple.stop_time == self.original_entry["stop_time"] + assert deserialised_tuple.run_name == self.original_entry["run_name"] + assert ( + deserialised_tuple.nexus_structure == self.original_entry["nexus_structure"] + ) + assert deserialised_tuple.service_id == self.original_entry["service_id"] + assert ( + deserialised_tuple.instrument_name == self.original_entry["instrument_name"] + ) + assert deserialised_tuple.broker == self.original_entry["broker"] + + def test_if_buffer_has_wrong_id_then_throws(self): + buf = serialise_pl72(**self.original_entry) + + # Manually hack the id + buf[4:8] = b"1234" + + with pytest.raises(RuntimeError): + deserialise_pl72(buf) From 4efed019142e0b7396cd55e5f439d3f101a17a6c Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Thu, 5 Mar 2020 13:44:06 +0000 Subject: [PATCH 049/363] Add tests for 6s4t --- tests/test_6s4t.py | 29 +++++++++++++++++++++++++++++ 1 file changed, 29 insertions(+) create mode 100644 tests/test_6s4t.py diff --git a/tests/test_6s4t.py b/tests/test_6s4t.py new file mode 100644 index 0000000..4e68c5b --- /dev/null +++ b/tests/test_6s4t.py @@ -0,0 +1,29 @@ +import pytest +from streaming_data_types.run_stop_6s4t import serialise_6s4t, deserialise_6s4t + + +class TestSerialisation6s4t: + original_entry = { + "job_id": "some_key", + "stop_time": 578214, + "run_name": "test_run", + "service_id": "filewriter1", + } + + def test_serialises_and_deserialises_6s4t_message_correctly(self): + buf = serialise_6s4t(**self.original_entry) + deserialised_tuple = deserialise_6s4t(buf) + + assert deserialised_tuple.job_id == self.original_entry["job_id"] + assert deserialised_tuple.stop_time == self.original_entry["stop_time"] + assert deserialised_tuple.run_name == self.original_entry["run_name"] + assert deserialised_tuple.service_id == self.original_entry["service_id"] + + def test_if_buffer_has_wrong_id_then_throws(self): + buf = serialise_6s4t(**self.original_entry) + + # Manually hack the id + buf[4:8] = b"1234" + + with pytest.raises(RuntimeError): + deserialise_6s4t(buf) From 160cdf6380cda7ea511433b8f18fa058de082567 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Fri, 6 Mar 2020 07:59:49 +0100 Subject: [PATCH 050/363] Copied the naming conventions from MDJ's code --- streaming_data_types/histogram_hs00.py | 98 ++++++++++++------------ streaming_data_types/nicos_cache_ns10.py | 24 +++--- streaming_data_types/run_stop_6s4t.py | 6 +- 3 files changed, 64 insertions(+), 64 deletions(-) diff --git a/streaming_data_types/histogram_hs00.py b/streaming_data_types/histogram_hs00.py index 1af8b8c..d058f18 100644 --- a/streaming_data_types/histogram_hs00.py +++ b/streaming_data_types/histogram_hs00.py @@ -11,25 +11,24 @@ FILE_IDENTIFIER = b"hs00" -def deserialise_hs00(buf): +def deserialise_hs00(buffer): """ Deserialise flatbuffer hs10 into a histogram. - :param buf: + :param buffer: :return: dict of histogram information """ - check_schema_identifier(buf, FILE_IDENTIFIER) - - event_hist = EventHistogram.EventHistogram.GetRootAsEventHistogram(buf, 0) + check_schema_identifier(buffer, FILE_IDENTIFIER) + event_hist = EventHistogram.EventHistogram.GetRootAsEventHistogram(buffer, 0) dims = [] for i in range(event_hist.DimMetadataLength()): - bins_fb = event_hist.DimMetadata(i).BinBoundaries() + bins_offset = event_hist.DimMetadata(i).BinBoundaries() # Get bins - temp = ArrayDouble.ArrayDouble() - temp.Init(bins_fb.Bytes, bins_fb.Pos) - bins = temp.ValueAsNumpy() + bins_fb = ArrayDouble.ArrayDouble() + bins_fb.Init(bins_offset.Bytes, bins_offset.Pos) + bin_boundaries = bins_fb.ValueAsNumpy() # Check type if event_hist.DimMetadata(i).BinBoundariesType() != Array.ArrayDouble: @@ -37,7 +36,7 @@ def deserialise_hs00(buf): hist_info = { "length": event_hist.DimMetadata(i).Length(), - "bin_boundaries": bins, + "bin_boundaries": bin_boundaries, "unit": event_hist.DimMetadata(i).Unit().decode("utf-8"), "label": event_hist.DimMetadata(i).Label().decode("utf-8"), } @@ -49,18 +48,18 @@ def deserialise_hs00(buf): if event_hist.DataType() != Array.ArrayDouble: raise TypeError("Type of the data array is incorrect") - data_fb = event_hist.Data() - temp = ArrayDouble.ArrayDouble() - temp.Init(data_fb.Bytes, data_fb.Pos) + data_offset = event_hist.Data() + data_fb = ArrayDouble.ArrayDouble() + data_fb.Init(data_offset.Bytes, data_offset.Pos) shape = event_hist.CurrentShapeAsNumpy().tolist() - data = temp.ValueAsNumpy().reshape(shape) + data = data_fb.ValueAsNumpy().reshape(shape) # Get the errors - errors_fb = event_hist.Errors() - if errors_fb: - temp = ArrayDouble.ArrayDouble() - temp.Init(errors_fb.Bytes, errors_fb.Pos) - errors = temp.ValueAsNumpy().reshape(shape) + errors_offset = event_hist.Errors() + if errors_offset: + errors_fb = ArrayDouble.ArrayDouble() + errors_fb.Init(errors_offset.Bytes, errors_offset.Pos) + errors = errors_fb.ValueAsNumpy().reshape(shape) else: errors = [] @@ -78,25 +77,25 @@ def deserialise_hs00(buf): def _serialise_metadata(builder, length, edges, unit, label): - unit_encoded = builder.CreateString(unit) - label_encoded = builder.CreateString(label) + unit_offset = builder.CreateString(unit) + label_offset = builder.CreateString(label) ArrayDouble.ArrayDoubleStartValueVector(builder, len(edges)) # FlatBuffers builds arrays backwards for x in reversed(edges): builder.PrependFloat64(x) - bins = builder.EndVector(len(edges)) + bins_vector = builder.EndVector(len(edges)) # Add the bins ArrayDouble.ArrayDoubleStart(builder) - ArrayDouble.ArrayDoubleAddValue(builder, bins) - pos_bin = ArrayDouble.ArrayDoubleEnd(builder) + ArrayDouble.ArrayDoubleAddValue(builder, bins_vector) + bins_offset = ArrayDouble.ArrayDoubleEnd(builder) DimensionMetaData.DimensionMetaDataStart(builder) DimensionMetaData.DimensionMetaDataAddLength(builder, length) - DimensionMetaData.DimensionMetaDataAddBinBoundaries(builder, pos_bin) + DimensionMetaData.DimensionMetaDataAddBinBoundaries(builder, bins_offset) DimensionMetaData.DimensionMetaDataAddBinBoundariesType(builder, Array.ArrayDouble) - DimensionMetaData.DimensionMetaDataAddLabel(builder, label_encoded) - DimensionMetaData.DimensionMetaDataAddUnit(builder, unit_encoded) + DimensionMetaData.DimensionMetaDataAddLabel(builder, label_offset) + DimensionMetaData.DimensionMetaDataAddUnit(builder, unit_offset) return DimensionMetaData.DimensionMetaDataEnd(builder) @@ -106,14 +105,14 @@ def serialise_hs00(histogram): :param histogram: A dictionary containing the histogram to serialise. """ - source = None - info = None + source_offset = None + info_offset = None builder = flatbuffers.Builder(1024) if "source" in histogram: - source = builder.CreateString(histogram["source"]) + source_offset = builder.CreateString(histogram["source"]) if "info" in histogram: - info = builder.CreateString(histogram["info"]) + info_offset = builder.CreateString(histogram["info"]) # Build shape array rank = len(histogram["current_shape"]) @@ -121,7 +120,7 @@ def serialise_hs00(histogram): # FlatBuffers builds arrays backwards for s in reversed(histogram["current_shape"]): builder.PrependUint32(s) - shape = builder.EndVector(rank) + shape_offset = builder.EndVector(rank) # Build dimensions metadata metadata = [] @@ -147,11 +146,12 @@ def serialise_hs00(histogram): # FlatBuffers builds arrays backwards for x in reversed(histogram["data"].flatten()): builder.PrependFloat64(x) - data = builder.EndVector(data_len) + data_vector = builder.EndVector(data_len) ArrayDouble.ArrayDoubleStart(builder) - ArrayDouble.ArrayDoubleAddValue(builder, data) - pos_data = ArrayDouble.ArrayDoubleEnd(builder) + ArrayDouble.ArrayDoubleAddValue(builder, data_vector) + data_offset = ArrayDouble.ArrayDoubleEnd(builder) + errors_offset = None if "errors" in histogram: ArrayDouble.ArrayDoubleStartValueVector(builder, data_len) for x in reversed(histogram["errors"].flatten()): @@ -159,30 +159,30 @@ def serialise_hs00(histogram): errors = builder.EndVector(data_len) ArrayDouble.ArrayDoubleStart(builder) ArrayDouble.ArrayDoubleAddValue(builder, errors) - pos_errors = ArrayDouble.ArrayDoubleEnd(builder) + errors_offset = ArrayDouble.ArrayDoubleEnd(builder) # Build the actual buffer EventHistogram.EventHistogramStart(builder) - if info: - EventHistogram.EventHistogramAddInfo(builder, info) - EventHistogram.EventHistogramAddData(builder, pos_data) - EventHistogram.EventHistogramAddCurrentShape(builder, shape) + if info_offset: + EventHistogram.EventHistogramAddInfo(builder, info_offset) + EventHistogram.EventHistogramAddData(builder, data_offset) + EventHistogram.EventHistogramAddCurrentShape(builder, shape_offset) EventHistogram.EventHistogramAddDimMetadata(builder, metadata_vector) EventHistogram.EventHistogramAddTimestamp(builder, histogram["timestamp"]) - if source: - EventHistogram.EventHistogramAddSource(builder, source) + if source_offset: + EventHistogram.EventHistogramAddSource(builder, source_offset) EventHistogram.EventHistogramAddDataType(builder, Array.ArrayDouble) - if "errors" in histogram: - EventHistogram.EventHistogramAddErrors(builder, pos_errors) + if errors_offset: + EventHistogram.EventHistogramAddErrors(builder, errors_offset) EventHistogram.EventHistogramAddErrorsType(builder, Array.ArrayDouble) if "last_metadata_timestamp" in histogram: EventHistogram.EventHistogramAddLastMetadataTimestamp( builder, histogram["last_metadata_timestamp"] ) - hist = EventHistogram.EventHistogramEnd(builder) - builder.Finish(hist) + hist_message = EventHistogram.EventHistogramEnd(builder) + builder.Finish(hist_message) # Generate the output and replace the file_identifier - buff = builder.Output() - buff[4:8] = FILE_IDENTIFIER - return buff + buffer = builder.Output() + buffer[4:8] = FILE_IDENTIFIER + return buffer diff --git a/streaming_data_types/nicos_cache_ns10.py b/streaming_data_types/nicos_cache_ns10.py index 72866cb..219da2d 100644 --- a/streaming_data_types/nicos_cache_ns10.py +++ b/streaming_data_types/nicos_cache_ns10.py @@ -12,29 +12,29 @@ def serialise_ns10( ): builder = flatbuffers.Builder(128) - value = builder.CreateString(value) - key = builder.CreateString(key) + value_offset = builder.CreateString(value) + key_offset = builder.CreateString(key) CacheEntry.CacheEntryStart(builder) - CacheEntry.CacheEntryAddValue(builder, value) + CacheEntry.CacheEntryAddValue(builder, value_offset) CacheEntry.CacheEntryAddExpired(builder, expired) CacheEntry.CacheEntryAddTtl(builder, ttl) CacheEntry.CacheEntryAddTime(builder, time_stamp) - CacheEntry.CacheEntryAddKey(builder, key) - entry = CacheEntry.CacheEntryEnd(builder) - builder.Finish(entry) + CacheEntry.CacheEntryAddKey(builder, key_offset) + cache_entry_message = CacheEntry.CacheEntryEnd(builder) + builder.Finish(cache_entry_message) # Generate the output and replace the file_identifier - buff = builder.Output() - buff[4:8] = FILE_IDENTIFIER + buffer = builder.Output() + buffer[4:8] = FILE_IDENTIFIER - return buff + return buffer -def deserialise_ns10(buf): - check_schema_identifier(buf, FILE_IDENTIFIER) +def deserialise_ns10(buffer): + check_schema_identifier(buffer, FILE_IDENTIFIER) - entry = CacheEntry.CacheEntry.GetRootAsCacheEntry(buf, 0) + entry = CacheEntry.CacheEntry.GetRootAsCacheEntry(buffer, 0) key = entry.Key() if entry.Key() else b"" time_stamp = entry.Time() diff --git a/streaming_data_types/run_stop_6s4t.py b/streaming_data_types/run_stop_6s4t.py index f26da3e..c7d945f 100644 --- a/streaming_data_types/run_stop_6s4t.py +++ b/streaming_data_types/run_stop_6s4t.py @@ -35,9 +35,9 @@ def serialise_6s4t( builder.Finish(run_stop_message) # Generate the output and replace the file_identifier - buff = builder.Output() - buff[4:8] = FILE_IDENTIFIER - return buff + buffer = builder.Output() + buffer[4:8] = FILE_IDENTIFIER + return buffer def deserialise_6s4t(buffer: bytearray) -> NamedTuple: From 2122269a7d13277bdd361f81bc86939e2ca37661 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Fri, 6 Mar 2020 08:56:18 +0100 Subject: [PATCH 051/363] Bumped version number for release --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index d01ac4c..6345adc 100644 --- a/setup.py +++ b/setup.py @@ -3,7 +3,7 @@ setup( name="streaming_data_types", - version="0.1.0", + version="0.2.0", description="Python utilities for handling ESS streamed data", long_description="Python utilities for serialising and deserialising data via FlatBuffers for the European Spallation Source ERIC", author="ScreamingUdder", From 19e266a55acd3993224f846cd36025d67a4fa86b Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Fri, 6 Mar 2020 09:16:44 +0000 Subject: [PATCH 052/363] Add new schemas to table in readme --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index e1b7b5f..8ce511e 100644 --- a/README.md +++ b/README.md @@ -10,6 +10,8 @@ https://github.com/ess-dmsc/streaming-data-types |----|-----------|----------| |hs00|Histogram schema|Y| |ns10|NICOS cache entry schema|Y| +|pl72|Run start|N| +|6s4t|Run stop|N| ### hs00 Schema for histogram data. It is one of the more complicated to use schemas. From 16ea0675bb94103e835132d1ac9fcdb55a0bf8d6 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Fri, 20 Mar 2020 11:18:36 +0100 Subject: [PATCH 053/363] Make sure the ns10 key in not empty --- streaming_data_types/nicos_cache_ns10.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/streaming_data_types/nicos_cache_ns10.py b/streaming_data_types/nicos_cache_ns10.py index 219da2d..be76fb3 100644 --- a/streaming_data_types/nicos_cache_ns10.py +++ b/streaming_data_types/nicos_cache_ns10.py @@ -44,4 +44,4 @@ def deserialise_ns10(buffer): Entry = namedtuple("Entry", "key time_stamp ttl expired value") - return Entry(key.decode(), time_stamp, ttl, expired, value.decode()) + return Entry(key.decode().strip(), time_stamp, ttl, expired, value.decode()) From f09d115b651c93c813feb13f707e4b75dfc16035 Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Fri, 27 Mar 2020 17:27:53 +0000 Subject: [PATCH 054/363] Initial implementation of f142 serialisation, scalars only --- .../fbschemas/logdata_f142/AlarmSeverity.py | 11 + .../fbschemas/logdata_f142/AlarmStatus.py | 29 +++ .../fbschemas/logdata_f142/ArrayByte.py | 63 ++++++ .../fbschemas/logdata_f142/ArrayDouble.py | 63 ++++++ .../fbschemas/logdata_f142/ArrayFloat.py | 63 ++++++ .../fbschemas/logdata_f142/ArrayInt.py | 63 ++++++ .../fbschemas/logdata_f142/ArrayLong.py | 63 ++++++ .../fbschemas/logdata_f142/ArrayShort.py | 63 ++++++ .../fbschemas/logdata_f142/ArrayString.py | 55 +++++ .../fbschemas/logdata_f142/ArrayUByte.py | 63 ++++++ .../fbschemas/logdata_f142/ArrayUInt.py | 63 ++++++ .../fbschemas/logdata_f142/ArrayULong.py | 63 ++++++ .../fbschemas/logdata_f142/ArrayUShort.py | 63 ++++++ .../fbschemas/logdata_f142/Byte.py | 39 ++++ .../fbschemas/logdata_f142/Double.py | 41 ++++ .../fbschemas/logdata_f142/Float.py | 41 ++++ .../fbschemas/logdata_f142/Int.py | 39 ++++ .../fbschemas/logdata_f142/LogData.py | 108 ++++++++++ .../fbschemas/logdata_f142/Long.py | 39 ++++ .../fbschemas/logdata_f142/Short.py | 39 ++++ .../fbschemas/logdata_f142/String.py | 41 ++++ .../fbschemas/logdata_f142/UByte.py | 39 ++++ .../fbschemas/logdata_f142/UInt.py | 41 ++++ .../fbschemas/logdata_f142/ULong.py | 41 ++++ .../fbschemas/logdata_f142/UShort.py | 41 ++++ .../fbschemas/logdata_f142/Value.py | 29 +++ .../fbschemas/logdata_f142/__init__.py | 0 streaming_data_types/logdata_f142.py | 196 ++++++++++++++++++ 28 files changed, 1499 insertions(+) create mode 100644 streaming_data_types/fbschemas/logdata_f142/AlarmSeverity.py create mode 100644 streaming_data_types/fbschemas/logdata_f142/AlarmStatus.py create mode 100644 streaming_data_types/fbschemas/logdata_f142/ArrayByte.py create mode 100644 streaming_data_types/fbschemas/logdata_f142/ArrayDouble.py create mode 100644 streaming_data_types/fbschemas/logdata_f142/ArrayFloat.py create mode 100644 streaming_data_types/fbschemas/logdata_f142/ArrayInt.py create mode 100644 streaming_data_types/fbschemas/logdata_f142/ArrayLong.py create mode 100644 streaming_data_types/fbschemas/logdata_f142/ArrayShort.py create mode 100644 streaming_data_types/fbschemas/logdata_f142/ArrayString.py create mode 100644 streaming_data_types/fbschemas/logdata_f142/ArrayUByte.py create mode 100644 streaming_data_types/fbschemas/logdata_f142/ArrayUInt.py create mode 100644 streaming_data_types/fbschemas/logdata_f142/ArrayULong.py create mode 100644 streaming_data_types/fbschemas/logdata_f142/ArrayUShort.py create mode 100644 streaming_data_types/fbschemas/logdata_f142/Byte.py create mode 100644 streaming_data_types/fbschemas/logdata_f142/Double.py create mode 100644 streaming_data_types/fbschemas/logdata_f142/Float.py create mode 100644 streaming_data_types/fbschemas/logdata_f142/Int.py create mode 100644 streaming_data_types/fbschemas/logdata_f142/LogData.py create mode 100644 streaming_data_types/fbschemas/logdata_f142/Long.py create mode 100644 streaming_data_types/fbschemas/logdata_f142/Short.py create mode 100644 streaming_data_types/fbschemas/logdata_f142/String.py create mode 100644 streaming_data_types/fbschemas/logdata_f142/UByte.py create mode 100644 streaming_data_types/fbschemas/logdata_f142/UInt.py create mode 100644 streaming_data_types/fbschemas/logdata_f142/ULong.py create mode 100644 streaming_data_types/fbschemas/logdata_f142/UShort.py create mode 100644 streaming_data_types/fbschemas/logdata_f142/Value.py create mode 100644 streaming_data_types/fbschemas/logdata_f142/__init__.py create mode 100644 streaming_data_types/logdata_f142.py diff --git a/streaming_data_types/fbschemas/logdata_f142/AlarmSeverity.py b/streaming_data_types/fbschemas/logdata_f142/AlarmSeverity.py new file mode 100644 index 0000000..8ab40d7 --- /dev/null +++ b/streaming_data_types/fbschemas/logdata_f142/AlarmSeverity.py @@ -0,0 +1,11 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + + +class AlarmSeverity(object): + MINOR = 0 + MAJOR = 1 + NO_ALARM = 2 + INVALID = 3 + NO_CHANGE = 4 diff --git a/streaming_data_types/fbschemas/logdata_f142/AlarmStatus.py b/streaming_data_types/fbschemas/logdata_f142/AlarmStatus.py new file mode 100644 index 0000000..959a28a --- /dev/null +++ b/streaming_data_types/fbschemas/logdata_f142/AlarmStatus.py @@ -0,0 +1,29 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + + +class AlarmStatus(object): + NO_ALARM = 0 + READ = 1 + WRITE = 2 + HIHI = 3 + HIGH = 4 + LOLO = 5 + LOW = 6 + STATE = 7 + COS = 8 + COMM = 9 + TIMED = 10 + HWLIMIT = 11 + CALC = 12 + SCAN = 13 + LINK = 14 + SOFT = 15 + BAD_SUB = 16 + UDF = 17 + DISABLE = 18 + SIMM = 19 + READ_ACCESS = 20 + WRITE_ACCESS = 21 + NO_CHANGE = 22 diff --git a/streaming_data_types/fbschemas/logdata_f142/ArrayByte.py b/streaming_data_types/fbschemas/logdata_f142/ArrayByte.py new file mode 100644 index 0000000..8307e0d --- /dev/null +++ b/streaming_data_types/fbschemas/logdata_f142/ArrayByte.py @@ -0,0 +1,63 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + + +class ArrayByte(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAsArrayByte(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ArrayByte() + x.Init(buf, n + offset) + return x + + # ArrayByte + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ArrayByte + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Int8Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1), + ) + return 0 + + # ArrayByte + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int8Flags, o) + return 0 + + # ArrayByte + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + +def ArrayByteStart(builder): + builder.StartObject(1) + + +def ArrayByteAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 + ) + + +def ArrayByteStartValueVector(builder, numElems): + return builder.StartVector(1, numElems, 1) + + +def ArrayByteEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/logdata_f142/ArrayDouble.py b/streaming_data_types/fbschemas/logdata_f142/ArrayDouble.py new file mode 100644 index 0000000..7b57488 --- /dev/null +++ b/streaming_data_types/fbschemas/logdata_f142/ArrayDouble.py @@ -0,0 +1,63 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + + +class ArrayDouble(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAsArrayDouble(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ArrayDouble() + x.Init(buf, n + offset) + return x + + # ArrayDouble + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ArrayDouble + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Float64Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8), + ) + return 0 + + # ArrayDouble + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Float64Flags, o) + return 0 + + # ArrayDouble + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + +def ArrayDoubleStart(builder): + builder.StartObject(1) + + +def ArrayDoubleAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 + ) + + +def ArrayDoubleStartValueVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + + +def ArrayDoubleEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/logdata_f142/ArrayFloat.py b/streaming_data_types/fbschemas/logdata_f142/ArrayFloat.py new file mode 100644 index 0000000..9b1a84c --- /dev/null +++ b/streaming_data_types/fbschemas/logdata_f142/ArrayFloat.py @@ -0,0 +1,63 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + + +class ArrayFloat(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAsArrayFloat(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ArrayFloat() + x.Init(buf, n + offset) + return x + + # ArrayFloat + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ArrayFloat + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Float32Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), + ) + return 0 + + # ArrayFloat + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Float32Flags, o) + return 0 + + # ArrayFloat + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + +def ArrayFloatStart(builder): + builder.StartObject(1) + + +def ArrayFloatAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 + ) + + +def ArrayFloatStartValueVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + +def ArrayFloatEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/logdata_f142/ArrayInt.py b/streaming_data_types/fbschemas/logdata_f142/ArrayInt.py new file mode 100644 index 0000000..841c0e2 --- /dev/null +++ b/streaming_data_types/fbschemas/logdata_f142/ArrayInt.py @@ -0,0 +1,63 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + + +class ArrayInt(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAsArrayInt(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ArrayInt() + x.Init(buf, n + offset) + return x + + # ArrayInt + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ArrayInt + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Int32Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), + ) + return 0 + + # ArrayInt + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return 0 + + # ArrayInt + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + +def ArrayIntStart(builder): + builder.StartObject(1) + + +def ArrayIntAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 + ) + + +def ArrayIntStartValueVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + +def ArrayIntEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/logdata_f142/ArrayLong.py b/streaming_data_types/fbschemas/logdata_f142/ArrayLong.py new file mode 100644 index 0000000..21277cc --- /dev/null +++ b/streaming_data_types/fbschemas/logdata_f142/ArrayLong.py @@ -0,0 +1,63 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + + +class ArrayLong(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAsArrayLong(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ArrayLong() + x.Init(buf, n + offset) + return x + + # ArrayLong + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ArrayLong + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Int64Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8), + ) + return 0 + + # ArrayLong + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) + return 0 + + # ArrayLong + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + +def ArrayLongStart(builder): + builder.StartObject(1) + + +def ArrayLongAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 + ) + + +def ArrayLongStartValueVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + + +def ArrayLongEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/logdata_f142/ArrayShort.py b/streaming_data_types/fbschemas/logdata_f142/ArrayShort.py new file mode 100644 index 0000000..edea083 --- /dev/null +++ b/streaming_data_types/fbschemas/logdata_f142/ArrayShort.py @@ -0,0 +1,63 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + + +class ArrayShort(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAsArrayShort(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ArrayShort() + x.Init(buf, n + offset) + return x + + # ArrayShort + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ArrayShort + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Int16Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 2), + ) + return 0 + + # ArrayShort + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int16Flags, o) + return 0 + + # ArrayShort + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + +def ArrayShortStart(builder): + builder.StartObject(1) + + +def ArrayShortAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 + ) + + +def ArrayShortStartValueVector(builder, numElems): + return builder.StartVector(2, numElems, 2) + + +def ArrayShortEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/logdata_f142/ArrayString.py b/streaming_data_types/fbschemas/logdata_f142/ArrayString.py new file mode 100644 index 0000000..0594d66 --- /dev/null +++ b/streaming_data_types/fbschemas/logdata_f142/ArrayString.py @@ -0,0 +1,55 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + + +class ArrayString(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAsArrayString(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ArrayString() + x.Init(buf, n + offset) + return x + + # ArrayString + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ArrayString + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.String( + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4) + ) + return "" + + # ArrayString + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + +def ArrayStringStart(builder): + builder.StartObject(1) + + +def ArrayStringAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 + ) + + +def ArrayStringStartValueVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + +def ArrayStringEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/logdata_f142/ArrayUByte.py b/streaming_data_types/fbschemas/logdata_f142/ArrayUByte.py new file mode 100644 index 0000000..82c7772 --- /dev/null +++ b/streaming_data_types/fbschemas/logdata_f142/ArrayUByte.py @@ -0,0 +1,63 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + + +class ArrayUByte(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAsArrayUByte(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ArrayUByte() + x.Init(buf, n + offset) + return x + + # ArrayUByte + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ArrayUByte + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Uint8Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1), + ) + return 0 + + # ArrayUByte + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) + return 0 + + # ArrayUByte + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + +def ArrayUByteStart(builder): + builder.StartObject(1) + + +def ArrayUByteAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 + ) + + +def ArrayUByteStartValueVector(builder, numElems): + return builder.StartVector(1, numElems, 1) + + +def ArrayUByteEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/logdata_f142/ArrayUInt.py b/streaming_data_types/fbschemas/logdata_f142/ArrayUInt.py new file mode 100644 index 0000000..615967d --- /dev/null +++ b/streaming_data_types/fbschemas/logdata_f142/ArrayUInt.py @@ -0,0 +1,63 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + + +class ArrayUInt(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAsArrayUInt(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ArrayUInt() + x.Init(buf, n + offset) + return x + + # ArrayUInt + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ArrayUInt + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Uint32Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), + ) + return 0 + + # ArrayUInt + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint32Flags, o) + return 0 + + # ArrayUInt + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + +def ArrayUIntStart(builder): + builder.StartObject(1) + + +def ArrayUIntAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 + ) + + +def ArrayUIntStartValueVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + +def ArrayUIntEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/logdata_f142/ArrayULong.py b/streaming_data_types/fbschemas/logdata_f142/ArrayULong.py new file mode 100644 index 0000000..6e29d22 --- /dev/null +++ b/streaming_data_types/fbschemas/logdata_f142/ArrayULong.py @@ -0,0 +1,63 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + + +class ArrayULong(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAsArrayULong(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ArrayULong() + x.Init(buf, n + offset) + return x + + # ArrayULong + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ArrayULong + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Uint64Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8), + ) + return 0 + + # ArrayULong + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint64Flags, o) + return 0 + + # ArrayULong + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + +def ArrayULongStart(builder): + builder.StartObject(1) + + +def ArrayULongAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 + ) + + +def ArrayULongStartValueVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + + +def ArrayULongEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/logdata_f142/ArrayUShort.py b/streaming_data_types/fbschemas/logdata_f142/ArrayUShort.py new file mode 100644 index 0000000..7e182c0 --- /dev/null +++ b/streaming_data_types/fbschemas/logdata_f142/ArrayUShort.py @@ -0,0 +1,63 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + + +class ArrayUShort(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAsArrayUShort(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ArrayUShort() + x.Init(buf, n + offset) + return x + + # ArrayUShort + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ArrayUShort + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Uint16Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 2), + ) + return 0 + + # ArrayUShort + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint16Flags, o) + return 0 + + # ArrayUShort + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + +def ArrayUShortStart(builder): + builder.StartObject(1) + + +def ArrayUShortAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 + ) + + +def ArrayUShortStartValueVector(builder, numElems): + return builder.StartVector(2, numElems, 2) + + +def ArrayUShortEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/logdata_f142/Byte.py b/streaming_data_types/fbschemas/logdata_f142/Byte.py new file mode 100644 index 0000000..85d5095 --- /dev/null +++ b/streaming_data_types/fbschemas/logdata_f142/Byte.py @@ -0,0 +1,39 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + + +class Byte(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAsByte(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Byte() + x.Init(buf, n + offset) + return x + + # Byte + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Byte + def Value(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + +def ByteStart(builder): + builder.StartObject(1) + + +def ByteAddValue(builder, value): + builder.PrependInt8Slot(0, value, 0) + + +def ByteEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/logdata_f142/Double.py b/streaming_data_types/fbschemas/logdata_f142/Double.py new file mode 100644 index 0000000..3c99568 --- /dev/null +++ b/streaming_data_types/fbschemas/logdata_f142/Double.py @@ -0,0 +1,41 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + + +class Double(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAsDouble(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Double() + x.Init(buf, n + offset) + return x + + # Double + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Double + def Value(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get( + flatbuffers.number_types.Float64Flags, o + self._tab.Pos + ) + return 0.0 + + +def DoubleStart(builder): + builder.StartObject(1) + + +def DoubleAddValue(builder, value): + builder.PrependFloat64Slot(0, value, 0.0) + + +def DoubleEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/logdata_f142/Float.py b/streaming_data_types/fbschemas/logdata_f142/Float.py new file mode 100644 index 0000000..a83926f --- /dev/null +++ b/streaming_data_types/fbschemas/logdata_f142/Float.py @@ -0,0 +1,41 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + + +class Float(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAsFloat(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Float() + x.Init(buf, n + offset) + return x + + # Float + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Float + def Value(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get( + flatbuffers.number_types.Float32Flags, o + self._tab.Pos + ) + return 0.0 + + +def FloatStart(builder): + builder.StartObject(1) + + +def FloatAddValue(builder, value): + builder.PrependFloat32Slot(0, value, 0.0) + + +def FloatEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/logdata_f142/Int.py b/streaming_data_types/fbschemas/logdata_f142/Int.py new file mode 100644 index 0000000..b2ca0b3 --- /dev/null +++ b/streaming_data_types/fbschemas/logdata_f142/Int.py @@ -0,0 +1,39 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + + +class Int(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAsInt(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Int() + x.Init(buf, n + offset) + return x + + # Int + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Int + def Value(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + +def IntStart(builder): + builder.StartObject(1) + + +def IntAddValue(builder, value): + builder.PrependInt32Slot(0, value, 0) + + +def IntEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/logdata_f142/LogData.py b/streaming_data_types/fbschemas/logdata_f142/LogData.py new file mode 100644 index 0000000..8c21ed4 --- /dev/null +++ b/streaming_data_types/fbschemas/logdata_f142/LogData.py @@ -0,0 +1,108 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + + +class LogData(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAsLogData(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = LogData() + x.Init(buf, n + offset) + return x + + # LogData + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # LogData + def SourceName(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # LogData + def ValueType(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) + return 0 + + # LogData + def Value(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + from flatbuffers.table import Table + + obj = Table(bytearray(), 0) + self._tab.Union(obj, o) + return obj + return None + + # LogData + def Timestamp(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.Get( + flatbuffers.number_types.Uint64Flags, o + self._tab.Pos + ) + return 0 + + # LogData + def Status(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.Get( + flatbuffers.number_types.Uint16Flags, o + self._tab.Pos + ) + return 22 + + # LogData + def Severity(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.Get( + flatbuffers.number_types.Uint16Flags, o + self._tab.Pos + ) + return 4 + + +def LogDataStart(builder): + builder.StartObject(6) + + +def LogDataAddSourceName(builder, sourceName): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(sourceName), 0 + ) + + +def LogDataAddValueType(builder, valueType): + builder.PrependUint8Slot(1, valueType, 0) + + +def LogDataAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot( + 2, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 + ) + + +def LogDataAddTimestamp(builder, timestamp): + builder.PrependUint64Slot(3, timestamp, 0) + + +def LogDataAddStatus(builder, status): + builder.PrependUint16Slot(4, status, 22) + + +def LogDataAddSeverity(builder, severity): + builder.PrependUint16Slot(5, severity, 4) + + +def LogDataEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/logdata_f142/Long.py b/streaming_data_types/fbschemas/logdata_f142/Long.py new file mode 100644 index 0000000..25d945d --- /dev/null +++ b/streaming_data_types/fbschemas/logdata_f142/Long.py @@ -0,0 +1,39 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + + +class Long(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAsLong(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Long() + x.Init(buf, n + offset) + return x + + # Long + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Long + def Value(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) + return 0 + + +def LongStart(builder): + builder.StartObject(1) + + +def LongAddValue(builder, value): + builder.PrependInt64Slot(0, value, 0) + + +def LongEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/logdata_f142/Short.py b/streaming_data_types/fbschemas/logdata_f142/Short.py new file mode 100644 index 0000000..abf883a --- /dev/null +++ b/streaming_data_types/fbschemas/logdata_f142/Short.py @@ -0,0 +1,39 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + + +class Short(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAsShort(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Short() + x.Init(buf, n + offset) + return x + + # Short + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Short + def Value(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int16Flags, o + self._tab.Pos) + return 0 + + +def ShortStart(builder): + builder.StartObject(1) + + +def ShortAddValue(builder, value): + builder.PrependInt16Slot(0, value, 0) + + +def ShortEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/logdata_f142/String.py b/streaming_data_types/fbschemas/logdata_f142/String.py new file mode 100644 index 0000000..8e3751e --- /dev/null +++ b/streaming_data_types/fbschemas/logdata_f142/String.py @@ -0,0 +1,41 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + + +class String(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAsString(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = String() + x.Init(buf, n + offset) + return x + + # String + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # String + def Value(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + +def StringStart(builder): + builder.StartObject(1) + + +def StringAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 + ) + + +def StringEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/logdata_f142/UByte.py b/streaming_data_types/fbschemas/logdata_f142/UByte.py new file mode 100644 index 0000000..09e92b2 --- /dev/null +++ b/streaming_data_types/fbschemas/logdata_f142/UByte.py @@ -0,0 +1,39 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + + +class UByte(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAsUByte(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = UByte() + x.Init(buf, n + offset) + return x + + # UByte + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # UByte + def Value(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) + return 0 + + +def UByteStart(builder): + builder.StartObject(1) + + +def UByteAddValue(builder, value): + builder.PrependUint8Slot(0, value, 0) + + +def UByteEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/logdata_f142/UInt.py b/streaming_data_types/fbschemas/logdata_f142/UInt.py new file mode 100644 index 0000000..e905742 --- /dev/null +++ b/streaming_data_types/fbschemas/logdata_f142/UInt.py @@ -0,0 +1,41 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + + +class UInt(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAsUInt(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = UInt() + x.Init(buf, n + offset) + return x + + # UInt + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # UInt + def Value(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get( + flatbuffers.number_types.Uint32Flags, o + self._tab.Pos + ) + return 0 + + +def UIntStart(builder): + builder.StartObject(1) + + +def UIntAddValue(builder, value): + builder.PrependUint32Slot(0, value, 0) + + +def UIntEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/logdata_f142/ULong.py b/streaming_data_types/fbschemas/logdata_f142/ULong.py new file mode 100644 index 0000000..295d1e2 --- /dev/null +++ b/streaming_data_types/fbschemas/logdata_f142/ULong.py @@ -0,0 +1,41 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + + +class ULong(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAsULong(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ULong() + x.Init(buf, n + offset) + return x + + # ULong + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ULong + def Value(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get( + flatbuffers.number_types.Uint64Flags, o + self._tab.Pos + ) + return 0 + + +def ULongStart(builder): + builder.StartObject(1) + + +def ULongAddValue(builder, value): + builder.PrependUint64Slot(0, value, 0) + + +def ULongEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/logdata_f142/UShort.py b/streaming_data_types/fbschemas/logdata_f142/UShort.py new file mode 100644 index 0000000..a74e208 --- /dev/null +++ b/streaming_data_types/fbschemas/logdata_f142/UShort.py @@ -0,0 +1,41 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + + +class UShort(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAsUShort(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = UShort() + x.Init(buf, n + offset) + return x + + # UShort + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # UShort + def Value(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get( + flatbuffers.number_types.Uint16Flags, o + self._tab.Pos + ) + return 0 + + +def UShortStart(builder): + builder.StartObject(1) + + +def UShortAddValue(builder, value): + builder.PrependUint16Slot(0, value, 0) + + +def UShortEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/logdata_f142/Value.py b/streaming_data_types/fbschemas/logdata_f142/Value.py new file mode 100644 index 0000000..9cefe8e --- /dev/null +++ b/streaming_data_types/fbschemas/logdata_f142/Value.py @@ -0,0 +1,29 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + + +class Value(object): + NONE = 0 + Byte = 1 + UByte = 2 + Short = 3 + UShort = 4 + Int = 5 + UInt = 6 + Long = 7 + ULong = 8 + Float = 9 + Double = 10 + ArrayByte = 11 + ArrayUByte = 12 + ArrayShort = 13 + ArrayUShort = 14 + ArrayInt = 15 + ArrayUInt = 16 + ArrayLong = 17 + ArrayULong = 18 + ArrayFloat = 19 + ArrayDouble = 20 + String = 21 + ArrayString = 22 diff --git a/streaming_data_types/fbschemas/logdata_f142/__init__.py b/streaming_data_types/fbschemas/logdata_f142/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/streaming_data_types/logdata_f142.py b/streaming_data_types/logdata_f142.py new file mode 100644 index 0000000..b0a0169 --- /dev/null +++ b/streaming_data_types/logdata_f142.py @@ -0,0 +1,196 @@ +import flatbuffers +from streaming_data_types.fbschemas.logdata_f142 import LogData +from streaming_data_types.fbschemas.logdata_f142.Value import Value +from streaming_data_types.fbschemas.logdata_f142.UByte import ( + UByteStart, + UByteAddValue, + UByteEnd, +) +from streaming_data_types.fbschemas.logdata_f142.Byte import ( + ByteStart, + ByteAddValue, + ByteEnd, +) +from streaming_data_types.fbschemas.logdata_f142.Short import ( + ShortStart, + ShortAddValue, + ShortEnd, +) +from streaming_data_types.fbschemas.logdata_f142.Int import ( + IntStart, + IntAddValue, + IntEnd, +) +from streaming_data_types.fbschemas.logdata_f142.Long import ( + LongStart, + LongAddValue, + LongEnd, +) +from streaming_data_types.fbschemas.logdata_f142.Float import ( + FloatStart, + FloatAddValue, + FloatEnd, +) +from streaming_data_types.fbschemas.logdata_f142.Double import ( + DoubleStart, + DoubleAddValue, + DoubleEnd, +) +from streaming_data_types.fbschemas.logdata_f142.String import ( + StringStart, + StringAddValue, + StringEnd, +) +import numpy as np +from typing import Any + + +def _complete_buffer(builder, timestamp_unix_ns: int) -> bytearray: + LogData.LogDataAddTimestamp(builder, timestamp_unix_ns) + log_msg = LogData.LogDataEnd(builder) + builder.Finish(log_msg) + buff = builder.Output() + file_identifier = b"f142" + buff[4:8] = file_identifier + return buff + + +def _setup_builder(): + builder = flatbuffers.Builder(1024) + source = builder.CreateString("Forwarder-Python") + return builder, source + + +def _serialise_byte(builder, data, source): + ByteStart(builder) + ByteAddValue(builder, data.astype(np.byte)[0]) + value_position = ByteEnd(builder) + LogData.LogDataStart(builder) + LogData.LogDataAddSourceName(builder, source) + LogData.LogDataAddValue(builder, value_position) + LogData.LogDataAddValueType(builder, Value.Byte) + + +def _serialise_ubyte(builder, data, source): + UByteStart(builder) + UByteAddValue(builder, data.astype(np.ubyte)[0]) + value_position = UByteEnd(builder) + LogData.LogDataStart(builder) + LogData.LogDataAddSourceName(builder, source) + LogData.LogDataAddValue(builder, value_position) + LogData.LogDataAddValueType(builder, Value.UByte) + + +def _serialise_short(builder, data, source): + ShortStart(builder) + ShortAddValue(builder, data.astype(np.int16)[0]) + value_position = ShortEnd(builder) + LogData.LogDataStart(builder) + LogData.LogDataAddSourceName(builder, source) + LogData.LogDataAddValue(builder, value_position) + LogData.LogDataAddValueType(builder, Value.Short) + + +def _serialise_int(builder, data, source): + IntStart(builder) + IntAddValue(builder, data.astype(np.int32)[0]) + value_position = IntEnd(builder) + LogData.LogDataStart(builder) + LogData.LogDataAddSourceName(builder, source) + LogData.LogDataAddValue(builder, value_position) + LogData.LogDataAddValueType(builder, Value.Int) + + +def _serialise_long(builder, data, source): + LongStart(builder) + LongAddValue(builder, data.astype(np.int64)[0]) + value_position = LongEnd(builder) + LogData.LogDataStart(builder) + LogData.LogDataAddSourceName(builder, source) + LogData.LogDataAddValue(builder, value_position) + LogData.LogDataAddValueType(builder, Value.Long) + + +def _serialise_float(builder, data, source): + FloatStart(builder) + FloatAddValue(builder, data.astype(np.float64)[0]) + value_position = FloatEnd(builder) + LogData.LogDataStart(builder) + LogData.LogDataAddSourceName(builder, source) + LogData.LogDataAddValue(builder, value_position) + LogData.LogDataAddValueType(builder, Value.Float) + + +def _serialise_double(builder, data, source): + DoubleStart(builder) + DoubleAddValue(builder, data.astype(np.float64)[0]) + value_position = DoubleEnd(builder) + LogData.LogDataStart(builder) + LogData.LogDataAddSourceName(builder, source) + LogData.LogDataAddValue(builder, value_position) + LogData.LogDataAddValueType(builder, Value.Double) + + +def _serialise_string(builder, data, source): + StringStart(builder) + StringAddValue(builder, data.astype(np.unicode_)[0]) + value_position = StringEnd(builder) + LogData.LogDataStart(builder) + LogData.LogDataAddSourceName(builder, source) + LogData.LogDataAddValue(builder, value_position) + LogData.LogDataAddValueType(builder, Value.String) + + +map_scalar_type_to_serialiser = { + np.byte: _serialise_byte, + np.ubyte: _serialise_ubyte, + np.int8: _serialise_short, + np.int16: _serialise_short, + np.int32: _serialise_int, + np.int64: _serialise_long, + np.uint8: "", + np.uint16: "", + np.uint32: "", + np.uint64: "", + np.float32: _serialise_float, + np.float64: _serialise_double, +} + + +def _ensure_data_is_numpy_type(data: Any) -> np.ndarray: + if not isinstance(data, np.ndarray): + return np.array(data) + return data + + +def serialise_f142(data: Any, timestamp_unix_ns: int = 0) -> bytearray: + """ + Serialise data and corresponding timestamp as an f142 Flatbuffer message. + Should automagically use a sensible type for data in the message, but if + in doubt pass data in as a numpy ndarray of a carefully chosen dtype. + + :param data: only scalar value currently supported; if ndarray then ndim must be 0 + :param timestamp_unix_ns: timestamp corresponding to data, e.g. when data was measured, in nanoseconds + """ + builder, source = _setup_builder() + + data = _ensure_data_is_numpy_type(data) + + if data.ndim != 0: + raise NotImplementedError("serialise_f142 does not yet support array types") + + # We can use a dictionary to map most numpy types to one of the types defined in the flatbuffer schema + # but we have to handle strings separately as there are many subtypes + if np.issubdtype(data.dtype, np.unicode_): + _serialise_string(builder, data, source) + else: + try: + map_scalar_type_to_serialiser[data.dtype](builder, data, source) + except KeyError: + # There are a few numpy types we don't try to handle, for example complex numbers + raise Exception( + f"Cannot serialise data of type {data.dtype}, must use one of " + f"{list(map_scalar_type_to_serialiser.keys()).append(np.unicode_)}" + ) + + return _complete_buffer(builder, timestamp_unix_ns) From 58faacf1eb722e449b96596f54fb6afa28a5f4f1 Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Fri, 27 Mar 2020 17:32:28 +0000 Subject: [PATCH 055/363] Initial implementation of f142 serialisation, unsigned types --- streaming_data_types/logdata_f142.py | 53 +++++++++++++++++++++++++--- 1 file changed, 49 insertions(+), 4 deletions(-) diff --git a/streaming_data_types/logdata_f142.py b/streaming_data_types/logdata_f142.py index b0a0169..2f42a5d 100644 --- a/streaming_data_types/logdata_f142.py +++ b/streaming_data_types/logdata_f142.py @@ -11,16 +11,31 @@ ByteAddValue, ByteEnd, ) +from streaming_data_types.fbschemas.logdata_f142.UShort import ( + UShortStart, + UShortAddValue, + UShortEnd, +) from streaming_data_types.fbschemas.logdata_f142.Short import ( ShortStart, ShortAddValue, ShortEnd, ) +from streaming_data_types.fbschemas.logdata_f142.UInt import ( + UIntStart, + UIntAddValue, + UIntEnd, +) from streaming_data_types.fbschemas.logdata_f142.Int import ( IntStart, IntAddValue, IntEnd, ) +from streaming_data_types.fbschemas.logdata_f142.ULong import ( + ULongStart, + ULongAddValue, + ULongEnd, +) from streaming_data_types.fbschemas.logdata_f142.Long import ( LongStart, LongAddValue, @@ -91,6 +106,16 @@ def _serialise_short(builder, data, source): LogData.LogDataAddValueType(builder, Value.Short) +def _serialise_ushort(builder, data, source): + UShortStart(builder) + UShortAddValue(builder, data.astype(np.uint16)[0]) + value_position = UShortEnd(builder) + LogData.LogDataStart(builder) + LogData.LogDataAddSourceName(builder, source) + LogData.LogDataAddValue(builder, value_position) + LogData.LogDataAddValueType(builder, Value.UShort) + + def _serialise_int(builder, data, source): IntStart(builder) IntAddValue(builder, data.astype(np.int32)[0]) @@ -101,6 +126,16 @@ def _serialise_int(builder, data, source): LogData.LogDataAddValueType(builder, Value.Int) +def _serialise_uint(builder, data, source): + UIntStart(builder) + UIntAddValue(builder, data.astype(np.uint32)[0]) + value_position = UIntEnd(builder) + LogData.LogDataStart(builder) + LogData.LogDataAddSourceName(builder, source) + LogData.LogDataAddValue(builder, value_position) + LogData.LogDataAddValueType(builder, Value.UInt) + + def _serialise_long(builder, data, source): LongStart(builder) LongAddValue(builder, data.astype(np.int64)[0]) @@ -111,6 +146,16 @@ def _serialise_long(builder, data, source): LogData.LogDataAddValueType(builder, Value.Long) +def _serialise_ulong(builder, data, source): + ULongStart(builder) + ULongAddValue(builder, data.astype(np.uint64)[0]) + value_position = ULongEnd(builder) + LogData.LogDataStart(builder) + LogData.LogDataAddSourceName(builder, source) + LogData.LogDataAddValue(builder, value_position) + LogData.LogDataAddValueType(builder, Value.ULong) + + def _serialise_float(builder, data, source): FloatStart(builder) FloatAddValue(builder, data.astype(np.float64)[0]) @@ -148,10 +193,10 @@ def _serialise_string(builder, data, source): np.int16: _serialise_short, np.int32: _serialise_int, np.int64: _serialise_long, - np.uint8: "", - np.uint16: "", - np.uint32: "", - np.uint64: "", + np.uint8: _serialise_ushort, + np.uint16: _serialise_ushort, + np.uint32: _serialise_uint, + np.uint64: _serialise_ulong, np.float32: _serialise_float, np.float64: _serialise_double, } From 2c227ce36b9ceaea0d4a5d4bc1a3957bb695f58b Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Fri, 27 Mar 2020 17:41:36 +0000 Subject: [PATCH 056/363] Make type mapping a private variable --- streaming_data_types/logdata_f142.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/streaming_data_types/logdata_f142.py b/streaming_data_types/logdata_f142.py index 2f42a5d..06c0442 100644 --- a/streaming_data_types/logdata_f142.py +++ b/streaming_data_types/logdata_f142.py @@ -186,7 +186,7 @@ def _serialise_string(builder, data, source): LogData.LogDataAddValueType(builder, Value.String) -map_scalar_type_to_serialiser = { +_map_scalar_type_to_serialiser = { np.byte: _serialise_byte, np.ubyte: _serialise_ubyte, np.int8: _serialise_short, @@ -230,12 +230,12 @@ def serialise_f142(data: Any, timestamp_unix_ns: int = 0) -> bytearray: _serialise_string(builder, data, source) else: try: - map_scalar_type_to_serialiser[data.dtype](builder, data, source) + _map_scalar_type_to_serialiser[data.dtype](builder, data, source) except KeyError: # There are a few numpy types we don't try to handle, for example complex numbers raise Exception( f"Cannot serialise data of type {data.dtype}, must use one of " - f"{list(map_scalar_type_to_serialiser.keys()).append(np.unicode_)}" + f"{list(_map_scalar_type_to_serialiser.keys()).append(np.unicode_)}" ) return _complete_buffer(builder, timestamp_unix_ns) From 16f6c16231b4f38774cf0d151855f47c307c1fcf Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Fri, 27 Mar 2020 17:55:34 +0000 Subject: [PATCH 057/363] More type hints --- streaming_data_types/logdata_f142.py | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/streaming_data_types/logdata_f142.py b/streaming_data_types/logdata_f142.py index 06c0442..15a9d82 100644 --- a/streaming_data_types/logdata_f142.py +++ b/streaming_data_types/logdata_f142.py @@ -57,7 +57,7 @@ StringEnd, ) import numpy as np -from typing import Any +from typing import Any, Tuple def _complete_buffer(builder, timestamp_unix_ns: int) -> bytearray: @@ -70,13 +70,13 @@ def _complete_buffer(builder, timestamp_unix_ns: int) -> bytearray: return buff -def _setup_builder(): +def _setup_builder() -> Tuple[flatbuffers.Builder, int]: builder = flatbuffers.Builder(1024) source = builder.CreateString("Forwarder-Python") return builder, source -def _serialise_byte(builder, data, source): +def _serialise_byte(builder: flatbuffers.Builder, data: np.ndarray, source: int): ByteStart(builder) ByteAddValue(builder, data.astype(np.byte)[0]) value_position = ByteEnd(builder) @@ -86,7 +86,7 @@ def _serialise_byte(builder, data, source): LogData.LogDataAddValueType(builder, Value.Byte) -def _serialise_ubyte(builder, data, source): +def _serialise_ubyte(builder: flatbuffers.Builder, data: np.ndarray, source: int): UByteStart(builder) UByteAddValue(builder, data.astype(np.ubyte)[0]) value_position = UByteEnd(builder) @@ -96,7 +96,7 @@ def _serialise_ubyte(builder, data, source): LogData.LogDataAddValueType(builder, Value.UByte) -def _serialise_short(builder, data, source): +def _serialise_short(builder: flatbuffers.Builder, data: np.ndarray, source: int): ShortStart(builder) ShortAddValue(builder, data.astype(np.int16)[0]) value_position = ShortEnd(builder) @@ -106,7 +106,7 @@ def _serialise_short(builder, data, source): LogData.LogDataAddValueType(builder, Value.Short) -def _serialise_ushort(builder, data, source): +def _serialise_ushort(builder: flatbuffers.Builder, data: np.ndarray, source: int): UShortStart(builder) UShortAddValue(builder, data.astype(np.uint16)[0]) value_position = UShortEnd(builder) @@ -116,7 +116,7 @@ def _serialise_ushort(builder, data, source): LogData.LogDataAddValueType(builder, Value.UShort) -def _serialise_int(builder, data, source): +def _serialise_int(builder: flatbuffers.Builder, data: np.ndarray, source: int): IntStart(builder) IntAddValue(builder, data.astype(np.int32)[0]) value_position = IntEnd(builder) @@ -126,7 +126,7 @@ def _serialise_int(builder, data, source): LogData.LogDataAddValueType(builder, Value.Int) -def _serialise_uint(builder, data, source): +def _serialise_uint(builder: flatbuffers.Builder, data: np.ndarray, source: int): UIntStart(builder) UIntAddValue(builder, data.astype(np.uint32)[0]) value_position = UIntEnd(builder) @@ -136,7 +136,7 @@ def _serialise_uint(builder, data, source): LogData.LogDataAddValueType(builder, Value.UInt) -def _serialise_long(builder, data, source): +def _serialise_long(builder: flatbuffers.Builder, data: np.ndarray, source: int): LongStart(builder) LongAddValue(builder, data.astype(np.int64)[0]) value_position = LongEnd(builder) @@ -146,7 +146,7 @@ def _serialise_long(builder, data, source): LogData.LogDataAddValueType(builder, Value.Long) -def _serialise_ulong(builder, data, source): +def _serialise_ulong(builder: flatbuffers.Builder, data: np.ndarray, source: int): ULongStart(builder) ULongAddValue(builder, data.astype(np.uint64)[0]) value_position = ULongEnd(builder) @@ -156,7 +156,7 @@ def _serialise_ulong(builder, data, source): LogData.LogDataAddValueType(builder, Value.ULong) -def _serialise_float(builder, data, source): +def _serialise_float(builder: flatbuffers.Builder, data: np.ndarray, source: int): FloatStart(builder) FloatAddValue(builder, data.astype(np.float64)[0]) value_position = FloatEnd(builder) @@ -166,7 +166,7 @@ def _serialise_float(builder, data, source): LogData.LogDataAddValueType(builder, Value.Float) -def _serialise_double(builder, data, source): +def _serialise_double(builder: flatbuffers.Builder, data: np.ndarray, source: int): DoubleStart(builder) DoubleAddValue(builder, data.astype(np.float64)[0]) value_position = DoubleEnd(builder) @@ -176,7 +176,7 @@ def _serialise_double(builder, data, source): LogData.LogDataAddValueType(builder, Value.Double) -def _serialise_string(builder, data, source): +def _serialise_string(builder: flatbuffers.Builder, data: np.ndarray, source: int): StringStart(builder) StringAddValue(builder, data.astype(np.unicode_)[0]) value_position = StringEnd(builder) From fc076a34b8bb010f79556b026a4eb6685c5047a6 Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Fri, 27 Mar 2020 18:03:36 +0000 Subject: [PATCH 058/363] Support setting source field --- streaming_data_types/logdata_f142.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/streaming_data_types/logdata_f142.py b/streaming_data_types/logdata_f142.py index 15a9d82..27204c7 100644 --- a/streaming_data_types/logdata_f142.py +++ b/streaming_data_types/logdata_f142.py @@ -70,9 +70,9 @@ def _complete_buffer(builder, timestamp_unix_ns: int) -> bytearray: return buff -def _setup_builder() -> Tuple[flatbuffers.Builder, int]: +def _setup_builder(source_name: str) -> Tuple[flatbuffers.Builder, int]: builder = flatbuffers.Builder(1024) - source = builder.CreateString("Forwarder-Python") + source = builder.CreateString(source_name) return builder, source @@ -208,16 +208,19 @@ def _ensure_data_is_numpy_type(data: Any) -> np.ndarray: return data -def serialise_f142(data: Any, timestamp_unix_ns: int = 0) -> bytearray: +def serialise_f142( + data: Any, source_name: str, timestamp_unix_ns: int = 0 +) -> bytearray: """ Serialise data and corresponding timestamp as an f142 Flatbuffer message. Should automagically use a sensible type for data in the message, but if in doubt pass data in as a numpy ndarray of a carefully chosen dtype. :param data: only scalar value currently supported; if ndarray then ndim must be 0 + :param source_name: name of the data source :param timestamp_unix_ns: timestamp corresponding to data, e.g. when data was measured, in nanoseconds """ - builder, source = _setup_builder() + builder, source = _setup_builder(source_name) data = _ensure_data_is_numpy_type(data) From 185a9902b3d54ac482624c05c1e98f4ac7b8e6c6 Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Fri, 27 Mar 2020 18:08:41 +0000 Subject: [PATCH 059/363] Add tests for f142 --- tests/test_f142.py | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) create mode 100644 tests/test_f142.py diff --git a/tests/test_f142.py b/tests/test_f142.py new file mode 100644 index 0000000..7756e8d --- /dev/null +++ b/tests/test_f142.py @@ -0,0 +1,30 @@ +import pytest +from streaming_data_types.logdata_f142 import serialise_f142, deserialise_f142 + + +class TestSerialisationf142: + original_entry = { + "source_name": "some_source", + "data": 578214, + "timestamp_unix_ns": 1585332414000000000, + } + + def test_serialises_and_deserialises_f142_message_correctly(self): + buf = serialise_f142(**self.original_entry) + deserialised_tuple = deserialise_f142(buf) + + assert deserialised_tuple.source_name == self.original_entry["source_name"] + assert deserialised_tuple.data == self.original_entry["data"] + assert ( + deserialised_tuple.timestamp_unix_ns + == self.original_entry["timestamp_unix_ns"] + ) + + def test_if_buffer_has_wrong_id_then_throws(self): + buf = serialise_f142(**self.original_entry) + + # Manually hack the id + buf[4:8] = b"1234" + + with pytest.raises(RuntimeError): + deserialise_f142(buf) From 48770640a1efb85630abdcbcb297bc740f8a6b92 Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Fri, 27 Mar 2020 19:25:39 +0000 Subject: [PATCH 060/363] Fixes to scalar type handling --- streaming_data_types/logdata_f142.py | 98 +++++++++++++++------------- tests/test_f142.py | 2 +- 2 files changed, 55 insertions(+), 45 deletions(-) diff --git a/streaming_data_types/logdata_f142.py b/streaming_data_types/logdata_f142.py index 27204c7..15fb3fa 100644 --- a/streaming_data_types/logdata_f142.py +++ b/streaming_data_types/logdata_f142.py @@ -56,8 +56,13 @@ StringAddValue, StringEnd, ) +from streaming_data_types.utils import check_schema_identifier import numpy as np -from typing import Any, Tuple +from typing import Any, Tuple, NamedTuple +from collections import namedtuple + + +FILE_IDENTIFIER = b"f142" def _complete_buffer(builder, timestamp_unix_ns: int) -> bytearray: @@ -65,8 +70,7 @@ def _complete_buffer(builder, timestamp_unix_ns: int) -> bytearray: log_msg = LogData.LogDataEnd(builder) builder.Finish(log_msg) buff = builder.Output() - file_identifier = b"f142" - buff[4:8] = file_identifier + buff[4:8] = FILE_IDENTIFIER return buff @@ -78,7 +82,7 @@ def _setup_builder(source_name: str) -> Tuple[flatbuffers.Builder, int]: def _serialise_byte(builder: flatbuffers.Builder, data: np.ndarray, source: int): ByteStart(builder) - ByteAddValue(builder, data.astype(np.byte)[0]) + ByteAddValue(builder, data.item()) value_position = ByteEnd(builder) LogData.LogDataStart(builder) LogData.LogDataAddSourceName(builder, source) @@ -88,7 +92,7 @@ def _serialise_byte(builder: flatbuffers.Builder, data: np.ndarray, source: int) def _serialise_ubyte(builder: flatbuffers.Builder, data: np.ndarray, source: int): UByteStart(builder) - UByteAddValue(builder, data.astype(np.ubyte)[0]) + UByteAddValue(builder, data.item()) value_position = UByteEnd(builder) LogData.LogDataStart(builder) LogData.LogDataAddSourceName(builder, source) @@ -98,7 +102,7 @@ def _serialise_ubyte(builder: flatbuffers.Builder, data: np.ndarray, source: int def _serialise_short(builder: flatbuffers.Builder, data: np.ndarray, source: int): ShortStart(builder) - ShortAddValue(builder, data.astype(np.int16)[0]) + ShortAddValue(builder, data.item()) value_position = ShortEnd(builder) LogData.LogDataStart(builder) LogData.LogDataAddSourceName(builder, source) @@ -108,7 +112,7 @@ def _serialise_short(builder: flatbuffers.Builder, data: np.ndarray, source: int def _serialise_ushort(builder: flatbuffers.Builder, data: np.ndarray, source: int): UShortStart(builder) - UShortAddValue(builder, data.astype(np.uint16)[0]) + UShortAddValue(builder, data.item()) value_position = UShortEnd(builder) LogData.LogDataStart(builder) LogData.LogDataAddSourceName(builder, source) @@ -118,7 +122,7 @@ def _serialise_ushort(builder: flatbuffers.Builder, data: np.ndarray, source: in def _serialise_int(builder: flatbuffers.Builder, data: np.ndarray, source: int): IntStart(builder) - IntAddValue(builder, data.astype(np.int32)[0]) + IntAddValue(builder, data.item()) value_position = IntEnd(builder) LogData.LogDataStart(builder) LogData.LogDataAddSourceName(builder, source) @@ -128,7 +132,7 @@ def _serialise_int(builder: flatbuffers.Builder, data: np.ndarray, source: int): def _serialise_uint(builder: flatbuffers.Builder, data: np.ndarray, source: int): UIntStart(builder) - UIntAddValue(builder, data.astype(np.uint32)[0]) + UIntAddValue(builder, data.item()) value_position = UIntEnd(builder) LogData.LogDataStart(builder) LogData.LogDataAddSourceName(builder, source) @@ -138,7 +142,7 @@ def _serialise_uint(builder: flatbuffers.Builder, data: np.ndarray, source: int) def _serialise_long(builder: flatbuffers.Builder, data: np.ndarray, source: int): LongStart(builder) - LongAddValue(builder, data.astype(np.int64)[0]) + LongAddValue(builder, data.item()) value_position = LongEnd(builder) LogData.LogDataStart(builder) LogData.LogDataAddSourceName(builder, source) @@ -148,7 +152,7 @@ def _serialise_long(builder: flatbuffers.Builder, data: np.ndarray, source: int) def _serialise_ulong(builder: flatbuffers.Builder, data: np.ndarray, source: int): ULongStart(builder) - ULongAddValue(builder, data.astype(np.uint64)[0]) + ULongAddValue(builder, data.item()) value_position = ULongEnd(builder) LogData.LogDataStart(builder) LogData.LogDataAddSourceName(builder, source) @@ -158,7 +162,7 @@ def _serialise_ulong(builder: flatbuffers.Builder, data: np.ndarray, source: int def _serialise_float(builder: flatbuffers.Builder, data: np.ndarray, source: int): FloatStart(builder) - FloatAddValue(builder, data.astype(np.float64)[0]) + FloatAddValue(builder, data.item()) value_position = FloatEnd(builder) LogData.LogDataStart(builder) LogData.LogDataAddSourceName(builder, source) @@ -168,7 +172,7 @@ def _serialise_float(builder: flatbuffers.Builder, data: np.ndarray, source: int def _serialise_double(builder: flatbuffers.Builder, data: np.ndarray, source: int): DoubleStart(builder) - DoubleAddValue(builder, data.astype(np.float64)[0]) + DoubleAddValue(builder, data.item()) value_position = DoubleEnd(builder) LogData.LogDataStart(builder) LogData.LogDataAddSourceName(builder, source) @@ -178,7 +182,7 @@ def _serialise_double(builder: flatbuffers.Builder, data: np.ndarray, source: in def _serialise_string(builder: flatbuffers.Builder, data: np.ndarray, source: int): StringStart(builder) - StringAddValue(builder, data.astype(np.unicode_)[0]) + StringAddValue(builder, data.item()) value_position = StringEnd(builder) LogData.LogDataStart(builder) LogData.LogDataAddSourceName(builder, source) @@ -187,58 +191,64 @@ def _serialise_string(builder: flatbuffers.Builder, data: np.ndarray, source: in _map_scalar_type_to_serialiser = { - np.byte: _serialise_byte, - np.ubyte: _serialise_ubyte, - np.int8: _serialise_short, - np.int16: _serialise_short, - np.int32: _serialise_int, - np.int64: _serialise_long, - np.uint8: _serialise_ushort, - np.uint16: _serialise_ushort, - np.uint32: _serialise_uint, - np.uint64: _serialise_ulong, - np.float32: _serialise_float, - np.float64: _serialise_double, + np.dtype("byte"): _serialise_byte, + np.dtype("ubyte"): _serialise_ubyte, + np.dtype("int8"): _serialise_short, + np.dtype("int16"): _serialise_short, + np.dtype("int32"): _serialise_int, + np.dtype("int64"): _serialise_long, + np.dtype("uint8"): _serialise_ushort, + np.dtype("uint16"): _serialise_ushort, + np.dtype("uint32"): _serialise_uint, + np.dtype("uint64"): _serialise_ulong, + np.dtype("float32"): _serialise_float, + np.dtype("float64"): _serialise_double, } -def _ensure_data_is_numpy_type(data: Any) -> np.ndarray: - if not isinstance(data, np.ndarray): - return np.array(data) - return data - - def serialise_f142( - data: Any, source_name: str, timestamp_unix_ns: int = 0 + value: Any, source_name: str, timestamp_unix_ns: int = 0 ) -> bytearray: """ - Serialise data and corresponding timestamp as an f142 Flatbuffer message. - Should automagically use a sensible type for data in the message, but if - in doubt pass data in as a numpy ndarray of a carefully chosen dtype. + Serialise value and corresponding timestamp as an f142 Flatbuffer message. + Should automagically use a sensible type for value in the message, but if + in doubt pass value in as a numpy ndarray of a carefully chosen dtype. - :param data: only scalar value currently supported; if ndarray then ndim must be 0 + :param value: only scalar value currently supported; if ndarray then ndim must be 0 :param source_name: name of the data source - :param timestamp_unix_ns: timestamp corresponding to data, e.g. when data was measured, in nanoseconds + :param timestamp_unix_ns: timestamp corresponding to value, e.g. when value was measured, in nanoseconds """ builder, source = _setup_builder(source_name) - data = _ensure_data_is_numpy_type(data) + value = np.array(value) - if data.ndim != 0: + if value.ndim != 0: raise NotImplementedError("serialise_f142 does not yet support array types") # We can use a dictionary to map most numpy types to one of the types defined in the flatbuffer schema # but we have to handle strings separately as there are many subtypes - if np.issubdtype(data.dtype, np.unicode_): - _serialise_string(builder, data, source) + if np.issubdtype(value.dtype, np.unicode_): + _serialise_string(builder, value, source) else: try: - _map_scalar_type_to_serialiser[data.dtype](builder, data, source) + _map_scalar_type_to_serialiser[value.dtype](builder, value, source) except KeyError: # There are a few numpy types we don't try to handle, for example complex numbers raise Exception( - f"Cannot serialise data of type {data.dtype}, must use one of " + f"Cannot serialise data of type {value.dtype}, must use one of " f"{list(_map_scalar_type_to_serialiser.keys()).append(np.unicode_)}" ) return _complete_buffer(builder, timestamp_unix_ns) + + +def deserialise_f142(buffer: bytearray) -> NamedTuple: + check_schema_identifier(buffer, FILE_IDENTIFIER) + + log_data = LogData.LogData.GetRootAsLogData(buffer, 0) + source_name = log_data.SourceName() if log_data.SourceName() else b"" + value = log_data.Value().GetVectorAsNumpy() + timestamp = log_data.Timestamp() + + LogDataInfo = namedtuple("LogDataInfo", "value source_name timestamp") + return LogDataInfo(value, source_name.decode(), timestamp) diff --git a/tests/test_f142.py b/tests/test_f142.py index 7756e8d..b1b94ae 100644 --- a/tests/test_f142.py +++ b/tests/test_f142.py @@ -5,7 +5,7 @@ class TestSerialisationf142: original_entry = { "source_name": "some_source", - "data": 578214, + "value": 578214, "timestamp_unix_ns": 1585332414000000000, } From 10b8ad7fb857dbed0ab64b0c4d1430eb01c93626 Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Fri, 27 Mar 2020 19:45:06 +0000 Subject: [PATCH 061/363] Fixes to scalar type deserialisation --- streaming_data_types/logdata_f142.py | 35 ++++++++++++++++++++++++++-- tests/test_f142.py | 2 +- 2 files changed, 34 insertions(+), 3 deletions(-) diff --git a/streaming_data_types/logdata_f142.py b/streaming_data_types/logdata_f142.py index 15fb3fa..c3245fd 100644 --- a/streaming_data_types/logdata_f142.py +++ b/streaming_data_types/logdata_f142.py @@ -2,56 +2,67 @@ from streaming_data_types.fbschemas.logdata_f142 import LogData from streaming_data_types.fbschemas.logdata_f142.Value import Value from streaming_data_types.fbschemas.logdata_f142.UByte import ( + UByte, UByteStart, UByteAddValue, UByteEnd, ) from streaming_data_types.fbschemas.logdata_f142.Byte import ( + Byte, ByteStart, ByteAddValue, ByteEnd, ) from streaming_data_types.fbschemas.logdata_f142.UShort import ( + UShort, UShortStart, UShortAddValue, UShortEnd, ) from streaming_data_types.fbschemas.logdata_f142.Short import ( + Short, ShortStart, ShortAddValue, ShortEnd, ) from streaming_data_types.fbschemas.logdata_f142.UInt import ( + UInt, UIntStart, UIntAddValue, UIntEnd, ) from streaming_data_types.fbschemas.logdata_f142.Int import ( + Int, IntStart, IntAddValue, IntEnd, ) from streaming_data_types.fbschemas.logdata_f142.ULong import ( + ULong, ULongStart, ULongAddValue, ULongEnd, ) from streaming_data_types.fbschemas.logdata_f142.Long import ( + Long, LongStart, LongAddValue, LongEnd, ) from streaming_data_types.fbschemas.logdata_f142.Float import ( + Float, FloatStart, FloatAddValue, FloatEnd, ) from streaming_data_types.fbschemas.logdata_f142.Double import ( + Double, DoubleStart, DoubleAddValue, DoubleEnd, ) from streaming_data_types.fbschemas.logdata_f142.String import ( + String, StringStart, StringAddValue, StringEnd, @@ -242,13 +253,33 @@ def serialise_f142( return _complete_buffer(builder, timestamp_unix_ns) +map_fb_enum_to_type = { + Value.Byte: Byte, + Value.UByte: UByte, + Value.Short: Short, + Value.UShort: UShort, + Value.Int: Int, + Value.UInt: UInt, + Value.Long: Long, + Value.ULong: ULong, + Value.Float: Float, + Value.Double: Double, + Value.String: String, +} + + def deserialise_f142(buffer: bytearray) -> NamedTuple: check_schema_identifier(buffer, FILE_IDENTIFIER) log_data = LogData.LogData.GetRootAsLogData(buffer, 0) source_name = log_data.SourceName() if log_data.SourceName() else b"" - value = log_data.Value().GetVectorAsNumpy() + + value_offset = log_data.Value() + value_fb = map_fb_enum_to_type[log_data.ValueType()]() + value_fb.Init(value_offset.Bytes, value_offset.Pos) + value = np.array(value_fb.Value()) + timestamp = log_data.Timestamp() - LogDataInfo = namedtuple("LogDataInfo", "value source_name timestamp") + LogDataInfo = namedtuple("LogDataInfo", "value source_name timestamp_unix_ns") return LogDataInfo(value, source_name.decode(), timestamp) diff --git a/tests/test_f142.py b/tests/test_f142.py index b1b94ae..020a77b 100644 --- a/tests/test_f142.py +++ b/tests/test_f142.py @@ -14,7 +14,7 @@ def test_serialises_and_deserialises_f142_message_correctly(self): deserialised_tuple = deserialise_f142(buf) assert deserialised_tuple.source_name == self.original_entry["source_name"] - assert deserialised_tuple.data == self.original_entry["data"] + assert deserialised_tuple.value == self.original_entry["value"] assert ( deserialised_tuple.timestamp_unix_ns == self.original_entry["timestamp_unix_ns"] From 236919a099121a2057cbf97775ebce2c1b725ab1 Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Fri, 27 Mar 2020 19:51:05 +0000 Subject: [PATCH 062/363] Deserialisation of array types --- streaming_data_types/logdata_f142.py | 27 ++++++++++++++++++++++++++- 1 file changed, 26 insertions(+), 1 deletion(-) diff --git a/streaming_data_types/logdata_f142.py b/streaming_data_types/logdata_f142.py index c3245fd..d1097db 100644 --- a/streaming_data_types/logdata_f142.py +++ b/streaming_data_types/logdata_f142.py @@ -67,6 +67,17 @@ StringAddValue, StringEnd, ) +from streaming_data_types.fbschemas.logdata_f142.ArrayByte import ArrayByte +from streaming_data_types.fbschemas.logdata_f142.ArrayUByte import ArrayUByte +from streaming_data_types.fbschemas.logdata_f142.ArrayShort import ArrayShort +from streaming_data_types.fbschemas.logdata_f142.ArrayUShort import ArrayUShort +from streaming_data_types.fbschemas.logdata_f142.ArrayInt import ArrayInt +from streaming_data_types.fbschemas.logdata_f142.ArrayUInt import ArrayUInt +from streaming_data_types.fbschemas.logdata_f142.ArrayLong import ArrayLong +from streaming_data_types.fbschemas.logdata_f142.ArrayULong import ArrayULong +from streaming_data_types.fbschemas.logdata_f142.ArrayFloat import ArrayFloat +from streaming_data_types.fbschemas.logdata_f142.ArrayDouble import ArrayDouble +from streaming_data_types.fbschemas.logdata_f142.ArrayString import ArrayString from streaming_data_types.utils import check_schema_identifier import numpy as np from typing import Any, Tuple, NamedTuple @@ -265,6 +276,17 @@ def serialise_f142( Value.Float: Float, Value.Double: Double, Value.String: String, + Value.ArrayByte: ArrayByte, + Value.ArrayUByte: ArrayUByte, + Value.ArrayShort: ArrayShort, + Value.ArrayUShort: ArrayUShort, + Value.ArrayInt: ArrayInt, + Value.ArrayUInt: ArrayUInt, + Value.ArrayLong: ArrayLong, + Value.ArrayULong: ArrayULong, + Value.ArrayFloat: ArrayFloat, + Value.ArrayDouble: ArrayDouble, + Value.ArrayString: ArrayString, } @@ -277,7 +299,10 @@ def deserialise_f142(buffer: bytearray) -> NamedTuple: value_offset = log_data.Value() value_fb = map_fb_enum_to_type[log_data.ValueType()]() value_fb.Init(value_offset.Bytes, value_offset.Pos) - value = np.array(value_fb.Value()) + try: + value = value_fb.ValueAsNumpy() + except AttributeError: + value = np.array(value_fb.Value()) timestamp = log_data.Timestamp() From 3c80b30de2c69c6573f3dcf5cc7ac1a6054e24e3 Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Fri, 27 Mar 2020 20:00:40 +0000 Subject: [PATCH 063/363] Test and fixes for f142 string values --- streaming_data_types/logdata_f142.py | 12 ++++++++++-- tests/test_f142.py | 13 +++++++++++++ 2 files changed, 23 insertions(+), 2 deletions(-) diff --git a/streaming_data_types/logdata_f142.py b/streaming_data_types/logdata_f142.py index d1097db..5783d46 100644 --- a/streaming_data_types/logdata_f142.py +++ b/streaming_data_types/logdata_f142.py @@ -203,8 +203,9 @@ def _serialise_double(builder: flatbuffers.Builder, data: np.ndarray, source: in def _serialise_string(builder: flatbuffers.Builder, data: np.ndarray, source: int): + string_offset = builder.CreateString(data.item()) StringStart(builder) - StringAddValue(builder, data.item()) + StringAddValue(builder, string_offset) value_position = StringEnd(builder) LogData.LogDataStart(builder) LogData.LogDataAddSourceName(builder, source) @@ -249,7 +250,9 @@ def serialise_f142( # We can use a dictionary to map most numpy types to one of the types defined in the flatbuffer schema # but we have to handle strings separately as there are many subtypes - if np.issubdtype(value.dtype, np.unicode_): + if np.issubdtype(value.dtype, np.unicode_) or np.issubdtype( + value.dtype, np.string_ + ): _serialise_string(builder, value, source) else: try: @@ -304,6 +307,11 @@ def deserialise_f142(buffer: bytearray) -> NamedTuple: except AttributeError: value = np.array(value_fb.Value()) + if np.issubdtype(value.dtype, np.unicode_) or np.issubdtype( + value.dtype, np.string_ + ): + value = value.item().decode() + timestamp = log_data.Timestamp() LogDataInfo = namedtuple("LogDataInfo", "value source_name timestamp_unix_ns") diff --git a/tests/test_f142.py b/tests/test_f142.py index 020a77b..334de27 100644 --- a/tests/test_f142.py +++ b/tests/test_f142.py @@ -20,6 +20,19 @@ def test_serialises_and_deserialises_f142_message_correctly(self): == self.original_entry["timestamp_unix_ns"] ) + def test_serialises_and_deserialises_string_f142_message_correctly(self): + string_log = { + "source_name": "some_source", + "value": "some_string", + "timestamp_unix_ns": 1585332414000000000, + } + buf = serialise_f142(**string_log) + deserialised_tuple = deserialise_f142(buf) + + assert deserialised_tuple.source_name == string_log["source_name"] + assert deserialised_tuple.value == string_log["value"] + assert deserialised_tuple.timestamp_unix_ns == string_log["timestamp_unix_ns"] + def test_if_buffer_has_wrong_id_then_throws(self): buf = serialise_f142(**self.original_entry) From e53f33c88d7480c1b33488d74247e9a2e0f6d278 Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Fri, 27 Mar 2020 20:07:26 +0000 Subject: [PATCH 064/363] Add tests for other value types --- tests/test_f142.py | 40 +++++++++++++++++++++++++++++++++++++++- 1 file changed, 39 insertions(+), 1 deletion(-) diff --git a/tests/test_f142.py b/tests/test_f142.py index 334de27..95e13ee 100644 --- a/tests/test_f142.py +++ b/tests/test_f142.py @@ -1,4 +1,5 @@ import pytest +import numpy as np from streaming_data_types.logdata_f142 import serialise_f142, deserialise_f142 @@ -9,7 +10,7 @@ class TestSerialisationf142: "timestamp_unix_ns": 1585332414000000000, } - def test_serialises_and_deserialises_f142_message_correctly(self): + def test_serialises_and_deserialises_integer_f142_message_correctly(self): buf = serialise_f142(**self.original_entry) deserialised_tuple = deserialise_f142(buf) @@ -20,6 +21,32 @@ def test_serialises_and_deserialises_f142_message_correctly(self): == self.original_entry["timestamp_unix_ns"] ) + def test_serialises_and_deserialises_float_f142_message_correctly(self): + float_log = { + "source_name": "some_source", + "value": 1.234, + "timestamp_unix_ns": 1585332414000000000, + } + buf = serialise_f142(**float_log) + deserialised_tuple = deserialise_f142(buf) + + assert deserialised_tuple.source_name == float_log["source_name"] + assert deserialised_tuple.value == float_log["value"] + assert deserialised_tuple.timestamp_unix_ns == float_log["timestamp_unix_ns"] + + def test_serialises_and_deserialises_scalar_ndarray_f142_message_correctly(self): + numpy_log = { + "source_name": "some_source", + "value": np.array(42), + "timestamp_unix_ns": 1585332414000000000, + } + buf = serialise_f142(**numpy_log) + deserialised_tuple = deserialise_f142(buf) + + assert deserialised_tuple.source_name == numpy_log["source_name"] + assert deserialised_tuple.value == np.array(numpy_log["value"]) + assert deserialised_tuple.timestamp_unix_ns == numpy_log["timestamp_unix_ns"] + def test_serialises_and_deserialises_string_f142_message_correctly(self): string_log = { "source_name": "some_source", @@ -33,6 +60,17 @@ def test_serialises_and_deserialises_string_f142_message_correctly(self): assert deserialised_tuple.value == string_log["value"] assert deserialised_tuple.timestamp_unix_ns == string_log["timestamp_unix_ns"] + def test_raises_not_implemented_error_when_trying_to_serialise_array(self): + array_log = { + "source_name": "some_source", + "value": [1, 2, 3], + "timestamp_unix_ns": 1585332414000000000, + } + try: + serialise_f142(**array_log) + except NotImplementedError: + pass + def test_if_buffer_has_wrong_id_then_throws(self): buf = serialise_f142(**self.original_entry) From 1a8d12e4ebef9ce676fa7389de209e42e3f93c55 Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Fri, 27 Mar 2020 20:17:58 +0000 Subject: [PATCH 065/363] Add test for trying to serialise complex number --- streaming_data_types/logdata_f142.py | 2 +- tests/test_f142.py | 13 +++++++++++++ 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/streaming_data_types/logdata_f142.py b/streaming_data_types/logdata_f142.py index 5783d46..9bea17a 100644 --- a/streaming_data_types/logdata_f142.py +++ b/streaming_data_types/logdata_f142.py @@ -259,7 +259,7 @@ def serialise_f142( _map_scalar_type_to_serialiser[value.dtype](builder, value, source) except KeyError: # There are a few numpy types we don't try to handle, for example complex numbers - raise Exception( + raise NotImplementedError( f"Cannot serialise data of type {value.dtype}, must use one of " f"{list(_map_scalar_type_to_serialiser.keys()).append(np.unicode_)}" ) diff --git a/tests/test_f142.py b/tests/test_f142.py index 95e13ee..37846b5 100644 --- a/tests/test_f142.py +++ b/tests/test_f142.py @@ -71,6 +71,19 @@ def test_raises_not_implemented_error_when_trying_to_serialise_array(self): except NotImplementedError: pass + def test_raises_not_implemented_error_when_trying_to_serialise_numpy_complex_number_type( + self, + ): + complex_log = { + "source_name": "some_source", + "value": np.complex(3, 4), + "timestamp_unix_ns": 1585332414000000000, + } + try: + serialise_f142(**complex_log) + except NotImplementedError: + pass + def test_if_buffer_has_wrong_id_then_throws(self): buf = serialise_f142(**self.original_entry) From 00a57f7f75a23b47e1aa62a026c800c8970c0a2b Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Sun, 29 Mar 2020 12:48:46 +0100 Subject: [PATCH 066/363] Make type map private --- streaming_data_types/logdata_f142.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/streaming_data_types/logdata_f142.py b/streaming_data_types/logdata_f142.py index 9bea17a..5169ad1 100644 --- a/streaming_data_types/logdata_f142.py +++ b/streaming_data_types/logdata_f142.py @@ -267,7 +267,7 @@ def serialise_f142( return _complete_buffer(builder, timestamp_unix_ns) -map_fb_enum_to_type = { +_map_fb_enum_to_type = { Value.Byte: Byte, Value.UByte: UByte, Value.Short: Short, @@ -300,7 +300,7 @@ def deserialise_f142(buffer: bytearray) -> NamedTuple: source_name = log_data.SourceName() if log_data.SourceName() else b"" value_offset = log_data.Value() - value_fb = map_fb_enum_to_type[log_data.ValueType()]() + value_fb = _map_fb_enum_to_type[log_data.ValueType()]() value_fb.Init(value_offset.Bytes, value_offset.Pos) try: value = value_fb.ValueAsNumpy() From 267840aea87e3994105dff23db607b48d76916a6 Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Mon, 30 Mar 2020 07:51:44 +0100 Subject: [PATCH 067/363] Use pytest.raises for expected exception --- tests/test_f142.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/tests/test_f142.py b/tests/test_f142.py index 37846b5..290197d 100644 --- a/tests/test_f142.py +++ b/tests/test_f142.py @@ -79,10 +79,8 @@ def test_raises_not_implemented_error_when_trying_to_serialise_numpy_complex_num "value": np.complex(3, 4), "timestamp_unix_ns": 1585332414000000000, } - try: + with pytest.raises(NotImplementedError): serialise_f142(**complex_log) - except NotImplementedError: - pass def test_if_buffer_has_wrong_id_then_throws(self): buf = serialise_f142(**self.original_entry) From 648fff07fba89ae92db92f0b69cf913c02db84be Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Mon, 30 Mar 2020 09:13:49 +0100 Subject: [PATCH 068/363] Add tests for array serialisation and deserialisation --- tests/test_f142.py | 54 ++++++++++++++++++++++++++++++++++++++++------ 1 file changed, 48 insertions(+), 6 deletions(-) diff --git a/tests/test_f142.py b/tests/test_f142.py index 290197d..404a15f 100644 --- a/tests/test_f142.py +++ b/tests/test_f142.py @@ -60,16 +60,58 @@ def test_serialises_and_deserialises_string_f142_message_correctly(self): assert deserialised_tuple.value == string_log["value"] assert deserialised_tuple.timestamp_unix_ns == string_log["timestamp_unix_ns"] - def test_raises_not_implemented_error_when_trying_to_serialise_array(self): - array_log = { + def test_serialises_and_deserialises_native_list_correctly(self): + list_log = { "source_name": "some_source", "value": [1, 2, 3], "timestamp_unix_ns": 1585332414000000000, } - try: - serialise_f142(**array_log) - except NotImplementedError: - pass + buf = serialise_f142(**list_log) + deserialised_tuple = deserialise_f142(buf) + + assert deserialised_tuple.source_name == list_log["source_name"] + # Array values are output as numpy array + assert np.array_equal(deserialised_tuple.value, np.array(list_log["value"])) + assert deserialised_tuple.timestamp_unix_ns == list_log["timestamp_unix_ns"] + + def test_serialises_and_deserialises_numpy_array_integers_correctly(self): + array_log = { + "source_name": "some_source", + "value": np.array([1, 2, 3]), + "timestamp_unix_ns": 1585332414000000000, + } + buf = serialise_f142(**array_log) + deserialised_tuple = deserialise_f142(buf) + + assert deserialised_tuple.source_name == array_log["source_name"] + assert np.array_equal(deserialised_tuple.value, array_log["value"]) + assert deserialised_tuple.timestamp_unix_ns == array_log["timestamp_unix_ns"] + + def test_serialises_and_deserialises_numpy_array_floats_correctly(self): + array_log = { + "source_name": "some_source", + "value": np.array([1.1, 2.2, 3.3]), + "timestamp_unix_ns": 1585332414000000000, + } + buf = serialise_f142(**array_log) + deserialised_tuple = deserialise_f142(buf) + + assert deserialised_tuple.source_name == array_log["source_name"] + assert np.array_equal(deserialised_tuple.value, array_log["value"]) + assert deserialised_tuple.timestamp_unix_ns == array_log["timestamp_unix_ns"] + + def test_serialises_and_deserialises_numpy_array_strings_correctly(self): + array_log = { + "source_name": "some_source", + "value": np.array(["1", "2", "3"]), + "timestamp_unix_ns": 1585332414000000000, + } + buf = serialise_f142(**array_log) + deserialised_tuple = deserialise_f142(buf) + + assert deserialised_tuple.source_name == array_log["source_name"] + assert np.array_equal(deserialised_tuple.value, array_log["value"]) + assert deserialised_tuple.timestamp_unix_ns == array_log["timestamp_unix_ns"] def test_raises_not_implemented_error_when_trying_to_serialise_numpy_complex_number_type( self, From b90a879b0e3e38cd950f28f7d3bd5190012f062a Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Mon, 30 Mar 2020 09:15:03 +0100 Subject: [PATCH 069/363] Avoid float equality comparison --- tests/test_f142.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_f142.py b/tests/test_f142.py index 404a15f..a6c03c4 100644 --- a/tests/test_f142.py +++ b/tests/test_f142.py @@ -97,7 +97,7 @@ def test_serialises_and_deserialises_numpy_array_floats_correctly(self): deserialised_tuple = deserialise_f142(buf) assert deserialised_tuple.source_name == array_log["source_name"] - assert np.array_equal(deserialised_tuple.value, array_log["value"]) + assert np.allclose(deserialised_tuple.value, array_log["value"]) assert deserialised_tuple.timestamp_unix_ns == array_log["timestamp_unix_ns"] def test_serialises_and_deserialises_numpy_array_strings_correctly(self): From 8952bd80e8cad79e733d04b1c4b9dec5ca5eb7b5 Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Mon, 30 Mar 2020 12:48:06 +0100 Subject: [PATCH 070/363] Implement serialising string arrays --- streaming_data_types/logdata_f142.py | 98 ++++++++++++++++++++++++---- 1 file changed, 84 insertions(+), 14 deletions(-) diff --git a/streaming_data_types/logdata_f142.py b/streaming_data_types/logdata_f142.py index 5169ad1..40b83ad 100644 --- a/streaming_data_types/logdata_f142.py +++ b/streaming_data_types/logdata_f142.py @@ -67,6 +67,13 @@ StringAddValue, StringEnd, ) +from streaming_data_types.fbschemas.logdata_f142.ArrayString import ( + ArrayString, + ArrayStringStart, + ArrayStringAddValue, + ArrayStringEnd, + ArrayStringStartValueVector, +) from streaming_data_types.fbschemas.logdata_f142.ArrayByte import ArrayByte from streaming_data_types.fbschemas.logdata_f142.ArrayUByte import ArrayUByte from streaming_data_types.fbschemas.logdata_f142.ArrayShort import ArrayShort @@ -77,10 +84,9 @@ from streaming_data_types.fbschemas.logdata_f142.ArrayULong import ArrayULong from streaming_data_types.fbschemas.logdata_f142.ArrayFloat import ArrayFloat from streaming_data_types.fbschemas.logdata_f142.ArrayDouble import ArrayDouble -from streaming_data_types.fbschemas.logdata_f142.ArrayString import ArrayString from streaming_data_types.utils import check_schema_identifier import numpy as np -from typing import Any, Tuple, NamedTuple +from typing import Any, Tuple, NamedTuple, Callable, Dict, Union from collections import namedtuple @@ -213,6 +219,23 @@ def _serialise_string(builder: flatbuffers.Builder, data: np.ndarray, source: in LogData.LogDataAddValueType(builder, Value.String) +def _serialise_stringarray(builder: flatbuffers.Builder, data: np.ndarray, source: int): + string_offsets = [ + builder.CreateString(string_item) for string_item in reversed(data) + ] + ArrayStringStartValueVector(builder, len(data)) + for string_offset in string_offsets: + builder.PrependSOffsetTRelative(string_offset) + string_array_offset = builder.EndVector(len(data)) + ArrayStringStart(builder) + ArrayStringAddValue(builder, string_array_offset) + value_position = ArrayStringEnd(builder) + LogData.LogDataStart(builder) + LogData.LogDataAddSourceName(builder, source) + LogData.LogDataAddValue(builder, value_position) + LogData.LogDataAddValueType(builder, Value.ArrayString) + + _map_scalar_type_to_serialiser = { np.dtype("byte"): _serialise_byte, np.dtype("ubyte"): _serialise_ubyte, @@ -228,6 +251,21 @@ def _serialise_string(builder: flatbuffers.Builder, data: np.ndarray, source: in np.dtype("float64"): _serialise_double, } +_map_array_type_to_serialiser = { + # np.dtype("byte"): _serialise_bytearray, + # np.dtype("ubyte"): _serialise_ubytearray, + # np.dtype("int8"): _serialise_shortarray, + # np.dtype("int16"): _serialise_shortarray, + # np.dtype("int32"): _serialise_intarray, + # np.dtype("int64"): _serialise_longarray, + # np.dtype("uint8"): _serialise_ushortarray, + # np.dtype("uint16"): _serialise_ushortarray, + # np.dtype("uint32"): _serialise_uintarray, + # np.dtype("uint64"): _serialise_ulongarray, + # np.dtype("float32"): _serialise_floatarray, + # np.dtype("float64"): _serialise_doublearray, +} + def serialise_f142( value: Any, source_name: str, timestamp_unix_ns: int = 0 @@ -242,21 +280,42 @@ def serialise_f142( :param timestamp_unix_ns: timestamp corresponding to value, e.g. when value was measured, in nanoseconds """ builder, source = _setup_builder(source_name) - value = np.array(value) - if value.ndim != 0: - raise NotImplementedError("serialise_f142 does not yet support array types") + if value.ndim == 0: + _serialise_value( + builder, source, value, _serialise_string, _map_scalar_type_to_serialiser + ) + elif value.ndim == 1: + _serialise_value( + builder, + source, + value, + _serialise_stringarray, + _map_array_type_to_serialiser, + ) + else: + raise NotImplementedError("f142 only supports scalars or 1D array values") + + return _complete_buffer(builder, timestamp_unix_ns) + +def _serialise_value( + builder: flatbuffers.Builder, + source: int, + value: Any, + string_serialiser: Callable, + serilisers_map: Dict, +): # We can use a dictionary to map most numpy types to one of the types defined in the flatbuffer schema # but we have to handle strings separately as there are many subtypes if np.issubdtype(value.dtype, np.unicode_) or np.issubdtype( value.dtype, np.string_ ): - _serialise_string(builder, value, source) + string_serialiser(builder, value, source) else: try: - _map_scalar_type_to_serialiser[value.dtype](builder, value, source) + serilisers_map[value.dtype](builder, value, source) except KeyError: # There are a few numpy types we don't try to handle, for example complex numbers raise NotImplementedError( @@ -264,8 +323,6 @@ def serialise_f142( f"{list(_map_scalar_type_to_serialiser.keys()).append(np.unicode_)}" ) - return _complete_buffer(builder, timestamp_unix_ns) - _map_fb_enum_to_type = { Value.Byte: Byte, @@ -293,6 +350,15 @@ def serialise_f142( } +def _decode_if_scalar_string(value: np.ndarray) -> Union[str, np.ndarray]: + if value.ndim == 0 and ( + np.issubdtype(value.dtype, np.unicode_) + or np.issubdtype(value.dtype, np.string_) + ): + return value.item().decode() + return value + + def deserialise_f142(buffer: bytearray) -> NamedTuple: check_schema_identifier(buffer, FILE_IDENTIFIER) @@ -305,12 +371,16 @@ def deserialise_f142(buffer: bytearray) -> NamedTuple: try: value = value_fb.ValueAsNumpy() except AttributeError: - value = np.array(value_fb.Value()) + try: + value = np.array(value_fb.Value()) + except TypeError: + # Must have an array of strings, which for some reason doesn't get a generated ValueAsNumpy method + # So we'll have to extract each element from the buffer manually and construct our own numpy array + value = np.array( + [str(value_fb.Value(n), "utf-8") for n in range(value_fb.ValueLength())] + ) - if np.issubdtype(value.dtype, np.unicode_) or np.issubdtype( - value.dtype, np.string_ - ): - value = value.item().decode() + value = _decode_if_scalar_string(value) timestamp = log_data.Timestamp() From cfcf78f1e4df7fe8c8353d1b4342890bc1884692 Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Mon, 30 Mar 2020 13:15:50 +0100 Subject: [PATCH 071/363] Implement serialising numerical arrays --- streaming_data_types/logdata_f142.py | 244 ++++++++++++++++++++++++--- tests/test_f142.py | 15 ++ 2 files changed, 237 insertions(+), 22 deletions(-) diff --git a/streaming_data_types/logdata_f142.py b/streaming_data_types/logdata_f142.py index 40b83ad..8f937a9 100644 --- a/streaming_data_types/logdata_f142.py +++ b/streaming_data_types/logdata_f142.py @@ -7,60 +7,130 @@ UByteAddValue, UByteEnd, ) +from streaming_data_types.fbschemas.logdata_f142.ArrayUByte import ( + ArrayUByte, + ArrayUByteStart, + ArrayUByteAddValue, + ArrayUByteEnd, + ArrayUByteStartValueVector, +) from streaming_data_types.fbschemas.logdata_f142.Byte import ( Byte, ByteStart, ByteAddValue, ByteEnd, ) +from streaming_data_types.fbschemas.logdata_f142.ArrayByte import ( + ArrayByte, + ArrayByteStart, + ArrayByteAddValue, + ArrayByteEnd, + ArrayByteStartValueVector, +) from streaming_data_types.fbschemas.logdata_f142.UShort import ( UShort, UShortStart, UShortAddValue, UShortEnd, ) +from streaming_data_types.fbschemas.logdata_f142.ArrayUShort import ( + ArrayUShort, + ArrayUShortStart, + ArrayUShortAddValue, + ArrayUShortEnd, + ArrayUShortStartValueVector, +) from streaming_data_types.fbschemas.logdata_f142.Short import ( Short, ShortStart, ShortAddValue, ShortEnd, ) +from streaming_data_types.fbschemas.logdata_f142.ArrayShort import ( + ArrayShort, + ArrayShortStart, + ArrayShortAddValue, + ArrayShortEnd, + ArrayShortStartValueVector, +) from streaming_data_types.fbschemas.logdata_f142.UInt import ( UInt, UIntStart, UIntAddValue, UIntEnd, ) +from streaming_data_types.fbschemas.logdata_f142.ArrayUInt import ( + ArrayUInt, + ArrayUIntStart, + ArrayUIntAddValue, + ArrayUIntEnd, + ArrayUIntStartValueVector, +) from streaming_data_types.fbschemas.logdata_f142.Int import ( Int, IntStart, IntAddValue, IntEnd, ) +from streaming_data_types.fbschemas.logdata_f142.ArrayInt import ( + ArrayInt, + ArrayIntStart, + ArrayIntAddValue, + ArrayIntEnd, + ArrayIntStartValueVector, +) from streaming_data_types.fbschemas.logdata_f142.ULong import ( ULong, ULongStart, ULongAddValue, ULongEnd, ) +from streaming_data_types.fbschemas.logdata_f142.ArrayULong import ( + ArrayULong, + ArrayULongStart, + ArrayULongAddValue, + ArrayULongEnd, + ArrayULongStartValueVector, +) from streaming_data_types.fbschemas.logdata_f142.Long import ( Long, LongStart, LongAddValue, LongEnd, ) +from streaming_data_types.fbschemas.logdata_f142.ArrayLong import ( + ArrayLong, + ArrayLongStart, + ArrayLongAddValue, + ArrayLongEnd, + ArrayLongStartValueVector, +) from streaming_data_types.fbschemas.logdata_f142.Float import ( Float, FloatStart, FloatAddValue, FloatEnd, ) +from streaming_data_types.fbschemas.logdata_f142.ArrayFloat import ( + ArrayFloat, + ArrayFloatStart, + ArrayFloatAddValue, + ArrayFloatEnd, + ArrayFloatStartValueVector, +) from streaming_data_types.fbschemas.logdata_f142.Double import ( Double, DoubleStart, DoubleAddValue, DoubleEnd, ) +from streaming_data_types.fbschemas.logdata_f142.ArrayDouble import ( + ArrayDouble, + ArrayDoubleStart, + ArrayDoubleAddValue, + ArrayDoubleEnd, + ArrayDoubleStartValueVector, +) from streaming_data_types.fbschemas.logdata_f142.String import ( String, StringStart, @@ -74,16 +144,6 @@ ArrayStringEnd, ArrayStringStartValueVector, ) -from streaming_data_types.fbschemas.logdata_f142.ArrayByte import ArrayByte -from streaming_data_types.fbschemas.logdata_f142.ArrayUByte import ArrayUByte -from streaming_data_types.fbschemas.logdata_f142.ArrayShort import ArrayShort -from streaming_data_types.fbschemas.logdata_f142.ArrayUShort import ArrayUShort -from streaming_data_types.fbschemas.logdata_f142.ArrayInt import ArrayInt -from streaming_data_types.fbschemas.logdata_f142.ArrayUInt import ArrayUInt -from streaming_data_types.fbschemas.logdata_f142.ArrayLong import ArrayLong -from streaming_data_types.fbschemas.logdata_f142.ArrayULong import ArrayULong -from streaming_data_types.fbschemas.logdata_f142.ArrayFloat import ArrayFloat -from streaming_data_types.fbschemas.logdata_f142.ArrayDouble import ArrayDouble from streaming_data_types.utils import check_schema_identifier import numpy as np from typing import Any, Tuple, NamedTuple, Callable, Dict, Union @@ -118,6 +178,20 @@ def _serialise_byte(builder: flatbuffers.Builder, data: np.ndarray, source: int) LogData.LogDataAddValueType(builder, Value.Byte) +def _serialise_bytearray(builder: flatbuffers.Builder, data: np.ndarray, source: int): + ArrayByteStartValueVector(builder, len(data)) + for single_value in reversed(data): + builder.PrependInt8(single_value) + array_offset = builder.EndVector(len(data)) + ArrayByteStart(builder) + ArrayByteAddValue(builder, array_offset) + value_position = ArrayByteEnd(builder) + LogData.LogDataStart(builder) + LogData.LogDataAddSourceName(builder, source) + LogData.LogDataAddValue(builder, value_position) + LogData.LogDataAddValueType(builder, Value.ArrayByte) + + def _serialise_ubyte(builder: flatbuffers.Builder, data: np.ndarray, source: int): UByteStart(builder) UByteAddValue(builder, data.item()) @@ -128,6 +202,20 @@ def _serialise_ubyte(builder: flatbuffers.Builder, data: np.ndarray, source: int LogData.LogDataAddValueType(builder, Value.UByte) +def _serialise_ubytearray(builder: flatbuffers.Builder, data: np.ndarray, source: int): + ArrayUByteStartValueVector(builder, len(data)) + for single_value in reversed(data): + builder.PrependUint8(single_value) + array_offset = builder.EndVector(len(data)) + ArrayUByteStart(builder) + ArrayUByteAddValue(builder, array_offset) + value_position = ArrayUByteEnd(builder) + LogData.LogDataStart(builder) + LogData.LogDataAddSourceName(builder, source) + LogData.LogDataAddValue(builder, value_position) + LogData.LogDataAddValueType(builder, Value.ArrayUByte) + + def _serialise_short(builder: flatbuffers.Builder, data: np.ndarray, source: int): ShortStart(builder) ShortAddValue(builder, data.item()) @@ -138,6 +226,20 @@ def _serialise_short(builder: flatbuffers.Builder, data: np.ndarray, source: int LogData.LogDataAddValueType(builder, Value.Short) +def _serialise_shortarray(builder: flatbuffers.Builder, data: np.ndarray, source: int): + ArrayShortStartValueVector(builder, len(data)) + for single_value in reversed(data): + builder.PrependInt16(single_value) + array_offset = builder.EndVector(len(data)) + ArrayShortStart(builder) + ArrayShortAddValue(builder, array_offset) + value_position = ArrayShortEnd(builder) + LogData.LogDataStart(builder) + LogData.LogDataAddSourceName(builder, source) + LogData.LogDataAddValue(builder, value_position) + LogData.LogDataAddValueType(builder, Value.ArrayShort) + + def _serialise_ushort(builder: flatbuffers.Builder, data: np.ndarray, source: int): UShortStart(builder) UShortAddValue(builder, data.item()) @@ -148,6 +250,20 @@ def _serialise_ushort(builder: flatbuffers.Builder, data: np.ndarray, source: in LogData.LogDataAddValueType(builder, Value.UShort) +def _serialise_ushortarray(builder: flatbuffers.Builder, data: np.ndarray, source: int): + ArrayUShortStartValueVector(builder, len(data)) + for single_value in reversed(data): + builder.PrependUint16(single_value) + array_offset = builder.EndVector(len(data)) + ArrayUShortStart(builder) + ArrayUShortAddValue(builder, array_offset) + value_position = ArrayUShortEnd(builder) + LogData.LogDataStart(builder) + LogData.LogDataAddSourceName(builder, source) + LogData.LogDataAddValue(builder, value_position) + LogData.LogDataAddValueType(builder, Value.ArrayUShort) + + def _serialise_int(builder: flatbuffers.Builder, data: np.ndarray, source: int): IntStart(builder) IntAddValue(builder, data.item()) @@ -158,6 +274,20 @@ def _serialise_int(builder: flatbuffers.Builder, data: np.ndarray, source: int): LogData.LogDataAddValueType(builder, Value.Int) +def _serialise_intarray(builder: flatbuffers.Builder, data: np.ndarray, source: int): + ArrayIntStartValueVector(builder, len(data)) + for single_value in reversed(data): + builder.PrependInt32(single_value) + array_offset = builder.EndVector(len(data)) + ArrayIntStart(builder) + ArrayIntAddValue(builder, array_offset) + value_position = ArrayIntEnd(builder) + LogData.LogDataStart(builder) + LogData.LogDataAddSourceName(builder, source) + LogData.LogDataAddValue(builder, value_position) + LogData.LogDataAddValueType(builder, Value.ArrayInt) + + def _serialise_uint(builder: flatbuffers.Builder, data: np.ndarray, source: int): UIntStart(builder) UIntAddValue(builder, data.item()) @@ -168,6 +298,20 @@ def _serialise_uint(builder: flatbuffers.Builder, data: np.ndarray, source: int) LogData.LogDataAddValueType(builder, Value.UInt) +def _serialise_uintarray(builder: flatbuffers.Builder, data: np.ndarray, source: int): + ArrayUIntStartValueVector(builder, len(data)) + for single_value in reversed(data): + builder.PrependUint32(single_value) + array_offset = builder.EndVector(len(data)) + ArrayUIntStart(builder) + ArrayUIntAddValue(builder, array_offset) + value_position = ArrayUIntEnd(builder) + LogData.LogDataStart(builder) + LogData.LogDataAddSourceName(builder, source) + LogData.LogDataAddValue(builder, value_position) + LogData.LogDataAddValueType(builder, Value.ArrayUInt) + + def _serialise_long(builder: flatbuffers.Builder, data: np.ndarray, source: int): LongStart(builder) LongAddValue(builder, data.item()) @@ -178,6 +322,20 @@ def _serialise_long(builder: flatbuffers.Builder, data: np.ndarray, source: int) LogData.LogDataAddValueType(builder, Value.Long) +def _serialise_longarray(builder: flatbuffers.Builder, data: np.ndarray, source: int): + ArrayLongStartValueVector(builder, len(data)) + for single_value in reversed(data): + builder.PrependInt64(single_value) + array_offset = builder.EndVector(len(data)) + ArrayLongStart(builder) + ArrayLongAddValue(builder, array_offset) + value_position = ArrayLongEnd(builder) + LogData.LogDataStart(builder) + LogData.LogDataAddSourceName(builder, source) + LogData.LogDataAddValue(builder, value_position) + LogData.LogDataAddValueType(builder, Value.ArrayLong) + + def _serialise_ulong(builder: flatbuffers.Builder, data: np.ndarray, source: int): ULongStart(builder) ULongAddValue(builder, data.item()) @@ -188,6 +346,20 @@ def _serialise_ulong(builder: flatbuffers.Builder, data: np.ndarray, source: int LogData.LogDataAddValueType(builder, Value.ULong) +def _serialise_ulongarray(builder: flatbuffers.Builder, data: np.ndarray, source: int): + ArrayULongStartValueVector(builder, len(data)) + for single_value in reversed(data): + builder.PrependUint64(single_value) + array_offset = builder.EndVector(len(data)) + ArrayULongStart(builder) + ArrayULongAddValue(builder, array_offset) + value_position = ArrayULongEnd(builder) + LogData.LogDataStart(builder) + LogData.LogDataAddSourceName(builder, source) + LogData.LogDataAddValue(builder, value_position) + LogData.LogDataAddValueType(builder, Value.ArrayULong) + + def _serialise_float(builder: flatbuffers.Builder, data: np.ndarray, source: int): FloatStart(builder) FloatAddValue(builder, data.item()) @@ -198,6 +370,20 @@ def _serialise_float(builder: flatbuffers.Builder, data: np.ndarray, source: int LogData.LogDataAddValueType(builder, Value.Float) +def _serialise_floatarray(builder: flatbuffers.Builder, data: np.ndarray, source: int): + ArrayFloatStartValueVector(builder, len(data)) + for single_value in reversed(data): + builder.PrependFloat32(single_value) + array_offset = builder.EndVector(len(data)) + ArrayFloatStart(builder) + ArrayFloatAddValue(builder, array_offset) + value_position = ArrayFloatEnd(builder) + LogData.LogDataStart(builder) + LogData.LogDataAddSourceName(builder, source) + LogData.LogDataAddValue(builder, value_position) + LogData.LogDataAddValueType(builder, Value.ArrayFloat) + + def _serialise_double(builder: flatbuffers.Builder, data: np.ndarray, source: int): DoubleStart(builder) DoubleAddValue(builder, data.item()) @@ -208,6 +394,20 @@ def _serialise_double(builder: flatbuffers.Builder, data: np.ndarray, source: in LogData.LogDataAddValueType(builder, Value.Double) +def _serialise_doublearray(builder: flatbuffers.Builder, data: np.ndarray, source: int): + ArrayDoubleStartValueVector(builder, len(data)) + for single_value in reversed(data): + builder.PrependFloat64(single_value) + array_offset = builder.EndVector(len(data)) + ArrayDoubleStart(builder) + ArrayDoubleAddValue(builder, array_offset) + value_position = ArrayDoubleEnd(builder) + LogData.LogDataStart(builder) + LogData.LogDataAddSourceName(builder, source) + LogData.LogDataAddValue(builder, value_position) + LogData.LogDataAddValueType(builder, Value.ArrayDouble) + + def _serialise_string(builder: flatbuffers.Builder, data: np.ndarray, source: int): string_offset = builder.CreateString(data.item()) StringStart(builder) @@ -252,18 +452,18 @@ def _serialise_stringarray(builder: flatbuffers.Builder, data: np.ndarray, sourc } _map_array_type_to_serialiser = { - # np.dtype("byte"): _serialise_bytearray, - # np.dtype("ubyte"): _serialise_ubytearray, - # np.dtype("int8"): _serialise_shortarray, - # np.dtype("int16"): _serialise_shortarray, - # np.dtype("int32"): _serialise_intarray, - # np.dtype("int64"): _serialise_longarray, - # np.dtype("uint8"): _serialise_ushortarray, - # np.dtype("uint16"): _serialise_ushortarray, - # np.dtype("uint32"): _serialise_uintarray, - # np.dtype("uint64"): _serialise_ulongarray, - # np.dtype("float32"): _serialise_floatarray, - # np.dtype("float64"): _serialise_doublearray, + np.dtype("byte"): _serialise_bytearray, + np.dtype("ubyte"): _serialise_ubytearray, + np.dtype("int8"): _serialise_shortarray, + np.dtype("int16"): _serialise_shortarray, + np.dtype("int32"): _serialise_intarray, + np.dtype("int64"): _serialise_longarray, + np.dtype("uint8"): _serialise_ushortarray, + np.dtype("uint16"): _serialise_ushortarray, + np.dtype("uint32"): _serialise_uintarray, + np.dtype("uint64"): _serialise_ulongarray, + np.dtype("float32"): _serialise_floatarray, + np.dtype("float64"): _serialise_doublearray, } diff --git a/tests/test_f142.py b/tests/test_f142.py index a6c03c4..87f21bd 100644 --- a/tests/test_f142.py +++ b/tests/test_f142.py @@ -87,6 +87,21 @@ def test_serialises_and_deserialises_numpy_array_integers_correctly(self): assert np.array_equal(deserialised_tuple.value, array_log["value"]) assert deserialised_tuple.timestamp_unix_ns == array_log["timestamp_unix_ns"] + def test_serialises_and_deserialises_numpy_array_preserves_integer_type_correctly( + self, + ): + specified_type = np.uint8 + array_log = { + "source_name": "some_source", + "value": np.array([1, 2, 3]).astype(specified_type), + "timestamp_unix_ns": 1585332414000000000, + } + buf = serialise_f142(**array_log) + deserialised_tuple = deserialise_f142(buf) + + assert np.array_equal(deserialised_tuple.value, array_log["value"]) + assert array_log["value"].dtype == specified_type + def test_serialises_and_deserialises_numpy_array_floats_correctly(self): array_log = { "source_name": "some_source", From 4eaa54d60055c7ac2de1aaa555e930b4ba86e62c Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Mon, 30 Mar 2020 13:26:53 +0100 Subject: [PATCH 072/363] Improve some explanatory comments --- streaming_data_types/logdata_f142.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/streaming_data_types/logdata_f142.py b/streaming_data_types/logdata_f142.py index 8f937a9..61d205f 100644 --- a/streaming_data_types/logdata_f142.py +++ b/streaming_data_types/logdata_f142.py @@ -572,9 +572,10 @@ def deserialise_f142(buffer: bytearray) -> NamedTuple: value = value_fb.ValueAsNumpy() except AttributeError: try: + # Must be a scalar value then, so we'll get it like this value = np.array(value_fb.Value()) except TypeError: - # Must have an array of strings, which for some reason doesn't get a generated ValueAsNumpy method + # In that case it is an array of strings, which for some reason doesn't get a generated ValueAsNumpy method # So we'll have to extract each element from the buffer manually and construct our own numpy array value = np.array( [str(value_fb.Value(n), "utf-8") for n in range(value_fb.ValueLength())] From a3ee05bd18a101f36c38b223e05da084032c78ec Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Tue, 31 Mar 2020 07:48:05 +0100 Subject: [PATCH 073/363] Add tests for serialising and deserialising EPICS alarms --- tests/test_f142.py | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/tests/test_f142.py b/tests/test_f142.py index 87f21bd..c89f68c 100644 --- a/tests/test_f142.py +++ b/tests/test_f142.py @@ -1,6 +1,8 @@ import pytest import numpy as np from streaming_data_types.logdata_f142 import serialise_f142, deserialise_f142 +from streaming_data_types.fbschemas.logdata_f142.AlarmSeverity import AlarmSeverity +from streaming_data_types.fbschemas.logdata_f142.AlarmStatus import AlarmStatus class TestSerialisationf142: @@ -128,6 +130,32 @@ def test_serialises_and_deserialises_numpy_array_strings_correctly(self): assert np.array_equal(deserialised_tuple.value, array_log["value"]) assert deserialised_tuple.timestamp_unix_ns == array_log["timestamp_unix_ns"] + def test_serialises_and_deserialises_epics_alarms_correctly(self): + float_log = { + "source_name": "some_source", + "value": 1.234, + "timestamp_unix_ns": 1585332414000000000, + "alarm_status": AlarmStatus.HIHI, + "alarm_severity": AlarmSeverity.MAJOR, + } + buf = serialise_f142(**float_log) + deserialised_tuple = deserialise_f142(buf) + + assert deserialised_tuple.alarm_status == float_log["alarm_status"] + assert deserialised_tuple.alarm_severity == float_log["alarm_severity"] + + def test_epics_alarms_default_to_no_change_when_not_provided_to_serialiser(self): + float_log = { + "source_name": "some_source", + "value": 1.234, + "timestamp_unix_ns": 1585332414000000000, + } + buf = serialise_f142(**float_log) + deserialised_tuple = deserialise_f142(buf) + + assert deserialised_tuple.alarm_status == AlarmStatus.NO_CHANGE + assert deserialised_tuple.alarm_severity == AlarmSeverity.NO_CHANGE + def test_raises_not_implemented_error_when_trying_to_serialise_numpy_complex_number_type( self, ): From 9449f62a19cb09278f40efa8b77cb0e494d5fc3a Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Tue, 31 Mar 2020 07:56:58 +0100 Subject: [PATCH 074/363] Implement serialising and deserialising EPICS alarms --- streaming_data_types/logdata_f142.py | 32 +++++++++++++++++++++++----- 1 file changed, 27 insertions(+), 5 deletions(-) diff --git a/streaming_data_types/logdata_f142.py b/streaming_data_types/logdata_f142.py index 61d205f..24480dd 100644 --- a/streaming_data_types/logdata_f142.py +++ b/streaming_data_types/logdata_f142.py @@ -153,8 +153,20 @@ FILE_IDENTIFIER = b"f142" -def _complete_buffer(builder, timestamp_unix_ns: int) -> bytearray: +def _complete_buffer( + builder, + timestamp_unix_ns: int, + alarm_status: Union[int, None] = None, + alarm_severity: Union[int, None] = None, +) -> bytearray: LogData.LogDataAddTimestamp(builder, timestamp_unix_ns) + + if alarm_status is not None: + LogData.LogDataAddStatus(builder, alarm_status) + # Only include severity if status was provided, it would be meaningless by itself + if alarm_severity is not None: + LogData.LogDataAddSeverity(builder, alarm_severity) + log_msg = LogData.LogDataEnd(builder) builder.Finish(log_msg) buff = builder.Output() @@ -468,7 +480,11 @@ def _serialise_stringarray(builder: flatbuffers.Builder, data: np.ndarray, sourc def serialise_f142( - value: Any, source_name: str, timestamp_unix_ns: int = 0 + value: Any, + source_name: str, + timestamp_unix_ns: int = 0, + alarm_status: Union[int, None] = None, + alarm_severity: Union[int, None] = None, ) -> bytearray: """ Serialise value and corresponding timestamp as an f142 Flatbuffer message. @@ -478,6 +494,8 @@ def serialise_f142( :param value: only scalar value currently supported; if ndarray then ndim must be 0 :param source_name: name of the data source :param timestamp_unix_ns: timestamp corresponding to value, e.g. when value was measured, in nanoseconds + :param alarm_status: EPICS alarm status, best to provide using enum-like class defined in logdata_f142.AlarmStatus + :param alarm_severity: EPICS alarm severity, best to provide using enum-like class defined in logdata_f142.AlarmSeverity """ builder, source = _setup_builder(source_name) value = np.array(value) @@ -497,7 +515,7 @@ def serialise_f142( else: raise NotImplementedError("f142 only supports scalars or 1D array values") - return _complete_buffer(builder, timestamp_unix_ns) + return _complete_buffer(builder, timestamp_unix_ns, alarm_status, alarm_severity) def _serialise_value( @@ -585,5 +603,9 @@ def deserialise_f142(buffer: bytearray) -> NamedTuple: timestamp = log_data.Timestamp() - LogDataInfo = namedtuple("LogDataInfo", "value source_name timestamp_unix_ns") - return LogDataInfo(value, source_name.decode(), timestamp) + LogDataInfo = namedtuple( + "LogDataInfo", "value source_name timestamp_unix_ns alarm_status alarm_severity" + ) + return LogDataInfo( + value, source_name.decode(), timestamp, log_data.Status(), log_data.Severity() + ) From dd651142ac50bad919862584dba6e6fdd6b4146e Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Tue, 31 Mar 2020 07:58:23 +0100 Subject: [PATCH 075/363] Add f142 to table in readme --- README.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 8ce511e..a3421a7 100644 --- a/README.md +++ b/README.md @@ -12,13 +12,14 @@ https://github.com/ess-dmsc/streaming-data-types |ns10|NICOS cache entry schema|Y| |pl72|Run start|N| |6s4t|Run stop|N| +|f142|Log Data|?| ### hs00 Schema for histogram data. It is one of the more complicated to use schemas. It takes a Python dictionary as its input; this dictionary needs to have correctly named fields. -The input histogram data for serialistation and the output deserialisation data +The input histogram data for serialisation and the output deserialisation data have the same dictionary "layout". Example for a 2-D histogram: ```json From 830f8b6503c9313ddd678a4624034438b81cb29e Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Tue, 31 Mar 2020 09:20:02 +0100 Subject: [PATCH 076/363] Record that f142 buffer is verifiable --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index a3421a7..2fb9c32 100644 --- a/README.md +++ b/README.md @@ -12,7 +12,7 @@ https://github.com/ess-dmsc/streaming-data-types |ns10|NICOS cache entry schema|Y| |pl72|Run start|N| |6s4t|Run stop|N| -|f142|Log Data|?| +|f142|Log Data|Y| ### hs00 Schema for histogram data. It is one of the more complicated to use schemas. From 9128ecc8adf209e64f243ed37a64c4315583ba6e Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Tue, 31 Mar 2020 09:31:08 +0100 Subject: [PATCH 077/363] Fix test which was checking the input rather than output --- tests/test_f142.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_f142.py b/tests/test_f142.py index c89f68c..87526fb 100644 --- a/tests/test_f142.py +++ b/tests/test_f142.py @@ -92,7 +92,7 @@ def test_serialises_and_deserialises_numpy_array_integers_correctly(self): def test_serialises_and_deserialises_numpy_array_preserves_integer_type_correctly( self, ): - specified_type = np.uint8 + specified_type = np.uint16 array_log = { "source_name": "some_source", "value": np.array([1, 2, 3]).astype(specified_type), @@ -102,7 +102,7 @@ def test_serialises_and_deserialises_numpy_array_preserves_integer_type_correctl deserialised_tuple = deserialise_f142(buf) assert np.array_equal(deserialised_tuple.value, array_log["value"]) - assert array_log["value"].dtype == specified_type + assert deserialised_tuple.value.dtype == specified_type def test_serialises_and_deserialises_numpy_array_floats_correctly(self): array_log = { From 74362bf8d7ef8cac0c69ecbc539954a5da334f29 Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Tue, 31 Mar 2020 09:32:51 +0100 Subject: [PATCH 078/363] Fix test which was checking the input rather than output --- tests/test_f142.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/tests/test_f142.py b/tests/test_f142.py index 87526fb..1cf52b7 100644 --- a/tests/test_f142.py +++ b/tests/test_f142.py @@ -92,17 +92,16 @@ def test_serialises_and_deserialises_numpy_array_integers_correctly(self): def test_serialises_and_deserialises_numpy_array_preserves_integer_type_correctly( self, ): - specified_type = np.uint16 array_log = { "source_name": "some_source", - "value": np.array([1, 2, 3]).astype(specified_type), + "value": np.array([1, 2, 3], dtype=np.uint16), "timestamp_unix_ns": 1585332414000000000, } buf = serialise_f142(**array_log) deserialised_tuple = deserialise_f142(buf) assert np.array_equal(deserialised_tuple.value, array_log["value"]) - assert deserialised_tuple.value.dtype == specified_type + assert deserialised_tuple.value.dtype == array_log["value"].dtype def test_serialises_and_deserialises_numpy_array_floats_correctly(self): array_log = { From b5f26b3cc0d06b08f73e883ebdb3c916cebb0de7 Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Tue, 31 Mar 2020 10:50:41 +0100 Subject: [PATCH 079/363] Fix typo in variable name --- streaming_data_types/logdata_f142.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/streaming_data_types/logdata_f142.py b/streaming_data_types/logdata_f142.py index 24480dd..bc3c730 100644 --- a/streaming_data_types/logdata_f142.py +++ b/streaming_data_types/logdata_f142.py @@ -523,7 +523,7 @@ def _serialise_value( source: int, value: Any, string_serialiser: Callable, - serilisers_map: Dict, + serialisers_map: Dict, ): # We can use a dictionary to map most numpy types to one of the types defined in the flatbuffer schema # but we have to handle strings separately as there are many subtypes @@ -533,7 +533,7 @@ def _serialise_value( string_serialiser(builder, value, source) else: try: - serilisers_map[value.dtype](builder, value, source) + serialisers_map[value.dtype](builder, value, source) except KeyError: # There are a few numpy types we don't try to handle, for example complex numbers raise NotImplementedError( From 142a2923e406388c8f821fe4a31ec15ff065f36a Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Tue, 31 Mar 2020 10:02:05 +0100 Subject: [PATCH 080/363] Output buffers in bytes rather than bytearray --- streaming_data_types/histogram_hs00.py | 2 +- streaming_data_types/logdata_f142.py | 8 +++++--- streaming_data_types/nicos_cache_ns10.py | 2 +- streaming_data_types/run_start_pl72.py | 8 ++++---- streaming_data_types/run_stop_6s4t.py | 8 ++++---- tests/test_6s4t.py | 2 ++ tests/test_f142.py | 2 ++ tests/test_hs00.py | 4 +++- tests/test_ns10.py | 2 ++ tests/test_pl72.py | 2 ++ 10 files changed, 26 insertions(+), 14 deletions(-) diff --git a/streaming_data_types/histogram_hs00.py b/streaming_data_types/histogram_hs00.py index d058f18..8a5a064 100644 --- a/streaming_data_types/histogram_hs00.py +++ b/streaming_data_types/histogram_hs00.py @@ -185,4 +185,4 @@ def serialise_hs00(histogram): # Generate the output and replace the file_identifier buffer = builder.Output() buffer[4:8] = FILE_IDENTIFIER - return buffer + return bytes(buffer) diff --git a/streaming_data_types/logdata_f142.py b/streaming_data_types/logdata_f142.py index bc3c730..0f3ecf0 100644 --- a/streaming_data_types/logdata_f142.py +++ b/streaming_data_types/logdata_f142.py @@ -485,7 +485,7 @@ def serialise_f142( timestamp_unix_ns: int = 0, alarm_status: Union[int, None] = None, alarm_severity: Union[int, None] = None, -) -> bytearray: +) -> bytes: """ Serialise value and corresponding timestamp as an f142 Flatbuffer message. Should automagically use a sensible type for value in the message, but if @@ -515,7 +515,9 @@ def serialise_f142( else: raise NotImplementedError("f142 only supports scalars or 1D array values") - return _complete_buffer(builder, timestamp_unix_ns, alarm_status, alarm_severity) + return bytes( + _complete_buffer(builder, timestamp_unix_ns, alarm_status, alarm_severity) + ) def _serialise_value( @@ -577,7 +579,7 @@ def _decode_if_scalar_string(value: np.ndarray) -> Union[str, np.ndarray]: return value -def deserialise_f142(buffer: bytearray) -> NamedTuple: +def deserialise_f142(buffer: Union[bytearray, bytes]) -> NamedTuple: check_schema_identifier(buffer, FILE_IDENTIFIER) log_data = LogData.LogData.GetRootAsLogData(buffer, 0) diff --git a/streaming_data_types/nicos_cache_ns10.py b/streaming_data_types/nicos_cache_ns10.py index be76fb3..695bc62 100644 --- a/streaming_data_types/nicos_cache_ns10.py +++ b/streaming_data_types/nicos_cache_ns10.py @@ -28,7 +28,7 @@ def serialise_ns10( buffer = builder.Output() buffer[4:8] = FILE_IDENTIFIER - return buffer + return bytes(buffer) def deserialise_ns10(buffer): diff --git a/streaming_data_types/run_start_pl72.py b/streaming_data_types/run_start_pl72.py index 732512f..bc35a7d 100644 --- a/streaming_data_types/run_start_pl72.py +++ b/streaming_data_types/run_start_pl72.py @@ -1,5 +1,5 @@ import time -from typing import Optional, NamedTuple +from typing import Optional, NamedTuple, Union import flatbuffers from streaming_data_types.fbschemas.run_start_pl72 import RunStart from streaming_data_types.utils import check_schema_identifier @@ -18,7 +18,7 @@ def serialise_pl72( service_id: str = "", instrument_name: str = "TEST", broker: str = "localhost:9092", -) -> bytearray: +) -> bytes: builder = flatbuffers.Builder(136) if start_time is None: @@ -55,10 +55,10 @@ def serialise_pl72( # Generate the output and replace the file_identifier buffer = builder.Output() buffer[4:8] = FILE_IDENTIFIER - return buffer + return bytes(buffer) -def deserialise_pl72(buffer: bytearray) -> NamedTuple: +def deserialise_pl72(buffer: Union[bytearray, bytes]) -> NamedTuple: check_schema_identifier(buffer, FILE_IDENTIFIER) run_start = RunStart.RunStart.GetRootAsRunStart(buffer, 0) diff --git a/streaming_data_types/run_stop_6s4t.py b/streaming_data_types/run_stop_6s4t.py index c7d945f..43d0057 100644 --- a/streaming_data_types/run_stop_6s4t.py +++ b/streaming_data_types/run_stop_6s4t.py @@ -1,4 +1,4 @@ -from typing import Optional, NamedTuple +from typing import Optional, NamedTuple, Union import flatbuffers from streaming_data_types.fbschemas.run_stop_6s4t import RunStop from streaming_data_types.utils import check_schema_identifier @@ -12,7 +12,7 @@ def serialise_6s4t( run_name: str = "test_run", service_id: str = "", stop_time: Optional[int] = None, -) -> bytearray: +) -> bytes: builder = flatbuffers.Builder(136) if service_id is None: @@ -37,10 +37,10 @@ def serialise_6s4t( # Generate the output and replace the file_identifier buffer = builder.Output() buffer[4:8] = FILE_IDENTIFIER - return buffer + return bytes(buffer) -def deserialise_6s4t(buffer: bytearray) -> NamedTuple: +def deserialise_6s4t(buffer: Union[bytearray, bytes]) -> NamedTuple: check_schema_identifier(buffer, FILE_IDENTIFIER) run_stop = RunStop.RunStop.GetRootAsRunStop(buffer, 0) diff --git a/tests/test_6s4t.py b/tests/test_6s4t.py index 4e68c5b..1b08e25 100644 --- a/tests/test_6s4t.py +++ b/tests/test_6s4t.py @@ -23,7 +23,9 @@ def test_if_buffer_has_wrong_id_then_throws(self): buf = serialise_6s4t(**self.original_entry) # Manually hack the id + buf = bytearray(buf) buf[4:8] = b"1234" + buf = bytes(buf) with pytest.raises(RuntimeError): deserialise_6s4t(buf) diff --git a/tests/test_f142.py b/tests/test_f142.py index 1cf52b7..7cdabd9 100644 --- a/tests/test_f142.py +++ b/tests/test_f142.py @@ -170,7 +170,9 @@ def test_if_buffer_has_wrong_id_then_throws(self): buf = serialise_f142(**self.original_entry) # Manually hack the id + buf = bytearray(buf) buf[4:8] = b"1234" + buf = bytes(buf) with pytest.raises(RuntimeError): deserialise_f142(buf) diff --git a/tests/test_hs00.py b/tests/test_hs00.py index 25853bb..1b2944e 100644 --- a/tests/test_hs00.py +++ b/tests/test_hs00.py @@ -49,7 +49,7 @@ def test_serialises_and_deserialises_hs00_message_correctly_for_full_1d_data(sel ) def test_serialises_and_deserialises_hs00_message_correctly_for_minimal_1d_data( - self + self, ): """ Round-trip to check what we serialise is what we get back. @@ -143,7 +143,9 @@ def test_if_buffer_has_wrong_id_then_throws(self): buf = serialise_hs00(original_hist) # Manually hack the id + buf = bytearray(buf) buf[4:8] = b"1234" + buf = bytes(buf) with pytest.raises(RuntimeError): deserialise_hs00(buf) diff --git a/tests/test_ns10.py b/tests/test_ns10.py index 7892096..3e31d51 100644 --- a/tests/test_ns10.py +++ b/tests/test_ns10.py @@ -35,7 +35,9 @@ def test_if_buffer_has_wrong_id_then_throws(self): buf = serialise_ns10(**original_entry) # Manually hack the id + buf = bytearray(buf) buf[4:8] = b"1234" + buf = bytes(buf) with pytest.raises(RuntimeError): deserialise_ns10(buf) diff --git a/tests/test_pl72.py b/tests/test_pl72.py index f139eea..33007de 100644 --- a/tests/test_pl72.py +++ b/tests/test_pl72.py @@ -37,7 +37,9 @@ def test_if_buffer_has_wrong_id_then_throws(self): buf = serialise_pl72(**self.original_entry) # Manually hack the id + buf = bytearray(buf) buf[4:8] = b"1234" + buf = bytes(buf) with pytest.raises(RuntimeError): deserialise_pl72(buf) From cd2d428e9c44ecff1a86a9ef05abe0e571fbbc56 Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Tue, 31 Mar 2020 12:56:33 +0100 Subject: [PATCH 081/363] No need to cast back to bytes --- tests/test_6s4t.py | 1 - tests/test_f142.py | 1 - tests/test_hs00.py | 1 - tests/test_ns10.py | 1 - tests/test_pl72.py | 1 - 5 files changed, 5 deletions(-) diff --git a/tests/test_6s4t.py b/tests/test_6s4t.py index 1b08e25..a68e156 100644 --- a/tests/test_6s4t.py +++ b/tests/test_6s4t.py @@ -25,7 +25,6 @@ def test_if_buffer_has_wrong_id_then_throws(self): # Manually hack the id buf = bytearray(buf) buf[4:8] = b"1234" - buf = bytes(buf) with pytest.raises(RuntimeError): deserialise_6s4t(buf) diff --git a/tests/test_f142.py b/tests/test_f142.py index 7cdabd9..720c048 100644 --- a/tests/test_f142.py +++ b/tests/test_f142.py @@ -172,7 +172,6 @@ def test_if_buffer_has_wrong_id_then_throws(self): # Manually hack the id buf = bytearray(buf) buf[4:8] = b"1234" - buf = bytes(buf) with pytest.raises(RuntimeError): deserialise_f142(buf) diff --git a/tests/test_hs00.py b/tests/test_hs00.py index 1b2944e..3e46472 100644 --- a/tests/test_hs00.py +++ b/tests/test_hs00.py @@ -145,7 +145,6 @@ def test_if_buffer_has_wrong_id_then_throws(self): # Manually hack the id buf = bytearray(buf) buf[4:8] = b"1234" - buf = bytes(buf) with pytest.raises(RuntimeError): deserialise_hs00(buf) diff --git a/tests/test_ns10.py b/tests/test_ns10.py index 3e31d51..02d4bdd 100644 --- a/tests/test_ns10.py +++ b/tests/test_ns10.py @@ -37,7 +37,6 @@ def test_if_buffer_has_wrong_id_then_throws(self): # Manually hack the id buf = bytearray(buf) buf[4:8] = b"1234" - buf = bytes(buf) with pytest.raises(RuntimeError): deserialise_ns10(buf) diff --git a/tests/test_pl72.py b/tests/test_pl72.py index 33007de..03b4d02 100644 --- a/tests/test_pl72.py +++ b/tests/test_pl72.py @@ -39,7 +39,6 @@ def test_if_buffer_has_wrong_id_then_throws(self): # Manually hack the id buf = bytearray(buf) buf[4:8] = b"1234" - buf = bytes(buf) with pytest.raises(RuntimeError): deserialise_pl72(buf) From bcb2bae00911227ce46aa804f2cc72f940e0f8ec Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Tue, 31 Mar 2020 13:29:38 +0100 Subject: [PATCH 082/363] Bump version to 0.3.0 --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 6345adc..28f7fc3 100644 --- a/setup.py +++ b/setup.py @@ -3,7 +3,7 @@ setup( name="streaming_data_types", - version="0.2.0", + version="0.3.0", description="Python utilities for handling ESS streamed data", long_description="Python utilities for serialising and deserialising data via FlatBuffers for the European Spallation Source ERIC", author="ScreamingUdder", From 92530c4ff37c83ea2cfa0fe97fa652f0b12d6110 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Tue, 7 Apr 2020 13:59:02 +0200 Subject: [PATCH 083/363] Changes required by just-bin-it --- streaming_data_types/histogram_hs00.py | 141 +++++++++++++++------- tests/test_hs00.py | 158 +++++++++++++++++++++++-- 2 files changed, 248 insertions(+), 51 deletions(-) diff --git a/streaming_data_types/histogram_hs00.py b/streaming_data_types/histogram_hs00.py index 8a5a064..ae89551 100644 --- a/streaming_data_types/histogram_hs00.py +++ b/streaming_data_types/histogram_hs00.py @@ -1,7 +1,9 @@ from functools import reduce import operator import flatbuffers +import numpy import streaming_data_types.fbschemas.histogram_hs00.ArrayDouble as ArrayDouble +import streaming_data_types.fbschemas.histogram_hs00.ArrayULong as ArrayULong import streaming_data_types.fbschemas.histogram_hs00.DimensionMetaData as DimensionMetaData import streaming_data_types.fbschemas.histogram_hs00.EventHistogram as EventHistogram from streaming_data_types.fbschemas.histogram_hs00.Array import Array @@ -23,33 +25,40 @@ def deserialise_hs00(buffer): dims = [] for i in range(event_hist.DimMetadataLength()): - bins_offset = event_hist.DimMetadata(i).BinBoundaries() + bins_fb = ArrayDouble.ArrayDouble() + if ( + event_hist.DimMetadata(i).BinBoundariesType() == Array.ArrayUInt + or event_hist.DimMetadata(i).BinBoundariesType() == Array.ArrayULong + ): + bins_fb = ArrayULong.ArrayULong() # Get bins - bins_fb = ArrayDouble.ArrayDouble() + bins_offset = event_hist.DimMetadata(i).BinBoundaries() bins_fb.Init(bins_offset.Bytes, bins_offset.Pos) bin_boundaries = bins_fb.ValueAsNumpy() - # Check type - if event_hist.DimMetadata(i).BinBoundariesType() != Array.ArrayDouble: - raise TypeError("Type of the bin boundaries is incorrect, should be double") - hist_info = { "length": event_hist.DimMetadata(i).Length(), "bin_boundaries": bin_boundaries, - "unit": event_hist.DimMetadata(i).Unit().decode("utf-8"), - "label": event_hist.DimMetadata(i).Label().decode("utf-8"), + "unit": event_hist.DimMetadata(i).Unit().decode("utf-8") + if event_hist.DimMetadata(i).Unit() + else "", + "label": event_hist.DimMetadata(i).Label().decode("utf-8") + if event_hist.DimMetadata(i).Label() + else "", } dims.append(hist_info) metadata_timestamp = event_hist.LastMetadataTimestamp() - # Get the data - if event_hist.DataType() != Array.ArrayDouble: - raise TypeError("Type of the data array is incorrect") + data_fb = ArrayDouble.ArrayDouble() + if ( + event_hist.DataType() == Array.ArrayUInt + or event_hist.DataType() == Array.ArrayULong + ): + data_fb = ArrayULong.ArrayULong() data_offset = event_hist.Data() - data_fb = ArrayDouble.ArrayDouble() data_fb.Init(data_offset.Bytes, data_offset.Pos) shape = event_hist.CurrentShapeAsNumpy().tolist() data = data_fb.ValueAsNumpy().reshape(shape) @@ -58,6 +67,11 @@ def deserialise_hs00(buffer): errors_offset = event_hist.Errors() if errors_offset: errors_fb = ArrayDouble.ArrayDouble() + if ( + event_hist.DataType() == Array.ArrayUInt + or event_hist.DataType() == Array.ArrayULong + ): + errors_fb = ArrayULong.ArrayULong() errors_fb.Init(errors_offset.Bytes, errors_offset.Pos) errors = errors_fb.ValueAsNumpy().reshape(shape) else: @@ -80,20 +94,35 @@ def _serialise_metadata(builder, length, edges, unit, label): unit_offset = builder.CreateString(unit) label_offset = builder.CreateString(label) - ArrayDouble.ArrayDoubleStartValueVector(builder, len(edges)) - # FlatBuffers builds arrays backwards - for x in reversed(edges): - builder.PrependFloat64(x) - bins_vector = builder.EndVector(len(edges)) - # Add the bins - ArrayDouble.ArrayDoubleStart(builder) - ArrayDouble.ArrayDoubleAddValue(builder, bins_vector) - bins_offset = ArrayDouble.ArrayDoubleEnd(builder) + if isinstance(edges[0], int) or ( + isinstance(edges, numpy.ndarray) and numpy.issubdtype(edges[0], numpy.int64) + ): + bin_type = Array.ArrayULong + ArrayULong.ArrayULongStartValueVector(builder, len(edges)) + # FlatBuffers builds arrays backwards + for x in reversed(edges): + builder.PrependUint64(x) + bins_vector = builder.EndVector(len(edges)) + # Add the bins + ArrayULong.ArrayULongStart(builder) + ArrayULong.ArrayULongAddValue(builder, bins_vector) + bins_offset = ArrayULong.ArrayULongEnd(builder) + else: + bin_type = Array.ArrayDouble + ArrayDouble.ArrayDoubleStartValueVector(builder, len(edges)) + # FlatBuffers builds arrays backwards + for x in reversed(edges): + builder.PrependFloat64(x) + bins_vector = builder.EndVector(len(edges)) + # Add the bins + ArrayDouble.ArrayDoubleStart(builder) + ArrayDouble.ArrayDoubleAddValue(builder, bins_vector) + bins_offset = ArrayDouble.ArrayDoubleEnd(builder) DimensionMetaData.DimensionMetaDataStart(builder) DimensionMetaData.DimensionMetaDataAddLength(builder, length) DimensionMetaData.DimensionMetaDataAddBinBoundaries(builder, bins_offset) - DimensionMetaData.DimensionMetaDataAddBinBoundariesType(builder, Array.ArrayDouble) + DimensionMetaData.DimensionMetaDataAddBinBoundariesType(builder, bin_type) DimensionMetaData.DimensionMetaDataAddLabel(builder, label_offset) DimensionMetaData.DimensionMetaDataAddUnit(builder, unit_offset) return DimensionMetaData.DimensionMetaDataEnd(builder) @@ -141,25 +170,57 @@ def serialise_hs00(histogram): # Build the data data_len = reduce(operator.mul, histogram["current_shape"], 1) - - ArrayDouble.ArrayDoubleStartValueVector(builder, data_len) - # FlatBuffers builds arrays backwards - for x in reversed(histogram["data"].flatten()): - builder.PrependFloat64(x) - data_vector = builder.EndVector(data_len) - ArrayDouble.ArrayDoubleStart(builder) - ArrayDouble.ArrayDoubleAddValue(builder, data_vector) - data_offset = ArrayDouble.ArrayDoubleEnd(builder) - - errors_offset = None - if "errors" in histogram: + if isinstance(histogram["data"], numpy.ndarray): + flattened_data = histogram["data"].flatten() + else: + flattened_data = numpy.asarray(histogram["data"]).flatten() + + if numpy.issubdtype(flattened_data[0], numpy.int64): + data_type = Array.ArrayULong + ArrayULong.ArrayULongStartValueVector(builder, data_len) + # FlatBuffers builds arrays backwards + for x in reversed(flattened_data): + builder.PrependUint64(x) + data_vector = builder.EndVector(data_len) + ArrayULong.ArrayULongStart(builder) + ArrayULong.ArrayULongAddValue(builder, data_vector) + data_offset = ArrayULong.ArrayULongEnd(builder) + else: + data_type = Array.ArrayDouble ArrayDouble.ArrayDoubleStartValueVector(builder, data_len) - for x in reversed(histogram["errors"].flatten()): + # FlatBuffers builds arrays backwards + for x in reversed(flattened_data): builder.PrependFloat64(x) - errors = builder.EndVector(data_len) + data_vector = builder.EndVector(data_len) ArrayDouble.ArrayDoubleStart(builder) - ArrayDouble.ArrayDoubleAddValue(builder, errors) - errors_offset = ArrayDouble.ArrayDoubleEnd(builder) + ArrayDouble.ArrayDoubleAddValue(builder, data_vector) + data_offset = ArrayDouble.ArrayDoubleEnd(builder) + + errors_offset = None + if "errors" in histogram: + if isinstance(histogram["errors"], numpy.ndarray): + flattened_data = histogram["errors"].flatten() + else: + flattened_data = numpy.asarray(histogram["errors"]).flatten() + + if numpy.issubdtype(flattened_data[0], numpy.int64): + error_type = Array.ArrayULong + ArrayULong.ArrayULongStartValueVector(builder, data_len) + for x in reversed(flattened_data): + builder.PrependUint64(x) + errors = builder.EndVector(data_len) + ArrayULong.ArrayULongStart(builder) + ArrayULong.ArrayULongAddValue(builder, errors) + errors_offset = ArrayULong.ArrayULongEnd(builder) + else: + error_type = Array.ArrayDouble + ArrayDouble.ArrayDoubleStartValueVector(builder, data_len) + for x in reversed(flattened_data): + builder.PrependFloat64(x) + errors = builder.EndVector(data_len) + ArrayDouble.ArrayDoubleStart(builder) + ArrayDouble.ArrayDoubleAddValue(builder, errors) + errors_offset = ArrayDouble.ArrayDoubleEnd(builder) # Build the actual buffer EventHistogram.EventHistogramStart(builder) @@ -171,10 +232,10 @@ def serialise_hs00(histogram): EventHistogram.EventHistogramAddTimestamp(builder, histogram["timestamp"]) if source_offset: EventHistogram.EventHistogramAddSource(builder, source_offset) - EventHistogram.EventHistogramAddDataType(builder, Array.ArrayDouble) + EventHistogram.EventHistogramAddDataType(builder, data_type) if errors_offset: EventHistogram.EventHistogramAddErrors(builder, errors_offset) - EventHistogram.EventHistogramAddErrorsType(builder, Array.ArrayDouble) + EventHistogram.EventHistogramAddErrorsType(builder, error_type) if "last_metadata_timestamp" in histogram: EventHistogram.EventHistogramAddLastMetadataTimestamp( builder, histogram["last_metadata_timestamp"] diff --git a/tests/test_hs00.py b/tests/test_hs00.py index 3e46472..fa1611c 100644 --- a/tests/test_hs00.py +++ b/tests/test_hs00.py @@ -23,12 +23,12 @@ def test_serialises_and_deserialises_hs00_message_correctly_for_full_1d_data(sel "length": 5, "unit": "m", "label": "some_label", - "bin_boundaries": [0, 1, 2, 3, 4, 5], + "bin_boundaries": np.array([0.0, 1.0, 2.0, 3.0, 4.0, 5.0]), } ], "last_metadata_timestamp": 123456, - "data": np.array([1, 2, 3, 4, 5]), - "errors": np.array([5, 4, 3, 2, 1]), + "data": np.array([1.0, 2.0, 3.0, 4.0, 5.0]), + "errors": np.array([5.0, 4.0, 3.0, 2.0, 1.0]), "info": "info_string", } @@ -62,10 +62,10 @@ def test_serialises_and_deserialises_hs00_message_correctly_for_minimal_1d_data( "length": 5, "unit": "m", "label": "some_label", - "bin_boundaries": [0, 1, 2, 3, 4, 5], + "bin_boundaries": np.array([0.0, 1.0, 2.0, 3.0, 4.0, 5.0]), } ], - "data": np.array([1, 2, 3, 4, 5]), + "data": np.array([1.0, 2.0, 3.0, 4.0, 5.0]), } buf = serialise_hs00(original_hist) @@ -93,18 +93,18 @@ def test_serialises_and_deserialises_hs00_message_correctly_for_full_2d_data(sel "length": 2, "unit": "b", "label": "y", - "bin_boundaries": np.array([10, 11, 12]), + "bin_boundaries": np.array([10.0, 11.0, 12.0]), }, { "length": 5, "unit": "m", "label": "x", - "bin_boundaries": [0, 1, 2, 3, 4, 5], + "bin_boundaries": np.array([0.0, 1.0, 2.0, 3.0, 4.0, 5.0]), }, ], "last_metadata_timestamp": 123456, - "data": np.array([[1, 2, 3, 4, 5], [6, 7, 8, 9, 10]]), - "errors": np.array([[5, 4, 3, 2, 1], [10, 9, 8, 7, 6]]), + "data": np.array([[1.0, 2.0, 3.0, 4.0, 5.0], [6.0, 7.0, 8.0, 9.0, 10.0]]), + "errors": np.array([[5.0, 4.0, 3.0, 2.0, 1.0], [10.0, 9.0, 8.0, 7.0, 6.0]]), "info": "info_string", } buf = serialise_hs00(original_hist) @@ -135,10 +135,10 @@ def test_if_buffer_has_wrong_id_then_throws(self): "length": 5, "unit": "m", "label": "some_label", - "bin_boundaries": [0, 1, 2, 3, 4, 5], + "bin_boundaries": np.array([0.0, 1.0, 2.0, 3.0, 4.0, 5.0]), } ], - "data": np.array([1, 2, 3, 4, 5]), + "data": np.array([1.0, 2.0, 3.0, 4.0, 5.0]), } buf = serialise_hs00(original_hist) @@ -148,3 +148,139 @@ def test_if_buffer_has_wrong_id_then_throws(self): with pytest.raises(RuntimeError): deserialise_hs00(buf) + + def test_serialises_and_deserialises_hs00_message_correctly_for_int_array_data( + self + ): + """ + Round-trip to check what we serialise is what we get back. + """ + original_hist = { + "source": "some_source", + "timestamp": 123456, + "current_shape": [5], + "dim_metadata": [ + { + "length": 5, + "unit": "m", + "label": "some_label", + "bin_boundaries": np.array([0, 1, 2, 3, 4, 5]), + } + ], + "last_metadata_timestamp": 123456, + "data": np.array([1, 2, 3, 4, 5]), + "errors": np.array([5, 4, 3, 2, 1]), + "info": "info_string", + } + + buf = serialise_hs00(original_hist) + hist = deserialise_hs00(buf) + + assert hist["source"] == original_hist["source"] + assert hist["timestamp"] == original_hist["timestamp"] + assert hist["current_shape"] == original_hist["current_shape"] + self._check_metadata_for_one_dimension( + hist["dim_metadata"][0], original_hist["dim_metadata"][0] + ) + assert np.array_equal(hist["data"], original_hist["data"]) + assert np.array_equal(hist["errors"], original_hist["errors"]) + assert hist["info"] == original_hist["info"] + assert ( + hist["last_metadata_timestamp"] == original_hist["last_metadata_timestamp"] + ) + + def test_serialises_and_deserialises_hs00_message_correctly_when_float_input_is_not_ndarray( + self + ): + """ + Round-trip to check what we serialise is what we get back. + """ + original_hist = { + "source": "some_source", + "timestamp": 123456, + "current_shape": [2, 5], + "dim_metadata": [ + { + "length": 2, + "unit": "b", + "label": "y", + "bin_boundaries": [10.0, 11.0, 12.0], + }, + { + "length": 5, + "unit": "m", + "label": "x", + "bin_boundaries": [0.0, 1.0, 2.0, 3.0, 4.0, 5.0], + }, + ], + "last_metadata_timestamp": 123456, + "data": [[1.0, 2.0, 3.0, 4.0, 5.0], [6.0, 7.0, 8.0, 9.0, 10.0]], + "errors": [[5.0, 4.0, 3.0, 2.0, 1.0], [10.0, 9.0, 8.0, 7.0, 6.0]], + "info": "info_string", + } + buf = serialise_hs00(original_hist) + + hist = deserialise_hs00(buf) + assert hist["source"] == original_hist["source"] + assert hist["timestamp"] == original_hist["timestamp"] + assert hist["current_shape"] == original_hist["current_shape"] + self._check_metadata_for_one_dimension( + hist["dim_metadata"][0], original_hist["dim_metadata"][0] + ) + self._check_metadata_for_one_dimension( + hist["dim_metadata"][1], original_hist["dim_metadata"][1] + ) + assert np.array_equal(hist["data"], original_hist["data"]) + assert np.array_equal(hist["errors"], original_hist["errors"]) + assert hist["info"] == original_hist["info"] + assert ( + hist["last_metadata_timestamp"] == original_hist["last_metadata_timestamp"] + ) + + def test_serialises_and_deserialises_hs00_message_correctly_when_int_input_is_not_ndarray( + self + ): + """ + Round-trip to check what we serialise is what we get back. + """ + original_hist = { + "source": "some_source", + "timestamp": 123456, + "current_shape": [2, 5], + "dim_metadata": [ + { + "length": 2, + "unit": "b", + "label": "y", + "bin_boundaries": [10, 11, 12], + }, + { + "length": 5, + "unit": "m", + "label": "x", + "bin_boundaries": [0, 1, 2, 3, 4, 5], + }, + ], + "last_metadata_timestamp": 123456, + "data": [[1, 2, 3, 4, 5], [6, 7, 8, 9, 10]], + "errors": [[5, 4, 3, 2, 1], [10, 9, 8, 7, 6]], + "info": "info_string", + } + buf = serialise_hs00(original_hist) + + hist = deserialise_hs00(buf) + assert hist["source"] == original_hist["source"] + assert hist["timestamp"] == original_hist["timestamp"] + assert hist["current_shape"] == original_hist["current_shape"] + self._check_metadata_for_one_dimension( + hist["dim_metadata"][0], original_hist["dim_metadata"][0] + ) + self._check_metadata_for_one_dimension( + hist["dim_metadata"][1], original_hist["dim_metadata"][1] + ) + assert np.array_equal(hist["data"], original_hist["data"]) + assert np.array_equal(hist["errors"], original_hist["errors"]) + assert hist["info"] == original_hist["info"] + assert ( + hist["last_metadata_timestamp"] == original_hist["last_metadata_timestamp"] + ) From 8f0941d432ee9b8299a46902910e766144059823 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Tue, 7 Apr 2020 14:39:34 +0200 Subject: [PATCH 084/363] Update streaming_data_types/histogram_hs00.py Co-Authored-By: Matthew D Jones --- streaming_data_types/histogram_hs00.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/streaming_data_types/histogram_hs00.py b/streaming_data_types/histogram_hs00.py index ae89551..a0f0a80 100644 --- a/streaming_data_types/histogram_hs00.py +++ b/streaming_data_types/histogram_hs00.py @@ -170,10 +170,7 @@ def serialise_hs00(histogram): # Build the data data_len = reduce(operator.mul, histogram["current_shape"], 1) - if isinstance(histogram["data"], numpy.ndarray): - flattened_data = histogram["data"].flatten() - else: - flattened_data = numpy.asarray(histogram["data"]).flatten() + flattened_data = numpy.asarray(histogram["data"]).flatten() if numpy.issubdtype(flattened_data[0], numpy.int64): data_type = Array.ArrayULong From fa8521b7d912763401b451637d4545b96ad09996 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Tue, 7 Apr 2020 15:04:24 +0200 Subject: [PATCH 085/363] Updated version number --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 28f7fc3..eb6fb49 100644 --- a/setup.py +++ b/setup.py @@ -3,7 +3,7 @@ setup( name="streaming_data_types", - version="0.3.0", + version="0.4.0", description="Python utilities for handling ESS streamed data", long_description="Python utilities for serialising and deserialising data via FlatBuffers for the European Spallation Source ERIC", author="ScreamingUdder", From 84f7aa84344b0fdf83a1325abaa561f0ac8cc047 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Wed, 8 Apr 2020 11:35:04 +0200 Subject: [PATCH 086/363] Preparation for putting on pypi --- MANIFEST.in | 1 + README.md | 37 ++----------------------------------- README_DEV.md | 32 ++++++++++++++++++++++++++++++++ setup.py | 17 ++++++++++++++--- 4 files changed, 49 insertions(+), 38 deletions(-) create mode 100644 README_DEV.md diff --git a/MANIFEST.in b/MANIFEST.in index 408c775..5dd9038 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,3 +1,4 @@ include LICENSE include requirements*.* include Makefile +include README.md diff --git a/README.md b/README.md index 2fb9c32..d1eb17b 100644 --- a/README.md +++ b/README.md @@ -47,38 +47,5 @@ hist = { "info": "info_string", } ``` -The arrays passed in for `data` and `errors` must be NumPy arrays. `bin_boundaries` -can be passed in as a Python list or a NumPy array, but on deserialisation it will be a -NumPy array. - -## For developers - -### Building the package -``` -python setup.py sdist bdist_wheel -``` - -### Install the commit hooks (important) -There are commit hooks for Black and Flake8. - -The commit hooks are handled using [pre-commit](https://pre-commit.com). - -To install the hooks for this project run: -``` -pre-commit install -``` - -To test the hooks run: -``` -pre-commit run --all-files -``` -This command can also be used to run the hooks manually. - -### Tox -Tox allows the unit tests to be run against multiple versions of Python. -See the tox.ini file for which versions are supported. -From the top directory: -``` -tox -``` - +The arrays passed in for `data`, `errors` and `bin_boundaries` can be NumPy arrays +or regular lists, but on deserialisation they will be NumPy arrays. diff --git a/README_DEV.md b/README_DEV.md new file mode 100644 index 0000000..65ed9c9 --- /dev/null +++ b/README_DEV.md @@ -0,0 +1,32 @@ +# Python Streaming Data Types +## For developers + +### Building the package +``` +python setup.py sdist bdist_wheel +``` + +### Install the commit hooks (important) +There are commit hooks for Black and Flake8. + +The commit hooks are handled using [pre-commit](https://pre-commit.com). + +To install the hooks for this project run: +``` +pre-commit install +``` + +To test the hooks run: +``` +pre-commit run --all-files +``` +This command can also be used to run the hooks manually. + +### Tox +Tox allows the unit tests to be run against multiple versions of Python. +See the tox.ini file for which versions are supported. +From the top directory: +``` +tox +``` + diff --git a/setup.py b/setup.py index eb6fb49..4f329f5 100644 --- a/setup.py +++ b/setup.py @@ -1,13 +1,24 @@ +import os from setuptools import setup, find_packages +DESCRIPTION = "Python utilities for handling ESS streamed data" + +here = os.path.abspath(os.path.dirname(__file__)) + +# Import the README and use it as the long-description. +try: + with open(os.path.join(here, "README.md"), encoding="utf-8") as f: + LONG_DESCRIPTION = "\n" + f.read() +except Exception as error: + print(error) + LONG_DESCRIPTION = DESCRIPTION setup( name="streaming_data_types", version="0.4.0", - description="Python utilities for handling ESS streamed data", - long_description="Python utilities for serialising and deserialising data via FlatBuffers for the European Spallation Source ERIC", + description=DESCRIPTION, + long_description=LONG_DESCRIPTION, author="ScreamingUdder", - author_email="NoAddress@Nowhere.com", url="https://github.com/ess-dmsc/python-streaming-data-types", license="BSD 2-Clause License", packages=find_packages(exclude="tests"), From 201755e351c7d436c5534856b6b214dc677be596 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Wed, 8 Apr 2020 11:35:04 +0200 Subject: [PATCH 087/363] Preparation for putting on pypi --- MANIFEST.in | 1 + README.md | 37 ++----------------------------------- README_DEV.md | 32 ++++++++++++++++++++++++++++++++ requirements-dev.txt | 1 + setup.py | 17 ++++++++++++++--- 5 files changed, 50 insertions(+), 38 deletions(-) create mode 100644 README_DEV.md diff --git a/MANIFEST.in b/MANIFEST.in index 408c775..5dd9038 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,3 +1,4 @@ include LICENSE include requirements*.* include Makefile +include README.md diff --git a/README.md b/README.md index 2fb9c32..d1eb17b 100644 --- a/README.md +++ b/README.md @@ -47,38 +47,5 @@ hist = { "info": "info_string", } ``` -The arrays passed in for `data` and `errors` must be NumPy arrays. `bin_boundaries` -can be passed in as a Python list or a NumPy array, but on deserialisation it will be a -NumPy array. - -## For developers - -### Building the package -``` -python setup.py sdist bdist_wheel -``` - -### Install the commit hooks (important) -There are commit hooks for Black and Flake8. - -The commit hooks are handled using [pre-commit](https://pre-commit.com). - -To install the hooks for this project run: -``` -pre-commit install -``` - -To test the hooks run: -``` -pre-commit run --all-files -``` -This command can also be used to run the hooks manually. - -### Tox -Tox allows the unit tests to be run against multiple versions of Python. -See the tox.ini file for which versions are supported. -From the top directory: -``` -tox -``` - +The arrays passed in for `data`, `errors` and `bin_boundaries` can be NumPy arrays +or regular lists, but on deserialisation they will be NumPy arrays. diff --git a/README_DEV.md b/README_DEV.md new file mode 100644 index 0000000..65ed9c9 --- /dev/null +++ b/README_DEV.md @@ -0,0 +1,32 @@ +# Python Streaming Data Types +## For developers + +### Building the package +``` +python setup.py sdist bdist_wheel +``` + +### Install the commit hooks (important) +There are commit hooks for Black and Flake8. + +The commit hooks are handled using [pre-commit](https://pre-commit.com). + +To install the hooks for this project run: +``` +pre-commit install +``` + +To test the hooks run: +``` +pre-commit run --all-files +``` +This command can also be used to run the hooks manually. + +### Tox +Tox allows the unit tests to be run against multiple versions of Python. +See the tox.ini file for which versions are supported. +From the top directory: +``` +tox +``` + diff --git a/requirements-dev.txt b/requirements-dev.txt index 96815bf..5ec9cb4 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -2,3 +2,4 @@ flake8 pre-commit pytest tox +twine diff --git a/setup.py b/setup.py index eb6fb49..4f329f5 100644 --- a/setup.py +++ b/setup.py @@ -1,13 +1,24 @@ +import os from setuptools import setup, find_packages +DESCRIPTION = "Python utilities for handling ESS streamed data" + +here = os.path.abspath(os.path.dirname(__file__)) + +# Import the README and use it as the long-description. +try: + with open(os.path.join(here, "README.md"), encoding="utf-8") as f: + LONG_DESCRIPTION = "\n" + f.read() +except Exception as error: + print(error) + LONG_DESCRIPTION = DESCRIPTION setup( name="streaming_data_types", version="0.4.0", - description="Python utilities for handling ESS streamed data", - long_description="Python utilities for serialising and deserialising data via FlatBuffers for the European Spallation Source ERIC", + description=DESCRIPTION, + long_description=LONG_DESCRIPTION, author="ScreamingUdder", - author_email="NoAddress@Nowhere.com", url="https://github.com/ess-dmsc/python-streaming-data-types", license="BSD 2-Clause License", packages=find_packages(exclude="tests"), From a8170bc4467111c822c22107d3193ef32fc4d2eb Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Wed, 8 Apr 2020 11:40:52 +0200 Subject: [PATCH 088/363] more distinct name --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 4f329f5..faa1d6e 100644 --- a/setup.py +++ b/setup.py @@ -14,7 +14,7 @@ LONG_DESCRIPTION = DESCRIPTION setup( - name="streaming_data_types", + name="ess_streaming_data_types", version="0.4.0", description=DESCRIPTION, long_description=LONG_DESCRIPTION, From d6440eede4d75b41a2d2d99d00ffd2b25bad09b9 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Wed, 8 Apr 2020 12:26:59 +0200 Subject: [PATCH 089/363] Mistake in setup.py --- README_DEV.md | 24 +++++++++++++++++++----- setup.py | 1 + 2 files changed, 20 insertions(+), 5 deletions(-) diff --git a/README_DEV.md b/README_DEV.md index 65ed9c9..dd5a101 100644 --- a/README_DEV.md +++ b/README_DEV.md @@ -1,11 +1,6 @@ # Python Streaming Data Types ## For developers -### Building the package -``` -python setup.py sdist bdist_wheel -``` - ### Install the commit hooks (important) There are commit hooks for Black and Flake8. @@ -30,3 +25,22 @@ From the top directory: tox ``` +### Building the package and deploying it +``` +python setup.py sdist bdist_wheel +``` + +Check dist files: +``` +twine check dist/* +``` + +Push to test.pypi.org for testing: +``` +twine upload --repository-url https://test.pypi.org/legacy/ dist/* +``` + +After testing installing from test.pypi.org: +``` +twine upload dist/* +``` diff --git a/setup.py b/setup.py index faa1d6e..bec450c 100644 --- a/setup.py +++ b/setup.py @@ -18,6 +18,7 @@ version="0.4.0", description=DESCRIPTION, long_description=LONG_DESCRIPTION, + long_description_content_type="text/markdown", author="ScreamingUdder", url="https://github.com/ess-dmsc/python-streaming-data-types", license="BSD 2-Clause License", From af29ee362f79c550a1a40e837195fcca6bbe2f9d Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Wed, 8 Apr 2020 12:41:48 +0200 Subject: [PATCH 090/363] Stray s in setup.py --- setup.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index bec450c..b4fc024 100644 --- a/setup.py +++ b/setup.py @@ -10,7 +10,7 @@ with open(os.path.join(here, "README.md"), encoding="utf-8") as f: LONG_DESCRIPTION = "\n" + f.read() except Exception as error: - print(error) + print("COULD NOT GET LONG DESC: {}".format(error)) LONG_DESCRIPTION = DESCRIPTION setup( @@ -23,6 +23,7 @@ url="https://github.com/ess-dmsc/python-streaming-data-types", license="BSD 2-Clause License", packages=find_packages(exclude="tests"), + python_requires=">=3.6.0", install_requires=["flatbuffers", "numpy"], - extras_requires={"dev": ["flake8", "pre-commit", "pytest", "tox"]}, + extras_require={"dev": ["flake8", "pre-commit", "pytest", "tox"]}, ) From 423c72013977742c9d71f3acb5fc3807e8c4256a Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Fri, 24 Apr 2020 11:33:40 +0200 Subject: [PATCH 091/363] Can serialise and deserialise ev42 --- README.md | 1 + streaming_data_types/eventdata_ev42.py | 108 ++++++++++++ .../fbschemas/eventdata_ev42/EventMessage.py | 165 ++++++++++++++++++ .../fbschemas/eventdata_ev42/FacilityData.py | 9 + .../fbschemas/eventdata_ev42/__init__.py | 0 .../isis_event_info_is84/ISISData.py | 65 +++++++ .../isis_event_info_is84/RunState.py | 8 + tests/test_ev42.py | 93 ++++++++++ 8 files changed, 449 insertions(+) create mode 100644 streaming_data_types/eventdata_ev42.py create mode 100644 streaming_data_types/fbschemas/eventdata_ev42/EventMessage.py create mode 100644 streaming_data_types/fbschemas/eventdata_ev42/FacilityData.py create mode 100644 streaming_data_types/fbschemas/eventdata_ev42/__init__.py create mode 100644 streaming_data_types/fbschemas/isis_event_info_is84/ISISData.py create mode 100644 streaming_data_types/fbschemas/isis_event_info_is84/RunState.py create mode 100644 tests/test_ev42.py diff --git a/README.md b/README.md index d1eb17b..5af82a1 100644 --- a/README.md +++ b/README.md @@ -13,6 +13,7 @@ https://github.com/ess-dmsc/streaming-data-types |pl72|Run start|N| |6s4t|Run stop|N| |f142|Log Data|Y| +|ev42|Event Data|Y| ### hs00 Schema for histogram data. It is one of the more complicated to use schemas. diff --git a/streaming_data_types/eventdata_ev42.py b/streaming_data_types/eventdata_ev42.py new file mode 100644 index 0000000..4350467 --- /dev/null +++ b/streaming_data_types/eventdata_ev42.py @@ -0,0 +1,108 @@ +from collections import namedtuple +import flatbuffers +import streaming_data_types.fbschemas.eventdata_ev42.EventMessage as EventMessage +import streaming_data_types.fbschemas.eventdata_ev42.FacilityData as FacilityData +import streaming_data_types.fbschemas.isis_event_info_is84.ISISData as ISISData +from streaming_data_types.utils import check_schema_identifier + + +FILE_IDENTIFIER = b"ev42" + + +def deserialise_ev42(buffer): + """ + Deserialise FlatBuffer ev42. + + :param buffer: The FlatBuffers buffer. + :return: The deserialised data. + """ + check_schema_identifier(buffer, FILE_IDENTIFIER) + + event = EventMessage.EventMessage.GetRootAsEventMessage(buffer, 0) + + specific_data = None + if event.FacilitySpecificDataType() == FacilityData.FacilityData.ISISData: + specific = event.FacilitySpecificData() + isis_buf = ISISData.ISISData() + isis_buf.Init(specific.Bytes, specific.Pos) + specific_data = { + "period_number": isis_buf.PeriodNumber(), + "run_state": isis_buf.RunState(), + "proton_charge": isis_buf.ProtonCharge(), + } + + EventData = namedtuple( + "EventData", + "source_name message_id pulse_time time_of_flight detector_id specific_data", + ) + + return EventData( + event.SourceName().decode("utf-8"), + event.MessageId(), + event.PulseTime(), + event.TimeOfFlightAsNumpy(), + event.DetectorIdAsNumpy(), + specific_data, + ) + + +def serialise_ev42( + source_name, message_id, pulse_time, time_of_flight, detector_id, isis_specific=None +): + """ + Serialise event data as an ev42 FlatBuffers message. + + :param source_name: + :param message_id: + :param pulse_time: + :param time_of_flight: + :param detector_id: + :param isis_specific: + :return: + """ + builder = flatbuffers.Builder(1024) + + source = builder.CreateString(source_name) + + EventMessage.EventMessageStartTimeOfFlightVector(builder, len(time_of_flight)) + # FlatBuffers builds arrays backwards + for x in reversed(time_of_flight): + builder.PrependInt32(x) + tof_data = builder.EndVector(len(time_of_flight)) + + EventMessage.EventMessageStartDetectorIdVector(builder, len(detector_id)) + # FlatBuffers builds arrays backwards + for x in reversed(detector_id): + builder.PrependInt32(x) + det_data = builder.EndVector(len(detector_id)) + + isis_data = None + if isis_specific: + # isis_builder = flatbuffers.Builder(96) + ISISData.ISISDataStart(builder) + ISISData.ISISDataAddPeriodNumber(builder, isis_specific["period_number"]) + ISISData.ISISDataAddRunState(builder, isis_specific["run_state"]) + ISISData.ISISDataAddProtonCharge(builder, isis_specific["proton_charge"]) + isis_data = ISISData.ISISDataEnd(builder) + + # Build the actual buffer + EventMessage.EventMessageStart(builder) + EventMessage.EventMessageAddDetectorId(builder, det_data) + EventMessage.EventMessageAddTimeOfFlight(builder, tof_data) + EventMessage.EventMessageAddPulseTime(builder, pulse_time) + EventMessage.EventMessageAddMessageId(builder, message_id) + EventMessage.EventMessageAddSourceName(builder, source) + + if isis_specific: + EventMessage.EventMessageAddFacilitySpecificDataType( + builder, FacilityData.FacilityData.ISISData + ) + EventMessage.EventMessageAddFacilitySpecificData(builder, isis_data) + + data = EventMessage.EventMessageEnd(builder) + builder.Finish(data) + + # Generate the output and replace the file_identifier + buffer = builder.Output() + buffer[4:8] = FILE_IDENTIFIER + return buffer diff --git a/streaming_data_types/fbschemas/eventdata_ev42/EventMessage.py b/streaming_data_types/fbschemas/eventdata_ev42/EventMessage.py new file mode 100644 index 0000000..3e4290a --- /dev/null +++ b/streaming_data_types/fbschemas/eventdata_ev42/EventMessage.py @@ -0,0 +1,165 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + + +class EventMessage(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAsEventMessage(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = EventMessage() + x.Init(buf, n + offset) + return x + + # EventMessage + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # EventMessage + def SourceName(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # EventMessage + def MessageId(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get( + flatbuffers.number_types.Uint64Flags, o + self._tab.Pos + ) + return 0 + + # EventMessage + def PulseTime(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.Get( + flatbuffers.number_types.Uint64Flags, o + self._tab.Pos + ) + return 0 + + # EventMessage + def TimeOfFlight(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Uint32Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), + ) + return 0 + + # EventMessage + def TimeOfFlightAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint32Flags, o) + return 0 + + # EventMessage + def TimeOfFlightLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # EventMessage + def DetectorId(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Uint32Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), + ) + return 0 + + # EventMessage + def DetectorIdAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint32Flags, o) + return 0 + + # EventMessage + def DetectorIdLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # EventMessage + def FacilitySpecificDataType(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) + return 0 + + # EventMessage + def FacilitySpecificData(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + if o != 0: + from flatbuffers.table import Table + + obj = Table(bytearray(), 0) + self._tab.Union(obj, o) + return obj + return None + + +def EventMessageStart(builder): + builder.StartObject(7) + + +def EventMessageAddSourceName(builder, sourceName): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(sourceName), 0 + ) + + +def EventMessageAddMessageId(builder, messageId): + builder.PrependUint64Slot(1, messageId, 0) + + +def EventMessageAddPulseTime(builder, pulseTime): + builder.PrependUint64Slot(2, pulseTime, 0) + + +def EventMessageAddTimeOfFlight(builder, timeOfFlight): + builder.PrependUOffsetTRelativeSlot( + 3, flatbuffers.number_types.UOffsetTFlags.py_type(timeOfFlight), 0 + ) + + +def EventMessageStartTimeOfFlightVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + +def EventMessageAddDetectorId(builder, detectorId): + builder.PrependUOffsetTRelativeSlot( + 4, flatbuffers.number_types.UOffsetTFlags.py_type(detectorId), 0 + ) + + +def EventMessageStartDetectorIdVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + +def EventMessageAddFacilitySpecificDataType(builder, facilitySpecificDataType): + builder.PrependUint8Slot(5, facilitySpecificDataType, 0) + + +def EventMessageAddFacilitySpecificData(builder, facilitySpecificData): + builder.PrependUOffsetTRelativeSlot( + 6, flatbuffers.number_types.UOffsetTFlags.py_type(facilitySpecificData), 0 + ) + + +def EventMessageEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/eventdata_ev42/FacilityData.py b/streaming_data_types/fbschemas/eventdata_ev42/FacilityData.py new file mode 100644 index 0000000..2be4a10 --- /dev/null +++ b/streaming_data_types/fbschemas/eventdata_ev42/FacilityData.py @@ -0,0 +1,9 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + + +class FacilityData(object): + NONE = 0 + ISISData = 1 + AdcPulseDebug = 2 diff --git a/streaming_data_types/fbschemas/eventdata_ev42/__init__.py b/streaming_data_types/fbschemas/eventdata_ev42/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/streaming_data_types/fbschemas/isis_event_info_is84/ISISData.py b/streaming_data_types/fbschemas/isis_event_info_is84/ISISData.py new file mode 100644 index 0000000..79e3c11 --- /dev/null +++ b/streaming_data_types/fbschemas/isis_event_info_is84/ISISData.py @@ -0,0 +1,65 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + + +class ISISData(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAsISISData(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ISISData() + x.Init(buf, n + offset) + return x + + # ISISData + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ISISData + def PeriodNumber(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get( + flatbuffers.number_types.Uint32Flags, o + self._tab.Pos + ) + return 0 + + # ISISData + def RunState(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # ISISData + def ProtonCharge(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.Get( + flatbuffers.number_types.Float32Flags, o + self._tab.Pos + ) + return 0.0 + + +def ISISDataStart(builder): + builder.StartObject(3) + + +def ISISDataAddPeriodNumber(builder, periodNumber): + builder.PrependUint32Slot(0, periodNumber, 0) + + +def ISISDataAddRunState(builder, runState): + builder.PrependInt8Slot(1, runState, 0) + + +def ISISDataAddProtonCharge(builder, protonCharge): + builder.PrependFloat32Slot(2, protonCharge, 0.0) + + +def ISISDataEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/isis_event_info_is84/RunState.py b/streaming_data_types/fbschemas/isis_event_info_is84/RunState.py new file mode 100644 index 0000000..9d34cd9 --- /dev/null +++ b/streaming_data_types/fbschemas/isis_event_info_is84/RunState.py @@ -0,0 +1,8 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + + +class RunState(object): + SETUP = 0 + RUNNING = 1 diff --git a/tests/test_ev42.py b/tests/test_ev42.py new file mode 100644 index 0000000..1b5b500 --- /dev/null +++ b/tests/test_ev42.py @@ -0,0 +1,93 @@ +import numpy as np +import pytest +from streaming_data_types.eventdata_ev42 import serialise_ev42, deserialise_ev42 + + +class TestSerialisationEv42: + def test_serialises_and_deserialises_ev42_message_correctly(self): + """ + Round-trip to check what we serialise is what we get back. + """ + original_entry = { + "source_name": "some_source", + "message_id": 123456, + "pulse_time": 567890, + "time_of_flight": [1, 2, 3, 4, 5, 6, 7, 8, 9], + "detector_id": [10, 20, 30, 40, 50, 60, 70, 80, 90], + } + + buf = serialise_ev42(**original_entry) + entry = deserialise_ev42(buf) + + assert entry.source_name == original_entry["source_name"] + assert entry.message_id == original_entry["message_id"] + assert entry.pulse_time == original_entry["pulse_time"] + assert np.array_equal(entry.time_of_flight, original_entry["time_of_flight"]) + assert np.array_equal(entry.detector_id, original_entry["detector_id"]) + + def test_serialises_and_deserialises_ev42_message_correctly_for_numpy_arrays(self): + """ + Round-trip to check what we serialise is what we get back. + """ + original_entry = { + "source_name": "some_source", + "message_id": 123456, + "pulse_time": 567890, + "time_of_flight": np.array([1, 2, 3, 4, 5, 6, 7, 8, 9]), + "detector_id": np.array([10, 20, 30, 40, 50, 60, 70, 80, 90]), + } + + buf = serialise_ev42(**original_entry) + entry = deserialise_ev42(buf) + + assert entry.source_name == original_entry["source_name"] + assert entry.message_id == original_entry["message_id"] + assert entry.pulse_time == original_entry["pulse_time"] + assert np.array_equal(entry.time_of_flight, original_entry["time_of_flight"]) + assert np.array_equal(entry.detector_id, original_entry["detector_id"]) + + def test_serialises_and_deserialises_ev42_message_correctly_with_isis_info(self): + """ + Round-trip to check what we serialise is what we get back. + """ + isis_data = {"period_number": 5, "run_state": 1, "proton_charge": 1.234} + + original_entry = { + "source_name": "some_source", + "message_id": 123456, + "pulse_time": 567890, + "time_of_flight": [1, 2, 3, 4, 5, 6, 7, 8, 9], + "detector_id": [10, 20, 30, 40, 50, 60, 70, 80, 90], + "isis_specific": isis_data, + } + + buf = serialise_ev42(**original_entry) + entry = deserialise_ev42(buf) + + assert entry.source_name == original_entry["source_name"] + assert entry.message_id == original_entry["message_id"] + assert entry.pulse_time == original_entry["pulse_time"] + assert np.array_equal(entry.time_of_flight, original_entry["time_of_flight"]) + assert np.array_equal(entry.detector_id, original_entry["detector_id"]) + assert entry.specific_data["period_number"] == isis_data["period_number"] + assert entry.specific_data["run_state"] == isis_data["run_state"] + assert entry.specific_data["proton_charge"] == pytest.approx( + isis_data["proton_charge"] + ) + + def test_if_buffer_has_wrong_id_then_throws(self): + original_entry = { + "source_name": "some_source", + "message_id": 123456, + "pulse_time": 567890, + "time_of_flight": [1, 2, 3, 4, 5, 6, 7, 8, 9], + "detector_id": [10, 20, 30, 40, 50, 60, 70, 80, 90], + } + buf = serialise_ev42(**original_entry) + + # Manually hack the id + buf = bytearray(buf) + buf[4:8] = b"1234" + + with pytest.raises(RuntimeError): + deserialise_ev42(buf) From f71541fbe86e4007f20233f8e3f3a2974f5ea831 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Mon, 27 Apr 2020 08:00:33 +0200 Subject: [PATCH 092/363] Use tuple for fields in named --- streaming_data_types/eventdata_ev42.py | 9 ++++++++- streaming_data_types/logdata_f142.py | 3 ++- streaming_data_types/nicos_cache_ns10.py | 2 +- streaming_data_types/run_start_pl72.py | 12 +++++++++++- streaming_data_types/run_stop_6s4t.py | 4 +++- 5 files changed, 25 insertions(+), 5 deletions(-) diff --git a/streaming_data_types/eventdata_ev42.py b/streaming_data_types/eventdata_ev42.py index 4350467..4fe511c 100644 --- a/streaming_data_types/eventdata_ev42.py +++ b/streaming_data_types/eventdata_ev42.py @@ -33,7 +33,14 @@ def deserialise_ev42(buffer): EventData = namedtuple( "EventData", - "source_name message_id pulse_time time_of_flight detector_id specific_data", + ( + "source_name", + "message_id", + "pulse_time", + "time_of_flight", + "detector_id", + "specific_data", + ), ) return EventData( diff --git a/streaming_data_types/logdata_f142.py b/streaming_data_types/logdata_f142.py index 0f3ecf0..a8c2121 100644 --- a/streaming_data_types/logdata_f142.py +++ b/streaming_data_types/logdata_f142.py @@ -606,7 +606,8 @@ def deserialise_f142(buffer: Union[bytearray, bytes]) -> NamedTuple: timestamp = log_data.Timestamp() LogDataInfo = namedtuple( - "LogDataInfo", "value source_name timestamp_unix_ns alarm_status alarm_severity" + "LogDataInfo", + ("value", "source_name", "timestamp_unix_ns", "alarm_status", "alarm_severity"), ) return LogDataInfo( value, source_name.decode(), timestamp, log_data.Status(), log_data.Severity() diff --git a/streaming_data_types/nicos_cache_ns10.py b/streaming_data_types/nicos_cache_ns10.py index 695bc62..8df3ee9 100644 --- a/streaming_data_types/nicos_cache_ns10.py +++ b/streaming_data_types/nicos_cache_ns10.py @@ -42,6 +42,6 @@ def deserialise_ns10(buffer): expired = entry.Expired() if entry.Expired() else False value = entry.Value() if entry.Value() else b"" - Entry = namedtuple("Entry", "key time_stamp ttl expired value") + Entry = namedtuple("Entry", ("key", "time_stamp", "ttl", "expired", "value")) return Entry(key.decode().strip(), time_stamp, ttl, expired, value.decode()) diff --git a/streaming_data_types/run_start_pl72.py b/streaming_data_types/run_start_pl72.py index bc35a7d..b2c739d 100644 --- a/streaming_data_types/run_start_pl72.py +++ b/streaming_data_types/run_start_pl72.py @@ -72,7 +72,17 @@ def deserialise_pl72(buffer: Union[bytearray, bytes]) -> NamedTuple: RunStartInfo = namedtuple( "RunStartInfo", - "job_id filename start_time stop_time run_name nexus_structure service_id instrument_name broker", + ( + "job_id", + "filename", + "start_time", + "stop_time", + "run_name", + "nexus_structure", + "service_id", + "instrument_name", + "broker", + ), ) return RunStartInfo( job_id.decode(), diff --git a/streaming_data_types/run_stop_6s4t.py b/streaming_data_types/run_stop_6s4t.py index 43d0057..be8a7f2 100644 --- a/streaming_data_types/run_stop_6s4t.py +++ b/streaming_data_types/run_stop_6s4t.py @@ -49,7 +49,9 @@ def deserialise_6s4t(buffer: Union[bytearray, bytes]) -> NamedTuple: run_name = run_stop.RunName() if run_stop.RunName() else b"" stop_time = run_stop.StopTime() - RunStopInfo = namedtuple("RunStopInfo", "stop_time run_name job_id service_id") + RunStopInfo = namedtuple( + "RunStopInfo", ("stop_time", "run_name", "job_id", "service_id") + ) return RunStopInfo( stop_time, run_name.decode(), job_id.decode(), service_id.decode() ) From 39411a3d9dc005b76e76aaa77769f078dc3b91ef Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Mon, 27 Apr 2020 08:03:12 +0200 Subject: [PATCH 093/363] Bump version number --- requirements-dev.txt | 1 + setup.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 5ec9cb4..029ce24 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,3 +1,4 @@ +-r requirements.txt flake8 pre-commit pytest diff --git a/setup.py b/setup.py index b4fc024..f95ac36 100644 --- a/setup.py +++ b/setup.py @@ -15,7 +15,7 @@ setup( name="ess_streaming_data_types", - version="0.4.0", + version="0.5.0", description=DESCRIPTION, long_description=LONG_DESCRIPTION, long_description_content_type="text/markdown", From ecb321b48f322e1188f5e02d3f4e36a1097c7e38 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Mon, 27 Apr 2020 09:26:36 +0200 Subject: [PATCH 094/363] Missed __init__ file --- streaming_data_types/fbschemas/isis_event_info_is84/__init__.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 streaming_data_types/fbschemas/isis_event_info_is84/__init__.py diff --git a/streaming_data_types/fbschemas/isis_event_info_is84/__init__.py b/streaming_data_types/fbschemas/isis_event_info_is84/__init__.py new file mode 100644 index 0000000..e69de29 From b7f6ec5d8e0006e64f7dbe7ccc053c59750b3953 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Mon, 27 Apr 2020 11:21:14 +0200 Subject: [PATCH 095/363] Expose EventData tuple --- streaming_data_types/eventdata_ev42.py | 25 +++++++++++++------------ 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/streaming_data_types/eventdata_ev42.py b/streaming_data_types/eventdata_ev42.py index 4fe511c..efde381 100644 --- a/streaming_data_types/eventdata_ev42.py +++ b/streaming_data_types/eventdata_ev42.py @@ -9,6 +9,19 @@ FILE_IDENTIFIER = b"ev42" +EventData = namedtuple( + "EventData", + ( + "source_name", + "message_id", + "pulse_time", + "time_of_flight", + "detector_id", + "specific_data", + ), +) + + def deserialise_ev42(buffer): """ Deserialise FlatBuffer ev42. @@ -31,18 +44,6 @@ def deserialise_ev42(buffer): "proton_charge": isis_buf.ProtonCharge(), } - EventData = namedtuple( - "EventData", - ( - "source_name", - "message_id", - "pulse_time", - "time_of_flight", - "detector_id", - "specific_data", - ), - ) - return EventData( event.SourceName().decode("utf-8"), event.MessageId(), From 600200e0a4285874071e3604188d19007852ff6b Mon Sep 17 00:00:00 2001 From: michele-brambilla Date: Wed, 6 May 2020 15:01:02 +0200 Subject: [PATCH 096/363] add x5f2 status serialiser --- .../fbschemas/status_x5f2/Status.py | 78 ++++++++++++++++ .../fbschemas/status_x5f2/__init__.py | 0 streaming_data_types/status_x5f2.py | 91 +++++++++++++++++++ tests/test_x52f.py | 51 +++++++++++ 4 files changed, 220 insertions(+) create mode 100644 streaming_data_types/fbschemas/status_x5f2/Status.py create mode 100644 streaming_data_types/fbschemas/status_x5f2/__init__.py create mode 100644 streaming_data_types/status_x5f2.py create mode 100644 tests/test_x52f.py diff --git a/streaming_data_types/fbschemas/status_x5f2/Status.py b/streaming_data_types/fbschemas/status_x5f2/Status.py new file mode 100644 index 0000000..8984dbe --- /dev/null +++ b/streaming_data_types/fbschemas/status_x5f2/Status.py @@ -0,0 +1,78 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + +class Status(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAsStatus(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Status() + x.Init(buf, n + offset) + return x + + # Status + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Status + def SoftwareName(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # Status + def SoftwareVersion(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # Status + def ServiceId(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # Status + def HostName(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # Status + def ProcessId(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint32Flags, o + self._tab.Pos) + return 0 + + # Status + def UpdateInterval(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint32Flags, o + self._tab.Pos) + return 0 + + # Status + def StatusJson(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + +def StatusStart(builder): builder.StartObject(7) +def StatusAddSoftwareName(builder, softwareName): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(softwareName), 0) +def StatusAddSoftwareVersion(builder, softwareVersion): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(softwareVersion), 0) +def StatusAddServiceId(builder, serviceId): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(serviceId), 0) +def StatusAddHostName(builder, hostName): builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(hostName), 0) +def StatusAddProcessId(builder, processId): builder.PrependUint32Slot(4, processId, 0) +def StatusAddUpdateInterval(builder, updateInterval): builder.PrependUint32Slot(5, updateInterval, 0) +def StatusAddStatusJson(builder, statusJson): builder.PrependUOffsetTRelativeSlot(6, flatbuffers.number_types.UOffsetTFlags.py_type(statusJson), 0) +def StatusEnd(builder): return builder.EndObject() diff --git a/streaming_data_types/fbschemas/status_x5f2/__init__.py b/streaming_data_types/fbschemas/status_x5f2/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/streaming_data_types/status_x5f2.py b/streaming_data_types/status_x5f2.py new file mode 100644 index 0000000..714c2d1 --- /dev/null +++ b/streaming_data_types/status_x5f2.py @@ -0,0 +1,91 @@ +from collections import namedtuple +import flatbuffers +from streaming_data_types.utils import check_schema_identifier + +from streaming_data_types.fbschemas.status_x5f2 import Status + +FILE_IDENTIFIER = b"x5f2" + +StatusMessage = namedtuple( + "StatusMessage", + ( + "software_name", + "software_version", + "service_id", + "host_name", + "process_id", + "update_interval", + "status_json", + ), +) + + +def deserialise_x5f2(buffer): + """ + Deserialise FlatBuffer x5f2. + + :param buffer: The FlatBuffers buffer. + :return: The deserialised data. + """ + check_schema_identifier(buffer, FILE_IDENTIFIER) + + log_message = Status.Status.GetRootAsStatus(buffer, 0) + + return StatusMessage( + log_message.SoftwareName().decode("utf-8"), + log_message.SoftwareVersion().decode("utf-8"), + log_message.ServiceId().decode("utf-8"), + log_message.HostName().decode("utf-8"), + log_message.ProcessId(), + log_message.UpdateInterval(), + log_message.StatusJson().decode("utf-8") + ) + + +def serialise_x5f2( + software_name, + software_version, + service_id, + host_name, + process_id, + update_interval, + status_json): + """ + Serialise status message as an x5f2 FlatBuffers message. + + :param software_name: + :param software_version: + :param service_id: + :param host_name: + :param process_id: + :param update_interval: + :param status_json: + :return: + """ + + builder = flatbuffers.Builder(1024) + + software_name = builder.CreateString(software_name) + software_version = builder.CreateString(software_version) + service_id = builder.CreateString(service_id) + host_name = builder.CreateString(host_name) + status_json = builder.CreateString(status_json) + + # Build the actual buffer + Status.StatusStart(builder) + + Status.StatusAddSoftwareName(builder, software_name) + Status.StatusAddSoftwareVersion(builder, software_version) + Status.StatusAddServiceId(builder, service_id) + Status.StatusAddHostName(builder, host_name) + Status.StatusAddProcessId(builder, process_id) + Status.StatusAddUpdateInterval(builder, update_interval) + Status.StatusAddStatusJson(builder, status_json) + + data = Status.StatusEnd(builder) + builder.Finish(data) + + # Generate the output and replace the file_identifier + buffer = builder.Output() + buffer[4:8] = FILE_IDENTIFIER + return buffer diff --git a/tests/test_x52f.py b/tests/test_x52f.py new file mode 100644 index 0000000..9dcfe54 --- /dev/null +++ b/tests/test_x52f.py @@ -0,0 +1,51 @@ +import pytest +from streaming_data_types.status_x5f2 import serialise_x5f2, deserialise_x5f2 + + +class TestEncoder(object): + + def test_serialises_and_deserialises_x5f2_message_correctly(self): + # """ + # Round-trip to check what we serialise is what we get back. + # """ + + original_entry = { + "software_name" : "nicos/test", + "software_version" : "1.0", + "service_id" : "1a2b3c", + "host_name" : "//localhost", + "process_id" : 1234, + "update_interval": 0, + "status_json" : '{"content" : "log_or_status_message"}', + } + + buf = serialise_x5f2(**original_entry) + entry = deserialise_x5f2(buf) + + assert entry.software_name == original_entry["software_name"] + assert entry.software_version == original_entry["software_version"] + assert entry.service_id == original_entry["service_id"] + assert entry.host_name == original_entry["host_name"] + assert entry.process_id == original_entry["process_id"] + assert entry.update_interval == original_entry["update_interval"] + assert entry.status_json == original_entry["status_json"] + + def test_if_buffer_has_wrong_id_then_throws(self): + original_entry = { + "software_name" : "nicos/test", + "software_version" : "1.0", + "service_id" : "1a2b3c", + "host_name" : "//localhost", + "process_id" : 1234, + "update_interval": 0, + "status_json" : '{"content" : "log_or_status_message"}', + } + + buf = serialise_x5f2(**original_entry) + + # Manually hack the id + buf = bytearray(buf) + buf[4:8] = b"1234" + + with pytest.raises(RuntimeError): + deserialise_x5f2(buf) From df7d77f5fd6325bd820e65db3f306e25909a38d9 Mon Sep 17 00:00:00 2001 From: michele-brambilla Date: Wed, 6 May 2020 15:10:30 +0200 Subject: [PATCH 097/363] add entry in readme, fix formatting --- README.md | 1 + streaming_data_types/fbschemas/status_x5f2/Status.py | 4 +++- streaming_data_types/status_x5f2.py | 2 +- 3 files changed, 5 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 5af82a1..6dae7f0 100644 --- a/README.md +++ b/README.md @@ -14,6 +14,7 @@ https://github.com/ess-dmsc/streaming-data-types |6s4t|Run stop|N| |f142|Log Data|Y| |ev42|Event Data|Y| +|x5f2|Status messages|Y| ### hs00 Schema for histogram data. It is one of the more complicated to use schemas. diff --git a/streaming_data_types/fbschemas/status_x5f2/Status.py b/streaming_data_types/fbschemas/status_x5f2/Status.py index 8984dbe..0cbc4c6 100644 --- a/streaming_data_types/fbschemas/status_x5f2/Status.py +++ b/streaming_data_types/fbschemas/status_x5f2/Status.py @@ -1,9 +1,10 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers + class Status(object): __slots__ = ['_tab'] @@ -67,6 +68,7 @@ def StatusJson(self): return self._tab.String(o + self._tab.Pos) return None + def StatusStart(builder): builder.StartObject(7) def StatusAddSoftwareName(builder, softwareName): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(softwareName), 0) def StatusAddSoftwareVersion(builder, softwareVersion): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(softwareVersion), 0) diff --git a/streaming_data_types/status_x5f2.py b/streaming_data_types/status_x5f2.py index 714c2d1..bd3f40e 100644 --- a/streaming_data_types/status_x5f2.py +++ b/streaming_data_types/status_x5f2.py @@ -2,7 +2,7 @@ import flatbuffers from streaming_data_types.utils import check_schema_identifier -from streaming_data_types.fbschemas.status_x5f2 import Status +from streaming_data_types.fbschemas.status_x5f2 import Status FILE_IDENTIFIER = b"x5f2" From 2be767a326e33b3e8a9562118c8f20820448388b Mon Sep 17 00:00:00 2001 From: michele-brambilla Date: Wed, 6 May 2020 16:06:04 +0200 Subject: [PATCH 098/363] add type hints --- streaming_data_types/status_x5f2.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/streaming_data_types/status_x5f2.py b/streaming_data_types/status_x5f2.py index bd3f40e..6e28e03 100644 --- a/streaming_data_types/status_x5f2.py +++ b/streaming_data_types/status_x5f2.py @@ -43,13 +43,13 @@ def deserialise_x5f2(buffer): def serialise_x5f2( - software_name, - software_version, - service_id, - host_name, - process_id, - update_interval, - status_json): + software_name: str, + software_version: str, + service_id: str, + host_name: str, + process_id: int, + update_interval: int, + status_json: str): """ Serialise status message as an x5f2 FlatBuffers message. From 0e802b66dd5db8ba6d1b3a6a4bdaf673a101b833 Mon Sep 17 00:00:00 2001 From: michele-brambilla Date: Wed, 6 May 2020 16:19:56 +0200 Subject: [PATCH 099/363] require flatbuffers>=1.12 --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 15438bd..9b4366a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ -flatbuffers +flatbuffers>=1.12 numpy From 5159695a2e5ca23517739a69432545a17a827564 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Thu, 7 May 2020 09:24:54 +0200 Subject: [PATCH 100/363] Update tests/test_x52f.py Co-authored-by: Matthew D Jones --- tests/test_x52f.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/test_x52f.py b/tests/test_x52f.py index 9dcfe54..f2c107e 100644 --- a/tests/test_x52f.py +++ b/tests/test_x52f.py @@ -5,9 +5,9 @@ class TestEncoder(object): def test_serialises_and_deserialises_x5f2_message_correctly(self): - # """ - # Round-trip to check what we serialise is what we get back. - # """ + """ + Round-trip to check what we serialise is what we get back. + """ original_entry = { "software_name" : "nicos/test", From c0c94c6fbd95b78f9bcb3bab938f354dd5840991 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Thu, 7 May 2020 09:25:09 +0200 Subject: [PATCH 101/363] Update tests/test_x52f.py Co-authored-by: Matthew D Jones --- tests/test_x52f.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_x52f.py b/tests/test_x52f.py index f2c107e..615e414 100644 --- a/tests/test_x52f.py +++ b/tests/test_x52f.py @@ -11,7 +11,7 @@ def test_serialises_and_deserialises_x5f2_message_correctly(self): original_entry = { "software_name" : "nicos/test", - "software_version" : "1.0", + "software_version" : "1.0.0", "service_id" : "1a2b3c", "host_name" : "//localhost", "process_id" : 1234, From 79ceb2fa3e31588c4b0a25d6f3462a6b3cd0ffc0 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Thu, 7 May 2020 09:25:19 +0200 Subject: [PATCH 102/363] Update tests/test_x52f.py Co-authored-by: Matthew D Jones --- tests/test_x52f.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_x52f.py b/tests/test_x52f.py index 615e414..f310ffd 100644 --- a/tests/test_x52f.py +++ b/tests/test_x52f.py @@ -13,7 +13,7 @@ def test_serialises_and_deserialises_x5f2_message_correctly(self): "software_name" : "nicos/test", "software_version" : "1.0.0", "service_id" : "1a2b3c", - "host_name" : "//localhost", + "host_name" : "localhost", "process_id" : 1234, "update_interval": 0, "status_json" : '{"content" : "log_or_status_message"}', From 0ea9c4e03458afa8093cff4f804d4fae937f82a9 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Thu, 7 May 2020 09:25:28 +0200 Subject: [PATCH 103/363] Update tests/test_x52f.py Co-authored-by: Matthew D Jones --- tests/test_x52f.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_x52f.py b/tests/test_x52f.py index f310ffd..85dcf17 100644 --- a/tests/test_x52f.py +++ b/tests/test_x52f.py @@ -33,7 +33,7 @@ def test_serialises_and_deserialises_x5f2_message_correctly(self): def test_if_buffer_has_wrong_id_then_throws(self): original_entry = { "software_name" : "nicos/test", - "software_version" : "1.0", + "software_version" : "1.0.0", "service_id" : "1a2b3c", "host_name" : "//localhost", "process_id" : 1234, From 278a6e0a3dbdfb3e0251d2b05d82908efc9acf84 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Thu, 7 May 2020 09:25:35 +0200 Subject: [PATCH 104/363] Update tests/test_x52f.py Co-authored-by: Matthew D Jones --- tests/test_x52f.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_x52f.py b/tests/test_x52f.py index 85dcf17..58c7ac5 100644 --- a/tests/test_x52f.py +++ b/tests/test_x52f.py @@ -35,7 +35,7 @@ def test_if_buffer_has_wrong_id_then_throws(self): "software_name" : "nicos/test", "software_version" : "1.0.0", "service_id" : "1a2b3c", - "host_name" : "//localhost", + "host_name" : "localhost", "process_id" : 1234, "update_interval": 0, "status_json" : '{"content" : "log_or_status_message"}', From 7dea97dc4f9c98692c52e3db2a155f5fc505190b Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Thu, 7 May 2020 09:25:45 +0200 Subject: [PATCH 105/363] Update requirements.txt --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 9b4366a..932ca93 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ -flatbuffers>=1.12 +flatbuffers>=1.11 numpy From 680453e670827dc11d82bd8041d93943c94236b2 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Thu, 7 May 2020 07:34:41 +0000 Subject: [PATCH 106/363] Clarify what verification means --- README.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 5af82a1..5175bee 100644 --- a/README.md +++ b/README.md @@ -6,7 +6,7 @@ https://github.com/ess-dmsc/streaming-data-types ## FlatBuffer Schemas -|name|description|verifiable| +|name|description|verifiable*| |----|-----------|----------| |hs00|Histogram schema|Y| |ns10|NICOS cache entry schema|Y| @@ -15,6 +15,8 @@ https://github.com/ess-dmsc/streaming-data-types |f142|Log Data|Y| |ev42|Event Data|Y| +\* whether it passes verification via the C++ FlatBuffers library. + ### hs00 Schema for histogram data. It is one of the more complicated to use schemas. It takes a Python dictionary as its input; this dictionary needs to have correctly From 5908dfdb889cad1d3198ec61ff7797df5346d8e3 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Thu, 7 May 2020 07:42:39 +0000 Subject: [PATCH 107/363] Clearer instructions for deploying to pypi --- README_DEV.md | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/README_DEV.md b/README_DEV.md index dd5a101..b77007f 100644 --- a/README_DEV.md +++ b/README_DEV.md @@ -25,7 +25,15 @@ From the top directory: tox ``` -### Building the package and deploying it +### Building the package locally and deploying it to PyPI +**First update the version number in setup.py.** + +Delete any old builds you may have: +``` +rm -rf build dist +``` + +Build it locally: ``` python setup.py sdist bdist_wheel ``` @@ -40,7 +48,7 @@ Push to test.pypi.org for testing: twine upload --repository-url https://test.pypi.org/legacy/ dist/* ``` -After testing installing from test.pypi.org: +After testing installing from test.pypi.org works, push to PyPI: ``` twine upload dist/* ``` From e38d06786f7189b6698a4c5a3f230b94e77d69a0 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Thu, 7 May 2020 07:44:42 +0000 Subject: [PATCH 108/363] Update README_DEV.md --- README_DEV.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README_DEV.md b/README_DEV.md index b77007f..e388be3 100644 --- a/README_DEV.md +++ b/README_DEV.md @@ -26,7 +26,7 @@ tox ``` ### Building the package locally and deploying it to PyPI -**First update the version number in setup.py.** +**First update the version number in setup.py and push the update to the repository.** Delete any old builds you may have: ``` From d694eee2137b613a78ecaad6ca8cd225bd4c29ab Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Thu, 7 May 2020 09:16:21 +0100 Subject: [PATCH 109/363] bump version to 0.6.0 --- requirements-dev.txt | 1 + setup.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 029ce24..52a8279 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -4,3 +4,4 @@ pre-commit pytest tox twine +wheel diff --git a/setup.py b/setup.py index f95ac36..0b55690 100644 --- a/setup.py +++ b/setup.py @@ -15,7 +15,7 @@ setup( name="ess_streaming_data_types", - version="0.5.0", + version="0.6.0", description=DESCRIPTION, long_description=LONG_DESCRIPTION, long_description_content_type="text/markdown", From bc372d76b619b83c34f884366c6e768a2d4d4081 Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Thu, 7 May 2020 09:40:17 +0100 Subject: [PATCH 110/363] Update README_DEV.md --- README_DEV.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/README_DEV.md b/README_DEV.md index e388be3..5a9d9a1 100644 --- a/README_DEV.md +++ b/README_DEV.md @@ -47,6 +47,10 @@ Push to test.pypi.org for testing: ``` twine upload --repository-url https://test.pypi.org/legacy/ dist/* ``` +The wheel should be downloaded, but you unfortunately flatbuffers is not on TestPyPi so the following error is expected: +``` +ERROR: Could not find a version that satisfies the requirement flatbuffers +``` After testing installing from test.pypi.org works, push to PyPI: ``` From a80ecf50c21c2e454f289f6d127e09a0e5e7609f Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Thu, 7 May 2020 08:55:17 +0000 Subject: [PATCH 111/363] Update README_DEV.md --- README_DEV.md | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/README_DEV.md b/README_DEV.md index 5a9d9a1..715f6c9 100644 --- a/README_DEV.md +++ b/README_DEV.md @@ -47,12 +47,19 @@ Push to test.pypi.org for testing: ``` twine upload --repository-url https://test.pypi.org/legacy/ dist/* ``` -The wheel should be downloaded, but you unfortunately flatbuffers is not on TestPyPi so the following error is expected: + +The module can then be installed from test.pypi.org like so: +``` +pip install -i https://test.pypi.org/simple/ ess-streaming-data-types +``` +Unfortunately, flatbuffers is not on test.pypi.org so the following error may occur: ``` ERROR: Could not find a version that satisfies the requirement flatbuffers ``` +The workaround is install flatbuffers manually first using `pip install flatbuffers` and then rerun the previous command. After testing installing from test.pypi.org works, push to PyPI: ``` twine upload dist/* ``` +Finally, create a tag on the GitHub repository with the appropriate name, e.g. `v0.7.0`. From b457496565187790db19ee396fededacd72f43e4 Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Sun, 10 May 2020 10:05:22 +0100 Subject: [PATCH 112/363] Add generated module for tdct --- .../fbschemas/timestamps_tdct/__init__.py | 0 .../fbschemas/timestamps_tdct/timestamp.py | 89 +++++++++++++++++++ 2 files changed, 89 insertions(+) create mode 100644 streaming_data_types/fbschemas/timestamps_tdct/__init__.py create mode 100644 streaming_data_types/fbschemas/timestamps_tdct/timestamp.py diff --git a/streaming_data_types/fbschemas/timestamps_tdct/__init__.py b/streaming_data_types/fbschemas/timestamps_tdct/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/streaming_data_types/fbschemas/timestamps_tdct/timestamp.py b/streaming_data_types/fbschemas/timestamps_tdct/timestamp.py new file mode 100644 index 0000000..908a5bb --- /dev/null +++ b/streaming_data_types/fbschemas/timestamps_tdct/timestamp.py @@ -0,0 +1,89 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + + +class timestamp(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAstimestamp(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = timestamp() + x.Init(buf, n + offset) + return x + + # timestamp + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # timestamp + def Name(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # timestamp + def Timestamps(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Uint64Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8), + ) + return 0 + + # timestamp + def TimestampsAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint64Flags, o) + return 0 + + # timestamp + def TimestampsLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # timestamp + def SequenceCounter(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.Get( + flatbuffers.number_types.Uint64Flags, o + self._tab.Pos + ) + return 0 + + +def timestampStart(builder): + builder.StartObject(3) + + +def timestampAddName(builder, name): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(name), 0 + ) + + +def timestampAddTimestamps(builder, timestamps): + builder.PrependUOffsetTRelativeSlot( + 1, flatbuffers.number_types.UOffsetTFlags.py_type(timestamps), 0 + ) + + +def timestampStartTimestampsVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + + +def timestampAddSequenceCounter(builder, sequenceCounter): + builder.PrependUint64Slot(2, sequenceCounter, 0) + + +def timestampEnd(builder): + return builder.EndObject() From cbf634da0268e2f1212015cc2e205847d536b324 Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Sun, 10 May 2020 10:41:12 +0100 Subject: [PATCH 113/363] Add tests for tdct --- tests/test_tdct.py | 48 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 48 insertions(+) create mode 100644 tests/test_tdct.py diff --git a/tests/test_tdct.py b/tests/test_tdct.py new file mode 100644 index 0000000..941c584 --- /dev/null +++ b/tests/test_tdct.py @@ -0,0 +1,48 @@ +import pytest +import numpy as np +from streaming_data_types.timestamps_tdct import serialise_tdct, deserialise_tdct + + +class TestSerialisationTdct: + original_entry = { + "name": "some_name", + "timestamps": [0, 1, 2, 3, 4], + "sequence_counter": 42, + } + + def test_serialises_and_deserialises_tdct_message_with_list_of_timestamps(self): + buf = serialise_tdct(**self.original_entry) + deserialised_tuple = deserialise_tdct(buf) + + assert deserialised_tuple.name == self.original_entry["name"] + assert np.allclose( + deserialised_tuple.timestamps, np.array(self.original_entry["timestamps"]) + ) + assert ( + deserialised_tuple.sequence_counter + == self.original_entry["sequence_counter"] + ) + + def test_serialises_and_deserialises_tdct_message_with_array_of_timestamps(self): + original_entry = { + "name": "some_name", + "timestamps": np.array([0, 1, 2, 3, 4]), + } + + buf = serialise_tdct(**self.original_entry) + deserialised_tuple = deserialise_tdct(buf) + + assert deserialised_tuple.name == original_entry["name"] + assert np.allclose( + deserialised_tuple.timestamps, self.original_entry["timestamps"] + ) + + def test_if_buffer_has_wrong_id_then_throws(self): + buf = serialise_tdct(**self.original_entry) + + # Manually hack the id + buf = bytearray(buf) + buf[4:8] = b"1234" + + with pytest.raises(RuntimeError): + deserialise_tdct(buf) From 7f4df3cf4dca178ef83ebd448785ad691395006c Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Sun, 10 May 2020 10:41:50 +0100 Subject: [PATCH 114/363] Add serialisation and deserialisation functions for tdct --- streaming_data_types/timestamps_tdct.py | 58 +++++++++++++++++++++++++ 1 file changed, 58 insertions(+) create mode 100644 streaming_data_types/timestamps_tdct.py diff --git a/streaming_data_types/timestamps_tdct.py b/streaming_data_types/timestamps_tdct.py new file mode 100644 index 0000000..ee64587 --- /dev/null +++ b/streaming_data_types/timestamps_tdct.py @@ -0,0 +1,58 @@ +from streaming_data_types.fbschemas.timestamps_tdct.timestamp import ( + timestamp, + timestampStart, + timestampAddName, + timestampAddTimestamps, + timestampAddSequenceCounter, + timestampStartTimestampsVector, + timestampEnd, +) +import flatbuffers +import numpy as np +from collections import namedtuple +from typing import Optional, NamedTuple, Union, List +from streaming_data_types.utils import check_schema_identifier + +FILE_IDENTIFIER = b"tdct" + + +def serialise_tdct( + name: str, + timestamps: Union[np.ndarray, List], + sequence_counter: Optional[int] = None, +) -> bytes: + builder = flatbuffers.Builder(136) + + timestamps = np.array(timestamps).astype(np.uint64) + + name_offset = builder.CreateString(name) + + timestampStartTimestampsVector(builder, len(timestamps)) + for single_value in reversed(timestamps): + builder.PrependUint64(single_value) + array_offset = builder.EndVector(len(timestamps)) + + timestampStart(builder) + timestampAddName(builder, name_offset) + timestampAddTimestamps(builder, array_offset) + if sequence_counter is not None: + timestampAddSequenceCounter(builder, sequence_counter) + timestamps_message = timestampEnd(builder) + builder.Finish(timestamps_message) + + # Generate the output and replace the file_identifier + buffer = builder.Output() + buffer[4:8] = FILE_IDENTIFIER + return bytes(buffer) + + +def deserialise_tdct(buffer: Union[bytearray, bytes]) -> NamedTuple: + check_schema_identifier(buffer, FILE_IDENTIFIER) + + timestamps = timestamp.GetRootAstimestamp(buffer, 0) + name = timestamps.Name() if timestamps.Name() else b"" + + timestamps_array = timestamps.TimestampsAsNumpy() + + Timestamps = namedtuple("Timestamps", ("name", "timestamps", "sequence_counter",),) + return Timestamps(name.decode(), timestamps_array, timestamps.SequenceCounter(),) From f07d20825fa1238840cfcd450f9ff33a349d86f9 Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Sun, 10 May 2020 11:11:43 +0100 Subject: [PATCH 115/363] Add tdct to table in readme --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 02bea06..05ee54b 100644 --- a/README.md +++ b/README.md @@ -15,6 +15,7 @@ https://github.com/ess-dmsc/streaming-data-types |f142|Log Data|Y| |ev42|Event Data|Y| |x5f2|Status messages|Y| +|tdct|Timestamps|Y| \* whether it passes verification via the C++ FlatBuffers library. From 6876f183b49523ef1259778b7b94828c0391a4bf Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Mon, 11 May 2020 09:41:26 +0000 Subject: [PATCH 116/363] Status messages are not verifiable --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 05ee54b..7ca8ef6 100644 --- a/README.md +++ b/README.md @@ -14,7 +14,7 @@ https://github.com/ess-dmsc/streaming-data-types |6s4t|Run stop|N| |f142|Log Data|Y| |ev42|Event Data|Y| -|x5f2|Status messages|Y| +|x5f2|Status messages|N| |tdct|Timestamps|Y| \* whether it passes verification via the C++ FlatBuffers library. From 5de900be15c64839e6a264e9263832ac4451f648 Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Sat, 23 May 2020 09:24:24 +0100 Subject: [PATCH 117/363] Bump version to 0.7.0 --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 0b55690..6ff7d26 100644 --- a/setup.py +++ b/setup.py @@ -15,7 +15,7 @@ setup( name="ess_streaming_data_types", - version="0.6.0", + version="0.7.0", description=DESCRIPTION, long_description=LONG_DESCRIPTION, long_description_content_type="text/markdown", From e01c6eb545af244d6e7d1531f95ba800b61afa0f Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Fri, 29 May 2020 11:31:33 +0200 Subject: [PATCH 118/363] Added way for clients to know which schema are supported. --- README_DEV.md | 4 ++++ setup.py | 2 +- streaming_data_types/__init__.py | 29 +++++++++++++++++++++++++++++ 3 files changed, 34 insertions(+), 1 deletion(-) diff --git a/README_DEV.md b/README_DEV.md index 715f6c9..cffe2b2 100644 --- a/README_DEV.md +++ b/README_DEV.md @@ -17,6 +17,10 @@ pre-commit run --all-files ``` This command can also be used to run the hooks manually. +### Adding new schemas checklist (important) +* Add unit-tests (see existing tests for an example) +* Update fbschemas.__init__ to include the new serialiser and deserialiser + ### Tox Tox allows the unit tests to be run against multiple versions of Python. See the tox.ini file for which versions are supported. diff --git a/setup.py b/setup.py index 6ff7d26..70bc14c 100644 --- a/setup.py +++ b/setup.py @@ -15,7 +15,7 @@ setup( name="ess_streaming_data_types", - version="0.7.0", + version="0.7.1", description=DESCRIPTION, long_description=LONG_DESCRIPTION, long_description_content_type="text/markdown", diff --git a/streaming_data_types/__init__.py b/streaming_data_types/__init__.py index e69de29..d1823c6 100644 --- a/streaming_data_types/__init__.py +++ b/streaming_data_types/__init__.py @@ -0,0 +1,29 @@ +from streaming_data_types.eventdata_ev42 import deserialise_ev42, serialise_ev42 +from streaming_data_types.histogram_hs00 import deserialise_hs00, serialise_hs00 +from streaming_data_types.logdata_f142 import deserialise_f142, serialise_f142 +from streaming_data_types.nicos_cache_ns10 import deserialise_ns10, serialise_ns10 +from streaming_data_types.run_start_pl72 import deserialise_pl72, serialise_pl72 +from streaming_data_types.run_stop_6s4t import deserialise_6s4t, serialise_6s4t +from streaming_data_types.status_x5f2 import deserialise_x5f2, serialise_x5f2 + + +SERIALISERS = { + "ev42": serialise_ev42, + "hs00": serialise_hs00, + "f142": serialise_f142, + "ns10": serialise_ns10, + "pl72": serialise_pl72, + "6s4t": serialise_6s4t, + "x5f2": serialise_x5f2, +} + + +DESERIALISERS = { + "ev42": deserialise_ev42, + "hs00": deserialise_hs00, + "f142": deserialise_f142, + "ns10": deserialise_ns10, + "pl72": deserialise_pl72, + "6s4t": deserialise_6s4t, + "x5f2": deserialise_x5f2, +} From 1d0fc70f9f474551e403ed792a7fc5c16498365f Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Fri, 29 May 2020 11:43:19 +0200 Subject: [PATCH 119/363] Was this a mistake? --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 8ea8384..d9e980b 100644 --- a/tox.ini +++ b/tox.ini @@ -13,4 +13,4 @@ commands = [testenv:flake8] commands = - python -m flake8 tests src + python -m flake8 tests streaming-data-types From 1109424ac97e82fab843bd26f37ade0d2bfb58a4 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Fri, 29 May 2020 12:08:07 +0200 Subject: [PATCH 120/363] Oops! --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index d9e980b..ea5302e 100644 --- a/tox.ini +++ b/tox.ini @@ -13,4 +13,4 @@ commands = [testenv:flake8] commands = - python -m flake8 tests streaming-data-types + python -m flake8 tests streaming_data_types From d2e3e7674749f378c9a27f745f07b6cbd6dc3863 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Tue, 2 Jun 2020 09:43:25 +0200 Subject: [PATCH 121/363] Missing black from reqs --- requirements-dev.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements-dev.txt b/requirements-dev.txt index 52a8279..fdda646 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,4 +1,5 @@ -r requirements.txt +black flake8 pre-commit pytest From 7ae15c9e3746d29c7b99b16fc712b22e9cad4524 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Tue, 2 Jun 2020 09:53:39 +0200 Subject: [PATCH 122/363] Updated instructions for deployment --- README_DEV.md | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/README_DEV.md b/README_DEV.md index cffe2b2..75ee597 100644 --- a/README_DEV.md +++ b/README_DEV.md @@ -32,6 +32,14 @@ tox ### Building the package locally and deploying it to PyPI **First update the version number in setup.py and push the update to the repository.** +#### Requirements +* A [PyPi](https://pypi.org/) account +* A [TestPyPi](https://test.pypi.org/) account (this is separate to the PyPi account) +* Permission to push to the ess-streaming-data-types project on TestPyPi and PyPi +* Installed all requirements in `requirements-dev.txt` + +#### Steps + Delete any old builds you may have: ``` rm -rf build dist From b26e8c9dc30a9d8ee7d48d9255bfbcba8d9c3c0d Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Mon, 22 Jun 2020 08:03:02 +0100 Subject: [PATCH 123/363] Serialise to bytes not bytearray --- streaming_data_types/status_x5f2.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/streaming_data_types/status_x5f2.py b/streaming_data_types/status_x5f2.py index 6e28e03..05c2ac2 100644 --- a/streaming_data_types/status_x5f2.py +++ b/streaming_data_types/status_x5f2.py @@ -49,7 +49,7 @@ def serialise_x5f2( host_name: str, process_id: int, update_interval: int, - status_json: str): + status_json: str) -> bytes: """ Serialise status message as an x5f2 FlatBuffers message. @@ -88,4 +88,4 @@ def serialise_x5f2( # Generate the output and replace the file_identifier buffer = builder.Output() buffer[4:8] = FILE_IDENTIFIER - return buffer + return bytes(buffer) From 913173b7361e365b3392d9f7968ee49676ec2435 Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Mon, 22 Jun 2020 10:48:54 +0100 Subject: [PATCH 124/363] Move namedtuples to module scope so autocomplete works --- streaming_data_types/logdata_f142.py | 14 +++++----- streaming_data_types/run_start_pl72.py | 34 +++++++++++++------------ streaming_data_types/run_stop_6s4t.py | 12 +++++---- streaming_data_types/timestamps_tdct.py | 8 +++--- 4 files changed, 38 insertions(+), 30 deletions(-) diff --git a/streaming_data_types/logdata_f142.py b/streaming_data_types/logdata_f142.py index a8c2121..1adbf59 100644 --- a/streaming_data_types/logdata_f142.py +++ b/streaming_data_types/logdata_f142.py @@ -146,7 +146,7 @@ ) from streaming_data_types.utils import check_schema_identifier import numpy as np -from typing import Any, Tuple, NamedTuple, Callable, Dict, Union +from typing import Any, Tuple, Callable, Dict, Union from collections import namedtuple @@ -570,6 +570,12 @@ def _serialise_value( } +LogDataInfo = namedtuple( + "LogDataInfo", + ("value", "source_name", "timestamp_unix_ns", "alarm_status", "alarm_severity"), +) + + def _decode_if_scalar_string(value: np.ndarray) -> Union[str, np.ndarray]: if value.ndim == 0 and ( np.issubdtype(value.dtype, np.unicode_) @@ -579,7 +585,7 @@ def _decode_if_scalar_string(value: np.ndarray) -> Union[str, np.ndarray]: return value -def deserialise_f142(buffer: Union[bytearray, bytes]) -> NamedTuple: +def deserialise_f142(buffer: Union[bytearray, bytes]) -> LogDataInfo: check_schema_identifier(buffer, FILE_IDENTIFIER) log_data = LogData.LogData.GetRootAsLogData(buffer, 0) @@ -605,10 +611,6 @@ def deserialise_f142(buffer: Union[bytearray, bytes]) -> NamedTuple: timestamp = log_data.Timestamp() - LogDataInfo = namedtuple( - "LogDataInfo", - ("value", "source_name", "timestamp_unix_ns", "alarm_status", "alarm_severity"), - ) return LogDataInfo( value, source_name.decode(), timestamp, log_data.Status(), log_data.Severity() ) diff --git a/streaming_data_types/run_start_pl72.py b/streaming_data_types/run_start_pl72.py index b2c739d..30a3490 100644 --- a/streaming_data_types/run_start_pl72.py +++ b/streaming_data_types/run_start_pl72.py @@ -1,5 +1,5 @@ import time -from typing import Optional, NamedTuple, Union +from typing import Optional, Union import flatbuffers from streaming_data_types.fbschemas.run_start_pl72 import RunStart from streaming_data_types.utils import check_schema_identifier @@ -58,7 +58,23 @@ def serialise_pl72( return bytes(buffer) -def deserialise_pl72(buffer: Union[bytearray, bytes]) -> NamedTuple: +RunStartInfo = namedtuple( + "RunStartInfo", + ( + "job_id", + "filename", + "start_time", + "stop_time", + "run_name", + "nexus_structure", + "service_id", + "instrument_name", + "broker", + ), +) + + +def deserialise_pl72(buffer: Union[bytearray, bytes]) -> RunStartInfo: check_schema_identifier(buffer, FILE_IDENTIFIER) run_start = RunStart.RunStart.GetRootAsRunStart(buffer, 0) @@ -70,20 +86,6 @@ def deserialise_pl72(buffer: Union[bytearray, bytes]) -> NamedTuple: instrument_name = run_start.InstrumentName() if run_start.InstrumentName() else b"" run_name = run_start.RunName() if run_start.RunName() else b"" - RunStartInfo = namedtuple( - "RunStartInfo", - ( - "job_id", - "filename", - "start_time", - "stop_time", - "run_name", - "nexus_structure", - "service_id", - "instrument_name", - "broker", - ), - ) return RunStartInfo( job_id.decode(), filename.decode(), diff --git a/streaming_data_types/run_stop_6s4t.py b/streaming_data_types/run_stop_6s4t.py index be8a7f2..f37292e 100644 --- a/streaming_data_types/run_stop_6s4t.py +++ b/streaming_data_types/run_stop_6s4t.py @@ -1,4 +1,4 @@ -from typing import Optional, NamedTuple, Union +from typing import Optional, Union import flatbuffers from streaming_data_types.fbschemas.run_stop_6s4t import RunStop from streaming_data_types.utils import check_schema_identifier @@ -40,7 +40,12 @@ def serialise_6s4t( return bytes(buffer) -def deserialise_6s4t(buffer: Union[bytearray, bytes]) -> NamedTuple: +RunStopInfo = namedtuple( + "RunStopInfo", ("stop_time", "run_name", "job_id", "service_id") +) + + +def deserialise_6s4t(buffer: Union[bytearray, bytes]) -> RunStopInfo: check_schema_identifier(buffer, FILE_IDENTIFIER) run_stop = RunStop.RunStop.GetRootAsRunStop(buffer, 0) @@ -49,9 +54,6 @@ def deserialise_6s4t(buffer: Union[bytearray, bytes]) -> NamedTuple: run_name = run_stop.RunName() if run_stop.RunName() else b"" stop_time = run_stop.StopTime() - RunStopInfo = namedtuple( - "RunStopInfo", ("stop_time", "run_name", "job_id", "service_id") - ) return RunStopInfo( stop_time, run_name.decode(), job_id.decode(), service_id.decode() ) diff --git a/streaming_data_types/timestamps_tdct.py b/streaming_data_types/timestamps_tdct.py index ee64587..93ad450 100644 --- a/streaming_data_types/timestamps_tdct.py +++ b/streaming_data_types/timestamps_tdct.py @@ -10,7 +10,7 @@ import flatbuffers import numpy as np from collections import namedtuple -from typing import Optional, NamedTuple, Union, List +from typing import Optional, Union, List from streaming_data_types.utils import check_schema_identifier FILE_IDENTIFIER = b"tdct" @@ -46,7 +46,10 @@ def serialise_tdct( return bytes(buffer) -def deserialise_tdct(buffer: Union[bytearray, bytes]) -> NamedTuple: +Timestamps = namedtuple("Timestamps", ("name", "timestamps", "sequence_counter",),) + + +def deserialise_tdct(buffer: Union[bytearray, bytes]) -> Timestamps: check_schema_identifier(buffer, FILE_IDENTIFIER) timestamps = timestamp.GetRootAstimestamp(buffer, 0) @@ -54,5 +57,4 @@ def deserialise_tdct(buffer: Union[bytearray, bytes]) -> NamedTuple: timestamps_array = timestamps.TimestampsAsNumpy() - Timestamps = namedtuple("Timestamps", ("name", "timestamps", "sequence_counter",),) return Timestamps(name.decode(), timestamps_array, timestamps.SequenceCounter(),) From 7bdbe255fc913f8e08596ada0aba417bc348b2d8 Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Wed, 1 Jul 2020 17:28:14 +0100 Subject: [PATCH 125/363] Add ep00 schema, serialisation/deserialisation fuctions and tests --- .../epics_connection_info_ep00.py | 63 +++++++++++++++ .../EpicsConnectionInfo.py | 80 +++++++++++++++++++ .../epics_connection_info_ep00/EventType.py | 11 +++ .../epics_connection_info_ep00/__init__.py | 0 tests/test_ep00.py | 34 ++++++++ 5 files changed, 188 insertions(+) create mode 100644 streaming_data_types/epics_connection_info_ep00.py create mode 100644 streaming_data_types/fbschemas/epics_connection_info_ep00/EpicsConnectionInfo.py create mode 100644 streaming_data_types/fbschemas/epics_connection_info_ep00/EventType.py create mode 100644 streaming_data_types/fbschemas/epics_connection_info_ep00/__init__.py create mode 100644 tests/test_ep00.py diff --git a/streaming_data_types/epics_connection_info_ep00.py b/streaming_data_types/epics_connection_info_ep00.py new file mode 100644 index 0000000..7ec350b --- /dev/null +++ b/streaming_data_types/epics_connection_info_ep00.py @@ -0,0 +1,63 @@ +from typing import Union, Optional +import flatbuffers +from streaming_data_types.fbschemas.epics_connection_info_ep00 import ( + EpicsConnectionInfo, + EventType, +) +from streaming_data_types.utils import check_schema_identifier +from collections import namedtuple + +FILE_IDENTIFIER = b"ep00" + + +def serialise_ep00( + timestamp_ns: int, + event_type: EventType, + source_name: str, + service_id: Optional[str] = None, +) -> bytes: + builder = flatbuffers.Builder(136) + + if service_id is not None: + service_id_offset = builder.CreateString(service_id) + source_name_offset = builder.CreateString(source_name) + + EpicsConnectionInfo.EpicsConnectionInfoStart(builder) + if service_id is not None: + EpicsConnectionInfo.EpicsConnectionInfoAddServiceId(builder, service_id_offset) + EpicsConnectionInfo.EpicsConnectionInfoAddSourceName(builder, source_name_offset) + EpicsConnectionInfo.EpicsConnectionInfoAddType(builder, event_type) + EpicsConnectionInfo.EpicsConnectionInfoAddTimestamp(builder, timestamp_ns) + + end = EpicsConnectionInfo.EpicsConnectionInfoEnd(builder) + builder.Finish(end) + + # Generate the output and replace the file_identifier + buffer = builder.Output() + buffer[4:8] = FILE_IDENTIFIER + return bytes(buffer) + + +EpicsConnection = namedtuple( + "EpicsConnection", ("timestamp", "type", "source_name", "service_id",), +) + + +def deserialise_ep00(buffer: Union[bytearray, bytes]) -> EpicsConnection: + check_schema_identifier(buffer, FILE_IDENTIFIER) + + epics_connection = EpicsConnectionInfo.EpicsConnectionInfo.GetRootAsEpicsConnectionInfo( + buffer, 0 + ) + + source_name = ( + epics_connection.SourceName() if epics_connection.SourceName() else b"" + ) + service_id = epics_connection.ServiceId() if epics_connection.ServiceId() else b"" + + return EpicsConnection( + epics_connection.Timestamp(), + epics_connection.Type(), + source_name.decode(), + service_id.decode(), + ) diff --git a/streaming_data_types/fbschemas/epics_connection_info_ep00/EpicsConnectionInfo.py b/streaming_data_types/fbschemas/epics_connection_info_ep00/EpicsConnectionInfo.py new file mode 100644 index 0000000..f2f1daa --- /dev/null +++ b/streaming_data_types/fbschemas/epics_connection_info_ep00/EpicsConnectionInfo.py @@ -0,0 +1,80 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + + +class EpicsConnectionInfo(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAsEpicsConnectionInfo(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = EpicsConnectionInfo() + x.Init(buf, n + offset) + return x + + # EpicsConnectionInfo + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # EpicsConnectionInfo + def Timestamp(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get( + flatbuffers.number_types.Uint64Flags, o + self._tab.Pos + ) + return 0 + + # EpicsConnectionInfo + def Type(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get( + flatbuffers.number_types.Uint16Flags, o + self._tab.Pos + ) + return 0 + + # EpicsConnectionInfo + def SourceName(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # EpicsConnectionInfo + def ServiceId(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + +def EpicsConnectionInfoStart(builder): + builder.StartObject(4) + + +def EpicsConnectionInfoAddTimestamp(builder, timestamp): + builder.PrependUint64Slot(0, timestamp, 0) + + +def EpicsConnectionInfoAddType(builder, type): + builder.PrependUint16Slot(1, type, 0) + + +def EpicsConnectionInfoAddSourceName(builder, sourceName): + builder.PrependUOffsetTRelativeSlot( + 2, flatbuffers.number_types.UOffsetTFlags.py_type(sourceName), 0 + ) + + +def EpicsConnectionInfoAddServiceId(builder, serviceId): + builder.PrependUOffsetTRelativeSlot( + 3, flatbuffers.number_types.UOffsetTFlags.py_type(serviceId), 0 + ) + + +def EpicsConnectionInfoEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/epics_connection_info_ep00/EventType.py b/streaming_data_types/fbschemas/epics_connection_info_ep00/EventType.py new file mode 100644 index 0000000..7a25f0f --- /dev/null +++ b/streaming_data_types/fbschemas/epics_connection_info_ep00/EventType.py @@ -0,0 +1,11 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + + +class EventType(object): + UNKNOWN = 0 + NEVER_CONNECTED = 1 + CONNECTED = 2 + DISCONNECTED = 3 + DESTROYED = 4 diff --git a/streaming_data_types/fbschemas/epics_connection_info_ep00/__init__.py b/streaming_data_types/fbschemas/epics_connection_info_ep00/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/test_ep00.py b/tests/test_ep00.py new file mode 100644 index 0000000..12caad7 --- /dev/null +++ b/tests/test_ep00.py @@ -0,0 +1,34 @@ +import pytest +from streaming_data_types.fbschemas.epics_connection_info_ep00 import EventType +from streaming_data_types.epics_connection_info_ep00 import ( + serialise_ep00, + deserialise_ep00, +) + + +class TestSerialisation6s4t: + original_entry = { + "timestamp_ns": 1593620746000000000, + "event_type": EventType.EventType.DISCONNECTED, + "source_name": "test_source", + "service_id": "test_service", + } + + def test_serialises_and_deserialises_ep00_message_correctly(self): + buf = serialise_ep00(**self.original_entry) + deserialised_tuple = deserialise_ep00(buf) + + assert deserialised_tuple.timestamp == self.original_entry["timestamp_ns"] + assert deserialised_tuple.type == self.original_entry["event_type"] + assert deserialised_tuple.source_name == self.original_entry["source_name"] + assert deserialised_tuple.service_id == self.original_entry["service_id"] + + def test_if_buffer_has_wrong_id_then_throws(self): + buf = serialise_ep00(**self.original_entry) + + # Manually hack the id + buf = bytearray(buf) + buf[4:8] = b"1234" + + with pytest.raises(RuntimeError): + deserialise_ep00(buf) From fa728e0cb9ae4c1b186561741b361e4e812ae5fd Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Wed, 1 Jul 2020 17:35:32 +0100 Subject: [PATCH 126/363] Add ep00 to README --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 7ca8ef6..abac6d9 100644 --- a/README.md +++ b/README.md @@ -16,6 +16,7 @@ https://github.com/ess-dmsc/streaming-data-types |ev42|Event Data|Y| |x5f2|Status messages|N| |tdct|Timestamps|Y| +|ep00|EPICS Connection Info|Y| \* whether it passes verification via the C++ FlatBuffers library. From 27415b96435a1e1da7cc1528bce7ae1a1ceb4057 Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Wed, 1 Jul 2020 17:38:14 +0100 Subject: [PATCH 127/363] Correct test class name --- tests/test_ep00.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_ep00.py b/tests/test_ep00.py index 12caad7..d482129 100644 --- a/tests/test_ep00.py +++ b/tests/test_ep00.py @@ -6,7 +6,7 @@ ) -class TestSerialisation6s4t: +class TestSerialisationEp00: original_entry = { "timestamp_ns": 1593620746000000000, "event_type": EventType.EventType.DISCONNECTED, From 72f9fc12b040e39fa6ed7aaa6e274a58bc228b82 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Thu, 2 Jul 2020 07:47:40 +0200 Subject: [PATCH 128/363] All serialisation functions should return bytes. --- streaming_data_types/eventdata_ev42.py | 2 +- streaming_data_types/logdata_f142.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/streaming_data_types/eventdata_ev42.py b/streaming_data_types/eventdata_ev42.py index efde381..f503763 100644 --- a/streaming_data_types/eventdata_ev42.py +++ b/streaming_data_types/eventdata_ev42.py @@ -113,4 +113,4 @@ def serialise_ev42( # Generate the output and replace the file_identifier buffer = builder.Output() buffer[4:8] = FILE_IDENTIFIER - return buffer + return bytes(buffer) diff --git a/streaming_data_types/logdata_f142.py b/streaming_data_types/logdata_f142.py index 1adbf59..0aeb2fd 100644 --- a/streaming_data_types/logdata_f142.py +++ b/streaming_data_types/logdata_f142.py @@ -158,7 +158,7 @@ def _complete_buffer( timestamp_unix_ns: int, alarm_status: Union[int, None] = None, alarm_severity: Union[int, None] = None, -) -> bytearray: +) -> bytes: LogData.LogDataAddTimestamp(builder, timestamp_unix_ns) if alarm_status is not None: @@ -171,7 +171,7 @@ def _complete_buffer( builder.Finish(log_msg) buff = builder.Output() buff[4:8] = FILE_IDENTIFIER - return buff + return bytes(buff) def _setup_builder(source_name: str) -> Tuple[flatbuffers.Builder, int]: From 2d32e9c3b70acc079a1c2052fba6802270543a46 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Thu, 2 Jul 2020 08:04:37 +0200 Subject: [PATCH 129/363] Bumped version number --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 70bc14c..82ba41d 100644 --- a/setup.py +++ b/setup.py @@ -15,7 +15,7 @@ setup( name="ess_streaming_data_types", - version="0.7.1", + version="0.8.0", description=DESCRIPTION, long_description=LONG_DESCRIPTION, long_description_content_type="text/markdown", From 377c6f21c7d2024f199289f1c1607e99ecca61c7 Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Thu, 2 Jul 2020 07:09:46 +0100 Subject: [PATCH 130/363] Revert early cast to bytes in f142 serialisation --- streaming_data_types/logdata_f142.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/streaming_data_types/logdata_f142.py b/streaming_data_types/logdata_f142.py index 0aeb2fd..1adbf59 100644 --- a/streaming_data_types/logdata_f142.py +++ b/streaming_data_types/logdata_f142.py @@ -158,7 +158,7 @@ def _complete_buffer( timestamp_unix_ns: int, alarm_status: Union[int, None] = None, alarm_severity: Union[int, None] = None, -) -> bytes: +) -> bytearray: LogData.LogDataAddTimestamp(builder, timestamp_unix_ns) if alarm_status is not None: @@ -171,7 +171,7 @@ def _complete_buffer( builder.Finish(log_msg) buff = builder.Output() buff[4:8] = FILE_IDENTIFIER - return bytes(buff) + return buff def _setup_builder(source_name: str) -> Tuple[flatbuffers.Builder, int]: From 05d7265ced5d3d8af61c0cd1b6575290b13eb47b Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Thu, 2 Jul 2020 08:44:39 +0200 Subject: [PATCH 131/363] Added tdct and ep00 to global serialisers list. --- streaming_data_types/__init__.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/streaming_data_types/__init__.py b/streaming_data_types/__init__.py index d1823c6..5fb3007 100644 --- a/streaming_data_types/__init__.py +++ b/streaming_data_types/__init__.py @@ -5,6 +5,11 @@ from streaming_data_types.run_start_pl72 import deserialise_pl72, serialise_pl72 from streaming_data_types.run_stop_6s4t import deserialise_6s4t, serialise_6s4t from streaming_data_types.status_x5f2 import deserialise_x5f2, serialise_x5f2 +from streaming_data_types.epics_connection_info_ep00 import ( + deserialise_ep00, + serialise_ep00, +) +from streaming_data_types.timestamps_tdct import deserialise_tdct, serialise_tdct SERIALISERS = { @@ -15,6 +20,8 @@ "pl72": serialise_pl72, "6s4t": serialise_6s4t, "x5f2": serialise_x5f2, + "ep00": serialise_ep00, + "tdct": serialise_tdct, } @@ -26,4 +33,6 @@ "pl72": deserialise_pl72, "6s4t": deserialise_6s4t, "x5f2": deserialise_x5f2, + "ep00": deserialise_ep00, + "tdct": deserialise_tdct, } From c95700650fe1a05317c0ce76b9aa2614a9d76d11 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Thu, 2 Jul 2020 08:45:21 +0200 Subject: [PATCH 132/363] Added tests to check serialisers are in global list. --- tests/test_6s4t.py | 5 +++++ tests/test_ep00.py | 5 +++++ tests/test_ev42.py | 5 +++++ tests/test_f142.py | 5 +++++ tests/test_hs00.py | 5 +++++ tests/test_ns10.py | 5 +++++ tests/test_pl72.py | 5 +++++ tests/test_tdct.py | 10 ++++++---- tests/test_x52f.py | 30 +++++++++++++++++------------- 9 files changed, 58 insertions(+), 17 deletions(-) diff --git a/tests/test_6s4t.py b/tests/test_6s4t.py index a68e156..21bcd17 100644 --- a/tests/test_6s4t.py +++ b/tests/test_6s4t.py @@ -1,5 +1,6 @@ import pytest from streaming_data_types.run_stop_6s4t import serialise_6s4t, deserialise_6s4t +from streaming_data_types import SERIALISERS, DESERIALISERS class TestSerialisation6s4t: @@ -28,3 +29,7 @@ def test_if_buffer_has_wrong_id_then_throws(self): with pytest.raises(RuntimeError): deserialise_6s4t(buf) + + def test_schema_type_is_in_global_serialisers_list(self): + assert "6s4t" in SERIALISERS + assert "6s4t" in DESERIALISERS diff --git a/tests/test_ep00.py b/tests/test_ep00.py index d482129..34857e8 100644 --- a/tests/test_ep00.py +++ b/tests/test_ep00.py @@ -4,6 +4,7 @@ serialise_ep00, deserialise_ep00, ) +from streaming_data_types import SERIALISERS, DESERIALISERS class TestSerialisationEp00: @@ -32,3 +33,7 @@ def test_if_buffer_has_wrong_id_then_throws(self): with pytest.raises(RuntimeError): deserialise_ep00(buf) + + def test_schema_type_is_in_global_serialisers_list(self): + assert "ep00" in SERIALISERS + assert "ep00" in DESERIALISERS diff --git a/tests/test_ev42.py b/tests/test_ev42.py index 1b5b500..d0d3765 100644 --- a/tests/test_ev42.py +++ b/tests/test_ev42.py @@ -1,6 +1,7 @@ import numpy as np import pytest from streaming_data_types.eventdata_ev42 import serialise_ev42, deserialise_ev42 +from streaming_data_types import SERIALISERS, DESERIALISERS class TestSerialisationEv42: @@ -91,3 +92,7 @@ def test_if_buffer_has_wrong_id_then_throws(self): with pytest.raises(RuntimeError): deserialise_ev42(buf) + + def test_schema_type_is_in_global_serialisers_list(self): + assert "ev42" in SERIALISERS + assert "ev42" in DESERIALISERS diff --git a/tests/test_f142.py b/tests/test_f142.py index 720c048..04aa46f 100644 --- a/tests/test_f142.py +++ b/tests/test_f142.py @@ -3,6 +3,7 @@ from streaming_data_types.logdata_f142 import serialise_f142, deserialise_f142 from streaming_data_types.fbschemas.logdata_f142.AlarmSeverity import AlarmSeverity from streaming_data_types.fbschemas.logdata_f142.AlarmStatus import AlarmStatus +from streaming_data_types import SERIALISERS, DESERIALISERS class TestSerialisationf142: @@ -175,3 +176,7 @@ def test_if_buffer_has_wrong_id_then_throws(self): with pytest.raises(RuntimeError): deserialise_f142(buf) + + def test_schema_type_is_in_global_serialisers_list(self): + assert "f142" in SERIALISERS + assert "f142" in DESERIALISERS diff --git a/tests/test_hs00.py b/tests/test_hs00.py index fa1611c..29ac38b 100644 --- a/tests/test_hs00.py +++ b/tests/test_hs00.py @@ -1,6 +1,7 @@ import numpy as np import pytest from streaming_data_types.histogram_hs00 import serialise_hs00, deserialise_hs00 +from streaming_data_types import SERIALISERS, DESERIALISERS class TestSerialisationHs00: @@ -284,3 +285,7 @@ def test_serialises_and_deserialises_hs00_message_correctly_when_int_input_is_no assert ( hist["last_metadata_timestamp"] == original_hist["last_metadata_timestamp"] ) + + def test_schema_type_is_in_global_serialisers_list(self): + assert "hs00" in SERIALISERS + assert "hs00" in DESERIALISERS diff --git a/tests/test_ns10.py b/tests/test_ns10.py index 02d4bdd..ff3f921 100644 --- a/tests/test_ns10.py +++ b/tests/test_ns10.py @@ -1,5 +1,6 @@ import pytest from streaming_data_types.nicos_cache_ns10 import serialise_ns10, deserialise_ns10 +from streaming_data_types import SERIALISERS, DESERIALISERS class TestSerialisationNs10: @@ -40,3 +41,7 @@ def test_if_buffer_has_wrong_id_then_throws(self): with pytest.raises(RuntimeError): deserialise_ns10(buf) + + def test_schema_type_is_in_global_serialisers_list(self): + assert "ns10" in SERIALISERS + assert "ns10" in DESERIALISERS diff --git a/tests/test_pl72.py b/tests/test_pl72.py index 03b4d02..547cdd9 100644 --- a/tests/test_pl72.py +++ b/tests/test_pl72.py @@ -1,5 +1,6 @@ import pytest from streaming_data_types.run_start_pl72 import serialise_pl72, deserialise_pl72 +from streaming_data_types import SERIALISERS, DESERIALISERS class TestSerialisationPl72: @@ -42,3 +43,7 @@ def test_if_buffer_has_wrong_id_then_throws(self): with pytest.raises(RuntimeError): deserialise_pl72(buf) + + def test_schema_type_is_in_global_serialisers_list(self): + assert "pl72" in SERIALISERS + assert "pl72" in DESERIALISERS diff --git a/tests/test_tdct.py b/tests/test_tdct.py index 941c584..1e96d26 100644 --- a/tests/test_tdct.py +++ b/tests/test_tdct.py @@ -1,6 +1,7 @@ import pytest import numpy as np from streaming_data_types.timestamps_tdct import serialise_tdct, deserialise_tdct +from streaming_data_types import SERIALISERS, DESERIALISERS class TestSerialisationTdct: @@ -24,10 +25,7 @@ def test_serialises_and_deserialises_tdct_message_with_list_of_timestamps(self): ) def test_serialises_and_deserialises_tdct_message_with_array_of_timestamps(self): - original_entry = { - "name": "some_name", - "timestamps": np.array([0, 1, 2, 3, 4]), - } + original_entry = {"name": "some_name", "timestamps": np.array([0, 1, 2, 3, 4])} buf = serialise_tdct(**self.original_entry) deserialised_tuple = deserialise_tdct(buf) @@ -46,3 +44,7 @@ def test_if_buffer_has_wrong_id_then_throws(self): with pytest.raises(RuntimeError): deserialise_tdct(buf) + + def test_schema_type_is_in_global_serialisers_list(self): + assert "tdct" in SERIALISERS + assert "tdct" in DESERIALISERS diff --git a/tests/test_x52f.py b/tests/test_x52f.py index 58c7ac5..1377ad9 100644 --- a/tests/test_x52f.py +++ b/tests/test_x52f.py @@ -1,22 +1,22 @@ import pytest from streaming_data_types.status_x5f2 import serialise_x5f2, deserialise_x5f2 +from streaming_data_types import SERIALISERS, DESERIALISERS class TestEncoder(object): - def test_serialises_and_deserialises_x5f2_message_correctly(self): """ Round-trip to check what we serialise is what we get back. """ original_entry = { - "software_name" : "nicos/test", - "software_version" : "1.0.0", - "service_id" : "1a2b3c", - "host_name" : "localhost", - "process_id" : 1234, + "software_name": "nicos/test", + "software_version": "1.0.0", + "service_id": "1a2b3c", + "host_name": "localhost", + "process_id": 1234, "update_interval": 0, - "status_json" : '{"content" : "log_or_status_message"}', + "status_json": '{"content" : "log_or_status_message"}', } buf = serialise_x5f2(**original_entry) @@ -32,13 +32,13 @@ def test_serialises_and_deserialises_x5f2_message_correctly(self): def test_if_buffer_has_wrong_id_then_throws(self): original_entry = { - "software_name" : "nicos/test", - "software_version" : "1.0.0", - "service_id" : "1a2b3c", - "host_name" : "localhost", - "process_id" : 1234, + "software_name": "nicos/test", + "software_version": "1.0.0", + "service_id": "1a2b3c", + "host_name": "localhost", + "process_id": 1234, "update_interval": 0, - "status_json" : '{"content" : "log_or_status_message"}', + "status_json": '{"content" : "log_or_status_message"}', } buf = serialise_x5f2(**original_entry) @@ -49,3 +49,7 @@ def test_if_buffer_has_wrong_id_then_throws(self): with pytest.raises(RuntimeError): deserialise_x5f2(buf) + + def test_schema_type_is_in_global_serialisers_list(self): + assert "x5f2" in SERIALISERS + assert "x5f2" in DESERIALISERS From 86357b52cb6cc596a2c630a9655157a4263b8752 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Thu, 2 Jul 2020 08:45:50 +0200 Subject: [PATCH 133/363] Update README_DEV to explain how to test installed module. --- README_DEV.md | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/README_DEV.md b/README_DEV.md index 75ee597..a994bcb 100644 --- a/README_DEV.md +++ b/README_DEV.md @@ -60,8 +60,9 @@ Push to test.pypi.org for testing: twine upload --repository-url https://test.pypi.org/legacy/ dist/* ``` -The module can then be installed from test.pypi.org like so: +The new module can then be installed from test.pypi.org like so: ``` +pip uninstall ess_streaming_data_types pip install -i https://test.pypi.org/simple/ ess-streaming-data-types ``` Unfortunately, flatbuffers is not on test.pypi.org so the following error may occur: @@ -70,6 +71,13 @@ ERROR: Could not find a version that satisfies the requirement flatbuffers ``` The workaround is install flatbuffers manually first using `pip install flatbuffers` and then rerun the previous command. +Test the module using the existing test-suite (from project root): +``` +rm -rf streaming_data_types # Rename the local source directory +pytest # The tests will be run against the pip installed module +git reset --hard origin/master # Put everything back to before +``` + After testing installing from test.pypi.org works, push to PyPI: ``` twine upload dist/* From ebcd2af4a689ceb652546714aac4670819d526a1 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Thu, 2 Jul 2020 08:47:39 +0200 Subject: [PATCH 134/363] Added comment to testing instructions --- README_DEV.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README_DEV.md b/README_DEV.md index a994bcb..946cc31 100644 --- a/README_DEV.md +++ b/README_DEV.md @@ -62,7 +62,7 @@ twine upload --repository-url https://test.pypi.org/legacy/ dist/* The new module can then be installed from test.pypi.org like so: ``` -pip uninstall ess_streaming_data_types +pip uninstall ess_streaming_data_types # Remove old version if present pip install -i https://test.pypi.org/simple/ ess-streaming-data-types ``` Unfortunately, flatbuffers is not on test.pypi.org so the following error may occur: From d1aafaf7d146a29d2126e22d8e3df0edb7ce4ea1 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Thu, 2 Jul 2020 15:11:35 +0200 Subject: [PATCH 135/363] Bump version number --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 82ba41d..af406bc 100644 --- a/setup.py +++ b/setup.py @@ -15,7 +15,7 @@ setup( name="ess_streaming_data_types", - version="0.8.0", + version="0.8.1", description=DESCRIPTION, long_description=LONG_DESCRIPTION, long_description_content_type="text/markdown", From bccd731aa488fb24937d36f86b5bb1934373a839 Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Thu, 9 Jul 2020 13:19:45 +0100 Subject: [PATCH 136/363] Add generated files for rf5k schema --- .../ConfigUpdate.py | 72 +++++++++++++++++++ .../forwarder_config_update_rf5k/Stream.py | 67 +++++++++++++++++ .../UpdateType.py | 9 +++ .../forwarder_config_update_rf5k/__init__.py | 0 4 files changed, 148 insertions(+) create mode 100644 streaming_data_types/fbschemas/forwarder_config_update_rf5k/ConfigUpdate.py create mode 100644 streaming_data_types/fbschemas/forwarder_config_update_rf5k/Stream.py create mode 100644 streaming_data_types/fbschemas/forwarder_config_update_rf5k/UpdateType.py create mode 100644 streaming_data_types/fbschemas/forwarder_config_update_rf5k/__init__.py diff --git a/streaming_data_types/fbschemas/forwarder_config_update_rf5k/ConfigUpdate.py b/streaming_data_types/fbschemas/forwarder_config_update_rf5k/ConfigUpdate.py new file mode 100644 index 0000000..f9522c3 --- /dev/null +++ b/streaming_data_types/fbschemas/forwarder_config_update_rf5k/ConfigUpdate.py @@ -0,0 +1,72 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + + +class ConfigUpdate(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAsConfigUpdate(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ConfigUpdate() + x.Init(buf, n + offset) + return x + + # ConfigUpdate + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ConfigUpdate + def ConfigChange(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get( + flatbuffers.number_types.Uint16Flags, o + self._tab.Pos + ) + return 0 + + # ConfigUpdate + def Streams(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + x = self._tab.Vector(o) + x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 + x = self._tab.Indirect(x) + from .Stream import Stream + + obj = Stream() + obj.Init(self._tab.Bytes, x) + return obj + return None + + # ConfigUpdate + def StreamsLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + +def ConfigUpdateStart(builder): + builder.StartObject(2) + + +def ConfigUpdateAddConfigChange(builder, configChange): + builder.PrependUint16Slot(0, configChange, 0) + + +def ConfigUpdateAddStreams(builder, streams): + builder.PrependUOffsetTRelativeSlot( + 1, flatbuffers.number_types.UOffsetTFlags.py_type(streams), 0 + ) + + +def ConfigUpdateStartStreamsVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + +def ConfigUpdateEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/forwarder_config_update_rf5k/Stream.py b/streaming_data_types/fbschemas/forwarder_config_update_rf5k/Stream.py new file mode 100644 index 0000000..cc31f3e --- /dev/null +++ b/streaming_data_types/fbschemas/forwarder_config_update_rf5k/Stream.py @@ -0,0 +1,67 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + + +class Stream(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAsStream(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Stream() + x.Init(buf, n + offset) + return x + + # Stream + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Stream + def Channel(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # Stream + def Schema(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # Stream + def Topic(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + +def StreamStart(builder): + builder.StartObject(3) + + +def StreamAddChannel(builder, channel): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(channel), 0 + ) + + +def StreamAddSchema(builder, schema): + builder.PrependUOffsetTRelativeSlot( + 1, flatbuffers.number_types.UOffsetTFlags.py_type(schema), 0 + ) + + +def StreamAddTopic(builder, topic): + builder.PrependUOffsetTRelativeSlot( + 2, flatbuffers.number_types.UOffsetTFlags.py_type(topic), 0 + ) + + +def StreamEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/forwarder_config_update_rf5k/UpdateType.py b/streaming_data_types/fbschemas/forwarder_config_update_rf5k/UpdateType.py new file mode 100644 index 0000000..e69b8e4 --- /dev/null +++ b/streaming_data_types/fbschemas/forwarder_config_update_rf5k/UpdateType.py @@ -0,0 +1,9 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + + +class UpdateType(object): + ADD = 0 + REMOVE = 1 + REMOVEALL = 2 diff --git a/streaming_data_types/fbschemas/forwarder_config_update_rf5k/__init__.py b/streaming_data_types/fbschemas/forwarder_config_update_rf5k/__init__.py new file mode 100644 index 0000000..e69de29 From 471e7d8cd414c86e91e53d8404e990d46aa977e3 Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Fri, 10 Jul 2020 14:38:37 +0100 Subject: [PATCH 137/363] Add rf5k tests --- streaming_data_types/__init__.py | 6 ++ .../forwarder_config_update_rf5k.py | 68 +++++++++++++++++++ tests/test_rf5k.py | 46 +++++++++++++ 3 files changed, 120 insertions(+) create mode 100644 streaming_data_types/forwarder_config_update_rf5k.py create mode 100644 tests/test_rf5k.py diff --git a/streaming_data_types/__init__.py b/streaming_data_types/__init__.py index 5fb3007..0fcc72d 100644 --- a/streaming_data_types/__init__.py +++ b/streaming_data_types/__init__.py @@ -10,6 +10,10 @@ serialise_ep00, ) from streaming_data_types.timestamps_tdct import deserialise_tdct, serialise_tdct +from streaming_data_types.forwarder_config_update_rf5k import ( + deserialise_rf5k, + serialise_rf5k, +) SERIALISERS = { @@ -22,6 +26,7 @@ "x5f2": serialise_x5f2, "ep00": serialise_ep00, "tdct": serialise_tdct, + "rf5k": serialise_rf5k, } @@ -35,4 +40,5 @@ "x5f2": deserialise_x5f2, "ep00": deserialise_ep00, "tdct": deserialise_tdct, + "rf5k": deserialise_rf5k, } diff --git a/streaming_data_types/forwarder_config_update_rf5k.py b/streaming_data_types/forwarder_config_update_rf5k.py new file mode 100644 index 0000000..c90e2e4 --- /dev/null +++ b/streaming_data_types/forwarder_config_update_rf5k.py @@ -0,0 +1,68 @@ +from collections import namedtuple +import flatbuffers +from streaming_data_types.utils import check_schema_identifier +from streaming_data_types.fbschemas.forwarder_config_update_rf5k import ( + UpdateType, + ConfigUpdate, + Stream, +) +from typing import List + +FILE_IDENTIFIER = b"rf5k" + +ConfigurationUpdate = namedtuple("ConfigurationUpdate", ("config_change", "streams"),) + +StreamInfo = namedtuple("StreamInfo", ("channel", "schema", "topic"),) + + +def deserialise_rf5k(buffer): + """ + Deserialise FlatBuffer rf5k. + + :param buffer: The FlatBuffers buffer. + :return: The deserialised data. + """ + check_schema_identifier(buffer, FILE_IDENTIFIER) + + config_message = ConfigUpdate.ConfigUpdate.GetRootAsConfigUpdate(buffer, 0) + + return ConfigurationUpdate(config_message.ConfigChange(), []) + + +def serialise_rf5k(config_change: UpdateType, streams: List[StreamInfo]) -> bytes: + """ + Serialise config update message as an rf5k FlatBuffers message. + + :param config_change: + :param streams: channel, schema and output topic configurations + :return: + """ + + builder = flatbuffers.Builder(1024) + + if streams: + streams_offset = ConfigUpdate.ConfigUpdateStartStreamsVector( + builder, len(streams) + ) + for stream in streams: + channel_offset = builder.CreateString(stream.channel) + schema_offset = builder.CreateString(stream.schema) + topic_offset = builder.CreateString(stream.topic) + Stream.StreamStart(builder) + Stream.StreamAddTopic(builder, topic_offset) + Stream.StreamAddSchema(builder, schema_offset) + Stream.StreamAddChannel(builder, channel_offset) + stream_offset = Stream.StreamEnd(builder) + builder.PrependUOffsetTRelative(stream_offset) + ConfigUpdate.ConfigUpdateAddStreams(builder, streams_offset) + + # Build the actual buffer + ConfigUpdate.ConfigUpdateStart(builder) + data = ConfigUpdate.ConfigUpdateEnd(builder) + ConfigUpdate.ConfigUpdateAddConfigChange(builder, config_change) + builder.Finish(data) + + # Generate the output and replace the file_identifier + buffer = builder.Output() + buffer[4:8] = FILE_IDENTIFIER + return bytes(buffer) diff --git a/tests/test_rf5k.py b/tests/test_rf5k.py new file mode 100644 index 0000000..41514c9 --- /dev/null +++ b/tests/test_rf5k.py @@ -0,0 +1,46 @@ +import pytest +from streaming_data_types.forwarder_config_update_rf5k import ( + serialise_rf5k, + deserialise_rf5k, + StreamInfo, +) +from streaming_data_types import SERIALISERS, DESERIALISERS +from streaming_data_types.fbschemas.forwarder_config_update_rf5k.UpdateType import ( + UpdateType, +) + + +class TestEncoder(object): + def test_serialises_and_deserialises_rf5k_message_correctly(self): + """ + Round-trip to check what we serialise is what we get back. + """ + stream_1 = StreamInfo("channel1", "f142", "topic1") + stream_2 = StreamInfo("channel2", "TdcTime", "topic2") + original_entry = { + "config_change": UpdateType.ADD, + "streams": [stream_1, stream_2], + } + + buf = serialise_rf5k(**original_entry) + entry = deserialise_rf5k(buf) + + assert entry.config_change == original_entry["config_change"] + assert stream_1 in entry.streams + assert stream_2 in entry.streams + + def test_if_buffer_has_wrong_id_then_throws(self): + original_entry = {"config_change": UpdateType.REMOVEALL, "streams": []} + + buf = serialise_rf5k(**original_entry) + + # Manually hack the id + buf = bytearray(buf) + buf[4:8] = b"1234" + + with pytest.raises(RuntimeError): + deserialise_rf5k(buf) + + def test_schema_type_is_in_global_serialisers_list(self): + assert "rf5k" in SERIALISERS + assert "rf5k" in DESERIALISERS From a5460a96434e51e0c3b5dac1aa43a24cc177f958 Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Fri, 10 Jul 2020 15:06:49 +0100 Subject: [PATCH 138/363] Avoid nesting builders during serialisation --- .../forwarder_config_update_rf5k.py | 44 ++++++++++++------- 1 file changed, 29 insertions(+), 15 deletions(-) diff --git a/streaming_data_types/forwarder_config_update_rf5k.py b/streaming_data_types/forwarder_config_update_rf5k.py index c90e2e4..7ccccd6 100644 --- a/streaming_data_types/forwarder_config_update_rf5k.py +++ b/streaming_data_types/forwarder_config_update_rf5k.py @@ -29,6 +29,14 @@ def deserialise_rf5k(buffer): return ConfigurationUpdate(config_message.ConfigChange(), []) +def serialise_stream(builder, topic_offset, schema_offset, channel_offset): + Stream.StreamStart(builder) + Stream.StreamAddTopic(builder, topic_offset) + Stream.StreamAddSchema(builder, schema_offset) + Stream.StreamAddChannel(builder, channel_offset) + return Stream.StreamEnd(builder) + + def serialise_rf5k(config_change: UpdateType, streams: List[StreamInfo]) -> bytes: """ Serialise config update message as an rf5k FlatBuffers message. @@ -37,29 +45,35 @@ def serialise_rf5k(config_change: UpdateType, streams: List[StreamInfo]) -> byte :param streams: channel, schema and output topic configurations :return: """ - builder = flatbuffers.Builder(1024) if streams: - streams_offset = ConfigUpdate.ConfigUpdateStartStreamsVector( - builder, len(streams) - ) - for stream in streams: - channel_offset = builder.CreateString(stream.channel) - schema_offset = builder.CreateString(stream.schema) - topic_offset = builder.CreateString(stream.topic) - Stream.StreamStart(builder) - Stream.StreamAddTopic(builder, topic_offset) - Stream.StreamAddSchema(builder, schema_offset) - Stream.StreamAddChannel(builder, channel_offset) - stream_offset = Stream.StreamEnd(builder) + # We have to use multiple loops/list comprehensions here because we cannot create strings after we have + # called StreamStart and cannot create streams after we have called StartVector + stream_field_offsets = [ + ( + builder.CreateString(stream.channel), + builder.CreateString(stream.schema), + builder.CreateString(stream.topic), + ) + for stream in streams + ] + stream_offsets = [ + serialise_stream(builder, *stream_fields) + for stream_fields in stream_field_offsets + ] + + ConfigUpdate.ConfigUpdateStartStreamsVector(builder, len(streams)) + for stream_offset in stream_offsets: builder.PrependUOffsetTRelative(stream_offset) - ConfigUpdate.ConfigUpdateAddStreams(builder, streams_offset) + streams_offset = builder.EndVector(len(streams)) # Build the actual buffer ConfigUpdate.ConfigUpdateStart(builder) - data = ConfigUpdate.ConfigUpdateEnd(builder) + if streams: + ConfigUpdate.ConfigUpdateAddStreams(builder, streams_offset) ConfigUpdate.ConfigUpdateAddConfigChange(builder, config_change) + data = ConfigUpdate.ConfigUpdateEnd(builder) builder.Finish(data) # Generate the output and replace the file_identifier From 86710f6f138e98c9cbaa0eb93df811614d746fab Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Fri, 10 Jul 2020 15:19:00 +0100 Subject: [PATCH 139/363] Deserialise streams in rf5k --- .../forwarder_config_update_rf5k.py | 21 +++++++++++++++++-- 1 file changed, 19 insertions(+), 2 deletions(-) diff --git a/streaming_data_types/forwarder_config_update_rf5k.py b/streaming_data_types/forwarder_config_update_rf5k.py index 7ccccd6..f94692c 100644 --- a/streaming_data_types/forwarder_config_update_rf5k.py +++ b/streaming_data_types/forwarder_config_update_rf5k.py @@ -26,10 +26,27 @@ def deserialise_rf5k(buffer): config_message = ConfigUpdate.ConfigUpdate.GetRootAsConfigUpdate(buffer, 0) - return ConfigurationUpdate(config_message.ConfigChange(), []) + streams = [] + for i in range(config_message.StreamsLength()): + stream_message = config_message.Streams(i) + streams.append( + StreamInfo( + stream_message.Channel().decode("utf-8") + if stream_message.Channel() + else "", + stream_message.Schema().decode("utf-8") + if stream_message.Schema() + else "", + stream_message.Topic().decode("utf-8") + if stream_message.Topic() + else "", + ) + ) + + return ConfigurationUpdate(config_message.ConfigChange(), streams) -def serialise_stream(builder, topic_offset, schema_offset, channel_offset): +def serialise_stream(builder, channel_offset, schema_offset, topic_offset): Stream.StreamStart(builder) Stream.StreamAddTopic(builder, topic_offset) Stream.StreamAddSchema(builder, schema_offset) From 18a8500cc8a33990f7b7c96570895b91a7ca8ee2 Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Fri, 10 Jul 2020 15:20:50 +0100 Subject: [PATCH 140/363] Add rf5k to table in readme --- README.md | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index abac6d9..8936bee 100644 --- a/README.md +++ b/README.md @@ -12,11 +12,12 @@ https://github.com/ess-dmsc/streaming-data-types |ns10|NICOS cache entry schema|Y| |pl72|Run start|N| |6s4t|Run stop|N| -|f142|Log Data|Y| -|ev42|Event Data|Y| +|f142|Log data|Y| +|ev42|Event data|Y| |x5f2|Status messages|N| |tdct|Timestamps|Y| -|ep00|EPICS Connection Info|Y| +|ep00|EPICS connection info|Y| +|rf5k|Forwarder configuration update|Y| \* whether it passes verification via the C++ FlatBuffers library. From dd3c88ab2ed07fb7b0e3e234caac018dab12602e Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Fri, 10 Jul 2020 15:24:51 +0100 Subject: [PATCH 141/363] Add missing type hints in rf5k --- streaming_data_types/forwarder_config_update_rf5k.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/streaming_data_types/forwarder_config_update_rf5k.py b/streaming_data_types/forwarder_config_update_rf5k.py index f94692c..c90be2f 100644 --- a/streaming_data_types/forwarder_config_update_rf5k.py +++ b/streaming_data_types/forwarder_config_update_rf5k.py @@ -6,7 +6,7 @@ ConfigUpdate, Stream, ) -from typing import List +from typing import List, Union FILE_IDENTIFIER = b"rf5k" @@ -15,7 +15,7 @@ StreamInfo = namedtuple("StreamInfo", ("channel", "schema", "topic"),) -def deserialise_rf5k(buffer): +def deserialise_rf5k(buffer: Union[bytearray, bytes]) -> ConfigurationUpdate: """ Deserialise FlatBuffer rf5k. @@ -46,7 +46,12 @@ def deserialise_rf5k(buffer): return ConfigurationUpdate(config_message.ConfigChange(), streams) -def serialise_stream(builder, channel_offset, schema_offset, topic_offset): +def serialise_stream( + builder: flatbuffers.Builder, + channel_offset: int, + schema_offset: int, + topic_offset: int, +) -> int: Stream.StreamStart(builder) Stream.StreamAddTopic(builder, topic_offset) Stream.StreamAddSchema(builder, schema_offset) From bee53cefdf244b052ae8cf9c60407d5037d91ad9 Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Fri, 10 Jul 2020 16:58:43 +0100 Subject: [PATCH 142/363] Refactor: extract methods --- streaming_data_types/histogram_hs00.py | 68 +++++++++++++------------- 1 file changed, 34 insertions(+), 34 deletions(-) diff --git a/streaming_data_types/histogram_hs00.py b/streaming_data_types/histogram_hs00.py index a0f0a80..57523ea 100644 --- a/streaming_data_types/histogram_hs00.py +++ b/streaming_data_types/histogram_hs00.py @@ -173,25 +173,9 @@ def serialise_hs00(histogram): flattened_data = numpy.asarray(histogram["data"]).flatten() if numpy.issubdtype(flattened_data[0], numpy.int64): - data_type = Array.ArrayULong - ArrayULong.ArrayULongStartValueVector(builder, data_len) - # FlatBuffers builds arrays backwards - for x in reversed(flattened_data): - builder.PrependUint64(x) - data_vector = builder.EndVector(data_len) - ArrayULong.ArrayULongStart(builder) - ArrayULong.ArrayULongAddValue(builder, data_vector) - data_offset = ArrayULong.ArrayULongEnd(builder) + data_offset, data_type = _serialise_uint64(builder, data_len, flattened_data) else: - data_type = Array.ArrayDouble - ArrayDouble.ArrayDoubleStartValueVector(builder, data_len) - # FlatBuffers builds arrays backwards - for x in reversed(flattened_data): - builder.PrependFloat64(x) - data_vector = builder.EndVector(data_len) - ArrayDouble.ArrayDoubleStart(builder) - ArrayDouble.ArrayDoubleAddValue(builder, data_vector) - data_offset = ArrayDouble.ArrayDoubleEnd(builder) + data_offset, data_type = _serialise_double(builder, data_len, flattened_data) errors_offset = None if "errors" in histogram: @@ -201,23 +185,13 @@ def serialise_hs00(histogram): flattened_data = numpy.asarray(histogram["errors"]).flatten() if numpy.issubdtype(flattened_data[0], numpy.int64): - error_type = Array.ArrayULong - ArrayULong.ArrayULongStartValueVector(builder, data_len) - for x in reversed(flattened_data): - builder.PrependUint64(x) - errors = builder.EndVector(data_len) - ArrayULong.ArrayULongStart(builder) - ArrayULong.ArrayULongAddValue(builder, errors) - errors_offset = ArrayULong.ArrayULongEnd(builder) + errors_offset, error_type = _serialise_uint64( + builder, data_len, flattened_data + ) else: - error_type = Array.ArrayDouble - ArrayDouble.ArrayDoubleStartValueVector(builder, data_len) - for x in reversed(flattened_data): - builder.PrependFloat64(x) - errors = builder.EndVector(data_len) - ArrayDouble.ArrayDoubleStart(builder) - ArrayDouble.ArrayDoubleAddValue(builder, errors) - errors_offset = ArrayDouble.ArrayDoubleEnd(builder) + errors_offset, error_type = _serialise_double( + builder, data_len, flattened_data + ) # Build the actual buffer EventHistogram.EventHistogramStart(builder) @@ -244,3 +218,29 @@ def serialise_hs00(histogram): buffer = builder.Output() buffer[4:8] = FILE_IDENTIFIER return bytes(buffer) + + +def _serialise_double(builder, data_len, flattened_data): + data_type = Array.ArrayDouble + ArrayDouble.ArrayDoubleStartValueVector(builder, data_len) + # FlatBuffers builds arrays backwards + for x in reversed(flattened_data): + builder.PrependFloat64(x) + data_vector = builder.EndVector(data_len) + ArrayDouble.ArrayDoubleStart(builder) + ArrayDouble.ArrayDoubleAddValue(builder, data_vector) + data_offset = ArrayDouble.ArrayDoubleEnd(builder) + return data_offset, data_type + + +def _serialise_uint64(builder, data_len, flattened_data): + data_type = Array.ArrayULong + ArrayULong.ArrayULongStartValueVector(builder, data_len) + # FlatBuffers builds arrays backwards + for x in reversed(flattened_data): + builder.PrependUint64(x) + data_vector = builder.EndVector(data_len) + ArrayULong.ArrayULongStart(builder) + ArrayULong.ArrayULongAddValue(builder, data_vector) + data_offset = ArrayULong.ArrayULongEnd(builder) + return data_offset, data_type From f4596a8ddffe51060823d313a2a88651fbdd0007 Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Fri, 10 Jul 2020 17:21:58 +0100 Subject: [PATCH 143/363] Refactor: extract methods --- streaming_data_types/histogram_hs00.py | 22 ++-------------------- 1 file changed, 2 insertions(+), 20 deletions(-) diff --git a/streaming_data_types/histogram_hs00.py b/streaming_data_types/histogram_hs00.py index 57523ea..a10e006 100644 --- a/streaming_data_types/histogram_hs00.py +++ b/streaming_data_types/histogram_hs00.py @@ -97,27 +97,9 @@ def _serialise_metadata(builder, length, edges, unit, label): if isinstance(edges[0], int) or ( isinstance(edges, numpy.ndarray) and numpy.issubdtype(edges[0], numpy.int64) ): - bin_type = Array.ArrayULong - ArrayULong.ArrayULongStartValueVector(builder, len(edges)) - # FlatBuffers builds arrays backwards - for x in reversed(edges): - builder.PrependUint64(x) - bins_vector = builder.EndVector(len(edges)) - # Add the bins - ArrayULong.ArrayULongStart(builder) - ArrayULong.ArrayULongAddValue(builder, bins_vector) - bins_offset = ArrayULong.ArrayULongEnd(builder) + bins_offset, bin_type = _serialise_uint64(builder, len(edges), edges) else: - bin_type = Array.ArrayDouble - ArrayDouble.ArrayDoubleStartValueVector(builder, len(edges)) - # FlatBuffers builds arrays backwards - for x in reversed(edges): - builder.PrependFloat64(x) - bins_vector = builder.EndVector(len(edges)) - # Add the bins - ArrayDouble.ArrayDoubleStart(builder) - ArrayDouble.ArrayDoubleAddValue(builder, bins_vector) - bins_offset = ArrayDouble.ArrayDoubleEnd(builder) + bins_offset, bin_type = _serialise_double(builder, len(edges), edges) DimensionMetaData.DimensionMetaDataStart(builder) DimensionMetaData.DimensionMetaDataAddLength(builder, length) From 2a8bfc75496b23baa9542bf0c8a0bebeb210b30d Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Fri, 10 Jul 2020 17:30:58 +0100 Subject: [PATCH 144/363] Refactor: extract methods --- streaming_data_types/histogram_hs00.py | 39 +++++++++----------------- 1 file changed, 14 insertions(+), 25 deletions(-) diff --git a/streaming_data_types/histogram_hs00.py b/streaming_data_types/histogram_hs00.py index a10e006..a592468 100644 --- a/streaming_data_types/histogram_hs00.py +++ b/streaming_data_types/histogram_hs00.py @@ -94,12 +94,7 @@ def _serialise_metadata(builder, length, edges, unit, label): unit_offset = builder.CreateString(unit) label_offset = builder.CreateString(label) - if isinstance(edges[0], int) or ( - isinstance(edges, numpy.ndarray) and numpy.issubdtype(edges[0], numpy.int64) - ): - bins_offset, bin_type = _serialise_uint64(builder, len(edges), edges) - else: - bins_offset, bin_type = _serialise_double(builder, len(edges), edges) + bins_offset, bin_type = _serialise_array(builder, len(edges), edges) DimensionMetaData.DimensionMetaDataStart(builder) DimensionMetaData.DimensionMetaDataAddLength(builder, length) @@ -152,28 +147,13 @@ def serialise_hs00(histogram): # Build the data data_len = reduce(operator.mul, histogram["current_shape"], 1) - flattened_data = numpy.asarray(histogram["data"]).flatten() - - if numpy.issubdtype(flattened_data[0], numpy.int64): - data_offset, data_type = _serialise_uint64(builder, data_len, flattened_data) - else: - data_offset, data_type = _serialise_double(builder, data_len, flattened_data) + data_offset, data_type = _serialise_array(builder, data_len, histogram["data"]) errors_offset = None if "errors" in histogram: - if isinstance(histogram["errors"], numpy.ndarray): - flattened_data = histogram["errors"].flatten() - else: - flattened_data = numpy.asarray(histogram["errors"]).flatten() - - if numpy.issubdtype(flattened_data[0], numpy.int64): - errors_offset, error_type = _serialise_uint64( - builder, data_len, flattened_data - ) - else: - errors_offset, error_type = _serialise_double( - builder, data_len, flattened_data - ) + errors_offset, error_type = _serialise_array( + builder, data_len, histogram["errors"] + ) # Build the actual buffer EventHistogram.EventHistogramStart(builder) @@ -202,6 +182,15 @@ def serialise_hs00(histogram): return bytes(buffer) +def _serialise_array(builder, data_len, data): + flattened_data = numpy.asarray(data).flatten() + + if numpy.issubdtype(flattened_data[0], numpy.int64): + return _serialise_uint64(builder, data_len, flattened_data) + else: + return _serialise_double(builder, data_len, flattened_data) + + def _serialise_double(builder, data_len, flattened_data): data_type = Array.ArrayDouble ArrayDouble.ArrayDoubleStartValueVector(builder, data_len) From 66d2d1db83e6a577a38924007fe482336cf121c3 Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Fri, 10 Jul 2020 17:45:32 +0100 Subject: [PATCH 145/363] Add tests that hs00 preserves 32 bit types --- tests/test_hs00.py | 70 ++++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 67 insertions(+), 3 deletions(-) diff --git a/tests/test_hs00.py b/tests/test_hs00.py index 29ac38b..f00641c 100644 --- a/tests/test_hs00.py +++ b/tests/test_hs00.py @@ -151,7 +151,7 @@ def test_if_buffer_has_wrong_id_then_throws(self): deserialise_hs00(buf) def test_serialises_and_deserialises_hs00_message_correctly_for_int_array_data( - self + self, ): """ Round-trip to check what we serialise is what we get back. @@ -190,8 +190,72 @@ def test_serialises_and_deserialises_hs00_message_correctly_for_int_array_data( hist["last_metadata_timestamp"] == original_hist["last_metadata_timestamp"] ) + def test_serialise_and_deserialise_hs00_message_returns_int32_type(self): + """ + Round-trip to check what we serialise is what we get back. + """ + original_hist = { + "source": "some_source", + "timestamp": 123456, + "current_shape": [5], + "dim_metadata": [ + { + "length": 5, + "unit": "m", + "label": "some_label", + "bin_boundaries": np.array([0, 1, 2, 3, 4, 5]).astype(np.int32), + } + ], + "last_metadata_timestamp": 123456, + "data": np.array([1, 2, 3, 4, 5]).astype(np.int32), + "errors": np.array([5, 4, 3, 2, 1]).astype(np.int32), + "info": "info_string", + } + + buf = serialise_hs00(original_hist) + hist = deserialise_hs00(buf) + + assert np.issubdtype( + hist["dim_metadata"][0]["bin_boundaries"].dtype, + original_hist["dim_metadata"][0]["bin_boundaries"].dtype, + ) + assert np.issubdtype(hist["data"].dtype, original_hist["data"].dtype) + assert np.issubdtype(hist["errors"].dtype, original_hist["errors"].dtype) + + def test_serialise_and_deserialise_hs00_message_returns_float32_type(self): + """ + Round-trip to check what we serialise is what we get back. + """ + original_hist = { + "source": "some_source", + "timestamp": 123456, + "current_shape": [5], + "dim_metadata": [ + { + "length": 5, + "unit": "m", + "label": "some_label", + "bin_boundaries": np.array([0, 1, 2, 3, 4, 5]).astype(np.float32), + } + ], + "last_metadata_timestamp": 123456, + "data": np.array([1, 2, 3, 4, 5]).astype(np.float32), + "errors": np.array([5, 4, 3, 2, 1]).astype(np.float32), + "info": "info_string", + } + + buf = serialise_hs00(original_hist) + hist = deserialise_hs00(buf) + + assert np.issubdtype( + hist["dim_metadata"][0]["bin_boundaries"].dtype, + original_hist["dim_metadata"][0]["bin_boundaries"].dtype, + ) + assert np.issubdtype(hist["data"].dtype, original_hist["data"].dtype) + assert np.issubdtype(hist["errors"].dtype, original_hist["errors"].dtype) + def test_serialises_and_deserialises_hs00_message_correctly_when_float_input_is_not_ndarray( - self + self, ): """ Round-trip to check what we serialise is what we get back. @@ -239,7 +303,7 @@ def test_serialises_and_deserialises_hs00_message_correctly_when_float_input_is_ ) def test_serialises_and_deserialises_hs00_message_correctly_when_int_input_is_not_ndarray( - self + self, ): """ Round-trip to check what we serialise is what we get back. From 155b9e5dbe41d7c28a84e2151f513ed97741297f Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Fri, 10 Jul 2020 17:53:51 +0100 Subject: [PATCH 146/363] Add tests to cover all types supported by hs00 --- tests/test_hs00.py | 90 +++++++++++++++++++++++++--------------------- 1 file changed, 49 insertions(+), 41 deletions(-) diff --git a/tests/test_hs00.py b/tests/test_hs00.py index f00641c..b710e1a 100644 --- a/tests/test_hs00.py +++ b/tests/test_hs00.py @@ -4,6 +4,26 @@ from streaming_data_types import SERIALISERS, DESERIALISERS +def create_test_data_with_type(numpy_type): + return { + "source": "some_source", + "timestamp": 123456, + "current_shape": [5], + "dim_metadata": [ + { + "length": 5, + "unit": "m", + "label": "some_label", + "bin_boundaries": np.array([0, 1, 2, 3, 4, 5]).astype(numpy_type), + } + ], + "last_metadata_timestamp": 123456, + "data": np.array([1, 2, 3, 4, 5]).astype(numpy_type), + "errors": np.array([5, 4, 3, 2, 1]).astype(numpy_type), + "info": "info_string", + } + + class TestSerialisationHs00: def _check_metadata_for_one_dimension(self, data, original_data): assert np.array_equal(data["bin_boundaries"], original_data["bin_boundaries"]) @@ -190,27 +210,21 @@ def test_serialises_and_deserialises_hs00_message_correctly_for_int_array_data( hist["last_metadata_timestamp"] == original_hist["last_metadata_timestamp"] ) - def test_serialise_and_deserialise_hs00_message_returns_int32_type(self): - """ - Round-trip to check what we serialise is what we get back. - """ - original_hist = { - "source": "some_source", - "timestamp": 123456, - "current_shape": [5], - "dim_metadata": [ - { - "length": 5, - "unit": "m", - "label": "some_label", - "bin_boundaries": np.array([0, 1, 2, 3, 4, 5]).astype(np.int32), - } - ], - "last_metadata_timestamp": 123456, - "data": np.array([1, 2, 3, 4, 5]).astype(np.int32), - "errors": np.array([5, 4, 3, 2, 1]).astype(np.int32), - "info": "info_string", - } + def test_serialise_and_deserialise_hs00_message_returns_uint32_type(self): + original_hist = create_test_data_with_type(np.uint32) + + buf = serialise_hs00(original_hist) + hist = deserialise_hs00(buf) + + assert np.issubdtype( + hist["dim_metadata"][0]["bin_boundaries"].dtype, + original_hist["dim_metadata"][0]["bin_boundaries"].dtype, + ) + assert np.issubdtype(hist["data"].dtype, original_hist["data"].dtype) + assert np.issubdtype(hist["errors"].dtype, original_hist["errors"].dtype) + + def test_serialise_and_deserialise_hs00_message_returns_uint64_type(self): + original_hist = create_test_data_with_type(np.uint64) buf = serialise_hs00(original_hist) hist = deserialise_hs00(buf) @@ -223,26 +237,20 @@ def test_serialise_and_deserialise_hs00_message_returns_int32_type(self): assert np.issubdtype(hist["errors"].dtype, original_hist["errors"].dtype) def test_serialise_and_deserialise_hs00_message_returns_float32_type(self): - """ - Round-trip to check what we serialise is what we get back. - """ - original_hist = { - "source": "some_source", - "timestamp": 123456, - "current_shape": [5], - "dim_metadata": [ - { - "length": 5, - "unit": "m", - "label": "some_label", - "bin_boundaries": np.array([0, 1, 2, 3, 4, 5]).astype(np.float32), - } - ], - "last_metadata_timestamp": 123456, - "data": np.array([1, 2, 3, 4, 5]).astype(np.float32), - "errors": np.array([5, 4, 3, 2, 1]).astype(np.float32), - "info": "info_string", - } + original_hist = create_test_data_with_type(np.float32) + + buf = serialise_hs00(original_hist) + hist = deserialise_hs00(buf) + + assert np.issubdtype( + hist["dim_metadata"][0]["bin_boundaries"].dtype, + original_hist["dim_metadata"][0]["bin_boundaries"].dtype, + ) + assert np.issubdtype(hist["data"].dtype, original_hist["data"].dtype) + assert np.issubdtype(hist["errors"].dtype, original_hist["errors"].dtype) + + def test_serialise_and_deserialise_hs00_message_returns_float64_type(self): + original_hist = create_test_data_with_type(np.float64) buf = serialise_hs00(original_hist) hist = deserialise_hs00(buf) From 181046b8a5bea7b82617a1842ff550ed2def647c Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Fri, 10 Jul 2020 18:10:39 +0100 Subject: [PATCH 147/363] Preserve array types when serialising hs00 --- streaming_data_types/histogram_hs00.py | 44 +++++++++++++++++++++++++- 1 file changed, 43 insertions(+), 1 deletion(-) diff --git a/streaming_data_types/histogram_hs00.py b/streaming_data_types/histogram_hs00.py index a592468..dc16ad7 100644 --- a/streaming_data_types/histogram_hs00.py +++ b/streaming_data_types/histogram_hs00.py @@ -2,7 +2,9 @@ import operator import flatbuffers import numpy +import streaming_data_types.fbschemas.histogram_hs00.ArrayFloat as ArrayFloat import streaming_data_types.fbschemas.histogram_hs00.ArrayDouble as ArrayDouble +import streaming_data_types.fbschemas.histogram_hs00.ArrayUInt as ArrayUInt import streaming_data_types.fbschemas.histogram_hs00.ArrayULong as ArrayULong import streaming_data_types.fbschemas.histogram_hs00.DimensionMetaData as DimensionMetaData import streaming_data_types.fbschemas.histogram_hs00.EventHistogram as EventHistogram @@ -109,6 +111,9 @@ def serialise_hs00(histogram): """ Serialise a histogram as an hs00 FlatBuffers message. + If arrays are provided as numpy arrays with type np.uint32, np.uint64, np.float32 + or np.float64 then type is preserved in output buffer. + :param histogram: A dictionary containing the histogram to serialise. """ source_offset = None @@ -185,12 +190,36 @@ def serialise_hs00(histogram): def _serialise_array(builder, data_len, data): flattened_data = numpy.asarray(data).flatten() - if numpy.issubdtype(flattened_data[0], numpy.int64): + # Carefully preserve explicitly supported types + if numpy.issubdtype(flattened_data.dtype, numpy.uint32): + return _serialise_uint32(builder, data_len, flattened_data) + elif numpy.issubdtype(flattened_data.dtype, numpy.uint64): + return _serialise_uint64(builder, data_len, flattened_data) + elif numpy.issubdtype(flattened_data.dtype, numpy.float32): + return _serialise_float(builder, data_len, flattened_data) + elif numpy.issubdtype(flattened_data.dtype, numpy.float64): + return _serialise_double(builder, data_len, flattened_data) + + # Otherwise if it looks like an int then use uint64, or use double as last resort + elif numpy.issubdtype(flattened_data.dtype, numpy.int64): return _serialise_uint64(builder, data_len, flattened_data) else: return _serialise_double(builder, data_len, flattened_data) +def _serialise_float(builder, data_len, flattened_data): + data_type = Array.ArrayFloat + ArrayFloat.ArrayFloatStartValueVector(builder, data_len) + # FlatBuffers builds arrays backwards + for x in reversed(flattened_data): + builder.PrependFloat32(x) + data_vector = builder.EndVector(data_len) + ArrayFloat.ArrayFloatStart(builder) + ArrayFloat.ArrayFloatAddValue(builder, data_vector) + data_offset = ArrayFloat.ArrayFloatEnd(builder) + return data_offset, data_type + + def _serialise_double(builder, data_len, flattened_data): data_type = Array.ArrayDouble ArrayDouble.ArrayDoubleStartValueVector(builder, data_len) @@ -204,6 +233,19 @@ def _serialise_double(builder, data_len, flattened_data): return data_offset, data_type +def _serialise_uint32(builder, data_len, flattened_data): + data_type = Array.ArrayUInt + ArrayUInt.ArrayUIntStartValueVector(builder, data_len) + # FlatBuffers builds arrays backwards + for x in reversed(flattened_data): + builder.PrependUint32(x) + data_vector = builder.EndVector(data_len) + ArrayUInt.ArrayUIntStart(builder) + ArrayUInt.ArrayUIntAddValue(builder, data_vector) + data_offset = ArrayUInt.ArrayUIntEnd(builder) + return data_offset, data_type + + def _serialise_uint64(builder, data_len, flattened_data): data_type = Array.ArrayULong ArrayULong.ArrayULongStartValueVector(builder, data_len) From c1563ff2d019eac55fba8cc551e910cf071ee98c Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Fri, 10 Jul 2020 19:30:14 +0100 Subject: [PATCH 148/363] Preserve array types when deserialising hs00 --- streaming_data_types/histogram_hs00.py | 35 ++++++++++++-------------- 1 file changed, 16 insertions(+), 19 deletions(-) diff --git a/streaming_data_types/histogram_hs00.py b/streaming_data_types/histogram_hs00.py index dc16ad7..49a4200 100644 --- a/streaming_data_types/histogram_hs00.py +++ b/streaming_data_types/histogram_hs00.py @@ -15,6 +15,17 @@ FILE_IDENTIFIER = b"hs00" +def _create_array_object_for_type(array_type): + if array_type == Array.ArrayUInt: + return ArrayUInt.ArrayUInt() + elif array_type == Array.ArrayULong: + return ArrayULong.ArrayULong() + elif array_type == Array.ArrayFloat: + return ArrayFloat.ArrayFloat() + else: + return ArrayDouble.ArrayDouble() + + def deserialise_hs00(buffer): """ Deserialise flatbuffer hs10 into a histogram. @@ -27,12 +38,9 @@ def deserialise_hs00(buffer): dims = [] for i in range(event_hist.DimMetadataLength()): - bins_fb = ArrayDouble.ArrayDouble() - if ( - event_hist.DimMetadata(i).BinBoundariesType() == Array.ArrayUInt - or event_hist.DimMetadata(i).BinBoundariesType() == Array.ArrayULong - ): - bins_fb = ArrayULong.ArrayULong() + bins_fb = _create_array_object_for_type( + event_hist.DimMetadata(i).BinBoundariesType() + ) # Get bins bins_offset = event_hist.DimMetadata(i).BinBoundaries() @@ -53,13 +61,7 @@ def deserialise_hs00(buffer): metadata_timestamp = event_hist.LastMetadataTimestamp() - data_fb = ArrayDouble.ArrayDouble() - if ( - event_hist.DataType() == Array.ArrayUInt - or event_hist.DataType() == Array.ArrayULong - ): - data_fb = ArrayULong.ArrayULong() - + data_fb = _create_array_object_for_type(event_hist.DataType()) data_offset = event_hist.Data() data_fb.Init(data_offset.Bytes, data_offset.Pos) shape = event_hist.CurrentShapeAsNumpy().tolist() @@ -68,12 +70,7 @@ def deserialise_hs00(buffer): # Get the errors errors_offset = event_hist.Errors() if errors_offset: - errors_fb = ArrayDouble.ArrayDouble() - if ( - event_hist.DataType() == Array.ArrayUInt - or event_hist.DataType() == Array.ArrayULong - ): - errors_fb = ArrayULong.ArrayULong() + errors_fb = _create_array_object_for_type(event_hist.ErrorsType()) errors_fb.Init(errors_offset.Bytes, errors_offset.Pos) errors = errors_fb.ValueAsNumpy().reshape(shape) else: From 55921b0802fff68f16286145ce4493a49da62b0c Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Sun, 12 Jul 2020 14:27:45 +0100 Subject: [PATCH 149/363] Serialise protocol for Forwarder --- .../forwarder_config_update_rf5k/Protocol.py | 9 +++++++++ .../forwarder_config_update_rf5k/Stream.py | 15 ++++++++++++++- .../forwarder_config_update_rf5k.py | 10 +++++++--- tests/test_rf5k.py | 5 +++-- 4 files changed, 33 insertions(+), 6 deletions(-) create mode 100644 streaming_data_types/fbschemas/forwarder_config_update_rf5k/Protocol.py diff --git a/streaming_data_types/fbschemas/forwarder_config_update_rf5k/Protocol.py b/streaming_data_types/fbschemas/forwarder_config_update_rf5k/Protocol.py new file mode 100644 index 0000000..f8231fa --- /dev/null +++ b/streaming_data_types/fbschemas/forwarder_config_update_rf5k/Protocol.py @@ -0,0 +1,9 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + + +class Protocol(object): + PVA = 0 + CA = 1 + FAKE = 2 diff --git a/streaming_data_types/fbschemas/forwarder_config_update_rf5k/Stream.py b/streaming_data_types/fbschemas/forwarder_config_update_rf5k/Stream.py index cc31f3e..afcdb86 100644 --- a/streaming_data_types/fbschemas/forwarder_config_update_rf5k/Stream.py +++ b/streaming_data_types/fbschemas/forwarder_config_update_rf5k/Stream.py @@ -40,9 +40,18 @@ def Topic(self): return self._tab.String(o + self._tab.Pos) return None + # Stream + def Protocol(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.Get( + flatbuffers.number_types.Uint16Flags, o + self._tab.Pos + ) + return 0 + def StreamStart(builder): - builder.StartObject(3) + builder.StartObject(4) def StreamAddChannel(builder, channel): @@ -63,5 +72,9 @@ def StreamAddTopic(builder, topic): ) +def StreamAddProtocol(builder, protocol): + builder.PrependUint16Slot(3, protocol, 0) + + def StreamEnd(builder): return builder.EndObject() diff --git a/streaming_data_types/forwarder_config_update_rf5k.py b/streaming_data_types/forwarder_config_update_rf5k.py index c90be2f..df9d2de 100644 --- a/streaming_data_types/forwarder_config_update_rf5k.py +++ b/streaming_data_types/forwarder_config_update_rf5k.py @@ -5,6 +5,7 @@ UpdateType, ConfigUpdate, Stream, + Protocol, ) from typing import List, Union @@ -12,7 +13,7 @@ ConfigurationUpdate = namedtuple("ConfigurationUpdate", ("config_change", "streams"),) -StreamInfo = namedtuple("StreamInfo", ("channel", "schema", "topic"),) +StreamInfo = namedtuple("StreamInfo", ("channel", "schema", "topic", "protocol"),) def deserialise_rf5k(buffer: Union[bytearray, bytes]) -> ConfigurationUpdate: @@ -40,6 +41,7 @@ def deserialise_rf5k(buffer: Union[bytearray, bytes]) -> ConfigurationUpdate: stream_message.Topic().decode("utf-8") if stream_message.Topic() else "", + stream_message.Protocol(), ) ) @@ -48,11 +50,13 @@ def deserialise_rf5k(buffer: Union[bytearray, bytes]) -> ConfigurationUpdate: def serialise_stream( builder: flatbuffers.Builder, + protocol: Protocol, channel_offset: int, schema_offset: int, topic_offset: int, ) -> int: Stream.StreamStart(builder) + Stream.StreamAddProtocol(builder, protocol) Stream.StreamAddTopic(builder, topic_offset) Stream.StreamAddSchema(builder, schema_offset) Stream.StreamAddChannel(builder, channel_offset) @@ -81,8 +85,8 @@ def serialise_rf5k(config_change: UpdateType, streams: List[StreamInfo]) -> byte for stream in streams ] stream_offsets = [ - serialise_stream(builder, *stream_fields) - for stream_fields in stream_field_offsets + serialise_stream(builder, stream.protocol, *stream_fields) + for stream, stream_fields in zip(streams, stream_field_offsets) ] ConfigUpdate.ConfigUpdateStartStreamsVector(builder, len(streams)) diff --git a/tests/test_rf5k.py b/tests/test_rf5k.py index 41514c9..ffd8784 100644 --- a/tests/test_rf5k.py +++ b/tests/test_rf5k.py @@ -3,6 +3,7 @@ serialise_rf5k, deserialise_rf5k, StreamInfo, + Protocol, ) from streaming_data_types import SERIALISERS, DESERIALISERS from streaming_data_types.fbschemas.forwarder_config_update_rf5k.UpdateType import ( @@ -15,8 +16,8 @@ def test_serialises_and_deserialises_rf5k_message_correctly(self): """ Round-trip to check what we serialise is what we get back. """ - stream_1 = StreamInfo("channel1", "f142", "topic1") - stream_2 = StreamInfo("channel2", "TdcTime", "topic2") + stream_1 = StreamInfo("channel1", "f142", "topic1", Protocol.Protocol.PVA) + stream_2 = StreamInfo("channel2", "TdcTime", "topic2", Protocol.Protocol.CA) original_entry = { "config_change": UpdateType.ADD, "streams": [stream_1, stream_2], From 879a78295c15b15d81769a2da339dddb9951c7e7 Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Sun, 12 Jul 2020 14:56:12 +0100 Subject: [PATCH 150/363] Test and impl to handle case of no streams in flatbuffer --- .../forwarder_config_update_rf5k.py | 34 +++++++++++-------- tests/test_rf5k.py | 16 ++++++++- 2 files changed, 34 insertions(+), 16 deletions(-) diff --git a/streaming_data_types/forwarder_config_update_rf5k.py b/streaming_data_types/forwarder_config_update_rf5k.py index df9d2de..a7b9f0e 100644 --- a/streaming_data_types/forwarder_config_update_rf5k.py +++ b/streaming_data_types/forwarder_config_update_rf5k.py @@ -1,5 +1,6 @@ from collections import namedtuple import flatbuffers +from flatbuffers.packer import struct as flatbuffer_struct from streaming_data_types.utils import check_schema_identifier from streaming_data_types.fbschemas.forwarder_config_update_rf5k import ( UpdateType, @@ -28,22 +29,25 @@ def deserialise_rf5k(buffer: Union[bytearray, bytes]) -> ConfigurationUpdate: config_message = ConfigUpdate.ConfigUpdate.GetRootAsConfigUpdate(buffer, 0) streams = [] - for i in range(config_message.StreamsLength()): - stream_message = config_message.Streams(i) - streams.append( - StreamInfo( - stream_message.Channel().decode("utf-8") - if stream_message.Channel() - else "", - stream_message.Schema().decode("utf-8") - if stream_message.Schema() - else "", - stream_message.Topic().decode("utf-8") - if stream_message.Topic() - else "", - stream_message.Protocol(), + try: + for i in range(config_message.StreamsLength()): + stream_message = config_message.Streams(i) + streams.append( + StreamInfo( + stream_message.Channel().decode("utf-8") + if stream_message.Channel() + else "", + stream_message.Schema().decode("utf-8") + if stream_message.Schema() + else "", + stream_message.Topic().decode("utf-8") + if stream_message.Topic() + else "", + stream_message.Protocol(), + ) ) - ) + except flatbuffer_struct.error: + pass # No streams in buffer return ConfigurationUpdate(config_message.ConfigChange(), streams) diff --git a/tests/test_rf5k.py b/tests/test_rf5k.py index ffd8784..1f012e3 100644 --- a/tests/test_rf5k.py +++ b/tests/test_rf5k.py @@ -12,7 +12,7 @@ class TestEncoder(object): - def test_serialises_and_deserialises_rf5k_message_correctly(self): + def test_serialises_and_deserialises_rf5k_message_with_streams_correctly(self): """ Round-trip to check what we serialise is what we get back. """ @@ -30,6 +30,20 @@ def test_serialises_and_deserialises_rf5k_message_correctly(self): assert stream_1 in entry.streams assert stream_2 in entry.streams + def test_serialises_and_deserialises_rf5k_message_without_streams_correctly(self): + """ + Round-trip to check what we serialise is what we get back. + """ + original_entry = { + "config_change": UpdateType.REMOVEALL, + "streams": [], + } + + buf = serialise_rf5k(**original_entry) + entry = deserialise_rf5k(buf) + + assert entry.config_change == original_entry["config_change"] + def test_if_buffer_has_wrong_id_then_throws(self): original_entry = {"config_change": UpdateType.REMOVEALL, "streams": []} From 3ea47bb4a9027812e81a2985d3c4fc7f98acf5cb Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Thu, 16 Jul 2020 10:33:13 +0100 Subject: [PATCH 151/363] Small refactor from PR suggestions --- streaming_data_types/histogram_hs00.py | 28 +++++++++++++------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/streaming_data_types/histogram_hs00.py b/streaming_data_types/histogram_hs00.py index 49a4200..52b3238 100644 --- a/streaming_data_types/histogram_hs00.py +++ b/streaming_data_types/histogram_hs00.py @@ -15,15 +15,15 @@ FILE_IDENTIFIER = b"hs00" +_array_for_type = { + Array.ArrayUInt: ArrayUInt.ArrayUInt(), + Array.ArrayULong: ArrayULong.ArrayULong(), + Array.ArrayFloat: ArrayFloat.ArrayFloat(), +} + + def _create_array_object_for_type(array_type): - if array_type == Array.ArrayUInt: - return ArrayUInt.ArrayUInt() - elif array_type == Array.ArrayULong: - return ArrayULong.ArrayULong() - elif array_type == Array.ArrayFloat: - return ArrayFloat.ArrayFloat() - else: - return ArrayDouble.ArrayDouble() + return _array_for_type.get(array_type, default=ArrayDouble.ArrayDouble()) def deserialise_hs00(buffer): @@ -190,18 +190,18 @@ def _serialise_array(builder, data_len, data): # Carefully preserve explicitly supported types if numpy.issubdtype(flattened_data.dtype, numpy.uint32): return _serialise_uint32(builder, data_len, flattened_data) - elif numpy.issubdtype(flattened_data.dtype, numpy.uint64): + if numpy.issubdtype(flattened_data.dtype, numpy.uint64): return _serialise_uint64(builder, data_len, flattened_data) - elif numpy.issubdtype(flattened_data.dtype, numpy.float32): + if numpy.issubdtype(flattened_data.dtype, numpy.float32): return _serialise_float(builder, data_len, flattened_data) - elif numpy.issubdtype(flattened_data.dtype, numpy.float64): + if numpy.issubdtype(flattened_data.dtype, numpy.float64): return _serialise_double(builder, data_len, flattened_data) # Otherwise if it looks like an int then use uint64, or use double as last resort - elif numpy.issubdtype(flattened_data.dtype, numpy.int64): + if numpy.issubdtype(flattened_data.dtype, numpy.int64): return _serialise_uint64(builder, data_len, flattened_data) - else: - return _serialise_double(builder, data_len, flattened_data) + + return _serialise_double(builder, data_len, flattened_data) def _serialise_float(builder, data_len, flattened_data): From 1eaf86f7269c6a88dd2da2c35307d70336772305 Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Thu, 16 Jul 2020 15:35:23 +0100 Subject: [PATCH 152/363] Small refactor from PR suggestions --- streaming_data_types/histogram_hs00.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/streaming_data_types/histogram_hs00.py b/streaming_data_types/histogram_hs00.py index 52b3238..480c84d 100644 --- a/streaming_data_types/histogram_hs00.py +++ b/streaming_data_types/histogram_hs00.py @@ -23,7 +23,7 @@ def _create_array_object_for_type(array_type): - return _array_for_type.get(array_type, default=ArrayDouble.ArrayDouble()) + return _array_for_type.get(array_type, ArrayDouble.ArrayDouble()) def deserialise_hs00(buffer): From 72a05e048c09185e2456d1e93aa68d5a5494571a Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Thu, 16 Jul 2020 15:45:42 +0100 Subject: [PATCH 153/363] Bump version to 0.9.0 --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index af406bc..c0d9928 100644 --- a/setup.py +++ b/setup.py @@ -15,7 +15,7 @@ setup( name="ess_streaming_data_types", - version="0.8.1", + version="0.9.0", description=DESCRIPTION, long_description=LONG_DESCRIPTION, long_description_content_type="text/markdown", From 668a34b2dbfc7b0d2b3a421efc46f56ac526ba0c Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Thu, 30 Jul 2020 10:36:04 +0200 Subject: [PATCH 154/363] Use official flake8 repo for pre-commit hook --- .pre-commit-config.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index e97f183..038f378 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -4,7 +4,7 @@ repos: hooks: - id: black language_version: python3.7 -- repo: https://github.com/pre-commit/pre-commit-hooks - rev: v1.2.3 +- repo: https://github.com/pycqa/flake8 + rev: 3.8.3 hooks: - - id: flake8 \ No newline at end of file + - id: flake8 From 665a028384b09116e31ac3e31590733e44f67c62 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Wed, 5 Aug 2020 07:12:43 +0000 Subject: [PATCH 155/363] Formatting mistake --- README_DEV.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README_DEV.md b/README_DEV.md index 946cc31..51b6af9 100644 --- a/README_DEV.md +++ b/README_DEV.md @@ -19,7 +19,7 @@ This command can also be used to run the hooks manually. ### Adding new schemas checklist (important) * Add unit-tests (see existing tests for an example) -* Update fbschemas.__init__ to include the new serialiser and deserialiser +* Update `fbschemas.__init__.py` to include the new serialiser and deserialiser ### Tox Tox allows the unit tests to be run against multiple versions of Python. From 3e50ba1980667ceff064d62792217c4bbfcdddb5 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Wed, 5 Aug 2020 07:26:21 +0000 Subject: [PATCH 156/363] More specific details for devs --- README_DEV.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/README_DEV.md b/README_DEV.md index 51b6af9..70f9aa3 100644 --- a/README_DEV.md +++ b/README_DEV.md @@ -18,8 +18,13 @@ pre-commit run --all-files This command can also be used to run the hooks manually. ### Adding new schemas checklist (important) +* Generate Python bindings for the schema using FlatBuffers' `flatc` executable +* Add the generated bindings to the project * Add unit-tests (see existing tests for an example) * Update `fbschemas.__init__.py` to include the new serialiser and deserialiser +* Check whether the serialised data produced by the new code can be verified in C++ + * Don't worry if it cannot be - it seems to be an inherent FlatBuffers issue +* Add the schema and verifiability to the table of schemas in `README.md` ### Tox Tox allows the unit tests to be run against multiple versions of Python. From 156d2f250ee0ba3bc40a36874edc0c22af7a0d87 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Wed, 5 Aug 2020 07:32:07 +0000 Subject: [PATCH 157/363] Update README_DEV.md --- README_DEV.md | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/README_DEV.md b/README_DEV.md index 70f9aa3..34176d4 100644 --- a/README_DEV.md +++ b/README_DEV.md @@ -23,8 +23,9 @@ This command can also be used to run the hooks manually. * Add unit-tests (see existing tests for an example) * Update `fbschemas.__init__.py` to include the new serialiser and deserialiser * Check whether the serialised data produced by the new code can be verified in C++ - * Don't worry if it cannot be - it seems to be an inherent FlatBuffers issue -* Add the schema and verifiability to the table of schemas in `README.md` + * There is a helper program in the [FlatBufferVerification](https://github.com/ess-dmsc/FlatBufferVerification) repository + * Don't worry if it fails verification - it seems to be an inherent FlatBuffers issue +* Add the schema and verifiability result to the table of schemas in `README.md` ### Tox Tox allows the unit tests to be run against multiple versions of Python. From 387f88b5ec7d71752b6c2841a748bd721acbc326 Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Tue, 11 Aug 2020 09:18:41 +0100 Subject: [PATCH 158/363] Add test case for single timestamp --- tests/test_tdct.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/tests/test_tdct.py b/tests/test_tdct.py index 1e96d26..034f9b7 100644 --- a/tests/test_tdct.py +++ b/tests/test_tdct.py @@ -27,13 +27,20 @@ def test_serialises_and_deserialises_tdct_message_with_list_of_timestamps(self): def test_serialises_and_deserialises_tdct_message_with_array_of_timestamps(self): original_entry = {"name": "some_name", "timestamps": np.array([0, 1, 2, 3, 4])} - buf = serialise_tdct(**self.original_entry) + buf = serialise_tdct(**original_entry) deserialised_tuple = deserialise_tdct(buf) assert deserialised_tuple.name == original_entry["name"] - assert np.allclose( - deserialised_tuple.timestamps, self.original_entry["timestamps"] - ) + assert np.allclose(deserialised_tuple.timestamps, original_entry["timestamps"]) + + def test_serialises_and_deserialises_tdct_message_with_single_timestamp(self): + original_entry = {"name": "some_name", "timestamps": np.array(0)} + + buf = serialise_tdct(**original_entry) + deserialised_tuple = deserialise_tdct(buf) + + assert deserialised_tuple.name == original_entry["name"] + assert np.allclose(deserialised_tuple.timestamps, original_entry["timestamps"]) def test_if_buffer_has_wrong_id_then_throws(self): buf = serialise_tdct(**self.original_entry) From f3274b1058ce4e07bb47f8a9db22f143d9db5eba Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Tue, 11 Aug 2020 09:19:20 +0100 Subject: [PATCH 159/363] Fix serialising for single timestamp to tdct --- streaming_data_types/timestamps_tdct.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/streaming_data_types/timestamps_tdct.py b/streaming_data_types/timestamps_tdct.py index 93ad450..ab28f83 100644 --- a/streaming_data_types/timestamps_tdct.py +++ b/streaming_data_types/timestamps_tdct.py @@ -23,7 +23,7 @@ def serialise_tdct( ) -> bytes: builder = flatbuffers.Builder(136) - timestamps = np.array(timestamps).astype(np.uint64) + timestamps = np.atleast_1d(np.array(timestamps)).astype(np.uint64) name_offset = builder.CreateString(name) From ee74e4411cf601cf872a12332bba5ff9af9c4caf Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Tue, 11 Aug 2020 10:40:18 +0100 Subject: [PATCH 160/363] Bump version to 0.9.1 --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index c0d9928..e4c8871 100644 --- a/setup.py +++ b/setup.py @@ -15,7 +15,7 @@ setup( name="ess_streaming_data_types", - version="0.9.0", + version="0.9.1", description=DESCRIPTION, long_description=LONG_DESCRIPTION, long_description_content_type="text/markdown", From bf3233c6ad72fbde061bd903c7f8baae135b7edd Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Wed, 26 Aug 2020 09:16:29 +0200 Subject: [PATCH 161/363] Excluding `tests` folder is not sufficient to stop the tests appearing in the whl and being installed in the root of site-packages. --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index e4c8871..1681264 100644 --- a/setup.py +++ b/setup.py @@ -22,7 +22,7 @@ author="ScreamingUdder", url="https://github.com/ess-dmsc/python-streaming-data-types", license="BSD 2-Clause License", - packages=find_packages(exclude="tests"), + packages=find_packages(exclude=["tests", "tests.*"]), python_requires=">=3.6.0", install_requires=["flatbuffers", "numpy"], extras_require={"dev": ["flake8", "pre-commit", "pytest", "tox"]}, From 1458e1516e11ff3d66a707e2dd16c8c8125ef1f3 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Wed, 26 Aug 2020 09:20:55 +0200 Subject: [PATCH 162/363] Make the importance of cleaning the build clearer! --- README_DEV.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README_DEV.md b/README_DEV.md index 34176d4..03f67d9 100644 --- a/README_DEV.md +++ b/README_DEV.md @@ -46,7 +46,7 @@ tox #### Steps -Delete any old builds you may have: +Delete any old builds you may have (IMPORTANT!): ``` rm -rf build dist ``` From e0ad6b10b67f238bd6adc934cf16bcfb815b164a Mon Sep 17 00:00:00 2001 From: Jonas Nilsson Date: Mon, 7 Sep 2020 15:57:47 +0200 Subject: [PATCH 163/363] Action response code. --- requirements.txt | 2 +- streaming_data_types/__init__.py | 3 + streaming_data_types/action_response_answ.py | 39 +++++++++++ .../action_response_answ/ActionOutcome.py | 8 +++ .../action_response_answ/ActionResponse.py | 68 +++++++++++++++++++ .../action_response_answ/ActionType.py | 10 +++ .../action_response_answ/__init__.py | 0 tests/test_answ.py | 49 +++++++++++++ 8 files changed, 178 insertions(+), 1 deletion(-) create mode 100644 streaming_data_types/action_response_answ.py create mode 100644 streaming_data_types/fbschemas/action_response_answ/ActionOutcome.py create mode 100644 streaming_data_types/fbschemas/action_response_answ/ActionResponse.py create mode 100644 streaming_data_types/fbschemas/action_response_answ/ActionType.py create mode 100644 streaming_data_types/fbschemas/action_response_answ/__init__.py create mode 100644 tests/test_answ.py diff --git a/requirements.txt b/requirements.txt index 932ca93..9b4366a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ -flatbuffers>=1.11 +flatbuffers>=1.12 numpy diff --git a/streaming_data_types/__init__.py b/streaming_data_types/__init__.py index 0fcc72d..75bdeca 100644 --- a/streaming_data_types/__init__.py +++ b/streaming_data_types/__init__.py @@ -5,6 +5,7 @@ from streaming_data_types.run_start_pl72 import deserialise_pl72, serialise_pl72 from streaming_data_types.run_stop_6s4t import deserialise_6s4t, serialise_6s4t from streaming_data_types.status_x5f2 import deserialise_x5f2, serialise_x5f2 +from streaming_data_types.action_response_answ import deserialise_answ, serialise_answ from streaming_data_types.epics_connection_info_ep00 import ( deserialise_ep00, serialise_ep00, @@ -27,6 +28,7 @@ "ep00": serialise_ep00, "tdct": serialise_tdct, "rf5k": serialise_rf5k, + "answ": serialise_answ, } @@ -41,4 +43,5 @@ "ep00": deserialise_ep00, "tdct": deserialise_tdct, "rf5k": deserialise_rf5k, + "answ": deserialise_answ, } diff --git a/streaming_data_types/action_response_answ.py b/streaming_data_types/action_response_answ.py new file mode 100644 index 0000000..ae82e5c --- /dev/null +++ b/streaming_data_types/action_response_answ.py @@ -0,0 +1,39 @@ +import flatbuffers +import streaming_data_types.fbschemas.action_response_answ.ActionOutcome as ActionOutcome +import streaming_data_types.fbschemas.action_response_answ.ActionResponse as ActionResponse +import streaming_data_types.fbschemas.action_response_answ.ActionType as ActionType +from streaming_data_types.utils import check_schema_identifier +from typing import Optional, Union +from collections import namedtuple + +FILE_IDENTIFIER = b"answ" + + +def serialise_answ(service_id: str, job_id: str, action: ActionType, outcome: ActionOutcome, message: str) -> bytes: + builder = flatbuffers.Builder(500) + service_id_offset = builder.CreateString(service_id) + job_id_offset = builder.CreateString(job_id) + message_offset = builder.CreateString(message) + + ActionResponse.ActionResponseStart(builder) + ActionResponse.ActionResponseAddServiceId(builder, service_id_offset) + ActionResponse.ActionResponseAddJobId(builder, job_id_offset) + ActionResponse.ActionResponseAddAction(builder, action) + ActionResponse.ActionResponseAddOutcome(builder, outcome) + ActionResponse.ActionResponseAddMessage(builder, message_offset) + + out_message = ActionResponse.ActionResponseEnd(builder) + builder.Finish(out_message) + output_buffer = builder.Output() + output_buffer[4:8] = FILE_IDENTIFIER + + return bytes(output_buffer) + + +Response = namedtuple("Response", ("service_id", "job_id", "action", "outcome", "message",),) + + +def deserialise_answ(buffer: Union[bytearray, bytes]): + check_schema_identifier(buffer, FILE_IDENTIFIER) + answ_message = ActionResponse.ActionResponse.GetRootAsActionResponse(buffer, 0) + return Response(answ_message.ServiceId().decode("utf-8"), answ_message.JobId().decode("utf-8"), answ_message.Action(), answ_message.Outcome(), answ_message.Message().decode("utf-8")) diff --git a/streaming_data_types/fbschemas/action_response_answ/ActionOutcome.py b/streaming_data_types/fbschemas/action_response_answ/ActionOutcome.py new file mode 100644 index 0000000..c2f2f39 --- /dev/null +++ b/streaming_data_types/fbschemas/action_response_answ/ActionOutcome.py @@ -0,0 +1,8 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +class ActionOutcome(object): + Success = 0 + Failure = 1 + diff --git a/streaming_data_types/fbschemas/action_response_answ/ActionResponse.py b/streaming_data_types/fbschemas/action_response_answ/ActionResponse.py new file mode 100644 index 0000000..0df2023 --- /dev/null +++ b/streaming_data_types/fbschemas/action_response_answ/ActionResponse.py @@ -0,0 +1,68 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy +np = import_numpy() + +class ActionResponse(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAsActionResponse(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ActionResponse() + x.Init(buf, n + offset) + return x + + @classmethod + def ActionResponseBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x61\x6E\x73\x77", size_prefixed=size_prefixed) + + # ActionResponse + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ActionResponse + def ServiceId(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # ActionResponse + def JobId(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # ActionResponse + def Action(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # ActionResponse + def Outcome(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # ActionResponse + def Message(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + +def ActionResponseStart(builder): builder.StartObject(5) +def ActionResponseAddServiceId(builder, serviceId): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(serviceId), 0) +def ActionResponseAddJobId(builder, jobId): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(jobId), 0) +def ActionResponseAddAction(builder, action): builder.PrependInt8Slot(2, action, 0) +def ActionResponseAddOutcome(builder, outcome): builder.PrependInt8Slot(3, outcome, 0) +def ActionResponseAddMessage(builder, message): builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(message), 0) +def ActionResponseEnd(builder): return builder.EndObject() diff --git a/streaming_data_types/fbschemas/action_response_answ/ActionType.py b/streaming_data_types/fbschemas/action_response_answ/ActionType.py new file mode 100644 index 0000000..27a2370 --- /dev/null +++ b/streaming_data_types/fbschemas/action_response_answ/ActionType.py @@ -0,0 +1,10 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +class ActionType(object): + StartJob = 0 + SetStopTime = 1 + StopNow = 2 + HasStopped = 3 + diff --git a/streaming_data_types/fbschemas/action_response_answ/__init__.py b/streaming_data_types/fbschemas/action_response_answ/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/test_answ.py b/tests/test_answ.py new file mode 100644 index 0000000..17fd0a6 --- /dev/null +++ b/tests/test_answ.py @@ -0,0 +1,49 @@ +import pytest +from streaming_data_types.action_response_answ import serialise_answ, deserialise_answ, ActionType, ActionOutcome +from streaming_data_types import SERIALISERS, DESERIALISERS + + +class TestEncoder(object): + def test_serialise_and_deserialise_answ_message(self): + """ + Round-trip to check what we serialise is what we get back. + """ + + original_entry = { + "service_id": "some_service_id_1234", + "job_id": "some_job_id_abcdef", + "action": ActionType.ActionType.SetStopTime, + "outcome": ActionOutcome.ActionOutcome.Failure, + "message": "some random error message", + } + + buf = serialise_answ(**original_entry) + entry = deserialise_answ(buf) + + assert entry.service_id == original_entry["service_id"] + assert entry.job_id == original_entry["job_id"] + assert entry.message == original_entry["message"] + assert entry.action == original_entry["action"] + assert entry.outcome == original_entry["outcome"] + + def test_if_buffer_has_wrong_id_then_throws(self): + original_entry = { + "service_id": "some_service_id_1234", + "job_id": "some_job_id_abcdef", + "action": ActionType.ActionType.SetStopTime, + "outcome": ActionOutcome.ActionOutcome.Failure, + "message": "some random error message", + } + + buf = serialise_answ(**original_entry) + + # Manually hack the id + buf = bytearray(buf) + buf[4:8] = b"1234" + + with pytest.raises(RuntimeError): + deserialise_answ(buf) + + def test_schema_type_is_in_global_serialisers_list(self): + assert "answ" in SERIALISERS + assert "answ" in DESERIALISERS From 04675a9b06a50679fb17106978adbad9f2601af4 Mon Sep 17 00:00:00 2001 From: Jonas Nilsson Date: Mon, 7 Sep 2020 16:08:32 +0200 Subject: [PATCH 164/363] Minor import change. --- streaming_data_types/action_response_answ.py | 6 +++--- tests/test_answ.py | 8 ++++---- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/streaming_data_types/action_response_answ.py b/streaming_data_types/action_response_answ.py index ae82e5c..b3f6cc6 100644 --- a/streaming_data_types/action_response_answ.py +++ b/streaming_data_types/action_response_answ.py @@ -1,9 +1,9 @@ import flatbuffers -import streaming_data_types.fbschemas.action_response_answ.ActionOutcome as ActionOutcome +from streaming_data_types.fbschemas.action_response_answ.ActionOutcome import ActionOutcome import streaming_data_types.fbschemas.action_response_answ.ActionResponse as ActionResponse -import streaming_data_types.fbschemas.action_response_answ.ActionType as ActionType +from streaming_data_types.fbschemas.action_response_answ.ActionType import ActionType from streaming_data_types.utils import check_schema_identifier -from typing import Optional, Union +from typing import Union from collections import namedtuple FILE_IDENTIFIER = b"answ" diff --git a/tests/test_answ.py b/tests/test_answ.py index 17fd0a6..93f83ac 100644 --- a/tests/test_answ.py +++ b/tests/test_answ.py @@ -12,8 +12,8 @@ def test_serialise_and_deserialise_answ_message(self): original_entry = { "service_id": "some_service_id_1234", "job_id": "some_job_id_abcdef", - "action": ActionType.ActionType.SetStopTime, - "outcome": ActionOutcome.ActionOutcome.Failure, + "action": ActionType.SetStopTime, + "outcome": ActionOutcome.Failure, "message": "some random error message", } @@ -30,8 +30,8 @@ def test_if_buffer_has_wrong_id_then_throws(self): original_entry = { "service_id": "some_service_id_1234", "job_id": "some_job_id_abcdef", - "action": ActionType.ActionType.SetStopTime, - "outcome": ActionOutcome.ActionOutcome.Failure, + "action": ActionType.SetStopTime, + "outcome": ActionOutcome.Failure, "message": "some random error message", } From 854bbc503c81de91bc16d7d7bba68cd4ccefe35b Mon Sep 17 00:00:00 2001 From: Jonas Nilsson Date: Mon, 7 Sep 2020 18:10:42 +0200 Subject: [PATCH 165/363] Updated the version. --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 1681264..6869ae9 100644 --- a/setup.py +++ b/setup.py @@ -15,7 +15,7 @@ setup( name="ess_streaming_data_types", - version="0.9.1", + version="0.9.2", description=DESCRIPTION, long_description=LONG_DESCRIPTION, long_description_content_type="text/markdown", From 61ca9553940f3a7e42d32968c40320633e13cbd5 Mon Sep 17 00:00:00 2001 From: Jonas Nilsson Date: Thu, 17 Sep 2020 20:06:52 +0200 Subject: [PATCH 166/363] Added command id. --- setup.py | 2 +- streaming_data_types/action_response_answ.py | 8 ++- .../action_response_answ/ActionResponse.py | 10 +++- .../fbschemas/run_stop_6s4t/RunStop.py | 56 ++++++++----------- streaming_data_types/run_stop_6s4t.py | 8 ++- tests/test_6s4t.py | 2 + tests/test_answ.py | 3 + 7 files changed, 48 insertions(+), 41 deletions(-) diff --git a/setup.py b/setup.py index 6869ae9..0091d36 100644 --- a/setup.py +++ b/setup.py @@ -15,7 +15,7 @@ setup( name="ess_streaming_data_types", - version="0.9.2", + version="0.9.3", description=DESCRIPTION, long_description=LONG_DESCRIPTION, long_description_content_type="text/markdown", diff --git a/streaming_data_types/action_response_answ.py b/streaming_data_types/action_response_answ.py index b3f6cc6..a2396ed 100644 --- a/streaming_data_types/action_response_answ.py +++ b/streaming_data_types/action_response_answ.py @@ -9,11 +9,12 @@ FILE_IDENTIFIER = b"answ" -def serialise_answ(service_id: str, job_id: str, action: ActionType, outcome: ActionOutcome, message: str) -> bytes: +def serialise_answ(service_id: str, job_id: str, command_id: str, action: ActionType, outcome: ActionOutcome, message: str) -> bytes: builder = flatbuffers.Builder(500) service_id_offset = builder.CreateString(service_id) job_id_offset = builder.CreateString(job_id) message_offset = builder.CreateString(message) + command_id_offset = builder.CreateString(command_id) ActionResponse.ActionResponseStart(builder) ActionResponse.ActionResponseAddServiceId(builder, service_id_offset) @@ -21,6 +22,7 @@ def serialise_answ(service_id: str, job_id: str, action: ActionType, outcome: Ac ActionResponse.ActionResponseAddAction(builder, action) ActionResponse.ActionResponseAddOutcome(builder, outcome) ActionResponse.ActionResponseAddMessage(builder, message_offset) + ActionResponse.ActionResponseAddCommandId(builder, command_id_offset) out_message = ActionResponse.ActionResponseEnd(builder) builder.Finish(out_message) @@ -30,10 +32,10 @@ def serialise_answ(service_id: str, job_id: str, action: ActionType, outcome: Ac return bytes(output_buffer) -Response = namedtuple("Response", ("service_id", "job_id", "action", "outcome", "message",),) +Response = namedtuple("Response", ("service_id", "job_id", "command_id", "action", "outcome", "message",),) def deserialise_answ(buffer: Union[bytearray, bytes]): check_schema_identifier(buffer, FILE_IDENTIFIER) answ_message = ActionResponse.ActionResponse.GetRootAsActionResponse(buffer, 0) - return Response(answ_message.ServiceId().decode("utf-8"), answ_message.JobId().decode("utf-8"), answ_message.Action(), answ_message.Outcome(), answ_message.Message().decode("utf-8")) + return Response(answ_message.ServiceId().decode("utf-8"), answ_message.JobId().decode("utf-8"), answ_message.CommandId().decode("utf-8"), answ_message.Action(), answ_message.Outcome(), answ_message.Message().decode("utf-8")) diff --git a/streaming_data_types/fbschemas/action_response_answ/ActionResponse.py b/streaming_data_types/fbschemas/action_response_answ/ActionResponse.py index 0df2023..fdb696d 100644 --- a/streaming_data_types/fbschemas/action_response_answ/ActionResponse.py +++ b/streaming_data_types/fbschemas/action_response_answ/ActionResponse.py @@ -59,10 +59,18 @@ def Message(self): return self._tab.String(o + self._tab.Pos) return None -def ActionResponseStart(builder): builder.StartObject(5) + # ActionResponse + def CommandId(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + +def ActionResponseStart(builder): builder.StartObject(6) def ActionResponseAddServiceId(builder, serviceId): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(serviceId), 0) def ActionResponseAddJobId(builder, jobId): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(jobId), 0) def ActionResponseAddAction(builder, action): builder.PrependInt8Slot(2, action, 0) def ActionResponseAddOutcome(builder, outcome): builder.PrependInt8Slot(3, outcome, 0) def ActionResponseAddMessage(builder, message): builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(message), 0) +def ActionResponseAddCommandId(builder, commandId): builder.PrependUOffsetTRelativeSlot(5, flatbuffers.number_types.UOffsetTFlags.py_type(commandId), 0) def ActionResponseEnd(builder): return builder.EndObject() diff --git a/streaming_data_types/fbschemas/run_stop_6s4t/RunStop.py b/streaming_data_types/fbschemas/run_stop_6s4t/RunStop.py index d30d41d..36c45f8 100644 --- a/streaming_data_types/fbschemas/run_stop_6s4t/RunStop.py +++ b/streaming_data_types/fbschemas/run_stop_6s4t/RunStop.py @@ -1,12 +1,13 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers - +from flatbuffers.compat import import_numpy +np = import_numpy() class RunStop(object): - __slots__ = ["_tab"] + __slots__ = ['_tab'] @classmethod def GetRootAsRunStop(cls, buf, offset): @@ -15,6 +16,10 @@ def GetRootAsRunStop(cls, buf, offset): x.Init(buf, n + offset) return x + @classmethod + def RunStopBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x36\x73\x34\x74", size_prefixed=size_prefixed) + # RunStop def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) @@ -23,9 +28,7 @@ def Init(self, buf, pos): def StopTime(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: - return self._tab.Get( - flatbuffers.number_types.Uint64Flags, o + self._tab.Pos - ) + return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) return 0 # RunStop @@ -49,32 +52,17 @@ def ServiceId(self): return self._tab.String(o + self._tab.Pos) return None + # RunStop + def CommandId(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None -def RunStopStart(builder): - builder.StartObject(4) - - -def RunStopAddStopTime(builder, stopTime): - builder.PrependUint64Slot(0, stopTime, 0) - - -def RunStopAddRunName(builder, runName): - builder.PrependUOffsetTRelativeSlot( - 1, flatbuffers.number_types.UOffsetTFlags.py_type(runName), 0 - ) - - -def RunStopAddJobId(builder, jobId): - builder.PrependUOffsetTRelativeSlot( - 2, flatbuffers.number_types.UOffsetTFlags.py_type(jobId), 0 - ) - - -def RunStopAddServiceId(builder, serviceId): - builder.PrependUOffsetTRelativeSlot( - 3, flatbuffers.number_types.UOffsetTFlags.py_type(serviceId), 0 - ) - - -def RunStopEnd(builder): - return builder.EndObject() +def RunStopStart(builder): builder.StartObject(5) +def RunStopAddStopTime(builder, stopTime): builder.PrependUint64Slot(0, stopTime, 0) +def RunStopAddRunName(builder, runName): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(runName), 0) +def RunStopAddJobId(builder, jobId): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(jobId), 0) +def RunStopAddServiceId(builder, serviceId): builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(serviceId), 0) +def RunStopAddCommandId(builder, commandId): builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(commandId), 0) +def RunStopEnd(builder): return builder.EndObject() diff --git a/streaming_data_types/run_stop_6s4t.py b/streaming_data_types/run_stop_6s4t.py index f37292e..e4965e9 100644 --- a/streaming_data_types/run_stop_6s4t.py +++ b/streaming_data_types/run_stop_6s4t.py @@ -11,6 +11,7 @@ def serialise_6s4t( job_id: str, run_name: str = "test_run", service_id: str = "", + command_id: str = "", stop_time: Optional[int] = None, ) -> bytes: builder = flatbuffers.Builder(136) @@ -23,6 +24,7 @@ def serialise_6s4t( service_id_offset = builder.CreateString(service_id) job_id_offset = builder.CreateString(job_id) run_name_offset = builder.CreateString(run_name) + command_id_offset = builder.CreateString(command_id) # Build the actual buffer RunStop.RunStopStart(builder) @@ -30,6 +32,7 @@ def serialise_6s4t( RunStop.RunStopAddJobId(builder, job_id_offset) RunStop.RunStopAddRunName(builder, run_name_offset) RunStop.RunStopAddStopTime(builder, stop_time) + RunStop.RunStopAddCommandId(builder, command_id_offset) run_stop_message = RunStop.RunStopEnd(builder) builder.Finish(run_stop_message) @@ -41,7 +44,7 @@ def serialise_6s4t( RunStopInfo = namedtuple( - "RunStopInfo", ("stop_time", "run_name", "job_id", "service_id") + "RunStopInfo", ("stop_time", "run_name", "job_id", "service_id", "command_id") ) @@ -53,7 +56,8 @@ def deserialise_6s4t(buffer: Union[bytearray, bytes]) -> RunStopInfo: job_id = run_stop.JobId() if run_stop.JobId() else b"" run_name = run_stop.RunName() if run_stop.RunName() else b"" stop_time = run_stop.StopTime() + command_id = run_stop.CommandId() return RunStopInfo( - stop_time, run_name.decode(), job_id.decode(), service_id.decode() + stop_time, run_name.decode(), job_id.decode(), service_id.decode(), command_id.decode() ) diff --git a/tests/test_6s4t.py b/tests/test_6s4t.py index 21bcd17..54d747b 100644 --- a/tests/test_6s4t.py +++ b/tests/test_6s4t.py @@ -9,6 +9,7 @@ class TestSerialisation6s4t: "stop_time": 578214, "run_name": "test_run", "service_id": "filewriter1", + "command_id": "some command id", } def test_serialises_and_deserialises_6s4t_message_correctly(self): @@ -19,6 +20,7 @@ def test_serialises_and_deserialises_6s4t_message_correctly(self): assert deserialised_tuple.stop_time == self.original_entry["stop_time"] assert deserialised_tuple.run_name == self.original_entry["run_name"] assert deserialised_tuple.service_id == self.original_entry["service_id"] + assert deserialised_tuple.command_id == self.original_entry["command_id"] def test_if_buffer_has_wrong_id_then_throws(self): buf = serialise_6s4t(**self.original_entry) diff --git a/tests/test_answ.py b/tests/test_answ.py index 93f83ac..5324a14 100644 --- a/tests/test_answ.py +++ b/tests/test_answ.py @@ -12,6 +12,7 @@ def test_serialise_and_deserialise_answ_message(self): original_entry = { "service_id": "some_service_id_1234", "job_id": "some_job_id_abcdef", + "command_id": "some command id", "action": ActionType.SetStopTime, "outcome": ActionOutcome.Failure, "message": "some random error message", @@ -21,6 +22,7 @@ def test_serialise_and_deserialise_answ_message(self): entry = deserialise_answ(buf) assert entry.service_id == original_entry["service_id"] + assert entry.command_id == original_entry["command_id"] assert entry.job_id == original_entry["job_id"] assert entry.message == original_entry["message"] assert entry.action == original_entry["action"] @@ -30,6 +32,7 @@ def test_if_buffer_has_wrong_id_then_throws(self): original_entry = { "service_id": "some_service_id_1234", "job_id": "some_job_id_abcdef", + "command_id": "some command id", "action": ActionType.SetStopTime, "outcome": ActionOutcome.Failure, "message": "some random error message", From edcbb697aebd04e1db7e83bb780e210b2c51e443 Mon Sep 17 00:00:00 2001 From: Jonas Nilsson Date: Thu, 17 Sep 2020 20:19:18 +0200 Subject: [PATCH 167/363] Documentation update. --- README_DEV.md | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/README_DEV.md b/README_DEV.md index 03f67d9..1794024 100644 --- a/README_DEV.md +++ b/README_DEV.md @@ -1,6 +1,19 @@ # Python Streaming Data Types ## For developers +### Install local (development) version + +First, uninstall existing version of the Python streaming data types pacakge. + +``` +pip uninstall ess-streaming-data-types +``` + +Then, from the _python-streaming-data-types_ root directory, run the following command: + +``` +pip install --user -e ./ +``` ### Install the commit hooks (important) There are commit hooks for Black and Flake8. @@ -68,7 +81,7 @@ twine upload --repository-url https://test.pypi.org/legacy/ dist/* The new module can then be installed from test.pypi.org like so: ``` -pip uninstall ess_streaming_data_types # Remove old version if present +pip uninstall ess-streaming-data-types # Remove old version if present pip install -i https://test.pypi.org/simple/ ess-streaming-data-types ``` Unfortunately, flatbuffers is not on test.pypi.org so the following error may occur: From 97c249ca7a845678ea9d8fe9705b5c1607e7492b Mon Sep 17 00:00:00 2001 From: Jonas Nilsson Date: Mon, 21 Sep 2020 13:52:20 +0200 Subject: [PATCH 168/363] Update setup.py --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 0091d36..6869ae9 100644 --- a/setup.py +++ b/setup.py @@ -15,7 +15,7 @@ setup( name="ess_streaming_data_types", - version="0.9.3", + version="0.9.2", description=DESCRIPTION, long_description=LONG_DESCRIPTION, long_description_content_type="text/markdown", From 67fffbafbd3c30f3fea5f1b0c7b1935a049008d2 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Mon, 21 Sep 2020 13:55:13 +0200 Subject: [PATCH 169/363] Update README_DEV.md --- README_DEV.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README_DEV.md b/README_DEV.md index 1794024..771a69f 100644 --- a/README_DEV.md +++ b/README_DEV.md @@ -3,7 +3,7 @@ ### Install local (development) version -First, uninstall existing version of the Python streaming data types pacakge. +First, uninstall existing version of the Python streaming data types package. ``` pip uninstall ess-streaming-data-types From 26b7fffa63a7846f6f3a7870e4c78cd80169fa36 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Mon, 21 Sep 2020 11:58:31 +0000 Subject: [PATCH 170/363] Moved new instructions to more relevant location --- README_DEV.md | 25 ++++++++++++------------- 1 file changed, 12 insertions(+), 13 deletions(-) diff --git a/README_DEV.md b/README_DEV.md index 771a69f..1bf34b6 100644 --- a/README_DEV.md +++ b/README_DEV.md @@ -1,19 +1,6 @@ # Python Streaming Data Types ## For developers -### Install local (development) version - -First, uninstall existing version of the Python streaming data types package. - -``` -pip uninstall ess-streaming-data-types -``` - -Then, from the _python-streaming-data-types_ root directory, run the following command: - -``` -pip install --user -e ./ -``` ### Install the commit hooks (important) There are commit hooks for Black and Flake8. @@ -48,6 +35,18 @@ From the top directory: tox ``` +### Installing the development version locally +First, uninstall any existing versions of the Python streaming data types package: + +``` +pip uninstall ess-streaming-data-types +``` +Then, from the _python-streaming-data-types_ root directory, run the following command: + +``` +pip install --user -e ./ +``` + ### Building the package locally and deploying it to PyPI **First update the version number in setup.py and push the update to the repository.** From 2af628ca42cf133daabb1a1f145a999101c14f0d Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Mon, 21 Sep 2020 14:09:03 +0200 Subject: [PATCH 171/363] Added `answ` to the table in the README.md - still needs to be checked for verifiability --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 8936bee..c532125 100644 --- a/README.md +++ b/README.md @@ -18,6 +18,7 @@ https://github.com/ess-dmsc/streaming-data-types |tdct|Timestamps|Y| |ep00|EPICS connection info|Y| |rf5k|Forwarder configuration update|Y| +|answ|File-writer command response|?| \* whether it passes verification via the C++ FlatBuffers library. From 68da85cd59f4691230a67dad5381f554be4eee8e Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Mon, 21 Sep 2020 14:27:46 +0200 Subject: [PATCH 172/363] Noticed some stray Py2 style and some uncommon naming. --- tests/test_f142.py | 2 +- tests/test_rf5k.py | 7 ++----- tests/test_x52f.py | 3 +-- 3 files changed, 4 insertions(+), 8 deletions(-) diff --git a/tests/test_f142.py b/tests/test_f142.py index 04aa46f..bf1e1ce 100644 --- a/tests/test_f142.py +++ b/tests/test_f142.py @@ -6,7 +6,7 @@ from streaming_data_types import SERIALISERS, DESERIALISERS -class TestSerialisationf142: +class TestSerialisationF142: original_entry = { "source_name": "some_source", "value": 578214, diff --git a/tests/test_rf5k.py b/tests/test_rf5k.py index 1f012e3..2614444 100644 --- a/tests/test_rf5k.py +++ b/tests/test_rf5k.py @@ -11,7 +11,7 @@ ) -class TestEncoder(object): +class TestSerialisationRf5k: def test_serialises_and_deserialises_rf5k_message_with_streams_correctly(self): """ Round-trip to check what we serialise is what we get back. @@ -34,10 +34,7 @@ def test_serialises_and_deserialises_rf5k_message_without_streams_correctly(self """ Round-trip to check what we serialise is what we get back. """ - original_entry = { - "config_change": UpdateType.REMOVEALL, - "streams": [], - } + original_entry = {"config_change": UpdateType.REMOVEALL, "streams": []} buf = serialise_rf5k(**original_entry) entry = deserialise_rf5k(buf) diff --git a/tests/test_x52f.py b/tests/test_x52f.py index 1377ad9..c7942c1 100644 --- a/tests/test_x52f.py +++ b/tests/test_x52f.py @@ -3,12 +3,11 @@ from streaming_data_types import SERIALISERS, DESERIALISERS -class TestEncoder(object): +class TestSerialisationX52f: def test_serialises_and_deserialises_x5f2_message_correctly(self): """ Round-trip to check what we serialise is what we get back. """ - original_entry = { "software_name": "nicos/test", "software_version": "1.0.0", From c8322cff2363e2c3016d90cc9325a6f823acb942 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Mon, 21 Sep 2020 12:30:37 +0000 Subject: [PATCH 173/363] Undo formatting change --- tests/test_rf5k.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/tests/test_rf5k.py b/tests/test_rf5k.py index 2614444..c5b8627 100644 --- a/tests/test_rf5k.py +++ b/tests/test_rf5k.py @@ -34,7 +34,10 @@ def test_serialises_and_deserialises_rf5k_message_without_streams_correctly(self """ Round-trip to check what we serialise is what we get back. """ - original_entry = {"config_change": UpdateType.REMOVEALL, "streams": []} + original_entry = { + "config_change": UpdateType.REMOVEALL, + "streams": [], + } buf = serialise_rf5k(**original_entry) entry = deserialise_rf5k(buf) From 4d34099b4eea90a88c40e40a8ad4cc07d5226767 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Mon, 21 Sep 2020 12:35:34 +0000 Subject: [PATCH 174/363] Removed trailing whitespace --- tests/test_rf5k.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_rf5k.py b/tests/test_rf5k.py index c5b8627..933dcc3 100644 --- a/tests/test_rf5k.py +++ b/tests/test_rf5k.py @@ -35,7 +35,7 @@ def test_serialises_and_deserialises_rf5k_message_without_streams_correctly(self Round-trip to check what we serialise is what we get back. """ original_entry = { - "config_change": UpdateType.REMOVEALL, + "config_change": UpdateType.REMOVEALL, "streams": [], } From 56cc3c700c503cbdd303bc3af279bf7e197e54c4 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Mon, 21 Sep 2020 12:38:32 +0000 Subject: [PATCH 175/363] Copy and paste error rectified --- tests/test_answ.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/test_answ.py b/tests/test_answ.py index 5324a14..f2f4326 100644 --- a/tests/test_answ.py +++ b/tests/test_answ.py @@ -3,12 +3,11 @@ from streaming_data_types import SERIALISERS, DESERIALISERS -class TestEncoder(object): +class TestSerialisationAnsw: def test_serialise_and_deserialise_answ_message(self): """ Round-trip to check what we serialise is what we get back. """ - original_entry = { "service_id": "some_service_id_1234", "job_id": "some_job_id_abcdef", From 56f043207091a28c6234949897818c4dbcbbe2d9 Mon Sep 17 00:00:00 2001 From: Jonas Nilsson Date: Sat, 3 Oct 2020 00:33:07 +0200 Subject: [PATCH 176/363] Schema changes. --- streaming_data_types/__init__.py | 3 + .../action_response_answ/ActionResponse.py | 26 +++- .../action_response_answ/ActionType.py | 2 - .../finished_writing_wrdn/FinishedWriting.py | 76 ++++++++++++ .../finished_writing_wrdn/__init__.py | 0 .../fbschemas/run_start_pl72/RunStart.py | 112 +++++------------- streaming_data_types/finished_writing_wrdn.py | 66 +++++++++++ tests/test_wrdn.py | 52 ++++++++ 8 files changed, 250 insertions(+), 87 deletions(-) create mode 100644 streaming_data_types/fbschemas/finished_writing_wrdn/FinishedWriting.py create mode 100644 streaming_data_types/fbschemas/finished_writing_wrdn/__init__.py create mode 100644 streaming_data_types/finished_writing_wrdn.py create mode 100644 tests/test_wrdn.py diff --git a/streaming_data_types/__init__.py b/streaming_data_types/__init__.py index 75bdeca..1a20d0c 100644 --- a/streaming_data_types/__init__.py +++ b/streaming_data_types/__init__.py @@ -6,6 +6,7 @@ from streaming_data_types.run_stop_6s4t import deserialise_6s4t, serialise_6s4t from streaming_data_types.status_x5f2 import deserialise_x5f2, serialise_x5f2 from streaming_data_types.action_response_answ import deserialise_answ, serialise_answ +from streaming_data_types.finished_writing_wrdn import deserialise_wrdn, serialise_wrdn from streaming_data_types.epics_connection_info_ep00 import ( deserialise_ep00, serialise_ep00, @@ -29,6 +30,7 @@ "tdct": serialise_tdct, "rf5k": serialise_rf5k, "answ": serialise_answ, + "wrdn": serialise_wrdn, } @@ -44,4 +46,5 @@ "tdct": deserialise_tdct, "rf5k": deserialise_rf5k, "answ": deserialise_answ, + "wrdn": deserialise_wrdn, } diff --git a/streaming_data_types/fbschemas/action_response_answ/ActionResponse.py b/streaming_data_types/fbschemas/action_response_answ/ActionResponse.py index fdb696d..21ea999 100644 --- a/streaming_data_types/fbschemas/action_response_answ/ActionResponse.py +++ b/streaming_data_types/fbschemas/action_response_answ/ActionResponse.py @@ -53,24 +53,40 @@ def Outcome(self): return 0 # ActionResponse - def Message(self): + def StatusCode(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # ActionResponse + def StopTime(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) + return 0 + + # ActionResponse + def Message(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) if o != 0: return self._tab.String(o + self._tab.Pos) return None # ActionResponse def CommandId(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) if o != 0: return self._tab.String(o + self._tab.Pos) return None -def ActionResponseStart(builder): builder.StartObject(6) +def ActionResponseStart(builder): builder.StartObject(8) def ActionResponseAddServiceId(builder, serviceId): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(serviceId), 0) def ActionResponseAddJobId(builder, jobId): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(jobId), 0) def ActionResponseAddAction(builder, action): builder.PrependInt8Slot(2, action, 0) def ActionResponseAddOutcome(builder, outcome): builder.PrependInt8Slot(3, outcome, 0) -def ActionResponseAddMessage(builder, message): builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(message), 0) -def ActionResponseAddCommandId(builder, commandId): builder.PrependUOffsetTRelativeSlot(5, flatbuffers.number_types.UOffsetTFlags.py_type(commandId), 0) +def ActionResponseAddStatusCode(builder, statusCode): builder.PrependInt32Slot(4, statusCode, 0) +def ActionResponseAddStopTime(builder, stopTime): builder.PrependUint64Slot(5, stopTime, 0) +def ActionResponseAddMessage(builder, message): builder.PrependUOffsetTRelativeSlot(6, flatbuffers.number_types.UOffsetTFlags.py_type(message), 0) +def ActionResponseAddCommandId(builder, commandId): builder.PrependUOffsetTRelativeSlot(7, flatbuffers.number_types.UOffsetTFlags.py_type(commandId), 0) def ActionResponseEnd(builder): return builder.EndObject() diff --git a/streaming_data_types/fbschemas/action_response_answ/ActionType.py b/streaming_data_types/fbschemas/action_response_answ/ActionType.py index 27a2370..b9141d0 100644 --- a/streaming_data_types/fbschemas/action_response_answ/ActionType.py +++ b/streaming_data_types/fbschemas/action_response_answ/ActionType.py @@ -5,6 +5,4 @@ class ActionType(object): StartJob = 0 SetStopTime = 1 - StopNow = 2 - HasStopped = 3 diff --git a/streaming_data_types/fbschemas/finished_writing_wrdn/FinishedWriting.py b/streaming_data_types/fbschemas/finished_writing_wrdn/FinishedWriting.py new file mode 100644 index 0000000..1dec3ed --- /dev/null +++ b/streaming_data_types/fbschemas/finished_writing_wrdn/FinishedWriting.py @@ -0,0 +1,76 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy +np = import_numpy() + +class FinishedWriting(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAsFinishedWriting(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = FinishedWriting() + x.Init(buf, n + offset) + return x + + @classmethod + def FinishedWritingBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x77\x72\x64\x6E", size_prefixed=size_prefixed) + + # FinishedWriting + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # FinishedWriting + def ServiceId(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # FinishedWriting + def JobId(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # FinishedWriting + def ErrorEncountered(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + + # FinishedWriting + def FileName(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # FinishedWriting + def Metadata(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # FinishedWriting + def Message(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + +def FinishedWritingStart(builder): builder.StartObject(6) +def FinishedWritingAddServiceId(builder, serviceId): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(serviceId), 0) +def FinishedWritingAddJobId(builder, jobId): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(jobId), 0) +def FinishedWritingAddErrorEncountered(builder, errorEncountered): builder.PrependBoolSlot(2, errorEncountered, 0) +def FinishedWritingAddFileName(builder, fileName): builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(fileName), 0) +def FinishedWritingAddMetadata(builder, metadata): builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(metadata), 0) +def FinishedWritingAddMessage(builder, message): builder.PrependUOffsetTRelativeSlot(5, flatbuffers.number_types.UOffsetTFlags.py_type(message), 0) +def FinishedWritingEnd(builder): return builder.EndObject() diff --git a/streaming_data_types/fbschemas/finished_writing_wrdn/__init__.py b/streaming_data_types/fbschemas/finished_writing_wrdn/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/streaming_data_types/fbschemas/run_start_pl72/RunStart.py b/streaming_data_types/fbschemas/run_start_pl72/RunStart.py index 6a5192e..9c70a17 100644 --- a/streaming_data_types/fbschemas/run_start_pl72/RunStart.py +++ b/streaming_data_types/fbschemas/run_start_pl72/RunStart.py @@ -1,12 +1,13 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers - +from flatbuffers.compat import import_numpy +np = import_numpy() class RunStart(object): - __slots__ = ["_tab"] + __slots__ = ['_tab'] @classmethod def GetRootAsRunStart(cls, buf, offset): @@ -15,6 +16,10 @@ def GetRootAsRunStart(cls, buf, offset): x.Init(buf, n + offset) return x + @classmethod + def RunStartBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x70\x6C\x37\x32", size_prefixed=size_prefixed) + # RunStart def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) @@ -23,18 +28,14 @@ def Init(self, buf, pos): def StartTime(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: - return self._tab.Get( - flatbuffers.number_types.Uint64Flags, o + self._tab.Pos - ) + return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) return 0 # RunStart def StopTime(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) if o != 0: - return self._tab.Get( - flatbuffers.number_types.Uint64Flags, o + self._tab.Pos - ) + return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) return 0 # RunStart @@ -90,9 +91,7 @@ def Filename(self): def NPeriods(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(22)) if o != 0: - return self._tab.Get( - flatbuffers.number_types.Uint32Flags, o + self._tab.Pos - ) + return self._tab.Get(flatbuffers.number_types.Uint32Flags, o + self._tab.Pos) return 1 # RunStart @@ -100,77 +99,30 @@ def DetectorSpectrumMap(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(24)) if o != 0: x = self._tab.Indirect(o + self._tab.Pos) - from .SpectraDetectorMapping import SpectraDetectorMapping - + from SpectraDetectorMapping import SpectraDetectorMapping obj = SpectraDetectorMapping() obj.Init(self._tab.Bytes, x) return obj return None + # RunStart + def Metadata(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(26)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None -def RunStartStart(builder): - builder.StartObject(11) - - -def RunStartAddStartTime(builder, startTime): - builder.PrependUint64Slot(0, startTime, 0) - - -def RunStartAddStopTime(builder, stopTime): - builder.PrependUint64Slot(1, stopTime, 0) - - -def RunStartAddRunName(builder, runName): - builder.PrependUOffsetTRelativeSlot( - 2, flatbuffers.number_types.UOffsetTFlags.py_type(runName), 0 - ) - - -def RunStartAddInstrumentName(builder, instrumentName): - builder.PrependUOffsetTRelativeSlot( - 3, flatbuffers.number_types.UOffsetTFlags.py_type(instrumentName), 0 - ) - - -def RunStartAddNexusStructure(builder, nexusStructure): - builder.PrependUOffsetTRelativeSlot( - 4, flatbuffers.number_types.UOffsetTFlags.py_type(nexusStructure), 0 - ) - - -def RunStartAddJobId(builder, jobId): - builder.PrependUOffsetTRelativeSlot( - 5, flatbuffers.number_types.UOffsetTFlags.py_type(jobId), 0 - ) - - -def RunStartAddBroker(builder, broker): - builder.PrependUOffsetTRelativeSlot( - 6, flatbuffers.number_types.UOffsetTFlags.py_type(broker), 0 - ) - - -def RunStartAddServiceId(builder, serviceId): - builder.PrependUOffsetTRelativeSlot( - 7, flatbuffers.number_types.UOffsetTFlags.py_type(serviceId), 0 - ) - - -def RunStartAddFilename(builder, filename): - builder.PrependUOffsetTRelativeSlot( - 8, flatbuffers.number_types.UOffsetTFlags.py_type(filename), 0 - ) - - -def RunStartAddNPeriods(builder, nPeriods): - builder.PrependUint32Slot(9, nPeriods, 1) - - -def RunStartAddDetectorSpectrumMap(builder, detectorSpectrumMap): - builder.PrependUOffsetTRelativeSlot( - 10, flatbuffers.number_types.UOffsetTFlags.py_type(detectorSpectrumMap), 0 - ) - - -def RunStartEnd(builder): - return builder.EndObject() +def RunStartStart(builder): builder.StartObject(12) +def RunStartAddStartTime(builder, startTime): builder.PrependUint64Slot(0, startTime, 0) +def RunStartAddStopTime(builder, stopTime): builder.PrependUint64Slot(1, stopTime, 0) +def RunStartAddRunName(builder, runName): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(runName), 0) +def RunStartAddInstrumentName(builder, instrumentName): builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(instrumentName), 0) +def RunStartAddNexusStructure(builder, nexusStructure): builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(nexusStructure), 0) +def RunStartAddJobId(builder, jobId): builder.PrependUOffsetTRelativeSlot(5, flatbuffers.number_types.UOffsetTFlags.py_type(jobId), 0) +def RunStartAddBroker(builder, broker): builder.PrependUOffsetTRelativeSlot(6, flatbuffers.number_types.UOffsetTFlags.py_type(broker), 0) +def RunStartAddServiceId(builder, serviceId): builder.PrependUOffsetTRelativeSlot(7, flatbuffers.number_types.UOffsetTFlags.py_type(serviceId), 0) +def RunStartAddFilename(builder, filename): builder.PrependUOffsetTRelativeSlot(8, flatbuffers.number_types.UOffsetTFlags.py_type(filename), 0) +def RunStartAddNPeriods(builder, nPeriods): builder.PrependUint32Slot(9, nPeriods, 1) +def RunStartAddDetectorSpectrumMap(builder, detectorSpectrumMap): builder.PrependUOffsetTRelativeSlot(10, flatbuffers.number_types.UOffsetTFlags.py_type(detectorSpectrumMap), 0) +def RunStartAddMetadata(builder, metadata): builder.PrependUOffsetTRelativeSlot(11, flatbuffers.number_types.UOffsetTFlags.py_type(metadata), 0) +def RunStartEnd(builder): return builder.EndObject() diff --git a/streaming_data_types/finished_writing_wrdn.py b/streaming_data_types/finished_writing_wrdn.py new file mode 100644 index 0000000..d09f6bc --- /dev/null +++ b/streaming_data_types/finished_writing_wrdn.py @@ -0,0 +1,66 @@ +from typing import Optional, Union +import flatbuffers +from streaming_data_types.fbschemas.finished_writing_wrdn import FinishedWriting +from streaming_data_types.utils import check_schema_identifier +from collections import namedtuple + +FILE_IDENTIFIER = b"wrdn" + + +def serialise_wrdn( + service_id: str, + job_id: str, + error_encountered: bool, + file_name: str, + metadata: str = "", + message: str = "" +) -> bytes: + builder = flatbuffers.Builder(500) + + service_id_offset = builder.CreateString(service_id) + job_id_offset = builder.CreateString(job_id) + file_name_offset = builder.CreateString(file_name) + if metadata: + metadata_offset = builder.CreateString(metadata) + if message: + message_offset = builder.CreateString(message) + + # Build the actual buffer + FinishedWriting.FinishedWritingStart(builder) + FinishedWriting.FinishedWritingAddServiceId(builder, service_id_offset) + FinishedWriting.FinishedWritingAddJobId(builder, job_id_offset) + FinishedWriting.FinishedWritingAddErrorEncountered(builder, error_encountered) + FinishedWriting.FinishedWritingAddFileName(builder, file_name_offset) + if metadata: + FinishedWriting.FinishedWritingAddMetadata(builder, metadata_offset) + if message: + FinishedWriting.FinishedWritingAddMessage(builder, message_offset) + + finished_writing_message = FinishedWriting.FinishedWritingEnd(builder) + builder.Finish(finished_writing_message) + + # Generate the output and replace the file_identifier + buffer = builder.Output() + buffer[4:8] = FILE_IDENTIFIER + return bytes(buffer) + + +WritingFinished = namedtuple( + "FinishedWriting", ("service_id", "job_id", "error_encountered", "file_name", "metadata", "message") +) + + +def deserialise_wrdn(buffer: Union[bytearray, bytes]) -> FinishedWriting: + check_schema_identifier(buffer, FILE_IDENTIFIER) + + finished_writing = FinishedWriting.FinishedWriting.GetRootAsFinishedWriting(buffer, 0) + service_id = finished_writing.ServiceId() + job_id = finished_writing.JobId() + has_error = finished_writing.ErrorEncountered() + file_name = finished_writing.FileName() if finished_writing.FileName() else b"" + metadata = finished_writing.Metadata() if finished_writing.Metadata() else b"" + message = finished_writing.Message() if finished_writing.Message() else b"" + + return WritingFinished( + service_id.decode(), job_id.decode(), has_error, file_name.decode(), metadata.decode(), message.decode() + ) diff --git a/tests/test_wrdn.py b/tests/test_wrdn.py new file mode 100644 index 0000000..0e7e14b --- /dev/null +++ b/tests/test_wrdn.py @@ -0,0 +1,52 @@ +import pytest +from streaming_data_types.finished_writing_wrdn import serialise_wrdn, deserialise_wrdn +from streaming_data_types import SERIALISERS, DESERIALISERS + + +class TestEncoder(object): + def test_serialise_and_deserialise_wrdn_message(self): + """ + Round-trip to check what we serialise is what we get back. + """ + + original_entry = { + "service_id": "some_service_id_1234", + "job_id": "some_job_id_abcdef", + "error_encountered": True, + "file_name": "somefile.nxs", + "metadata": '{"hello":4}', + "message": "some random error message", + } + + buf = serialise_wrdn(**original_entry) + entry = deserialise_wrdn(buf) + + assert entry.service_id == original_entry["service_id"] + assert entry.job_id == original_entry["job_id"] + assert entry.error_encountered == original_entry["error_encountered"] + assert entry.file_name == original_entry["file_name"] + assert entry.metadata == original_entry["metadata"] + assert entry.message == original_entry["message"] + + def test_if_buffer_has_wrong_id_then_throws(self): + original_entry = { + "service_id": "some_service_id_1234", + "job_id": "some_job_id_abcdef", + "error_encountered": True, + "file_name": "somefile.nxs", + "metadata": '{"hello":4}', + "message": "some random error message", + } + + buf = serialise_wrdn(**original_entry) + + # Manually hack the id + buf = bytearray(buf) + buf[4:8] = b"1234" + + with pytest.raises(RuntimeError): + deserialise_wrdn(buf) + + def test_schema_type_is_in_global_serialisers_list(self): + assert "wrdn" in SERIALISERS + assert "wrdn" in DESERIALISERS From ebf9dea50032dddee9682c44f583bab9c592f2d4 Mon Sep 17 00:00:00 2001 From: Jonas Nilsson Date: Sat, 3 Oct 2020 00:35:20 +0200 Subject: [PATCH 177/363] Black. --- streaming_data_types/action_response_answ.py | 26 ++++- .../epics_connection_info_ep00.py | 2 +- .../action_response_answ/ActionOutcome.py | 4 +- .../action_response_answ/ActionResponse.py | 71 ++++++++--- .../action_response_answ/ActionType.py | 4 +- .../finished_writing_wrdn/FinishedWriting.py | 63 ++++++++-- .../fbschemas/run_start_pl72/RunStart.py | 110 ++++++++++++++---- .../fbschemas/run_stop_6s4t/RunStop.py | 56 +++++++-- .../fbschemas/status_x5f2/Status.py | 63 ++++++++-- streaming_data_types/finished_writing_wrdn.py | 16 ++- .../forwarder_config_update_rf5k.py | 4 +- streaming_data_types/run_stop_6s4t.py | 6 +- streaming_data_types/status_x5f2.py | 17 +-- streaming_data_types/timestamps_tdct.py | 4 +- tests/test_answ.py | 7 +- tests/test_rf5k.py | 5 +- 16 files changed, 358 insertions(+), 100 deletions(-) diff --git a/streaming_data_types/action_response_answ.py b/streaming_data_types/action_response_answ.py index a2396ed..20693a0 100644 --- a/streaming_data_types/action_response_answ.py +++ b/streaming_data_types/action_response_answ.py @@ -1,5 +1,7 @@ import flatbuffers -from streaming_data_types.fbschemas.action_response_answ.ActionOutcome import ActionOutcome +from streaming_data_types.fbschemas.action_response_answ.ActionOutcome import ( + ActionOutcome, +) import streaming_data_types.fbschemas.action_response_answ.ActionResponse as ActionResponse from streaming_data_types.fbschemas.action_response_answ.ActionType import ActionType from streaming_data_types.utils import check_schema_identifier @@ -9,7 +11,14 @@ FILE_IDENTIFIER = b"answ" -def serialise_answ(service_id: str, job_id: str, command_id: str, action: ActionType, outcome: ActionOutcome, message: str) -> bytes: +def serialise_answ( + service_id: str, + job_id: str, + command_id: str, + action: ActionType, + outcome: ActionOutcome, + message: str, +) -> bytes: builder = flatbuffers.Builder(500) service_id_offset = builder.CreateString(service_id) job_id_offset = builder.CreateString(job_id) @@ -32,10 +41,19 @@ def serialise_answ(service_id: str, job_id: str, command_id: str, action: Action return bytes(output_buffer) -Response = namedtuple("Response", ("service_id", "job_id", "command_id", "action", "outcome", "message",),) +Response = namedtuple( + "Response", ("service_id", "job_id", "command_id", "action", "outcome", "message") +) def deserialise_answ(buffer: Union[bytearray, bytes]): check_schema_identifier(buffer, FILE_IDENTIFIER) answ_message = ActionResponse.ActionResponse.GetRootAsActionResponse(buffer, 0) - return Response(answ_message.ServiceId().decode("utf-8"), answ_message.JobId().decode("utf-8"), answ_message.CommandId().decode("utf-8"), answ_message.Action(), answ_message.Outcome(), answ_message.Message().decode("utf-8")) + return Response( + answ_message.ServiceId().decode("utf-8"), + answ_message.JobId().decode("utf-8"), + answ_message.CommandId().decode("utf-8"), + answ_message.Action(), + answ_message.Outcome(), + answ_message.Message().decode("utf-8"), + ) diff --git a/streaming_data_types/epics_connection_info_ep00.py b/streaming_data_types/epics_connection_info_ep00.py index 7ec350b..b59cc16 100644 --- a/streaming_data_types/epics_connection_info_ep00.py +++ b/streaming_data_types/epics_connection_info_ep00.py @@ -39,7 +39,7 @@ def serialise_ep00( EpicsConnection = namedtuple( - "EpicsConnection", ("timestamp", "type", "source_name", "service_id",), + "EpicsConnection", ("timestamp", "type", "source_name", "service_id") ) diff --git a/streaming_data_types/fbschemas/action_response_answ/ActionOutcome.py b/streaming_data_types/fbschemas/action_response_answ/ActionOutcome.py index c2f2f39..c7f4d3a 100644 --- a/streaming_data_types/fbschemas/action_response_answ/ActionOutcome.py +++ b/streaming_data_types/fbschemas/action_response_answ/ActionOutcome.py @@ -1,8 +1,8 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: + class ActionOutcome(object): Success = 0 Failure = 1 - diff --git a/streaming_data_types/fbschemas/action_response_answ/ActionResponse.py b/streaming_data_types/fbschemas/action_response_answ/ActionResponse.py index 21ea999..92c5280 100644 --- a/streaming_data_types/fbschemas/action_response_answ/ActionResponse.py +++ b/streaming_data_types/fbschemas/action_response_answ/ActionResponse.py @@ -1,13 +1,15 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers from flatbuffers.compat import import_numpy + np = import_numpy() + class ActionResponse(object): - __slots__ = ['_tab'] + __slots__ = ["_tab"] @classmethod def GetRootAsActionResponse(cls, buf, offset): @@ -18,7 +20,9 @@ def GetRootAsActionResponse(cls, buf, offset): @classmethod def ActionResponseBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x61\x6E\x73\x77", size_prefixed=size_prefixed) + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x61\x6E\x73\x77", size_prefixed=size_prefixed + ) # ActionResponse def Init(self, buf, pos): @@ -63,7 +67,9 @@ def StatusCode(self): def StopTime(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) if o != 0: - return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) + return self._tab.Get( + flatbuffers.number_types.Uint64Flags, o + self._tab.Pos + ) return 0 # ActionResponse @@ -80,13 +86,50 @@ def CommandId(self): return self._tab.String(o + self._tab.Pos) return None -def ActionResponseStart(builder): builder.StartObject(8) -def ActionResponseAddServiceId(builder, serviceId): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(serviceId), 0) -def ActionResponseAddJobId(builder, jobId): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(jobId), 0) -def ActionResponseAddAction(builder, action): builder.PrependInt8Slot(2, action, 0) -def ActionResponseAddOutcome(builder, outcome): builder.PrependInt8Slot(3, outcome, 0) -def ActionResponseAddStatusCode(builder, statusCode): builder.PrependInt32Slot(4, statusCode, 0) -def ActionResponseAddStopTime(builder, stopTime): builder.PrependUint64Slot(5, stopTime, 0) -def ActionResponseAddMessage(builder, message): builder.PrependUOffsetTRelativeSlot(6, flatbuffers.number_types.UOffsetTFlags.py_type(message), 0) -def ActionResponseAddCommandId(builder, commandId): builder.PrependUOffsetTRelativeSlot(7, flatbuffers.number_types.UOffsetTFlags.py_type(commandId), 0) -def ActionResponseEnd(builder): return builder.EndObject() + +def ActionResponseStart(builder): + builder.StartObject(8) + + +def ActionResponseAddServiceId(builder, serviceId): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(serviceId), 0 + ) + + +def ActionResponseAddJobId(builder, jobId): + builder.PrependUOffsetTRelativeSlot( + 1, flatbuffers.number_types.UOffsetTFlags.py_type(jobId), 0 + ) + + +def ActionResponseAddAction(builder, action): + builder.PrependInt8Slot(2, action, 0) + + +def ActionResponseAddOutcome(builder, outcome): + builder.PrependInt8Slot(3, outcome, 0) + + +def ActionResponseAddStatusCode(builder, statusCode): + builder.PrependInt32Slot(4, statusCode, 0) + + +def ActionResponseAddStopTime(builder, stopTime): + builder.PrependUint64Slot(5, stopTime, 0) + + +def ActionResponseAddMessage(builder, message): + builder.PrependUOffsetTRelativeSlot( + 6, flatbuffers.number_types.UOffsetTFlags.py_type(message), 0 + ) + + +def ActionResponseAddCommandId(builder, commandId): + builder.PrependUOffsetTRelativeSlot( + 7, flatbuffers.number_types.UOffsetTFlags.py_type(commandId), 0 + ) + + +def ActionResponseEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/action_response_answ/ActionType.py b/streaming_data_types/fbschemas/action_response_answ/ActionType.py index b9141d0..7120bb0 100644 --- a/streaming_data_types/fbschemas/action_response_answ/ActionType.py +++ b/streaming_data_types/fbschemas/action_response_answ/ActionType.py @@ -1,8 +1,8 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: + class ActionType(object): StartJob = 0 SetStopTime = 1 - diff --git a/streaming_data_types/fbschemas/finished_writing_wrdn/FinishedWriting.py b/streaming_data_types/fbschemas/finished_writing_wrdn/FinishedWriting.py index 1dec3ed..a7f2996 100644 --- a/streaming_data_types/fbschemas/finished_writing_wrdn/FinishedWriting.py +++ b/streaming_data_types/fbschemas/finished_writing_wrdn/FinishedWriting.py @@ -1,13 +1,15 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers from flatbuffers.compat import import_numpy + np = import_numpy() + class FinishedWriting(object): - __slots__ = ['_tab'] + __slots__ = ["_tab"] @classmethod def GetRootAsFinishedWriting(cls, buf, offset): @@ -18,7 +20,9 @@ def GetRootAsFinishedWriting(cls, buf, offset): @classmethod def FinishedWritingBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x77\x72\x64\x6E", size_prefixed=size_prefixed) + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x77\x72\x64\x6E", size_prefixed=size_prefixed + ) # FinishedWriting def Init(self, buf, pos): @@ -42,7 +46,9 @@ def JobId(self): def ErrorEncountered(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) if o != 0: - return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return bool( + self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos) + ) return False # FinishedWriting @@ -66,11 +72,44 @@ def Message(self): return self._tab.String(o + self._tab.Pos) return None -def FinishedWritingStart(builder): builder.StartObject(6) -def FinishedWritingAddServiceId(builder, serviceId): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(serviceId), 0) -def FinishedWritingAddJobId(builder, jobId): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(jobId), 0) -def FinishedWritingAddErrorEncountered(builder, errorEncountered): builder.PrependBoolSlot(2, errorEncountered, 0) -def FinishedWritingAddFileName(builder, fileName): builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(fileName), 0) -def FinishedWritingAddMetadata(builder, metadata): builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(metadata), 0) -def FinishedWritingAddMessage(builder, message): builder.PrependUOffsetTRelativeSlot(5, flatbuffers.number_types.UOffsetTFlags.py_type(message), 0) -def FinishedWritingEnd(builder): return builder.EndObject() + +def FinishedWritingStart(builder): + builder.StartObject(6) + + +def FinishedWritingAddServiceId(builder, serviceId): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(serviceId), 0 + ) + + +def FinishedWritingAddJobId(builder, jobId): + builder.PrependUOffsetTRelativeSlot( + 1, flatbuffers.number_types.UOffsetTFlags.py_type(jobId), 0 + ) + + +def FinishedWritingAddErrorEncountered(builder, errorEncountered): + builder.PrependBoolSlot(2, errorEncountered, 0) + + +def FinishedWritingAddFileName(builder, fileName): + builder.PrependUOffsetTRelativeSlot( + 3, flatbuffers.number_types.UOffsetTFlags.py_type(fileName), 0 + ) + + +def FinishedWritingAddMetadata(builder, metadata): + builder.PrependUOffsetTRelativeSlot( + 4, flatbuffers.number_types.UOffsetTFlags.py_type(metadata), 0 + ) + + +def FinishedWritingAddMessage(builder, message): + builder.PrependUOffsetTRelativeSlot( + 5, flatbuffers.number_types.UOffsetTFlags.py_type(message), 0 + ) + + +def FinishedWritingEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/run_start_pl72/RunStart.py b/streaming_data_types/fbschemas/run_start_pl72/RunStart.py index 9c70a17..9cf7231 100644 --- a/streaming_data_types/fbschemas/run_start_pl72/RunStart.py +++ b/streaming_data_types/fbschemas/run_start_pl72/RunStart.py @@ -1,13 +1,15 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers from flatbuffers.compat import import_numpy + np = import_numpy() + class RunStart(object): - __slots__ = ['_tab'] + __slots__ = ["_tab"] @classmethod def GetRootAsRunStart(cls, buf, offset): @@ -18,7 +20,9 @@ def GetRootAsRunStart(cls, buf, offset): @classmethod def RunStartBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x70\x6C\x37\x32", size_prefixed=size_prefixed) + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x70\x6C\x37\x32", size_prefixed=size_prefixed + ) # RunStart def Init(self, buf, pos): @@ -28,14 +32,18 @@ def Init(self, buf, pos): def StartTime(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: - return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) + return self._tab.Get( + flatbuffers.number_types.Uint64Flags, o + self._tab.Pos + ) return 0 # RunStart def StopTime(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) if o != 0: - return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) + return self._tab.Get( + flatbuffers.number_types.Uint64Flags, o + self._tab.Pos + ) return 0 # RunStart @@ -91,7 +99,9 @@ def Filename(self): def NPeriods(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(22)) if o != 0: - return self._tab.Get(flatbuffers.number_types.Uint32Flags, o + self._tab.Pos) + return self._tab.Get( + flatbuffers.number_types.Uint32Flags, o + self._tab.Pos + ) return 1 # RunStart @@ -100,6 +110,7 @@ def DetectorSpectrumMap(self): if o != 0: x = self._tab.Indirect(o + self._tab.Pos) from SpectraDetectorMapping import SpectraDetectorMapping + obj = SpectraDetectorMapping() obj.Init(self._tab.Bytes, x) return obj @@ -112,17 +123,76 @@ def Metadata(self): return self._tab.String(o + self._tab.Pos) return None -def RunStartStart(builder): builder.StartObject(12) -def RunStartAddStartTime(builder, startTime): builder.PrependUint64Slot(0, startTime, 0) -def RunStartAddStopTime(builder, stopTime): builder.PrependUint64Slot(1, stopTime, 0) -def RunStartAddRunName(builder, runName): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(runName), 0) -def RunStartAddInstrumentName(builder, instrumentName): builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(instrumentName), 0) -def RunStartAddNexusStructure(builder, nexusStructure): builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(nexusStructure), 0) -def RunStartAddJobId(builder, jobId): builder.PrependUOffsetTRelativeSlot(5, flatbuffers.number_types.UOffsetTFlags.py_type(jobId), 0) -def RunStartAddBroker(builder, broker): builder.PrependUOffsetTRelativeSlot(6, flatbuffers.number_types.UOffsetTFlags.py_type(broker), 0) -def RunStartAddServiceId(builder, serviceId): builder.PrependUOffsetTRelativeSlot(7, flatbuffers.number_types.UOffsetTFlags.py_type(serviceId), 0) -def RunStartAddFilename(builder, filename): builder.PrependUOffsetTRelativeSlot(8, flatbuffers.number_types.UOffsetTFlags.py_type(filename), 0) -def RunStartAddNPeriods(builder, nPeriods): builder.PrependUint32Slot(9, nPeriods, 1) -def RunStartAddDetectorSpectrumMap(builder, detectorSpectrumMap): builder.PrependUOffsetTRelativeSlot(10, flatbuffers.number_types.UOffsetTFlags.py_type(detectorSpectrumMap), 0) -def RunStartAddMetadata(builder, metadata): builder.PrependUOffsetTRelativeSlot(11, flatbuffers.number_types.UOffsetTFlags.py_type(metadata), 0) -def RunStartEnd(builder): return builder.EndObject() + +def RunStartStart(builder): + builder.StartObject(12) + + +def RunStartAddStartTime(builder, startTime): + builder.PrependUint64Slot(0, startTime, 0) + + +def RunStartAddStopTime(builder, stopTime): + builder.PrependUint64Slot(1, stopTime, 0) + + +def RunStartAddRunName(builder, runName): + builder.PrependUOffsetTRelativeSlot( + 2, flatbuffers.number_types.UOffsetTFlags.py_type(runName), 0 + ) + + +def RunStartAddInstrumentName(builder, instrumentName): + builder.PrependUOffsetTRelativeSlot( + 3, flatbuffers.number_types.UOffsetTFlags.py_type(instrumentName), 0 + ) + + +def RunStartAddNexusStructure(builder, nexusStructure): + builder.PrependUOffsetTRelativeSlot( + 4, flatbuffers.number_types.UOffsetTFlags.py_type(nexusStructure), 0 + ) + + +def RunStartAddJobId(builder, jobId): + builder.PrependUOffsetTRelativeSlot( + 5, flatbuffers.number_types.UOffsetTFlags.py_type(jobId), 0 + ) + + +def RunStartAddBroker(builder, broker): + builder.PrependUOffsetTRelativeSlot( + 6, flatbuffers.number_types.UOffsetTFlags.py_type(broker), 0 + ) + + +def RunStartAddServiceId(builder, serviceId): + builder.PrependUOffsetTRelativeSlot( + 7, flatbuffers.number_types.UOffsetTFlags.py_type(serviceId), 0 + ) + + +def RunStartAddFilename(builder, filename): + builder.PrependUOffsetTRelativeSlot( + 8, flatbuffers.number_types.UOffsetTFlags.py_type(filename), 0 + ) + + +def RunStartAddNPeriods(builder, nPeriods): + builder.PrependUint32Slot(9, nPeriods, 1) + + +def RunStartAddDetectorSpectrumMap(builder, detectorSpectrumMap): + builder.PrependUOffsetTRelativeSlot( + 10, flatbuffers.number_types.UOffsetTFlags.py_type(detectorSpectrumMap), 0 + ) + + +def RunStartAddMetadata(builder, metadata): + builder.PrependUOffsetTRelativeSlot( + 11, flatbuffers.number_types.UOffsetTFlags.py_type(metadata), 0 + ) + + +def RunStartEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/run_stop_6s4t/RunStop.py b/streaming_data_types/fbschemas/run_stop_6s4t/RunStop.py index 36c45f8..55faf97 100644 --- a/streaming_data_types/fbschemas/run_stop_6s4t/RunStop.py +++ b/streaming_data_types/fbschemas/run_stop_6s4t/RunStop.py @@ -1,13 +1,15 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers from flatbuffers.compat import import_numpy + np = import_numpy() + class RunStop(object): - __slots__ = ['_tab'] + __slots__ = ["_tab"] @classmethod def GetRootAsRunStop(cls, buf, offset): @@ -18,7 +20,9 @@ def GetRootAsRunStop(cls, buf, offset): @classmethod def RunStopBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x36\x73\x34\x74", size_prefixed=size_prefixed) + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x36\x73\x34\x74", size_prefixed=size_prefixed + ) # RunStop def Init(self, buf, pos): @@ -28,7 +32,9 @@ def Init(self, buf, pos): def StopTime(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: - return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) + return self._tab.Get( + flatbuffers.number_types.Uint64Flags, o + self._tab.Pos + ) return 0 # RunStop @@ -59,10 +65,38 @@ def CommandId(self): return self._tab.String(o + self._tab.Pos) return None -def RunStopStart(builder): builder.StartObject(5) -def RunStopAddStopTime(builder, stopTime): builder.PrependUint64Slot(0, stopTime, 0) -def RunStopAddRunName(builder, runName): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(runName), 0) -def RunStopAddJobId(builder, jobId): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(jobId), 0) -def RunStopAddServiceId(builder, serviceId): builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(serviceId), 0) -def RunStopAddCommandId(builder, commandId): builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(commandId), 0) -def RunStopEnd(builder): return builder.EndObject() + +def RunStopStart(builder): + builder.StartObject(5) + + +def RunStopAddStopTime(builder, stopTime): + builder.PrependUint64Slot(0, stopTime, 0) + + +def RunStopAddRunName(builder, runName): + builder.PrependUOffsetTRelativeSlot( + 1, flatbuffers.number_types.UOffsetTFlags.py_type(runName), 0 + ) + + +def RunStopAddJobId(builder, jobId): + builder.PrependUOffsetTRelativeSlot( + 2, flatbuffers.number_types.UOffsetTFlags.py_type(jobId), 0 + ) + + +def RunStopAddServiceId(builder, serviceId): + builder.PrependUOffsetTRelativeSlot( + 3, flatbuffers.number_types.UOffsetTFlags.py_type(serviceId), 0 + ) + + +def RunStopAddCommandId(builder, commandId): + builder.PrependUOffsetTRelativeSlot( + 4, flatbuffers.number_types.UOffsetTFlags.py_type(commandId), 0 + ) + + +def RunStopEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/status_x5f2/Status.py b/streaming_data_types/fbschemas/status_x5f2/Status.py index 0cbc4c6..e0d273a 100644 --- a/streaming_data_types/fbschemas/status_x5f2/Status.py +++ b/streaming_data_types/fbschemas/status_x5f2/Status.py @@ -6,7 +6,7 @@ class Status(object): - __slots__ = ['_tab'] + __slots__ = ["_tab"] @classmethod def GetRootAsStatus(cls, buf, offset): @@ -51,14 +51,18 @@ def HostName(self): def ProcessId(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) if o != 0: - return self._tab.Get(flatbuffers.number_types.Uint32Flags, o + self._tab.Pos) + return self._tab.Get( + flatbuffers.number_types.Uint32Flags, o + self._tab.Pos + ) return 0 # Status def UpdateInterval(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) if o != 0: - return self._tab.Get(flatbuffers.number_types.Uint32Flags, o + self._tab.Pos) + return self._tab.Get( + flatbuffers.number_types.Uint32Flags, o + self._tab.Pos + ) return 0 # Status @@ -69,12 +73,47 @@ def StatusJson(self): return None -def StatusStart(builder): builder.StartObject(7) -def StatusAddSoftwareName(builder, softwareName): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(softwareName), 0) -def StatusAddSoftwareVersion(builder, softwareVersion): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(softwareVersion), 0) -def StatusAddServiceId(builder, serviceId): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(serviceId), 0) -def StatusAddHostName(builder, hostName): builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(hostName), 0) -def StatusAddProcessId(builder, processId): builder.PrependUint32Slot(4, processId, 0) -def StatusAddUpdateInterval(builder, updateInterval): builder.PrependUint32Slot(5, updateInterval, 0) -def StatusAddStatusJson(builder, statusJson): builder.PrependUOffsetTRelativeSlot(6, flatbuffers.number_types.UOffsetTFlags.py_type(statusJson), 0) -def StatusEnd(builder): return builder.EndObject() +def StatusStart(builder): + builder.StartObject(7) + + +def StatusAddSoftwareName(builder, softwareName): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(softwareName), 0 + ) + + +def StatusAddSoftwareVersion(builder, softwareVersion): + builder.PrependUOffsetTRelativeSlot( + 1, flatbuffers.number_types.UOffsetTFlags.py_type(softwareVersion), 0 + ) + + +def StatusAddServiceId(builder, serviceId): + builder.PrependUOffsetTRelativeSlot( + 2, flatbuffers.number_types.UOffsetTFlags.py_type(serviceId), 0 + ) + + +def StatusAddHostName(builder, hostName): + builder.PrependUOffsetTRelativeSlot( + 3, flatbuffers.number_types.UOffsetTFlags.py_type(hostName), 0 + ) + + +def StatusAddProcessId(builder, processId): + builder.PrependUint32Slot(4, processId, 0) + + +def StatusAddUpdateInterval(builder, updateInterval): + builder.PrependUint32Slot(5, updateInterval, 0) + + +def StatusAddStatusJson(builder, statusJson): + builder.PrependUOffsetTRelativeSlot( + 6, flatbuffers.number_types.UOffsetTFlags.py_type(statusJson), 0 + ) + + +def StatusEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/finished_writing_wrdn.py b/streaming_data_types/finished_writing_wrdn.py index d09f6bc..f72f2ce 100644 --- a/streaming_data_types/finished_writing_wrdn.py +++ b/streaming_data_types/finished_writing_wrdn.py @@ -13,7 +13,7 @@ def serialise_wrdn( error_encountered: bool, file_name: str, metadata: str = "", - message: str = "" + message: str = "", ) -> bytes: builder = flatbuffers.Builder(500) @@ -46,14 +46,17 @@ def serialise_wrdn( WritingFinished = namedtuple( - "FinishedWriting", ("service_id", "job_id", "error_encountered", "file_name", "metadata", "message") + "FinishedWriting", + ("service_id", "job_id", "error_encountered", "file_name", "metadata", "message"), ) def deserialise_wrdn(buffer: Union[bytearray, bytes]) -> FinishedWriting: check_schema_identifier(buffer, FILE_IDENTIFIER) - finished_writing = FinishedWriting.FinishedWriting.GetRootAsFinishedWriting(buffer, 0) + finished_writing = FinishedWriting.FinishedWriting.GetRootAsFinishedWriting( + buffer, 0 + ) service_id = finished_writing.ServiceId() job_id = finished_writing.JobId() has_error = finished_writing.ErrorEncountered() @@ -62,5 +65,10 @@ def deserialise_wrdn(buffer: Union[bytearray, bytes]) -> FinishedWriting: message = finished_writing.Message() if finished_writing.Message() else b"" return WritingFinished( - service_id.decode(), job_id.decode(), has_error, file_name.decode(), metadata.decode(), message.decode() + service_id.decode(), + job_id.decode(), + has_error, + file_name.decode(), + metadata.decode(), + message.decode(), ) diff --git a/streaming_data_types/forwarder_config_update_rf5k.py b/streaming_data_types/forwarder_config_update_rf5k.py index a7b9f0e..b19db6d 100644 --- a/streaming_data_types/forwarder_config_update_rf5k.py +++ b/streaming_data_types/forwarder_config_update_rf5k.py @@ -12,9 +12,9 @@ FILE_IDENTIFIER = b"rf5k" -ConfigurationUpdate = namedtuple("ConfigurationUpdate", ("config_change", "streams"),) +ConfigurationUpdate = namedtuple("ConfigurationUpdate", ("config_change", "streams")) -StreamInfo = namedtuple("StreamInfo", ("channel", "schema", "topic", "protocol"),) +StreamInfo = namedtuple("StreamInfo", ("channel", "schema", "topic", "protocol")) def deserialise_rf5k(buffer: Union[bytearray, bytes]) -> ConfigurationUpdate: diff --git a/streaming_data_types/run_stop_6s4t.py b/streaming_data_types/run_stop_6s4t.py index e4965e9..70cc34f 100644 --- a/streaming_data_types/run_stop_6s4t.py +++ b/streaming_data_types/run_stop_6s4t.py @@ -59,5 +59,9 @@ def deserialise_6s4t(buffer: Union[bytearray, bytes]) -> RunStopInfo: command_id = run_stop.CommandId() return RunStopInfo( - stop_time, run_name.decode(), job_id.decode(), service_id.decode(), command_id.decode() + stop_time, + run_name.decode(), + job_id.decode(), + service_id.decode(), + command_id.decode(), ) diff --git a/streaming_data_types/status_x5f2.py b/streaming_data_types/status_x5f2.py index 05c2ac2..01b9de0 100644 --- a/streaming_data_types/status_x5f2.py +++ b/streaming_data_types/status_x5f2.py @@ -38,18 +38,19 @@ def deserialise_x5f2(buffer): log_message.HostName().decode("utf-8"), log_message.ProcessId(), log_message.UpdateInterval(), - log_message.StatusJson().decode("utf-8") + log_message.StatusJson().decode("utf-8"), ) def serialise_x5f2( - software_name: str, - software_version: str, - service_id: str, - host_name: str, - process_id: int, - update_interval: int, - status_json: str) -> bytes: + software_name: str, + software_version: str, + service_id: str, + host_name: str, + process_id: int, + update_interval: int, + status_json: str, +) -> bytes: """ Serialise status message as an x5f2 FlatBuffers message. diff --git a/streaming_data_types/timestamps_tdct.py b/streaming_data_types/timestamps_tdct.py index ab28f83..fc7fe3f 100644 --- a/streaming_data_types/timestamps_tdct.py +++ b/streaming_data_types/timestamps_tdct.py @@ -46,7 +46,7 @@ def serialise_tdct( return bytes(buffer) -Timestamps = namedtuple("Timestamps", ("name", "timestamps", "sequence_counter",),) +Timestamps = namedtuple("Timestamps", ("name", "timestamps", "sequence_counter")) def deserialise_tdct(buffer: Union[bytearray, bytes]) -> Timestamps: @@ -57,4 +57,4 @@ def deserialise_tdct(buffer: Union[bytearray, bytes]) -> Timestamps: timestamps_array = timestamps.TimestampsAsNumpy() - return Timestamps(name.decode(), timestamps_array, timestamps.SequenceCounter(),) + return Timestamps(name.decode(), timestamps_array, timestamps.SequenceCounter()) diff --git a/tests/test_answ.py b/tests/test_answ.py index f2f4326..de88adc 100644 --- a/tests/test_answ.py +++ b/tests/test_answ.py @@ -1,5 +1,10 @@ import pytest -from streaming_data_types.action_response_answ import serialise_answ, deserialise_answ, ActionType, ActionOutcome +from streaming_data_types.action_response_answ import ( + serialise_answ, + deserialise_answ, + ActionType, + ActionOutcome, +) from streaming_data_types import SERIALISERS, DESERIALISERS diff --git a/tests/test_rf5k.py b/tests/test_rf5k.py index 933dcc3..2614444 100644 --- a/tests/test_rf5k.py +++ b/tests/test_rf5k.py @@ -34,10 +34,7 @@ def test_serialises_and_deserialises_rf5k_message_without_streams_correctly(self """ Round-trip to check what we serialise is what we get back. """ - original_entry = { - "config_change": UpdateType.REMOVEALL, - "streams": [], - } + original_entry = {"config_change": UpdateType.REMOVEALL, "streams": []} buf = serialise_rf5k(**original_entry) entry = deserialise_rf5k(buf) From 4a49157b433fe3e440cc42c1fa2bae01913a8050 Mon Sep 17 00:00:00 2001 From: Jonas Nilsson Date: Sat, 3 Oct 2020 00:37:19 +0200 Subject: [PATCH 178/363] Minor fix. --- streaming_data_types/finished_writing_wrdn.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/streaming_data_types/finished_writing_wrdn.py b/streaming_data_types/finished_writing_wrdn.py index f72f2ce..ee2710b 100644 --- a/streaming_data_types/finished_writing_wrdn.py +++ b/streaming_data_types/finished_writing_wrdn.py @@ -1,4 +1,4 @@ -from typing import Optional, Union +from typing import Union import flatbuffers from streaming_data_types.fbschemas.finished_writing_wrdn import FinishedWriting from streaming_data_types.utils import check_schema_identifier From 1499a65c29c865477dd50c5128b30d13872d8e77 Mon Sep 17 00:00:00 2001 From: Jonas Nilsson Date: Wed, 7 Oct 2020 16:56:33 +0200 Subject: [PATCH 179/363] Add missing metadata code. --- streaming_data_types/run_start_pl72.py | 6 ++++++ tests/test_pl72.py | 2 ++ 2 files changed, 8 insertions(+) diff --git a/streaming_data_types/run_start_pl72.py b/streaming_data_types/run_start_pl72.py index 30a3490..263b5b8 100644 --- a/streaming_data_types/run_start_pl72.py +++ b/streaming_data_types/run_start_pl72.py @@ -18,6 +18,7 @@ def serialise_pl72( service_id: str = "", instrument_name: str = "TEST", broker: str = "localhost:9092", + metadata: str = "{}" ) -> bytes: builder = flatbuffers.Builder(136) @@ -35,6 +36,7 @@ def serialise_pl72( instrument_name_offset = builder.CreateString(instrument_name) run_name_offset = builder.CreateString(run_name) filename_offset = builder.CreateString(filename) + metadata_offset = builder.CreateString(metadata) # Build the actual buffer RunStart.RunStartStart(builder) @@ -48,6 +50,7 @@ def serialise_pl72( RunStart.RunStartAddStartTime(builder, start_time) RunStart.RunStartAddFilename(builder, filename_offset) RunStart.RunStartAddNPeriods(builder, 1) + RunStart.RunStartAddMetadata(builder, metadata_offset) run_start_message = RunStart.RunStartEnd(builder) builder.Finish(run_start_message) @@ -70,6 +73,7 @@ def serialise_pl72( "service_id", "instrument_name", "broker", + "metadata" ), ) @@ -85,6 +89,7 @@ def deserialise_pl72(buffer: Union[bytearray, bytes]) -> RunStartInfo: nexus_structure = run_start.NexusStructure() if run_start.NexusStructure() else b"" instrument_name = run_start.InstrumentName() if run_start.InstrumentName() else b"" run_name = run_start.RunName() if run_start.RunName() else b"" + metadata = run_start.Metadata() if run_start.Metadata() else b"" return RunStartInfo( job_id.decode(), @@ -96,4 +101,5 @@ def deserialise_pl72(buffer: Union[bytearray, bytes]) -> RunStartInfo: service_id.decode(), instrument_name.decode(), broker.decode(), + metadata.decode() ) diff --git a/tests/test_pl72.py b/tests/test_pl72.py index 547cdd9..7676cb4 100644 --- a/tests/test_pl72.py +++ b/tests/test_pl72.py @@ -14,6 +14,7 @@ class TestSerialisationPl72: "service_id": "filewriter1", "instrument_name": "LOKI", "broker": "localhost:9092", + "metadata": "{3:1}" } def test_serialises_and_deserialises_pl72_message_correctly(self): @@ -33,6 +34,7 @@ def test_serialises_and_deserialises_pl72_message_correctly(self): deserialised_tuple.instrument_name == self.original_entry["instrument_name"] ) assert deserialised_tuple.broker == self.original_entry["broker"] + assert deserialised_tuple.metadata == self.original_entry["metadata"] def test_if_buffer_has_wrong_id_then_throws(self): buf = serialise_pl72(**self.original_entry) From f640c5c2a823680e8c7ea3faebc5a3bb0b0bafec Mon Sep 17 00:00:00 2001 From: Jonas Nilsson Date: Tue, 20 Oct 2020 09:01:49 +0200 Subject: [PATCH 180/363] Documentation and other minor changes. --- README.md | 3 ++- streaming_data_types/fbschemas/status_x5f2/Status.py | 9 +++++++++ streaming_data_types/run_start_pl72.py | 6 +++--- tests/test_pl72.py | 2 +- 4 files changed, 15 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index c532125..0ad8e15 100644 --- a/README.md +++ b/README.md @@ -18,7 +18,8 @@ https://github.com/ess-dmsc/streaming-data-types |tdct|Timestamps|Y| |ep00|EPICS connection info|Y| |rf5k|Forwarder configuration update|Y| -|answ|File-writer command response|?| +|answ|File-writer command response|n/a| +|wrdn|File-writer finished writing|n/a| \* whether it passes verification via the C++ FlatBuffers library. diff --git a/streaming_data_types/fbschemas/status_x5f2/Status.py b/streaming_data_types/fbschemas/status_x5f2/Status.py index e0d273a..24eee0f 100644 --- a/streaming_data_types/fbschemas/status_x5f2/Status.py +++ b/streaming_data_types/fbschemas/status_x5f2/Status.py @@ -3,6 +3,9 @@ # namespace: import flatbuffers +from flatbuffers.compat import import_numpy + +np = import_numpy() class Status(object): @@ -15,6 +18,12 @@ def GetRootAsStatus(cls, buf, offset): x.Init(buf, n + offset) return x + @classmethod + def StatusBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x78\x35\x66\x32", size_prefixed=size_prefixed + ) + # Status def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) diff --git a/streaming_data_types/run_start_pl72.py b/streaming_data_types/run_start_pl72.py index 263b5b8..9aa834b 100644 --- a/streaming_data_types/run_start_pl72.py +++ b/streaming_data_types/run_start_pl72.py @@ -18,7 +18,7 @@ def serialise_pl72( service_id: str = "", instrument_name: str = "TEST", broker: str = "localhost:9092", - metadata: str = "{}" + metadata: str = "{}", ) -> bytes: builder = flatbuffers.Builder(136) @@ -73,7 +73,7 @@ def serialise_pl72( "service_id", "instrument_name", "broker", - "metadata" + "metadata", ), ) @@ -101,5 +101,5 @@ def deserialise_pl72(buffer: Union[bytearray, bytes]) -> RunStartInfo: service_id.decode(), instrument_name.decode(), broker.decode(), - metadata.decode() + metadata.decode(), ) diff --git a/tests/test_pl72.py b/tests/test_pl72.py index 7676cb4..c9d1762 100644 --- a/tests/test_pl72.py +++ b/tests/test_pl72.py @@ -14,7 +14,7 @@ class TestSerialisationPl72: "service_id": "filewriter1", "instrument_name": "LOKI", "broker": "localhost:9092", - "metadata": "{3:1}" + "metadata": "{3:1}", } def test_serialises_and_deserialises_pl72_message_correctly(self): From 16572a14313a105fb1e7105675cdcf5d5b15df6e Mon Sep 17 00:00:00 2001 From: Jonas Nilsson Date: Tue, 20 Oct 2020 09:40:01 +0200 Subject: [PATCH 181/363] Updated to typed code. --- streaming_data_types/action_response_answ.py | 35 +++++++++++---- streaming_data_types/finished_writing_wrdn.py | 25 +++++++---- streaming_data_types/run_start_pl72.py | 44 +++++++++---------- streaming_data_types/run_stop_6s4t.py | 23 ++++++---- tests/test_answ.py | 11 +++++ 5 files changed, 90 insertions(+), 48 deletions(-) diff --git a/streaming_data_types/action_response_answ.py b/streaming_data_types/action_response_answ.py index 20693a0..2dee054 100644 --- a/streaming_data_types/action_response_answ.py +++ b/streaming_data_types/action_response_answ.py @@ -6,7 +6,8 @@ from streaming_data_types.fbschemas.action_response_answ.ActionType import ActionType from streaming_data_types.utils import check_schema_identifier from typing import Union -from collections import namedtuple +from typing import NamedTuple +from datetime import datetime FILE_IDENTIFIER = b"answ" @@ -18,6 +19,8 @@ def serialise_answ( action: ActionType, outcome: ActionOutcome, message: str, + status_code: int, + stop_time: datetime, ) -> bytes: builder = flatbuffers.Builder(500) service_id_offset = builder.CreateString(service_id) @@ -32,6 +35,8 @@ def serialise_answ( ActionResponse.ActionResponseAddOutcome(builder, outcome) ActionResponse.ActionResponseAddMessage(builder, message_offset) ActionResponse.ActionResponseAddCommandId(builder, command_id_offset) + ActionResponse.ActionResponseAddStatusCode(builder, status_code) + ActionResponse.ActionResponseAddStopTime(builder, int(stop_time.timestamp() * 1000)) out_message = ActionResponse.ActionResponseEnd(builder) builder.Finish(out_message) @@ -41,8 +46,18 @@ def serialise_answ( return bytes(output_buffer) -Response = namedtuple( - "Response", ("service_id", "job_id", "command_id", "action", "outcome", "message") +Response = NamedTuple( + "Response", + ( + ("service_id", str), + ("job_id", str), + ("command_id", str), + ("action", ActionType), + ("outcome", ActionOutcome), + ("message", str), + ("status_code", int), + ("stop_time", datetime), + ), ) @@ -50,10 +65,12 @@ def deserialise_answ(buffer: Union[bytearray, bytes]): check_schema_identifier(buffer, FILE_IDENTIFIER) answ_message = ActionResponse.ActionResponse.GetRootAsActionResponse(buffer, 0) return Response( - answ_message.ServiceId().decode("utf-8"), - answ_message.JobId().decode("utf-8"), - answ_message.CommandId().decode("utf-8"), - answ_message.Action(), - answ_message.Outcome(), - answ_message.Message().decode("utf-8"), + service_id=answ_message.ServiceId().decode("utf-8"), + job_id=answ_message.JobId().decode("utf-8"), + command_id=answ_message.CommandId().decode("utf-8"), + action=answ_message.Action(), + outcome=answ_message.Outcome(), + message=answ_message.Message().decode("utf-8"), + status_code=answ_message.StatusCode(), + stop_time=datetime.fromtimestamp(answ_message.StopTime() / 1000), ) diff --git a/streaming_data_types/finished_writing_wrdn.py b/streaming_data_types/finished_writing_wrdn.py index ee2710b..62933d7 100644 --- a/streaming_data_types/finished_writing_wrdn.py +++ b/streaming_data_types/finished_writing_wrdn.py @@ -2,7 +2,7 @@ import flatbuffers from streaming_data_types.fbschemas.finished_writing_wrdn import FinishedWriting from streaming_data_types.utils import check_schema_identifier -from collections import namedtuple +from typing import NamedTuple FILE_IDENTIFIER = b"wrdn" @@ -45,9 +45,16 @@ def serialise_wrdn( return bytes(buffer) -WritingFinished = namedtuple( +WritingFinished = NamedTuple( "FinishedWriting", - ("service_id", "job_id", "error_encountered", "file_name", "metadata", "message"), + ( + ("service_id", str), + ("job_id", str), + ("error_encountered", bool), + ("file_name", str), + ("metadata", str), + ("message", str), + ), ) @@ -65,10 +72,10 @@ def deserialise_wrdn(buffer: Union[bytearray, bytes]) -> FinishedWriting: message = finished_writing.Message() if finished_writing.Message() else b"" return WritingFinished( - service_id.decode(), - job_id.decode(), - has_error, - file_name.decode(), - metadata.decode(), - message.decode(), + service_id=service_id.decode(), + job_id=job_id.decode(), + error_encountered=has_error, + file_name=file_name.decode(), + metadata=metadata.decode(), + message=message.decode(), ) diff --git a/streaming_data_types/run_start_pl72.py b/streaming_data_types/run_start_pl72.py index 9aa834b..bb9ca2c 100644 --- a/streaming_data_types/run_start_pl72.py +++ b/streaming_data_types/run_start_pl72.py @@ -3,7 +3,7 @@ import flatbuffers from streaming_data_types.fbschemas.run_start_pl72 import RunStart from streaming_data_types.utils import check_schema_identifier -from collections import namedtuple +from typing import NamedTuple FILE_IDENTIFIER = b"pl72" @@ -61,19 +61,19 @@ def serialise_pl72( return bytes(buffer) -RunStartInfo = namedtuple( +RunStartInfo = NamedTuple( "RunStartInfo", ( - "job_id", - "filename", - "start_time", - "stop_time", - "run_name", - "nexus_structure", - "service_id", - "instrument_name", - "broker", - "metadata", + ("job_id", str), + ("filename", str), + ("start_time", int), + ("stop_time", int), + ("run_name", str), + ("nexus_structure", str), + ("service_id", str), + ("instrument_name", str), + ("broker", str), + ("metadata", str), ), ) @@ -92,14 +92,14 @@ def deserialise_pl72(buffer: Union[bytearray, bytes]) -> RunStartInfo: metadata = run_start.Metadata() if run_start.Metadata() else b"" return RunStartInfo( - job_id.decode(), - filename.decode(), - run_start.StartTime(), - run_start.StopTime(), - run_name.decode(), - nexus_structure.decode(), - service_id.decode(), - instrument_name.decode(), - broker.decode(), - metadata.decode(), + job_id=job_id.decode(), + filename=filename.decode(), + start_time=run_start.StartTime(), + stop_time=run_start.StopTime(), + run_name=run_name.decode(), + nexus_structure=nexus_structure.decode(), + service_id=service_id.decode(), + instrument_name=instrument_name.decode(), + broker=broker.decode(), + metadata=metadata.decode(), ) diff --git a/streaming_data_types/run_stop_6s4t.py b/streaming_data_types/run_stop_6s4t.py index 70cc34f..4dd4d82 100644 --- a/streaming_data_types/run_stop_6s4t.py +++ b/streaming_data_types/run_stop_6s4t.py @@ -2,7 +2,7 @@ import flatbuffers from streaming_data_types.fbschemas.run_stop_6s4t import RunStop from streaming_data_types.utils import check_schema_identifier -from collections import namedtuple +from typing import NamedTuple FILE_IDENTIFIER = b"6s4t" @@ -43,8 +43,15 @@ def serialise_6s4t( return bytes(buffer) -RunStopInfo = namedtuple( - "RunStopInfo", ("stop_time", "run_name", "job_id", "service_id", "command_id") +RunStopInfo = NamedTuple( + "RunStopInfo", + ( + ("stop_time", int), + ("run_name", str), + ("job_id", str), + ("service_id", str), + ("command_id", str), + ), ) @@ -59,9 +66,9 @@ def deserialise_6s4t(buffer: Union[bytearray, bytes]) -> RunStopInfo: command_id = run_stop.CommandId() return RunStopInfo( - stop_time, - run_name.decode(), - job_id.decode(), - service_id.decode(), - command_id.decode(), + stop_time=stop_time, + run_name=run_name.decode(), + job_id=job_id.decode(), + service_id=service_id.decode(), + command_id=command_id.decode(), ) diff --git a/tests/test_answ.py b/tests/test_answ.py index de88adc..f9f5d9d 100644 --- a/tests/test_answ.py +++ b/tests/test_answ.py @@ -6,6 +6,7 @@ ActionOutcome, ) from streaming_data_types import SERIALISERS, DESERIALISERS +from datetime import datetime class TestSerialisationAnsw: @@ -20,6 +21,10 @@ def test_serialise_and_deserialise_answ_message(self): "action": ActionType.SetStopTime, "outcome": ActionOutcome.Failure, "message": "some random error message", + "status_code": 123456789, + "stop_time": datetime( + year=2021, month=2, day=12, hour=2, minute=12, second=12 + ), } buf = serialise_answ(**original_entry) @@ -31,6 +36,8 @@ def test_serialise_and_deserialise_answ_message(self): assert entry.message == original_entry["message"] assert entry.action == original_entry["action"] assert entry.outcome == original_entry["outcome"] + assert entry.status_code == original_entry["status_code"] + assert entry.stop_time == original_entry["stop_time"] def test_if_buffer_has_wrong_id_then_throws(self): original_entry = { @@ -40,6 +47,10 @@ def test_if_buffer_has_wrong_id_then_throws(self): "action": ActionType.SetStopTime, "outcome": ActionOutcome.Failure, "message": "some random error message", + "status_code": 123456789, + "stop_time": datetime( + year=2021, month=2, day=12, hour=2, minute=12, second=12 + ), } buf = serialise_answ(**original_entry) From 830dd17cfdc511d8689769fbe3d3c3faa80df22a Mon Sep 17 00:00:00 2001 From: Jonas Nilsson Date: Tue, 20 Oct 2020 10:34:06 +0200 Subject: [PATCH 182/363] Code review changes. --- streaming_data_types/finished_writing_wrdn.py | 25 +++++++++++-------- 1 file changed, 15 insertions(+), 10 deletions(-) diff --git a/streaming_data_types/finished_writing_wrdn.py b/streaming_data_types/finished_writing_wrdn.py index 62933d7..9040568 100644 --- a/streaming_data_types/finished_writing_wrdn.py +++ b/streaming_data_types/finished_writing_wrdn.py @@ -3,6 +3,7 @@ from streaming_data_types.fbschemas.finished_writing_wrdn import FinishedWriting from streaming_data_types.utils import check_schema_identifier from typing import NamedTuple +from typing import Union FILE_IDENTIFIER = b"wrdn" @@ -12,17 +13,17 @@ def serialise_wrdn( job_id: str, error_encountered: bool, file_name: str, - metadata: str = "", - message: str = "", + metadata: Union[str, None] = None, + message: Union[str, None] = None, ) -> bytes: builder = flatbuffers.Builder(500) service_id_offset = builder.CreateString(service_id) job_id_offset = builder.CreateString(job_id) file_name_offset = builder.CreateString(file_name) - if metadata: + if metadata is not None: metadata_offset = builder.CreateString(metadata) - if message: + if message is not None: message_offset = builder.CreateString(message) # Build the actual buffer @@ -52,8 +53,8 @@ def serialise_wrdn( ("job_id", str), ("error_encountered", bool), ("file_name", str), - ("metadata", str), - ("message", str), + ("metadata", Union[str, None]), + ("message", Union[str, None]), ), ) @@ -68,14 +69,18 @@ def deserialise_wrdn(buffer: Union[bytearray, bytes]) -> FinishedWriting: job_id = finished_writing.JobId() has_error = finished_writing.ErrorEncountered() file_name = finished_writing.FileName() if finished_writing.FileName() else b"" - metadata = finished_writing.Metadata() if finished_writing.Metadata() else b"" - message = finished_writing.Message() if finished_writing.Message() else b"" + metadata = ( + finished_writing.Metadata().decode() if finished_writing.Metadata() else None + ) + message = ( + finished_writing.Message().decode() if finished_writing.Message() else None + ) return WritingFinished( service_id=service_id.decode(), job_id=job_id.decode(), error_encountered=has_error, file_name=file_name.decode(), - metadata=metadata.decode(), - message=message.decode(), + metadata=metadata, + message=message, ) From 541b00035772904e41b5a6b1e024b5073dbdbcb0 Mon Sep 17 00:00:00 2001 From: Jonas Nilsson Date: Tue, 20 Oct 2020 10:42:01 +0200 Subject: [PATCH 183/363] Minor typing change. --- streaming_data_types/finished_writing_wrdn.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/streaming_data_types/finished_writing_wrdn.py b/streaming_data_types/finished_writing_wrdn.py index 9040568..82b54e7 100644 --- a/streaming_data_types/finished_writing_wrdn.py +++ b/streaming_data_types/finished_writing_wrdn.py @@ -3,7 +3,7 @@ from streaming_data_types.fbschemas.finished_writing_wrdn import FinishedWriting from streaming_data_types.utils import check_schema_identifier from typing import NamedTuple -from typing import Union +from typing import Optional FILE_IDENTIFIER = b"wrdn" @@ -13,8 +13,8 @@ def serialise_wrdn( job_id: str, error_encountered: bool, file_name: str, - metadata: Union[str, None] = None, - message: Union[str, None] = None, + metadata: Optional[str] = None, + message: Optional[str] = None, ) -> bytes: builder = flatbuffers.Builder(500) @@ -53,8 +53,8 @@ def serialise_wrdn( ("job_id", str), ("error_encountered", bool), ("file_name", str), - ("metadata", Union[str, None]), - ("message", Union[str, None]), + ("metadata", Optional[str]), + ("message", Optional[str]), ), ) From 70ac0d73974d046233de5823d9268d9a63bd7dbb Mon Sep 17 00:00:00 2001 From: Dominic Oram Date: Tue, 20 Oct 2020 16:59:44 +0100 Subject: [PATCH 184/363] Added deserialiser for NDArray --- streaming_data_types/area_detector_NDAr.py | 34 +++++ .../fbschemas/NDAr_NDArray_schema/DType.py | 17 +++ .../fbschemas/NDAr_NDArray_schema/NDArray.py | 128 ++++++++++++++++++ .../NDAr_NDArray_schema/NDAttribute.py | 78 +++++++++++ .../fbschemas/NDAr_NDArray_schema/__init__.py | 0 .../NDAr_NDArray_schema/epicsTimeStamp.py | 23 ++++ 6 files changed, 280 insertions(+) create mode 100644 streaming_data_types/area_detector_NDAr.py create mode 100644 streaming_data_types/fbschemas/NDAr_NDArray_schema/DType.py create mode 100644 streaming_data_types/fbschemas/NDAr_NDArray_schema/NDArray.py create mode 100644 streaming_data_types/fbschemas/NDAr_NDArray_schema/NDAttribute.py create mode 100644 streaming_data_types/fbschemas/NDAr_NDArray_schema/__init__.py create mode 100644 streaming_data_types/fbschemas/NDAr_NDArray_schema/epicsTimeStamp.py diff --git a/streaming_data_types/area_detector_NDAr.py b/streaming_data_types/area_detector_NDAr.py new file mode 100644 index 0000000..5418a38 --- /dev/null +++ b/streaming_data_types/area_detector_NDAr.py @@ -0,0 +1,34 @@ +from typing import Union +from streaming_data_types.fbschemas.NDAr_NDArray_schema import NDArray, NDAttribute +from streaming_data_types.utils import check_schema_identifier +from collections import namedtuple + +FILE_IDENTIFIER = b"NDAr" + + +nd_Array = namedtuple( + "NDArray", + ( + "id", + "timestamp", + "dims", + "data", + ), +) + + +def deserialise_ndar(buffer: Union[bytearray, bytes]) -> NDArray: + check_schema_identifier(buffer, FILE_IDENTIFIER) + + nd_array = NDArray.NDArray.GetRootAsNDArray(buffer, 0) + id = nd_array.Id() if nd_array.Id() else b"" + timestamp = nd_array.TimeStamp() if nd_array.TimeStamp() else b"" + dims = nd_array.DimsAsNumpy() + data = nd_array.PDataAsNumpy() + + return nd_Array( + id=id, + timestamp=timestamp, + dims=dims, + data=data, + ) diff --git a/streaming_data_types/fbschemas/NDAr_NDArray_schema/DType.py b/streaming_data_types/fbschemas/NDAr_NDArray_schema/DType.py new file mode 100644 index 0000000..8b46fce --- /dev/null +++ b/streaming_data_types/fbschemas/NDAr_NDArray_schema/DType.py @@ -0,0 +1,17 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: FB_Tables + +class DType(object): + Int8 = 0 + Uint8 = 1 + Int16 = 2 + Uint16 = 3 + Int32 = 4 + Uint32 = 5 + Int64 = 6 + Uint64 = 7 + Float32 = 8 + Float64 = 9 + c_string = 10 + diff --git a/streaming_data_types/fbschemas/NDAr_NDArray_schema/NDArray.py b/streaming_data_types/fbschemas/NDAr_NDArray_schema/NDArray.py new file mode 100644 index 0000000..013dd1a --- /dev/null +++ b/streaming_data_types/fbschemas/NDAr_NDArray_schema/NDArray.py @@ -0,0 +1,128 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: FB_Tables + +import flatbuffers + +class NDArray(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAsNDArray(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = NDArray() + x.Init(buf, n + offset) + return x + + # NDArray + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # NDArray + def Id(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # NDArray + def TimeStamp(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Float64Flags, o + self._tab.Pos) + return 0.0 + + # NDArray + def EpicsTS(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + x = o + self._tab.Pos + from .epicsTimeStamp import epicsTimeStamp + obj = epicsTimeStamp() + obj.Init(self._tab.Bytes, x) + return obj + return None + + # NDArray + def Dims(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Uint64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return 0 + + # NDArray + def DimsAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint64Flags, o) + return 0 + + # NDArray + def DimsLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # NDArray + def DataType(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # NDArray + def PData(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) + return 0 + + # NDArray + def PDataAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) + return 0 + + # NDArray + def PDataLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # NDArray + def PAttributeList(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + if o != 0: + x = self._tab.Vector(o) + x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 + x = self._tab.Indirect(x) + from .NDAttribute import NDAttribute + obj = NDAttribute() + obj.Init(self._tab.Bytes, x) + return obj + return None + + # NDArray + def PAttributeListLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + +def NDArrayStart(builder): builder.StartObject(7) +def NDArrayAddId(builder, id): builder.PrependInt32Slot(0, id, 0) +def NDArrayAddTimeStamp(builder, timeStamp): builder.PrependFloat64Slot(1, timeStamp, 0.0) +def NDArrayAddEpicsTS(builder, epicsTS): builder.PrependStructSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(epicsTS), 0) +def NDArrayAddDims(builder, dims): builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(dims), 0) +def NDArrayStartDimsVector(builder, numElems): return builder.StartVector(8, numElems, 8) +def NDArrayAddDataType(builder, dataType): builder.PrependInt8Slot(4, dataType, 0) +def NDArrayAddPData(builder, pData): builder.PrependUOffsetTRelativeSlot(5, flatbuffers.number_types.UOffsetTFlags.py_type(pData), 0) +def NDArrayStartPDataVector(builder, numElems): return builder.StartVector(1, numElems, 1) +def NDArrayAddPAttributeList(builder, pAttributeList): builder.PrependUOffsetTRelativeSlot(6, flatbuffers.number_types.UOffsetTFlags.py_type(pAttributeList), 0) +def NDArrayStartPAttributeListVector(builder, numElems): return builder.StartVector(4, numElems, 4) +def NDArrayEnd(builder): return builder.EndObject() diff --git a/streaming_data_types/fbschemas/NDAr_NDArray_schema/NDAttribute.py b/streaming_data_types/fbschemas/NDAr_NDArray_schema/NDAttribute.py new file mode 100644 index 0000000..901e520 --- /dev/null +++ b/streaming_data_types/fbschemas/NDAr_NDArray_schema/NDAttribute.py @@ -0,0 +1,78 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: FB_Tables + +import flatbuffers + +class NDAttribute(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAsNDAttribute(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = NDAttribute() + x.Init(buf, n + offset) + return x + + # NDAttribute + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # NDAttribute + def PName(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # NDAttribute + def PDescription(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # NDAttribute + def PSource(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # NDAttribute + def DataType(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # NDAttribute + def PData(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) + return 0 + + # NDAttribute + def PDataAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) + return 0 + + # NDAttribute + def PDataLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + +def NDAttributeStart(builder): builder.StartObject(5) +def NDAttributeAddPName(builder, pName): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(pName), 0) +def NDAttributeAddPDescription(builder, pDescription): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(pDescription), 0) +def NDAttributeAddPSource(builder, pSource): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(pSource), 0) +def NDAttributeAddDataType(builder, dataType): builder.PrependInt8Slot(3, dataType, 0) +def NDAttributeAddPData(builder, pData): builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(pData), 0) +def NDAttributeStartPDataVector(builder, numElems): return builder.StartVector(1, numElems, 1) +def NDAttributeEnd(builder): return builder.EndObject() diff --git a/streaming_data_types/fbschemas/NDAr_NDArray_schema/__init__.py b/streaming_data_types/fbschemas/NDAr_NDArray_schema/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/streaming_data_types/fbschemas/NDAr_NDArray_schema/epicsTimeStamp.py b/streaming_data_types/fbschemas/NDAr_NDArray_schema/epicsTimeStamp.py new file mode 100644 index 0000000..1877faf --- /dev/null +++ b/streaming_data_types/fbschemas/NDAr_NDArray_schema/epicsTimeStamp.py @@ -0,0 +1,23 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: FB_Tables + +import flatbuffers + +class epicsTimeStamp(object): + __slots__ = ['_tab'] + + # epicsTimeStamp + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # epicsTimeStamp + def SecPastEpoch(self): return self._tab.Get(flatbuffers.number_types.Int32Flags, self._tab.Pos + flatbuffers.number_types.UOffsetTFlags.py_type(0)) + # epicsTimeStamp + def Nsec(self): return self._tab.Get(flatbuffers.number_types.Int32Flags, self._tab.Pos + flatbuffers.number_types.UOffsetTFlags.py_type(4)) + +def CreateepicsTimeStamp(builder, secPastEpoch, nsec): + builder.Prep(4, 8) + builder.PrependInt32(nsec) + builder.PrependInt32(secPastEpoch) + return builder.Offset() From d4999399ebe4bc81e4b7e83091e555860901faf0 Mon Sep 17 00:00:00 2001 From: Jonas Nilsson Date: Thu, 22 Oct 2020 07:55:03 +0200 Subject: [PATCH 185/363] Bug workaround, code update and use of datetime. --- setup.py | 2 +- streaming_data_types/action_response_answ.py | 8 +++--- .../epics_connection_info_ep00.py | 9 +++---- streaming_data_types/eventdata_ev42.py | 8 +++--- streaming_data_types/finished_writing_wrdn.py | 8 +++--- .../forwarder_config_update_rf5k.py | 8 +++--- streaming_data_types/histogram_hs00.py | 8 +++--- streaming_data_types/logdata_f142.py | 8 +++--- streaming_data_types/nicos_cache_ns10.py | 9 +++---- streaming_data_types/run_start_pl72.py | 25 +++++++++++-------- streaming_data_types/run_stop_6s4t.py | 19 +++++++------- streaming_data_types/status_x5f2.py | 8 +++--- streaming_data_types/timestamps_tdct.py | 10 +++----- 13 files changed, 57 insertions(+), 73 deletions(-) diff --git a/setup.py b/setup.py index 6869ae9..0091d36 100644 --- a/setup.py +++ b/setup.py @@ -15,7 +15,7 @@ setup( name="ess_streaming_data_types", - version="0.9.2", + version="0.9.3", description=DESCRIPTION, long_description=LONG_DESCRIPTION, long_description_content_type="text/markdown", diff --git a/streaming_data_types/action_response_answ.py b/streaming_data_types/action_response_answ.py index 2dee054..f99148d 100644 --- a/streaming_data_types/action_response_answ.py +++ b/streaming_data_types/action_response_answ.py @@ -23,6 +23,7 @@ def serialise_answ( stop_time: datetime, ) -> bytes: builder = flatbuffers.Builder(500) + builder.ForceDefaults(True) service_id_offset = builder.CreateString(service_id) job_id_offset = builder.CreateString(job_id) message_offset = builder.CreateString(message) @@ -39,11 +40,8 @@ def serialise_answ( ActionResponse.ActionResponseAddStopTime(builder, int(stop_time.timestamp() * 1000)) out_message = ActionResponse.ActionResponseEnd(builder) - builder.Finish(out_message) - output_buffer = builder.Output() - output_buffer[4:8] = FILE_IDENTIFIER - - return bytes(output_buffer) + builder.Finish(out_message, file_identifier=FILE_IDENTIFIER) + return bytes(builder.Output()) Response = NamedTuple( diff --git a/streaming_data_types/epics_connection_info_ep00.py b/streaming_data_types/epics_connection_info_ep00.py index b59cc16..590fc54 100644 --- a/streaming_data_types/epics_connection_info_ep00.py +++ b/streaming_data_types/epics_connection_info_ep00.py @@ -17,6 +17,7 @@ def serialise_ep00( service_id: Optional[str] = None, ) -> bytes: builder = flatbuffers.Builder(136) + builder.ForceDefaults(True) if service_id is not None: service_id_offset = builder.CreateString(service_id) @@ -30,12 +31,8 @@ def serialise_ep00( EpicsConnectionInfo.EpicsConnectionInfoAddTimestamp(builder, timestamp_ns) end = EpicsConnectionInfo.EpicsConnectionInfoEnd(builder) - builder.Finish(end) - - # Generate the output and replace the file_identifier - buffer = builder.Output() - buffer[4:8] = FILE_IDENTIFIER - return bytes(buffer) + builder.Finish(end, file_identifier=FILE_IDENTIFIER) + return bytes(builder.Output()) EpicsConnection = namedtuple( diff --git a/streaming_data_types/eventdata_ev42.py b/streaming_data_types/eventdata_ev42.py index f503763..da00ffe 100644 --- a/streaming_data_types/eventdata_ev42.py +++ b/streaming_data_types/eventdata_ev42.py @@ -69,6 +69,7 @@ def serialise_ev42( :return: """ builder = flatbuffers.Builder(1024) + builder.ForceDefaults(True) source = builder.CreateString(source_name) @@ -108,9 +109,6 @@ def serialise_ev42( EventMessage.EventMessageAddFacilitySpecificData(builder, isis_data) data = EventMessage.EventMessageEnd(builder) - builder.Finish(data) - # Generate the output and replace the file_identifier - buffer = builder.Output() - buffer[4:8] = FILE_IDENTIFIER - return bytes(buffer) + builder.Finish(data, file_identifier=FILE_IDENTIFIER) + return bytes(builder.Output()) diff --git a/streaming_data_types/finished_writing_wrdn.py b/streaming_data_types/finished_writing_wrdn.py index 82b54e7..0887893 100644 --- a/streaming_data_types/finished_writing_wrdn.py +++ b/streaming_data_types/finished_writing_wrdn.py @@ -17,6 +17,7 @@ def serialise_wrdn( message: Optional[str] = None, ) -> bytes: builder = flatbuffers.Builder(500) + builder.ForceDefaults(True) service_id_offset = builder.CreateString(service_id) job_id_offset = builder.CreateString(job_id) @@ -38,12 +39,9 @@ def serialise_wrdn( FinishedWriting.FinishedWritingAddMessage(builder, message_offset) finished_writing_message = FinishedWriting.FinishedWritingEnd(builder) - builder.Finish(finished_writing_message) - # Generate the output and replace the file_identifier - buffer = builder.Output() - buffer[4:8] = FILE_IDENTIFIER - return bytes(buffer) + builder.Finish(finished_writing_message, file_identifier=FILE_IDENTIFIER) + return bytes(builder.Output()) WritingFinished = NamedTuple( diff --git a/streaming_data_types/forwarder_config_update_rf5k.py b/streaming_data_types/forwarder_config_update_rf5k.py index b19db6d..598d0da 100644 --- a/streaming_data_types/forwarder_config_update_rf5k.py +++ b/streaming_data_types/forwarder_config_update_rf5k.py @@ -76,6 +76,7 @@ def serialise_rf5k(config_change: UpdateType, streams: List[StreamInfo]) -> byte :return: """ builder = flatbuffers.Builder(1024) + builder.ForceDefaults(True) if streams: # We have to use multiple loops/list comprehensions here because we cannot create strings after we have @@ -104,9 +105,6 @@ def serialise_rf5k(config_change: UpdateType, streams: List[StreamInfo]) -> byte ConfigUpdate.ConfigUpdateAddStreams(builder, streams_offset) ConfigUpdate.ConfigUpdateAddConfigChange(builder, config_change) data = ConfigUpdate.ConfigUpdateEnd(builder) - builder.Finish(data) - # Generate the output and replace the file_identifier - buffer = builder.Output() - buffer[4:8] = FILE_IDENTIFIER - return bytes(buffer) + builder.Finish(data, file_identifier=FILE_IDENTIFIER) + return bytes(builder.Output()) diff --git a/streaming_data_types/histogram_hs00.py b/streaming_data_types/histogram_hs00.py index 480c84d..b1a8dc1 100644 --- a/streaming_data_types/histogram_hs00.py +++ b/streaming_data_types/histogram_hs00.py @@ -117,6 +117,7 @@ def serialise_hs00(histogram): info_offset = None builder = flatbuffers.Builder(1024) + builder.ForceDefaults(True) if "source" in histogram: source_offset = builder.CreateString(histogram["source"]) if "info" in histogram: @@ -176,12 +177,9 @@ def serialise_hs00(histogram): builder, histogram["last_metadata_timestamp"] ) hist_message = EventHistogram.EventHistogramEnd(builder) - builder.Finish(hist_message) - # Generate the output and replace the file_identifier - buffer = builder.Output() - buffer[4:8] = FILE_IDENTIFIER - return bytes(buffer) + builder.Finish(hist_message, file_identifier=FILE_IDENTIFIER) + return bytes(builder.Output()) def _serialise_array(builder, data_len, data): diff --git a/streaming_data_types/logdata_f142.py b/streaming_data_types/logdata_f142.py index 1adbf59..993693c 100644 --- a/streaming_data_types/logdata_f142.py +++ b/streaming_data_types/logdata_f142.py @@ -168,14 +168,14 @@ def _complete_buffer( LogData.LogDataAddSeverity(builder, alarm_severity) log_msg = LogData.LogDataEnd(builder) - builder.Finish(log_msg) - buff = builder.Output() - buff[4:8] = FILE_IDENTIFIER - return buff + + builder.Finish(log_msg, file_identifier=FILE_IDENTIFIER) + return bytes(builder.Output()) def _setup_builder(source_name: str) -> Tuple[flatbuffers.Builder, int]: builder = flatbuffers.Builder(1024) + builder.ForceDefaults(True) source = builder.CreateString(source_name) return builder, source diff --git a/streaming_data_types/nicos_cache_ns10.py b/streaming_data_types/nicos_cache_ns10.py index 8df3ee9..7e7f237 100644 --- a/streaming_data_types/nicos_cache_ns10.py +++ b/streaming_data_types/nicos_cache_ns10.py @@ -11,6 +11,7 @@ def serialise_ns10( key: str, value: str, time_stamp: float = 0, ttl: float = 0, expired: bool = False ): builder = flatbuffers.Builder(128) + builder.ForceDefaults(True) value_offset = builder.CreateString(value) key_offset = builder.CreateString(key) @@ -22,13 +23,9 @@ def serialise_ns10( CacheEntry.CacheEntryAddTime(builder, time_stamp) CacheEntry.CacheEntryAddKey(builder, key_offset) cache_entry_message = CacheEntry.CacheEntryEnd(builder) - builder.Finish(cache_entry_message) - # Generate the output and replace the file_identifier - buffer = builder.Output() - buffer[4:8] = FILE_IDENTIFIER - - return bytes(buffer) + builder.Finish(cache_entry_message, file_identifier=FILE_IDENTIFIER) + return bytes(builder.Output()) def deserialise_ns10(buffer): diff --git a/streaming_data_types/run_start_pl72.py b/streaming_data_types/run_start_pl72.py index bb9ca2c..6763028 100644 --- a/streaming_data_types/run_start_pl72.py +++ b/streaming_data_types/run_start_pl72.py @@ -1,9 +1,10 @@ import time -from typing import Optional, Union +from typing import Union import flatbuffers from streaming_data_types.fbschemas.run_start_pl72 import RunStart from streaming_data_types.utils import check_schema_identifier from typing import NamedTuple +from datetime import datetime FILE_IDENTIFIER = b"pl72" @@ -11,8 +12,8 @@ def serialise_pl72( job_id: str, filename: str, - start_time: Optional[int] = None, - stop_time: Optional[int] = None, + start_time: Union[int, datetime, None] = None, + stop_time: Union[int, datetime, None] = None, run_name: str = "test_run", nexus_structure: str = "{}", service_id: str = "", @@ -20,13 +21,18 @@ def serialise_pl72( broker: str = "localhost:9092", metadata: str = "{}", ) -> bytes: - builder = flatbuffers.Builder(136) + builder = flatbuffers.Builder(512) + builder.ForceDefaults(True) - if start_time is None: + if type(start_time) is datetime: + start_time = int(start_time.timestamp() * 1000) + elif start_time is None: start_time = int(time.time() * 1000) if service_id is None: service_id = "" - if stop_time is None: + if type(stop_time) is datetime: + stop_time = int(stop_time.timestamp() * 1000) + elif stop_time is None: stop_time = 0 service_id_offset = builder.CreateString(service_id) @@ -53,12 +59,9 @@ def serialise_pl72( RunStart.RunStartAddMetadata(builder, metadata_offset) run_start_message = RunStart.RunStartEnd(builder) - builder.Finish(run_start_message) - # Generate the output and replace the file_identifier - buffer = builder.Output() - buffer[4:8] = FILE_IDENTIFIER - return bytes(buffer) + builder.Finish(run_start_message, file_identifier=FILE_IDENTIFIER) + return bytes(builder.Output()) RunStartInfo = NamedTuple( diff --git a/streaming_data_types/run_stop_6s4t.py b/streaming_data_types/run_stop_6s4t.py index 4dd4d82..a957f10 100644 --- a/streaming_data_types/run_stop_6s4t.py +++ b/streaming_data_types/run_stop_6s4t.py @@ -1,8 +1,9 @@ -from typing import Optional, Union +from typing import Union import flatbuffers from streaming_data_types.fbschemas.run_stop_6s4t import RunStop from streaming_data_types.utils import check_schema_identifier from typing import NamedTuple +from datetime import datetime FILE_IDENTIFIER = b"6s4t" @@ -12,13 +13,16 @@ def serialise_6s4t( run_name: str = "test_run", service_id: str = "", command_id: str = "", - stop_time: Optional[int] = None, + stop_time: Union[int, datetime, None] = None, ) -> bytes: - builder = flatbuffers.Builder(136) + builder = flatbuffers.Builder(500) + builder.ForceDefaults(True) if service_id is None: service_id = "" - if stop_time is None: + if type(stop_time) is datetime: + stop_time = int(stop_time.timestamp() * 1000) + elif stop_time is None: stop_time = 0 service_id_offset = builder.CreateString(service_id) @@ -35,12 +39,9 @@ def serialise_6s4t( RunStop.RunStopAddCommandId(builder, command_id_offset) run_stop_message = RunStop.RunStopEnd(builder) - builder.Finish(run_stop_message) + builder.Finish(run_stop_message, file_identifier=FILE_IDENTIFIER) - # Generate the output and replace the file_identifier - buffer = builder.Output() - buffer[4:8] = FILE_IDENTIFIER - return bytes(buffer) + return bytes(builder.Output()) RunStopInfo = NamedTuple( diff --git a/streaming_data_types/status_x5f2.py b/streaming_data_types/status_x5f2.py index 01b9de0..a25d16e 100644 --- a/streaming_data_types/status_x5f2.py +++ b/streaming_data_types/status_x5f2.py @@ -65,6 +65,7 @@ def serialise_x5f2( """ builder = flatbuffers.Builder(1024) + builder.ForceDefaults(True) software_name = builder.CreateString(software_name) software_version = builder.CreateString(software_version) @@ -84,9 +85,6 @@ def serialise_x5f2( Status.StatusAddStatusJson(builder, status_json) data = Status.StatusEnd(builder) - builder.Finish(data) + builder.Finish(data, file_identifier=FILE_IDENTIFIER) - # Generate the output and replace the file_identifier - buffer = builder.Output() - buffer[4:8] = FILE_IDENTIFIER - return bytes(buffer) + return bytes(builder.Output()) diff --git a/streaming_data_types/timestamps_tdct.py b/streaming_data_types/timestamps_tdct.py index fc7fe3f..61e0cf5 100644 --- a/streaming_data_types/timestamps_tdct.py +++ b/streaming_data_types/timestamps_tdct.py @@ -21,7 +21,8 @@ def serialise_tdct( timestamps: Union[np.ndarray, List], sequence_counter: Optional[int] = None, ) -> bytes: - builder = flatbuffers.Builder(136) + builder = flatbuffers.Builder(1024) + builder.ForceDefaults(True) timestamps = np.atleast_1d(np.array(timestamps)).astype(np.uint64) @@ -38,12 +39,9 @@ def serialise_tdct( if sequence_counter is not None: timestampAddSequenceCounter(builder, sequence_counter) timestamps_message = timestampEnd(builder) - builder.Finish(timestamps_message) - # Generate the output and replace the file_identifier - buffer = builder.Output() - buffer[4:8] = FILE_IDENTIFIER - return bytes(buffer) + builder.Finish(timestamps_message, file_identifier=FILE_IDENTIFIER) + return bytes(builder.Output()) Timestamps = namedtuple("Timestamps", ("name", "timestamps", "sequence_counter")) From faa9b4a80ae8cb2a814df86565fdb76697559f56 Mon Sep 17 00:00:00 2001 From: Dominic Oram Date: Thu, 22 Oct 2020 14:14:00 +0100 Subject: [PATCH 186/363] Added NDAr datatype and deserialiser --- streaming_data_types/__init__.py | 2 ++ streaming_data_types/area_detector_NDAr.py | 3 +++ 2 files changed, 5 insertions(+) diff --git a/streaming_data_types/__init__.py b/streaming_data_types/__init__.py index 1a20d0c..79cf25e 100644 --- a/streaming_data_types/__init__.py +++ b/streaming_data_types/__init__.py @@ -16,6 +16,7 @@ deserialise_rf5k, serialise_rf5k, ) +from streaming_data_types.area_detector_NDAr import deserialise_ndar SERIALISERS = { @@ -47,4 +48,5 @@ "rf5k": deserialise_rf5k, "answ": deserialise_answ, "wrdn": deserialise_wrdn, + "NDAr": deserialise_ndar, } diff --git a/streaming_data_types/area_detector_NDAr.py b/streaming_data_types/area_detector_NDAr.py index 5418a38..7c0a462 100644 --- a/streaming_data_types/area_detector_NDAr.py +++ b/streaming_data_types/area_detector_NDAr.py @@ -9,6 +9,7 @@ nd_Array = namedtuple( "NDArray", ( + "data_type", "id", "timestamp", "dims", @@ -23,10 +24,12 @@ def deserialise_ndar(buffer: Union[bytearray, bytes]) -> NDArray: nd_array = NDArray.NDArray.GetRootAsNDArray(buffer, 0) id = nd_array.Id() if nd_array.Id() else b"" timestamp = nd_array.TimeStamp() if nd_array.TimeStamp() else b"" + data_type = nd_array.DataType() if nd_array.DataType() else b"" dims = nd_array.DimsAsNumpy() data = nd_array.PDataAsNumpy() return nd_Array( + data_type=data_type, id=id, timestamp=timestamp, dims=dims, From 1e0cbedacecf4e380bf35a96f7ed61e65e5a8c77 Mon Sep 17 00:00:00 2001 From: Dominic Oram Date: Thu, 22 Oct 2020 15:06:14 +0100 Subject: [PATCH 187/363] Added NDAr deserialiser and tests --- streaming_data_types/__init__.py | 3 +- streaming_data_types/area_detector_NDAr.py | 42 +++++++++++++++++++- tests/test_NDAr.py | 46 ++++++++++++++++++++++ 3 files changed, 89 insertions(+), 2 deletions(-) create mode 100644 tests/test_NDAr.py diff --git a/streaming_data_types/__init__.py b/streaming_data_types/__init__.py index 79cf25e..8f0eacd 100644 --- a/streaming_data_types/__init__.py +++ b/streaming_data_types/__init__.py @@ -16,7 +16,7 @@ deserialise_rf5k, serialise_rf5k, ) -from streaming_data_types.area_detector_NDAr import deserialise_ndar +from streaming_data_types.area_detector_NDAr import deserialise_ndar, serialise_ndar SERIALISERS = { @@ -32,6 +32,7 @@ "rf5k": serialise_rf5k, "answ": serialise_answ, "wrdn": serialise_wrdn, + "NDAr": serialise_ndar, } diff --git a/streaming_data_types/area_detector_NDAr.py b/streaming_data_types/area_detector_NDAr.py index 7c0a462..6fd1ac3 100644 --- a/streaming_data_types/area_detector_NDAr.py +++ b/streaming_data_types/area_detector_NDAr.py @@ -1,11 +1,51 @@ from typing import Union -from streaming_data_types.fbschemas.NDAr_NDArray_schema import NDArray, NDAttribute +import flatbuffers +from streaming_data_types.fbschemas.NDAr_NDArray_schema import NDArray from streaming_data_types.utils import check_schema_identifier from collections import namedtuple +import time FILE_IDENTIFIER = b"NDAr" +def serialise_ndar( + id: str, + dims: list, + data_type: int, + data: list, +) -> bytes: + builder = flatbuffers.Builder(1024) + + # Build dims + NDArray.NDArrayStartDimsVector(builder, len(dims)) + # FlatBuffers builds arrays backwards + for s in reversed(dims): + builder.PrependUint64(s) + dims_offset = builder.EndVector(len(dims)) + + # Build data + NDArray.NDArrayStartPDataVector(builder, len(data)) + # FlatBuffers builds arrays backwards + for s in reversed(data): + builder.PrependUint8(s) + data_offset = builder.EndVector(len(data)) + + # Build the actual buffer + NDArray.NDArrayStart(builder) + NDArray.NDArrayAddDataType(builder, data_type) + NDArray.NDArrayAddDims(builder, dims_offset) + NDArray.NDArrayAddId(builder, id) + NDArray.NDArrayAddPData(builder, data_offset) + NDArray.NDArrayAddTimeStamp(builder, int(time.time() * 1000)) + nd_array_message = NDArray.NDArrayEnd(builder) + builder.Finish(nd_array_message) + + # Generate the output and replace the file_identifier + buffer = builder.Output() + buffer[4:8] = FILE_IDENTIFIER + return bytes(buffer) + + nd_Array = namedtuple( "NDArray", ( diff --git a/tests/test_NDAr.py b/tests/test_NDAr.py new file mode 100644 index 0000000..0404568 --- /dev/null +++ b/tests/test_NDAr.py @@ -0,0 +1,46 @@ +import pytest +from streaming_data_types.area_detector_NDAr import serialise_ndar, deserialise_ndar +from streaming_data_types import SERIALISERS, DESERIALISERS +import numpy as np + + +class TestSerialisationNDAr: + def test_serialises_and_deserialises_NDAr_message_correctly(self): + """ + Round-trip to check what we serialise is what we get back. + """ + original_entry = { + "id": 754, + "dims": [10, 10], + "data_type": 1, + "data": [0, 0, 100, 200, 250], + } + + buf = serialise_ndar(**original_entry) + entry = deserialise_ndar(buf) + + assert entry.id == original_entry["id"] + assert np.array_equal(entry.dims, original_entry["dims"]) + assert entry.data_type == original_entry["data_type"] + assert np.array_equal(entry.data, original_entry["data"]) + + def test_if_buffer_has_wrong_id_then_throws(self): + original_entry = { + "id": 754, + "dims": [10, 10], + "data_type": 0, + "data": [0, 0, 100, 200, 300], + } + + buf = serialise_ndar(**original_entry) + + # Manually hack the id + buf = bytearray(buf) + buf[4:8] = b"1234" + + with pytest.raises(RuntimeError): + deserialise_ndar(buf) + + def test_schema_type_is_in_global_serialisers_list(self): + assert "NDAr" in SERIALISERS + assert "NDAr" in DESERIALISERS From 1c07430c6f4edc28554f863b387665ecea687fee Mon Sep 17 00:00:00 2001 From: Dominic Oram Date: Thu, 22 Oct 2020 15:10:13 +0100 Subject: [PATCH 188/363] Add this line to work around a bug in the flat buffers library. Co-authored-by: Jonas Nilsson --- streaming_data_types/area_detector_NDAr.py | 1 + 1 file changed, 1 insertion(+) diff --git a/streaming_data_types/area_detector_NDAr.py b/streaming_data_types/area_detector_NDAr.py index 6fd1ac3..8574ca2 100644 --- a/streaming_data_types/area_detector_NDAr.py +++ b/streaming_data_types/area_detector_NDAr.py @@ -15,6 +15,7 @@ def serialise_ndar( data: list, ) -> bytes: builder = flatbuffers.Builder(1024) +builder.ForceDefaults(True) # Build dims NDArray.NDArrayStartDimsVector(builder, len(dims)) From b4214724765b20e34b06cd53b0499f5ef8fd2d9a Mon Sep 17 00:00:00 2001 From: Dominic Oram Date: Thu, 22 Oct 2020 15:18:00 +0100 Subject: [PATCH 189/363] Addressed rework comments --- streaming_data_types/area_detector_NDAr.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/streaming_data_types/area_detector_NDAr.py b/streaming_data_types/area_detector_NDAr.py index 8574ca2..b8b0316 100644 --- a/streaming_data_types/area_detector_NDAr.py +++ b/streaming_data_types/area_detector_NDAr.py @@ -15,7 +15,7 @@ def serialise_ndar( data: list, ) -> bytes: builder = flatbuffers.Builder(1024) -builder.ForceDefaults(True) + builder.ForceDefaults(True) # Build dims NDArray.NDArrayStartDimsVector(builder, len(dims)) @@ -39,12 +39,9 @@ def serialise_ndar( NDArray.NDArrayAddPData(builder, data_offset) NDArray.NDArrayAddTimeStamp(builder, int(time.time() * 1000)) nd_array_message = NDArray.NDArrayEnd(builder) - builder.Finish(nd_array_message) - # Generate the output and replace the file_identifier - buffer = builder.Output() - buffer[4:8] = FILE_IDENTIFIER - return bytes(buffer) + builder.Finish(nd_array_message, file_identifier=FILE_IDENTIFIER) + return bytes(builder.Output()) nd_Array = namedtuple( From 6afe028835ce9a7254f64b7119387fc7ec6dc6e9 Mon Sep 17 00:00:00 2001 From: Dominic Oram Date: Thu, 22 Oct 2020 22:01:42 +0100 Subject: [PATCH 190/363] Added type correction in NDAr deserialisation --- streaming_data_types/area_detector_NDAr.py | 23 +++++++++------ tests/test_NDAr.py | 34 ++++++++++++++++------ 2 files changed, 39 insertions(+), 18 deletions(-) diff --git a/streaming_data_types/area_detector_NDAr.py b/streaming_data_types/area_detector_NDAr.py index b8b0316..4b5d7ba 100644 --- a/streaming_data_types/area_detector_NDAr.py +++ b/streaming_data_types/area_detector_NDAr.py @@ -4,6 +4,7 @@ from streaming_data_types.utils import check_schema_identifier from collections import namedtuple import time +import numpy as np FILE_IDENTIFIER = b"NDAr" @@ -47,29 +48,33 @@ def serialise_ndar( nd_Array = namedtuple( "NDArray", ( - "data_type", "id", "timestamp", - "dims", "data", ), ) +def get_data(fb_arr): + """ + Converts the data array into the correct type. + """ + raw_data = fb_arr.PDataAsNumpy() + numpy_arr_type = [np.int8, np.uint8, np.int16, np.uint16, np.int32, np.uint32, np.int64, np.uint64, + np.float32, np.float64] + return raw_data.view(numpy_arr_type[fb_arr.DataType()]).reshape(fb_arr.DimsAsNumpy()) + + def deserialise_ndar(buffer: Union[bytearray, bytes]) -> NDArray: check_schema_identifier(buffer, FILE_IDENTIFIER) nd_array = NDArray.NDArray.GetRootAsNDArray(buffer, 0) - id = nd_array.Id() if nd_array.Id() else b"" - timestamp = nd_array.TimeStamp() if nd_array.TimeStamp() else b"" - data_type = nd_array.DataType() if nd_array.DataType() else b"" - dims = nd_array.DimsAsNumpy() - data = nd_array.PDataAsNumpy() + id = nd_array.Id() + timestamp = nd_array.TimeStamp() + data = get_data(nd_array) return nd_Array( - data_type=data_type, id=id, timestamp=timestamp, - dims=dims, data=data, ) diff --git a/tests/test_NDAr.py b/tests/test_NDAr.py index 0404568..f47d2a8 100644 --- a/tests/test_NDAr.py +++ b/tests/test_NDAr.py @@ -1,35 +1,51 @@ import pytest -from streaming_data_types.area_detector_NDAr import serialise_ndar, deserialise_ndar +from streaming_data_types.area_detector_NDAr import serialise_ndar, deserialise_ndar, get_data +from streaming_data_types.fbschemas.NDAr_NDArray_schema.DType import DType from streaming_data_types import SERIALISERS, DESERIALISERS import numpy as np class TestSerialisationNDAr: - def test_serialises_and_deserialises_NDAr_message_correctly(self): + def test_serialises_and_deserialises_NDAr_message_correctly_float64_1_pixel(self): """ Round-trip to check what we serialise is what we get back. """ original_entry = { "id": 754, - "dims": [10, 10], - "data_type": 1, - "data": [0, 0, 100, 200, 250], + "dims": [1, 1], + "data_type": DType.Float64, + "data": [54, 78, 100, 156, 43, 1, 23, 0], } buf = serialise_ndar(**original_entry) entry = deserialise_ndar(buf) assert entry.id == original_entry["id"] - assert np.array_equal(entry.dims, original_entry["dims"]) - assert entry.data_type == original_entry["data_type"] - assert np.array_equal(entry.data, original_entry["data"]) + assert np.array_equal(entry.data, [[3.1991794446845865e-308]]) + + def test_serialises_and_deserialises_NDAr_message_correctly_int32_3_pixel(self): + """ + Round-trip to check what we serialise is what we get back. + """ + original_entry = { + "id": 754, + "dims": [1, 3], + "data_type": DType.Int32, + "data": [54, 78, 100, 200, 32, 19, 2, 156, 43, 1, 23, 0], + } + + buf = serialise_ndar(**original_entry) + entry = deserialise_ndar(buf) + + assert entry.id == original_entry["id"] + assert np.array_equal(entry.data, [[-932950474, -1677585632, 1507627]]) def test_if_buffer_has_wrong_id_then_throws(self): original_entry = { "id": 754, "dims": [10, 10], "data_type": 0, - "data": [0, 0, 100, 200, 300], + "data": [0, 0, 100, 200, 250], } buf = serialise_ndar(**original_entry) From d0d05bb5334eee1a05ffeb15238c9b3f2ec4c453 Mon Sep 17 00:00:00 2001 From: Dominic Oram Date: Thu, 22 Oct 2020 22:14:46 +0100 Subject: [PATCH 191/363] Fix flake8 error --- tests/test_NDAr.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_NDAr.py b/tests/test_NDAr.py index f47d2a8..5a1c8ed 100644 --- a/tests/test_NDAr.py +++ b/tests/test_NDAr.py @@ -1,5 +1,5 @@ import pytest -from streaming_data_types.area_detector_NDAr import serialise_ndar, deserialise_ndar, get_data +from streaming_data_types.area_detector_NDAr import serialise_ndar, deserialise_ndar from streaming_data_types.fbschemas.NDAr_NDArray_schema.DType import DType from streaming_data_types import SERIALISERS, DESERIALISERS import numpy as np From 3ecbab03f3d7c010057a0f25714dfb0034075031 Mon Sep 17 00:00:00 2001 From: Dominic Oram Date: Fri, 23 Oct 2020 09:55:47 +0100 Subject: [PATCH 192/363] Added NDAr to README --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 0ad8e15..08886bd 100644 --- a/README.md +++ b/README.md @@ -20,6 +20,7 @@ https://github.com/ess-dmsc/streaming-data-types |rf5k|Forwarder configuration update|Y| |answ|File-writer command response|n/a| |wrdn|File-writer finished writing|n/a| +|NDAr|NDArray schema for area detector images|n/a| \* whether it passes verification via the C++ FlatBuffers library. From 5bb36f53199e630a266cb23cf0ad4be8ce7aec5f Mon Sep 17 00:00:00 2001 From: Jonas Nilsson Date: Tue, 27 Oct 2020 23:57:29 +0100 Subject: [PATCH 193/363] Improve performance of tdct serialiser. --- setup.py | 2 +- streaming_data_types/timestamps_tdct.py | 6 +----- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/setup.py b/setup.py index 0091d36..00c51c4 100644 --- a/setup.py +++ b/setup.py @@ -15,7 +15,7 @@ setup( name="ess_streaming_data_types", - version="0.9.3", + version="0.9.4", description=DESCRIPTION, long_description=LONG_DESCRIPTION, long_description_content_type="text/markdown", diff --git a/streaming_data_types/timestamps_tdct.py b/streaming_data_types/timestamps_tdct.py index 61e0cf5..6360542 100644 --- a/streaming_data_types/timestamps_tdct.py +++ b/streaming_data_types/timestamps_tdct.py @@ -4,7 +4,6 @@ timestampAddName, timestampAddTimestamps, timestampAddSequenceCounter, - timestampStartTimestampsVector, timestampEnd, ) import flatbuffers @@ -28,10 +27,7 @@ def serialise_tdct( name_offset = builder.CreateString(name) - timestampStartTimestampsVector(builder, len(timestamps)) - for single_value in reversed(timestamps): - builder.PrependUint64(single_value) - array_offset = builder.EndVector(len(timestamps)) + array_offset = builder.CreateNumpyVector(timestamps) timestampStart(builder) timestampAddName(builder, name_offset) From fe9abed17f1d9eca439bfc9058629b88b9c25019 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Fri, 30 Oct 2020 08:35:31 +0100 Subject: [PATCH 194/363] Use CreateNumpyVector where appropriate in hs00 --- streaming_data_types/histogram_hs00.py | 67 +++++++++----------------- 1 file changed, 22 insertions(+), 45 deletions(-) diff --git a/streaming_data_types/histogram_hs00.py b/streaming_data_types/histogram_hs00.py index b1a8dc1..2ba6312 100644 --- a/streaming_data_types/histogram_hs00.py +++ b/streaming_data_types/histogram_hs00.py @@ -1,5 +1,3 @@ -from functools import reduce -import operator import flatbuffers import numpy import streaming_data_types.fbschemas.histogram_hs00.ArrayFloat as ArrayFloat @@ -93,7 +91,7 @@ def _serialise_metadata(builder, length, edges, unit, label): unit_offset = builder.CreateString(unit) label_offset = builder.CreateString(label) - bins_offset, bin_type = _serialise_array(builder, len(edges), edges) + bins_offset, bin_type = _serialise_array(builder, edges) DimensionMetaData.DimensionMetaDataStart(builder) DimensionMetaData.DimensionMetaDataAddLength(builder, length) @@ -124,12 +122,9 @@ def serialise_hs00(histogram): info_offset = builder.CreateString(histogram["info"]) # Build shape array - rank = len(histogram["current_shape"]) - EventHistogram.EventHistogramStartCurrentShapeVector(builder, rank) - # FlatBuffers builds arrays backwards - for s in reversed(histogram["current_shape"]): - builder.PrependUint32(s) - shape_offset = builder.EndVector(rank) + shape_offset = builder.CreateNumpyVector( + numpy.array(histogram["current_shape"]).astype(numpy.uint32) + ) # Build dimensions metadata metadata = [] @@ -142,6 +137,7 @@ def serialise_hs00(histogram): ) ) + rank = len(histogram["current_shape"]) EventHistogram.EventHistogramStartDimMetadataVector(builder, rank) # FlatBuffers builds arrays backwards for m in reversed(metadata): @@ -149,14 +145,11 @@ def serialise_hs00(histogram): metadata_vector = builder.EndVector(rank) # Build the data - data_len = reduce(operator.mul, histogram["current_shape"], 1) - data_offset, data_type = _serialise_array(builder, data_len, histogram["data"]) + data_offset, data_type = _serialise_array(builder, histogram["data"]) errors_offset = None if "errors" in histogram: - errors_offset, error_type = _serialise_array( - builder, data_len, histogram["errors"] - ) + errors_offset, error_type = _serialise_array(builder, histogram["errors"]) # Build the actual buffer EventHistogram.EventHistogramStart(builder) @@ -182,72 +175,56 @@ def serialise_hs00(histogram): return bytes(builder.Output()) -def _serialise_array(builder, data_len, data): +def _serialise_array(builder, data): flattened_data = numpy.asarray(data).flatten() # Carefully preserve explicitly supported types if numpy.issubdtype(flattened_data.dtype, numpy.uint32): - return _serialise_uint32(builder, data_len, flattened_data) + return _serialise_uint32(builder, flattened_data) if numpy.issubdtype(flattened_data.dtype, numpy.uint64): - return _serialise_uint64(builder, data_len, flattened_data) + return _serialise_uint64(builder, flattened_data) if numpy.issubdtype(flattened_data.dtype, numpy.float32): - return _serialise_float(builder, data_len, flattened_data) + return _serialise_float(builder, flattened_data) if numpy.issubdtype(flattened_data.dtype, numpy.float64): - return _serialise_double(builder, data_len, flattened_data) + return _serialise_double(builder, flattened_data) # Otherwise if it looks like an int then use uint64, or use double as last resort if numpy.issubdtype(flattened_data.dtype, numpy.int64): - return _serialise_uint64(builder, data_len, flattened_data) + return _serialise_uint64(builder, flattened_data) - return _serialise_double(builder, data_len, flattened_data) + return _serialise_double(builder, flattened_data) -def _serialise_float(builder, data_len, flattened_data): +def _serialise_float(builder, flattened_data): data_type = Array.ArrayFloat - ArrayFloat.ArrayFloatStartValueVector(builder, data_len) - # FlatBuffers builds arrays backwards - for x in reversed(flattened_data): - builder.PrependFloat32(x) - data_vector = builder.EndVector(data_len) + data_vector = builder.CreateNumpyVector(flattened_data) ArrayFloat.ArrayFloatStart(builder) ArrayFloat.ArrayFloatAddValue(builder, data_vector) data_offset = ArrayFloat.ArrayFloatEnd(builder) return data_offset, data_type -def _serialise_double(builder, data_len, flattened_data): +def _serialise_double(builder, flattened_data): data_type = Array.ArrayDouble - ArrayDouble.ArrayDoubleStartValueVector(builder, data_len) - # FlatBuffers builds arrays backwards - for x in reversed(flattened_data): - builder.PrependFloat64(x) - data_vector = builder.EndVector(data_len) + data_vector = builder.CreateNumpyVector(flattened_data) ArrayDouble.ArrayDoubleStart(builder) ArrayDouble.ArrayDoubleAddValue(builder, data_vector) data_offset = ArrayDouble.ArrayDoubleEnd(builder) return data_offset, data_type -def _serialise_uint32(builder, data_len, flattened_data): +def _serialise_uint32(builder, flattened_data): data_type = Array.ArrayUInt - ArrayUInt.ArrayUIntStartValueVector(builder, data_len) - # FlatBuffers builds arrays backwards - for x in reversed(flattened_data): - builder.PrependUint32(x) - data_vector = builder.EndVector(data_len) + data_vector = builder.CreateNumpyVector(flattened_data) ArrayUInt.ArrayUIntStart(builder) ArrayUInt.ArrayUIntAddValue(builder, data_vector) data_offset = ArrayUInt.ArrayUIntEnd(builder) return data_offset, data_type -def _serialise_uint64(builder, data_len, flattened_data): +def _serialise_uint64(builder, flattened_data): data_type = Array.ArrayULong - ArrayULong.ArrayULongStartValueVector(builder, data_len) - # FlatBuffers builds arrays backwards - for x in reversed(flattened_data): - builder.PrependUint64(x) - data_vector = builder.EndVector(data_len) + data_vector = builder.CreateNumpyVector(flattened_data) ArrayULong.ArrayULongStart(builder) ArrayULong.ArrayULongAddValue(builder, data_vector) data_offset = ArrayULong.ArrayULongEnd(builder) From 0160ae8c819584d0fa6ee943a9a5f21a99795327 Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Fri, 30 Oct 2020 09:46:21 +0000 Subject: [PATCH 195/363] Specify flatbuffers 1.12 needed in install require --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 00c51c4..d1633be 100644 --- a/setup.py +++ b/setup.py @@ -24,6 +24,6 @@ license="BSD 2-Clause License", packages=find_packages(exclude=["tests", "tests.*"]), python_requires=">=3.6.0", - install_requires=["flatbuffers", "numpy"], + install_requires=["flatbuffers>=1.12", "numpy"], extras_require={"dev": ["flake8", "pre-commit", "pytest", "tox"]}, ) From b1514eec7d599ebb5cd62deaf5a9adc2b115c9f8 Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Fri, 30 Oct 2020 09:57:06 +0000 Subject: [PATCH 196/363] Update version --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index d1633be..57d9378 100644 --- a/setup.py +++ b/setup.py @@ -15,7 +15,7 @@ setup( name="ess_streaming_data_types", - version="0.9.4", + version="0.9.5", description=DESCRIPTION, long_description=LONG_DESCRIPTION, long_description_content_type="text/markdown", From af13336f97655150d2d25b35cc904c44f0e94b4a Mon Sep 17 00:00:00 2001 From: Jonas Nilsson Date: Wed, 4 Nov 2020 11:08:33 +0100 Subject: [PATCH 197/363] Work around for datetime limitations. --- streaming_data_types/action_response_answ.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/streaming_data_types/action_response_answ.py b/streaming_data_types/action_response_answ.py index f99148d..87d8d55 100644 --- a/streaming_data_types/action_response_answ.py +++ b/streaming_data_types/action_response_answ.py @@ -62,6 +62,10 @@ def serialise_answ( def deserialise_answ(buffer: Union[bytearray, bytes]): check_schema_identifier(buffer, FILE_IDENTIFIER) answ_message = ActionResponse.ActionResponse.GetRootAsActionResponse(buffer, 0) + max_time = datetime(year=3000, month=1, day=1, hour=0, minute=0, second=0).timestamp() + used_timestamp = answ_message.StopTime() / 1000 + if used_timestamp > max_time: + used_timestamp = max_time return Response( service_id=answ_message.ServiceId().decode("utf-8"), job_id=answ_message.JobId().decode("utf-8"), @@ -70,5 +74,5 @@ def deserialise_answ(buffer: Union[bytearray, bytes]): outcome=answ_message.Outcome(), message=answ_message.Message().decode("utf-8"), status_code=answ_message.StatusCode(), - stop_time=datetime.fromtimestamp(answ_message.StopTime() / 1000), + stop_time=datetime.fromtimestamp(used_timestamp), ) From f8b8c291d003c67ec6e8702f0281faf72bf6b159 Mon Sep 17 00:00:00 2001 From: Jonas Nilsson Date: Sat, 14 Nov 2020 00:07:19 +0100 Subject: [PATCH 198/363] Update streaming_data_types/action_response_answ.py --- streaming_data_types/action_response_answ.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/streaming_data_types/action_response_answ.py b/streaming_data_types/action_response_answ.py index 87d8d55..2b8a526 100644 --- a/streaming_data_types/action_response_answ.py +++ b/streaming_data_types/action_response_answ.py @@ -62,7 +62,7 @@ def serialise_answ( def deserialise_answ(buffer: Union[bytearray, bytes]): check_schema_identifier(buffer, FILE_IDENTIFIER) answ_message = ActionResponse.ActionResponse.GetRootAsActionResponse(buffer, 0) - max_time = datetime(year=3000, month=1, day=1, hour=0, minute=0, second=0).timestamp() + max_time = datetime(year=9000, month=1, day=1, hour=0, minute=0, second=0).timestamp() used_timestamp = answ_message.StopTime() / 1000 if used_timestamp > max_time: used_timestamp = max_time From aa762e1ffe42da5117be7f572166b8af056977e0 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Wed, 18 Nov 2020 12:09:01 +0100 Subject: [PATCH 199/363] Use CreateNumpyVector where appropriate --- streaming_data_types/exceptions.py | 10 +++++++++ streaming_data_types/utils.py | 7 +++++- tests/test_utils.py | 9 ++++++++ tests/test_x52f.py | 35 ++++++++++++------------------ 4 files changed, 39 insertions(+), 22 deletions(-) create mode 100644 streaming_data_types/exceptions.py create mode 100644 tests/test_utils.py diff --git a/streaming_data_types/exceptions.py b/streaming_data_types/exceptions.py new file mode 100644 index 0000000..0efb440 --- /dev/null +++ b/streaming_data_types/exceptions.py @@ -0,0 +1,10 @@ +class StreamingDataTypesException(Exception): + pass + + +class WrongSchemaException(StreamingDataTypesException): + pass + + +class ShortBufferException(StreamingDataTypesException): + pass diff --git a/streaming_data_types/utils.py b/streaming_data_types/utils.py index c56d312..22f8234 100644 --- a/streaming_data_types/utils.py +++ b/streaming_data_types/utils.py @@ -1,3 +1,6 @@ +from streaming_data_types.exceptions import ShortBufferException, WrongSchemaException + + def _get_schema(buffer) -> str: """ Extract the schema code embedded in the buffer @@ -5,6 +8,8 @@ def _get_schema(buffer) -> str: :param buffer: The raw buffer of the FlatBuffers message. :return: The schema identifier """ + if len(buffer) < 8: + raise ShortBufferException("Could not retrieve schema as buffer too short") return buffer[4:8].decode("utf-8") @@ -16,6 +21,6 @@ def check_schema_identifier(buffer, expected_identifer: bytes): :param expected_identifer: The expected flatbuffer identifier """ if _get_schema(buffer) != expected_identifer.decode(): - raise RuntimeError( + raise WrongSchemaException( f"Incorrect schema: expected {expected_identifer} but got {_get_schema(buffer)}" ) diff --git a/tests/test_utils.py b/tests/test_utils.py new file mode 100644 index 0000000..93f62d2 --- /dev/null +++ b/tests/test_utils.py @@ -0,0 +1,9 @@ +import pytest +from streaming_data_types.exceptions import ShortBufferException +from streaming_data_types.utils import check_schema_identifier + + +def test_schema_check_throws_if_buffer_too_short(): + short_buffer = b"1234567" + with pytest.raises(ShortBufferException): + check_schema_identifier(short_buffer, b"1234") diff --git a/tests/test_x52f.py b/tests/test_x52f.py index c7942c1..961d006 100644 --- a/tests/test_x52f.py +++ b/tests/test_x52f.py @@ -1,23 +1,26 @@ import pytest + +from streaming_data_types.exceptions import WrongSchemaException from streaming_data_types.status_x5f2 import serialise_x5f2, deserialise_x5f2 from streaming_data_types import SERIALISERS, DESERIALISERS +original_entry = { + "software_name": "nicos/test", + "software_version": "1.0.0", + "service_id": "1a2b3c", + "host_name": "localhost", + "process_id": 1234, + "update_interval": 0, + "status_json": '{"content" : "log_or_status_message"}', +} + + class TestSerialisationX52f: def test_serialises_and_deserialises_x5f2_message_correctly(self): """ Round-trip to check what we serialise is what we get back. """ - original_entry = { - "software_name": "nicos/test", - "software_version": "1.0.0", - "service_id": "1a2b3c", - "host_name": "localhost", - "process_id": 1234, - "update_interval": 0, - "status_json": '{"content" : "log_or_status_message"}', - } - buf = serialise_x5f2(**original_entry) entry = deserialise_x5f2(buf) @@ -30,23 +33,13 @@ def test_serialises_and_deserialises_x5f2_message_correctly(self): assert entry.status_json == original_entry["status_json"] def test_if_buffer_has_wrong_id_then_throws(self): - original_entry = { - "software_name": "nicos/test", - "software_version": "1.0.0", - "service_id": "1a2b3c", - "host_name": "localhost", - "process_id": 1234, - "update_interval": 0, - "status_json": '{"content" : "log_or_status_message"}', - } - buf = serialise_x5f2(**original_entry) # Manually hack the id buf = bytearray(buf) buf[4:8] = b"1234" - with pytest.raises(RuntimeError): + with pytest.raises(WrongSchemaException): deserialise_x5f2(buf) def test_schema_type_is_in_global_serialisers_list(self): From 552e659dcf34720256ebebcae359f197435b90b8 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Wed, 18 Nov 2020 12:17:10 +0100 Subject: [PATCH 200/363] Update tests --- tests/test_6s4t.py | 4 +++- tests/test_NDAr.py | 3 ++- tests/test_answ.py | 5 +++-- tests/test_ep00.py | 3 ++- tests/test_ev42.py | 3 ++- tests/test_f142.py | 3 ++- tests/test_hs00.py | 3 ++- tests/test_ns10.py | 3 ++- tests/test_pl72.py | 3 ++- tests/test_rf5k.py | 3 ++- tests/test_tdct.py | 4 +++- tests/test_wrdn.py | 3 ++- tests/test_x52f.py | 1 - 13 files changed, 27 insertions(+), 14 deletions(-) diff --git a/tests/test_6s4t.py b/tests/test_6s4t.py index 54d747b..7ec16f9 100644 --- a/tests/test_6s4t.py +++ b/tests/test_6s4t.py @@ -1,4 +1,6 @@ import pytest + +from streaming_data_types.exceptions import WrongSchemaException from streaming_data_types.run_stop_6s4t import serialise_6s4t, deserialise_6s4t from streaming_data_types import SERIALISERS, DESERIALISERS @@ -29,7 +31,7 @@ def test_if_buffer_has_wrong_id_then_throws(self): buf = bytearray(buf) buf[4:8] = b"1234" - with pytest.raises(RuntimeError): + with pytest.raises(WrongSchemaException): deserialise_6s4t(buf) def test_schema_type_is_in_global_serialisers_list(self): diff --git a/tests/test_NDAr.py b/tests/test_NDAr.py index 5a1c8ed..8c914cf 100644 --- a/tests/test_NDAr.py +++ b/tests/test_NDAr.py @@ -1,5 +1,6 @@ import pytest from streaming_data_types.area_detector_NDAr import serialise_ndar, deserialise_ndar +from streaming_data_types.exceptions import WrongSchemaException from streaming_data_types.fbschemas.NDAr_NDArray_schema.DType import DType from streaming_data_types import SERIALISERS, DESERIALISERS import numpy as np @@ -54,7 +55,7 @@ def test_if_buffer_has_wrong_id_then_throws(self): buf = bytearray(buf) buf[4:8] = b"1234" - with pytest.raises(RuntimeError): + with pytest.raises(WrongSchemaException): deserialise_ndar(buf) def test_schema_type_is_in_global_serialisers_list(self): diff --git a/tests/test_answ.py b/tests/test_answ.py index f9f5d9d..3c19695 100644 --- a/tests/test_answ.py +++ b/tests/test_answ.py @@ -1,3 +1,4 @@ +from datetime import datetime import pytest from streaming_data_types.action_response_answ import ( serialise_answ, @@ -6,7 +7,7 @@ ActionOutcome, ) from streaming_data_types import SERIALISERS, DESERIALISERS -from datetime import datetime +from streaming_data_types.exceptions import WrongSchemaException class TestSerialisationAnsw: @@ -59,7 +60,7 @@ def test_if_buffer_has_wrong_id_then_throws(self): buf = bytearray(buf) buf[4:8] = b"1234" - with pytest.raises(RuntimeError): + with pytest.raises(WrongSchemaException): deserialise_answ(buf) def test_schema_type_is_in_global_serialisers_list(self): diff --git a/tests/test_ep00.py b/tests/test_ep00.py index 34857e8..2abfa31 100644 --- a/tests/test_ep00.py +++ b/tests/test_ep00.py @@ -1,4 +1,5 @@ import pytest +from streaming_data_types.exceptions import WrongSchemaException from streaming_data_types.fbschemas.epics_connection_info_ep00 import EventType from streaming_data_types.epics_connection_info_ep00 import ( serialise_ep00, @@ -31,7 +32,7 @@ def test_if_buffer_has_wrong_id_then_throws(self): buf = bytearray(buf) buf[4:8] = b"1234" - with pytest.raises(RuntimeError): + with pytest.raises(WrongSchemaException): deserialise_ep00(buf) def test_schema_type_is_in_global_serialisers_list(self): diff --git a/tests/test_ev42.py b/tests/test_ev42.py index d0d3765..545f8dd 100644 --- a/tests/test_ev42.py +++ b/tests/test_ev42.py @@ -2,6 +2,7 @@ import pytest from streaming_data_types.eventdata_ev42 import serialise_ev42, deserialise_ev42 from streaming_data_types import SERIALISERS, DESERIALISERS +from streaming_data_types.exceptions import WrongSchemaException class TestSerialisationEv42: @@ -90,7 +91,7 @@ def test_if_buffer_has_wrong_id_then_throws(self): buf = bytearray(buf) buf[4:8] = b"1234" - with pytest.raises(RuntimeError): + with pytest.raises(WrongSchemaException): deserialise_ev42(buf) def test_schema_type_is_in_global_serialisers_list(self): diff --git a/tests/test_f142.py b/tests/test_f142.py index bf1e1ce..e6221e5 100644 --- a/tests/test_f142.py +++ b/tests/test_f142.py @@ -1,5 +1,6 @@ import pytest import numpy as np +from streaming_data_types.exceptions import WrongSchemaException from streaming_data_types.logdata_f142 import serialise_f142, deserialise_f142 from streaming_data_types.fbschemas.logdata_f142.AlarmSeverity import AlarmSeverity from streaming_data_types.fbschemas.logdata_f142.AlarmStatus import AlarmStatus @@ -174,7 +175,7 @@ def test_if_buffer_has_wrong_id_then_throws(self): buf = bytearray(buf) buf[4:8] = b"1234" - with pytest.raises(RuntimeError): + with pytest.raises(WrongSchemaException): deserialise_f142(buf) def test_schema_type_is_in_global_serialisers_list(self): diff --git a/tests/test_hs00.py b/tests/test_hs00.py index b710e1a..f552869 100644 --- a/tests/test_hs00.py +++ b/tests/test_hs00.py @@ -1,5 +1,6 @@ import numpy as np import pytest +from streaming_data_types.exceptions import WrongSchemaException from streaming_data_types.histogram_hs00 import serialise_hs00, deserialise_hs00 from streaming_data_types import SERIALISERS, DESERIALISERS @@ -167,7 +168,7 @@ def test_if_buffer_has_wrong_id_then_throws(self): buf = bytearray(buf) buf[4:8] = b"1234" - with pytest.raises(RuntimeError): + with pytest.raises(WrongSchemaException): deserialise_hs00(buf) def test_serialises_and_deserialises_hs00_message_correctly_for_int_array_data( diff --git a/tests/test_ns10.py b/tests/test_ns10.py index ff3f921..7d90174 100644 --- a/tests/test_ns10.py +++ b/tests/test_ns10.py @@ -1,4 +1,5 @@ import pytest +from streaming_data_types.exceptions import WrongSchemaException from streaming_data_types.nicos_cache_ns10 import serialise_ns10, deserialise_ns10 from streaming_data_types import SERIALISERS, DESERIALISERS @@ -39,7 +40,7 @@ def test_if_buffer_has_wrong_id_then_throws(self): buf = bytearray(buf) buf[4:8] = b"1234" - with pytest.raises(RuntimeError): + with pytest.raises(WrongSchemaException): deserialise_ns10(buf) def test_schema_type_is_in_global_serialisers_list(self): diff --git a/tests/test_pl72.py b/tests/test_pl72.py index c9d1762..543c875 100644 --- a/tests/test_pl72.py +++ b/tests/test_pl72.py @@ -1,4 +1,5 @@ import pytest +from streaming_data_types.exceptions import WrongSchemaException from streaming_data_types.run_start_pl72 import serialise_pl72, deserialise_pl72 from streaming_data_types import SERIALISERS, DESERIALISERS @@ -43,7 +44,7 @@ def test_if_buffer_has_wrong_id_then_throws(self): buf = bytearray(buf) buf[4:8] = b"1234" - with pytest.raises(RuntimeError): + with pytest.raises(WrongSchemaException): deserialise_pl72(buf) def test_schema_type_is_in_global_serialisers_list(self): diff --git a/tests/test_rf5k.py b/tests/test_rf5k.py index 2614444..74c21f9 100644 --- a/tests/test_rf5k.py +++ b/tests/test_rf5k.py @@ -1,4 +1,5 @@ import pytest +from streaming_data_types.exceptions import WrongSchemaException from streaming_data_types.forwarder_config_update_rf5k import ( serialise_rf5k, deserialise_rf5k, @@ -50,7 +51,7 @@ def test_if_buffer_has_wrong_id_then_throws(self): buf = bytearray(buf) buf[4:8] = b"1234" - with pytest.raises(RuntimeError): + with pytest.raises(WrongSchemaException): deserialise_rf5k(buf) def test_schema_type_is_in_global_serialisers_list(self): diff --git a/tests/test_tdct.py b/tests/test_tdct.py index 034f9b7..0858695 100644 --- a/tests/test_tdct.py +++ b/tests/test_tdct.py @@ -1,5 +1,7 @@ import pytest import numpy as np + +from streaming_data_types.exceptions import WrongSchemaException from streaming_data_types.timestamps_tdct import serialise_tdct, deserialise_tdct from streaming_data_types import SERIALISERS, DESERIALISERS @@ -49,7 +51,7 @@ def test_if_buffer_has_wrong_id_then_throws(self): buf = bytearray(buf) buf[4:8] = b"1234" - with pytest.raises(RuntimeError): + with pytest.raises(WrongSchemaException): deserialise_tdct(buf) def test_schema_type_is_in_global_serialisers_list(self): diff --git a/tests/test_wrdn.py b/tests/test_wrdn.py index 0e7e14b..7a99bca 100644 --- a/tests/test_wrdn.py +++ b/tests/test_wrdn.py @@ -1,4 +1,5 @@ import pytest +from streaming_data_types.exceptions import WrongSchemaException from streaming_data_types.finished_writing_wrdn import serialise_wrdn, deserialise_wrdn from streaming_data_types import SERIALISERS, DESERIALISERS @@ -44,7 +45,7 @@ def test_if_buffer_has_wrong_id_then_throws(self): buf = bytearray(buf) buf[4:8] = b"1234" - with pytest.raises(RuntimeError): + with pytest.raises(WrongSchemaException): deserialise_wrdn(buf) def test_schema_type_is_in_global_serialisers_list(self): diff --git a/tests/test_x52f.py b/tests/test_x52f.py index 961d006..9e4c03f 100644 --- a/tests/test_x52f.py +++ b/tests/test_x52f.py @@ -1,5 +1,4 @@ import pytest - from streaming_data_types.exceptions import WrongSchemaException from streaming_data_types.status_x5f2 import serialise_x5f2, deserialise_x5f2 from streaming_data_types import SERIALISERS, DESERIALISERS From cf226b8740de9385231ad0581bf8c18e7958d330 Mon Sep 17 00:00:00 2001 From: Jonas Nilsson Date: Fri, 20 Nov 2020 00:52:30 +0100 Subject: [PATCH 201/363] ADArray implementation. --- streaming_data_types/__init__.py | 3 + streaming_data_types/area_detector_ADAr.py | 122 +++++++++++++++ .../fbschemas/ADAr_ADArray_schema/ADArray.py | 145 ++++++++++++++++++ .../ADAr_ADArray_schema/Attribute.py | 89 +++++++++++ .../fbschemas/ADAr_ADArray_schema/DType.py | 17 ++ .../fbschemas/ADAr_ADArray_schema/__init__.py | 0 tests/test_ADAr.py | 68 ++++++++ 7 files changed, 444 insertions(+) create mode 100644 streaming_data_types/area_detector_ADAr.py create mode 100644 streaming_data_types/fbschemas/ADAr_ADArray_schema/ADArray.py create mode 100644 streaming_data_types/fbschemas/ADAr_ADArray_schema/Attribute.py create mode 100644 streaming_data_types/fbschemas/ADAr_ADArray_schema/DType.py create mode 100644 streaming_data_types/fbschemas/ADAr_ADArray_schema/__init__.py create mode 100644 tests/test_ADAr.py diff --git a/streaming_data_types/__init__.py b/streaming_data_types/__init__.py index 8f0eacd..3959cd9 100644 --- a/streaming_data_types/__init__.py +++ b/streaming_data_types/__init__.py @@ -17,6 +17,7 @@ serialise_rf5k, ) from streaming_data_types.area_detector_NDAr import deserialise_ndar, serialise_ndar +from streaming_data_types.area_detector_ADAr import deserialise_ADAr, serialise_ADAr SERIALISERS = { @@ -33,6 +34,7 @@ "answ": serialise_answ, "wrdn": serialise_wrdn, "NDAr": serialise_ndar, + "ADAr": serialise_ADAr, } @@ -50,4 +52,5 @@ "answ": deserialise_answ, "wrdn": deserialise_wrdn, "NDAr": deserialise_ndar, + "ADAr": deserialise_ADAr, } diff --git a/streaming_data_types/area_detector_ADAr.py b/streaming_data_types/area_detector_ADAr.py new file mode 100644 index 0000000..3ffdf44 --- /dev/null +++ b/streaming_data_types/area_detector_ADAr.py @@ -0,0 +1,122 @@ +from typing import Union, NamedTuple, List +import flatbuffers +from streaming_data_types.fbschemas.ADAr_ADArray_schema import ADArray +from streaming_data_types.fbschemas.ADAr_ADArray_schema.DType import DType +from streaming_data_types.utils import check_schema_identifier +import numpy as np +from datetime import datetime + +FILE_IDENTIFIER = b"ADAr" + +Attribute = NamedTuple( + "Attribute", ( + ("name", str), + ("description", str), + ("source", str), + ("data", Union[np.ndarray, str]) + ) +) + + +def serialise_ADAr( + source_name: str, + unique_id: int, + timestamp: datetime, + data: Union[np.ndarray, str], + attributes: List[Attribute] = [] +) -> bytes: + builder = flatbuffers.Builder(1024) + builder.ForceDefaults(True) + + type_map = {np.dtype("uint8"): DType.uint8, + np.dtype("int8"): DType.int8, + np.dtype("uint16"): DType.uint16, + np.dtype("int16"): DType.int16, + np.dtype("uint32"): DType.uint32, + np.dtype("int32"): DType.int32, + np.dtype("uint64"): DType.uint64, + np.dtype("int64"): DType.int64, + np.dtype("float32"): DType.float32, + np.dtype("float64"): DType.float64, + } + + if type(data) is str: + data = np.frombuffer(data.encode(), np.uint8) + data_type = DType.c_string + else: + data_type = type_map[data.dtype] + + # Build dims + dims_offset = builder.CreateNumpyVector(np.array(data.shape)) + + # Build data + data_offset = builder.CreateNumpyVector(data.flatten().view(np.uint8)) + + source_name_offset = builder.CreateString(source_name) + + # Build the actual buffer + ADArray.ADArrayStart(builder) + ADArray.ADArrayAddSourceName(builder, source_name_offset) + ADArray.ADArrayAddDataType(builder, data_type) + ADArray.ADArrayAddDimensions(builder, dims_offset) + ADArray.ADArrayAddId(builder, unique_id) + ADArray.ADArrayAddData(builder, data_offset) + ADArray.ADArrayAddTimestamp(builder, int(timestamp.timestamp()*1e9)) + array_message = ADArray.ADArrayEnd(builder) + + builder.Finish(array_message, file_identifier=FILE_IDENTIFIER) + return bytes(builder.Output()) + + +ADArray_t= NamedTuple( + "ADArray", + ( + ("source_name", str), + ("unique_id", int), + ("timestamp", datetime), + ("data", np.ndarray), + ("attributes", List[Attribute]) + ), +) + + +def get_data(fb_arr): + """ + Converts the data array into the correct type. + """ + raw_data = fb_arr.DataAsNumpy() + type_map = {DType.uint8: np.uint8, + DType.int8: np.int8, + DType.uint16: np.uint16, + DType.int16: np.int16, + DType.uint32: np.uint32, + DType.int32: np.int32, + DType.uint64: np.uint64, + DType.int64: np.int64, + DType.float32: np.float32, + DType.float64: np.float64, + } + return raw_data.view(type_map[fb_arr.DataType()]).reshape(fb_arr.DimensionsAsNumpy()) + + +def deserialise_ADAr(buffer: Union[bytearray, bytes]) -> ADArray: + check_schema_identifier(buffer, FILE_IDENTIFIER) + + ad_array = ADArray.ADArray.GetRootAsADArray(buffer, 0) + unique_id = ad_array.Id() + max_time = datetime(year=9000, month=1, day=1, hour=0, minute=0, second=0).timestamp() + used_timestamp = ad_array.Timestamp() / 1e9 + if used_timestamp > max_time: + used_timestamp = max_time + if ad_array.DataType() == DType.c_string: + data = ad_array.DataAsNumpy().tobytes().decode() + else: + data = get_data(ad_array) + + return ADArray_t( + source_name=ad_array.SourceName().decode(), + unique_id=unique_id, + timestamp=datetime.fromtimestamp(used_timestamp), + data=data, + attributes=[] + ) diff --git a/streaming_data_types/fbschemas/ADAr_ADArray_schema/ADArray.py b/streaming_data_types/fbschemas/ADAr_ADArray_schema/ADArray.py new file mode 100644 index 0000000..98c1b22 --- /dev/null +++ b/streaming_data_types/fbschemas/ADAr_ADArray_schema/ADArray.py @@ -0,0 +1,145 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy +np = import_numpy() + +class ADArray(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAsADArray(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ADArray() + x.Init(buf, n + offset) + return x + + @classmethod + def ADArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x41\x44\x41\x72", size_prefixed=size_prefixed) + + # ADArray + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ADArray + def SourceName(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # ADArray + def Id(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # ADArray + def Timestamp(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) + return 0 + + # ADArray + def Dimensions(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Uint64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return 0 + + # ADArray + def DimensionsAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint64Flags, o) + return 0 + + # ADArray + def DimensionsLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # ADArray + def DimensionsIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + return o == 0 + + # ADArray + def DataType(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # ADArray + def Data(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) + return 0 + + # ADArray + def DataAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) + return 0 + + # ADArray + def DataLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # ADArray + def DataIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + return o == 0 + + # ADArray + def Attributes(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + if o != 0: + x = self._tab.Vector(o) + x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 + x = self._tab.Indirect(x) + from Attribute import Attribute + obj = Attribute() + obj.Init(self._tab.Bytes, x) + return obj + return None + + # ADArray + def AttributesLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # ADArray + def AttributesIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + return o == 0 + +def ADArrayStart(builder): builder.StartObject(7) +def ADArrayAddSourceName(builder, sourceName): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(sourceName), 0) +def ADArrayAddId(builder, id): builder.PrependInt32Slot(1, id, 0) +def ADArrayAddTimestamp(builder, timestamp): builder.PrependUint64Slot(2, timestamp, 0) +def ADArrayAddDimensions(builder, dimensions): builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(dimensions), 0) +def ADArrayStartDimensionsVector(builder, numElems): return builder.StartVector(8, numElems, 8) +def ADArrayAddDataType(builder, dataType): builder.PrependInt8Slot(4, dataType, 0) +def ADArrayAddData(builder, data): builder.PrependUOffsetTRelativeSlot(5, flatbuffers.number_types.UOffsetTFlags.py_type(data), 0) +def ADArrayStartDataVector(builder, numElems): return builder.StartVector(1, numElems, 1) +def ADArrayAddAttributes(builder, attributes): builder.PrependUOffsetTRelativeSlot(6, flatbuffers.number_types.UOffsetTFlags.py_type(attributes), 0) +def ADArrayStartAttributesVector(builder, numElems): return builder.StartVector(4, numElems, 4) +def ADArrayEnd(builder): return builder.EndObject() diff --git a/streaming_data_types/fbschemas/ADAr_ADArray_schema/Attribute.py b/streaming_data_types/fbschemas/ADAr_ADArray_schema/Attribute.py new file mode 100644 index 0000000..7b12306 --- /dev/null +++ b/streaming_data_types/fbschemas/ADAr_ADArray_schema/Attribute.py @@ -0,0 +1,89 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy +np = import_numpy() + +class Attribute(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAsAttribute(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Attribute() + x.Init(buf, n + offset) + return x + + @classmethod + def AttributeBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x41\x44\x41\x72", size_prefixed=size_prefixed) + + # Attribute + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Attribute + def Name(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # Attribute + def Description(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # Attribute + def Source(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # Attribute + def DataType(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # Attribute + def Data(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) + return 0 + + # Attribute + def DataAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) + return 0 + + # Attribute + def DataLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # Attribute + def DataIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + return o == 0 + +def AttributeStart(builder): builder.StartObject(5) +def AttributeAddName(builder, name): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(name), 0) +def AttributeAddDescription(builder, description): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(description), 0) +def AttributeAddSource(builder, source): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(source), 0) +def AttributeAddDataType(builder, dataType): builder.PrependInt8Slot(3, dataType, 0) +def AttributeAddData(builder, data): builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(data), 0) +def AttributeStartDataVector(builder, numElems): return builder.StartVector(1, numElems, 1) +def AttributeEnd(builder): return builder.EndObject() diff --git a/streaming_data_types/fbschemas/ADAr_ADArray_schema/DType.py b/streaming_data_types/fbschemas/ADAr_ADArray_schema/DType.py new file mode 100644 index 0000000..c1ee51a --- /dev/null +++ b/streaming_data_types/fbschemas/ADAr_ADArray_schema/DType.py @@ -0,0 +1,17 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +class DType(object): + int8 = 0 + uint8 = 1 + int16 = 2 + uint16 = 3 + int32 = 4 + uint32 = 5 + int64 = 6 + uint64 = 7 + float32 = 8 + float64 = 9 + c_string = 10 + diff --git a/streaming_data_types/fbschemas/ADAr_ADArray_schema/__init__.py b/streaming_data_types/fbschemas/ADAr_ADArray_schema/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/test_ADAr.py b/tests/test_ADAr.py new file mode 100644 index 0000000..a8d9cf5 --- /dev/null +++ b/tests/test_ADAr.py @@ -0,0 +1,68 @@ +import pytest +from streaming_data_types.area_detector_ADAr import serialise_ADAr, deserialise_ADAr +from streaming_data_types.fbschemas.ADAr_ADArray_schema.DType import DType +from streaming_data_types import SERIALISERS, DESERIALISERS +import numpy as np +from datetime import datetime + + +class TestSerialisationNDAr: + def test_serialises_and_deserialises_ADAr_message_correctly_array(self): + """ + Round-trip to check what we serialise is what we get back. + """ + original_entry = { + "source_name": "some source name", + "unique_id": 754, + "data": np.array([[1, 2, 3], [3, 4, 5]], dtype=np.uint64), + "timestamp": datetime.now() + } + + buf = serialise_ADAr(**original_entry) + entry = deserialise_ADAr(buf) + + assert entry.unique_id == original_entry["unique_id"] + assert entry.source_name == original_entry["source_name"] + assert entry.timestamp == original_entry["timestamp"] + assert np.array_equal(entry.data, original_entry["data"]) + assert entry.data.dtype == original_entry["data"].dtype + + def test_serialises_and_deserialises_ADAr_message_correctly_string(self): + """ + Round-trip to check what we serialise is what we get back. + """ + original_entry = { + "source_name": "some source name", + "unique_id": 754, + "data": "hi, this is a string", + "timestamp": datetime.now() + } + + buf = serialise_ADAr(**original_entry) + entry = deserialise_ADAr(buf) + + assert entry.unique_id == original_entry["unique_id"] + assert entry.source_name == original_entry["source_name"] + assert entry.timestamp == original_entry["timestamp"] + assert entry.data == original_entry["data"] + + def test_if_buffer_has_wrong_id_then_throws(self): + original_entry = { + "source_name": "some source name", + "unique_id": 754, + "data": np.array([[1, 2, 3], [3, 4, 5]], dtype=np.uint64), + "timestamp": datetime.now() + } + + buf = serialise_ADAr(**original_entry) + + # Manually hack the id + buf = bytearray(buf) + buf[4:8] = b"1234" + + with pytest.raises(RuntimeError): + deserialise_ADAr(buf) + + def test_schema_type_is_in_global_serialisers_list(self): + assert "ADAr" in SERIALISERS + assert "ADAr" in DESERIALISERS From d62d3f80253debd518d448cfe2285da2cbdc90ed Mon Sep 17 00:00:00 2001 From: Jonas Nilsson Date: Mon, 23 Nov 2020 13:33:39 +0100 Subject: [PATCH 202/363] Added attributes. --- streaming_data_types/area_detector_ADAr.py | 100 ++++++++++++++---- .../fbschemas/ADAr_ADArray_schema/ADArray.py | 2 +- tests/test_ADAr.py | 35 +++++- 3 files changed, 113 insertions(+), 24 deletions(-) diff --git a/streaming_data_types/area_detector_ADAr.py b/streaming_data_types/area_detector_ADAr.py index 3ffdf44..468b723 100644 --- a/streaming_data_types/area_detector_ADAr.py +++ b/streaming_data_types/area_detector_ADAr.py @@ -2,28 +2,37 @@ import flatbuffers from streaming_data_types.fbschemas.ADAr_ADArray_schema import ADArray from streaming_data_types.fbschemas.ADAr_ADArray_schema.DType import DType +import streaming_data_types.fbschemas.ADAr_ADArray_schema.Attribute as ADArAttribute from streaming_data_types.utils import check_schema_identifier import numpy as np from datetime import datetime +from struct import pack FILE_IDENTIFIER = b"ADAr" -Attribute = NamedTuple( - "Attribute", ( - ("name", str), - ("description", str), - ("source", str), - ("data", Union[np.ndarray, str]) - ) -) + +class Attribute: + def __init__(self, name: str, description: str, source: str, data: Union[np.ndarray, str, int, float]): + self.name = name + self.description = description + self.source = source + self.data = data + + def __eq__(self, other): + data_is_equal = type(self.data) == type(other.data) + if type(self.data) is np.ndarray: + data_is_equal = data_is_equal and np.array_equal(self.data, other.data) + else: + data_is_equal = data_is_equal and self.data == other.data + return self.name == other.name and self.description == other.description and self.source == other.source and data_is_equal def serialise_ADAr( - source_name: str, - unique_id: int, - timestamp: datetime, - data: Union[np.ndarray, str], - attributes: List[Attribute] = [] + source_name: str, + unique_id: int, + timestamp: datetime, + data: Union[np.ndarray, str], + attributes: List[Attribute] = [] ) -> bytes: builder = flatbuffers.Builder(1024) builder.ForceDefaults(True) @@ -54,6 +63,38 @@ def serialise_ADAr( source_name_offset = builder.CreateString(source_name) + temp_attributes = [] + for item in attributes: + if type(item.data) is np.ndarray: + attr_data_type = type_map[item.data.dtype] + attr_data = item.data + elif type(item.data) is str: + attr_data_type = DType.c_string + attr_data = np.frombuffer(item.data.encode(), np.uint8) + elif type(item.data) is int: + attr_data_type = DType.int64 + attr_data = np.frombuffer(pack("q", item.data), np.uint8) + elif type(item.data) is float: + attr_data_type = DType.float64 + attr_data = np.frombuffer(pack("d", item.data), np.uint8) + attr_name_offset = builder.CreateString(item.name) + attr_desc_offset = builder.CreateString(item.description) + attr_src_offset = builder.CreateString(item.source) + attr_data_offset = builder.CreateNumpyVector(attr_data.flatten().view(np.uint8)) + ADArAttribute.AttributeStart(builder) + ADArAttribute.AttributeAddName(builder, attr_name_offset) + ADArAttribute.AttributeAddDescription(builder, attr_desc_offset) + ADArAttribute.AttributeAddSource(builder, attr_src_offset) + ADArAttribute.AttributeAddDataType(builder, attr_data_type) + ADArAttribute.AttributeAddData(builder, attr_data_offset) + attr_offset = ADArAttribute.AttributeEnd(builder) + temp_attributes.append(attr_offset) + + ADArray.ADArrayStartAttributesVector(builder, len(attributes)) + for item in reversed(temp_attributes): + builder.PrependUOffsetTRelative(item) + attributes_offset = builder.EndVector(len(attributes)) + # Build the actual buffer ADArray.ADArrayStart(builder) ADArray.ADArrayAddSourceName(builder, source_name_offset) @@ -61,14 +102,15 @@ def serialise_ADAr( ADArray.ADArrayAddDimensions(builder, dims_offset) ADArray.ADArrayAddId(builder, unique_id) ADArray.ADArrayAddData(builder, data_offset) - ADArray.ADArrayAddTimestamp(builder, int(timestamp.timestamp()*1e9)) + ADArray.ADArrayAddTimestamp(builder, int(timestamp.timestamp() * 1e9)) + ADArray.ADArrayAddAttributes(builder, attributes_offset) array_message = ADArray.ADArrayEnd(builder) builder.Finish(array_message, file_identifier=FILE_IDENTIFIER) return bytes(builder.Output()) -ADArray_t= NamedTuple( +ADArray_t = NamedTuple( "ADArray", ( ("source_name", str), @@ -80,7 +122,11 @@ def serialise_ADAr( ) -def get_data(fb_arr): +def get_payload_data(fb_arr) -> np.ndarray: + return get_data(fb_arr).reshape(fb_arr.DimensionsAsNumpy()) + + +def get_data(fb_arr) -> np.ndarray: """ Converts the data array into the correct type. """ @@ -96,7 +142,7 @@ def get_data(fb_arr): DType.float32: np.float32, DType.float64: np.float64, } - return raw_data.view(type_map[fb_arr.DataType()]).reshape(fb_arr.DimensionsAsNumpy()) + return raw_data.view(type_map[fb_arr.DataType()]) def deserialise_ADAr(buffer: Union[bytearray, bytes]) -> ADArray: @@ -111,12 +157,28 @@ def deserialise_ADAr(buffer: Union[bytearray, bytes]) -> ADArray: if ad_array.DataType() == DType.c_string: data = ad_array.DataAsNumpy().tobytes().decode() else: - data = get_data(ad_array) + data = get_payload_data(ad_array) + + attributes_list = [] + for i in range(ad_array.AttributesLength()): + attribute_ptr = ad_array.Attributes(i) + if attribute_ptr.DataType() == DType.c_string: + attr_data = attribute_ptr.DataAsNumpy().tobytes().decode() + else: + attr_data = get_data(attribute_ptr) + temp_attribute = Attribute(name=attribute_ptr.Name().decode(), description=attribute_ptr.Description().decode(), + source=attribute_ptr.Source().decode(), data=attr_data) + if type(temp_attribute.data) is np.ndarray and len(temp_attribute.data) == 1: + if np.issubdtype(temp_attribute.data.dtype, np.floating): + temp_attribute.data = float(temp_attribute.data[0]) + elif np.issubdtype(temp_attribute.data.dtype, np.integer): + temp_attribute.data = int(temp_attribute.data[0]) + attributes_list.append(temp_attribute) return ADArray_t( source_name=ad_array.SourceName().decode(), unique_id=unique_id, timestamp=datetime.fromtimestamp(used_timestamp), data=data, - attributes=[] + attributes=attributes_list ) diff --git a/streaming_data_types/fbschemas/ADAr_ADArray_schema/ADArray.py b/streaming_data_types/fbschemas/ADAr_ADArray_schema/ADArray.py index 98c1b22..13acb55 100644 --- a/streaming_data_types/fbschemas/ADAr_ADArray_schema/ADArray.py +++ b/streaming_data_types/fbschemas/ADAr_ADArray_schema/ADArray.py @@ -113,7 +113,7 @@ def Attributes(self, j): x = self._tab.Vector(o) x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 x = self._tab.Indirect(x) - from Attribute import Attribute + from .Attribute import Attribute obj = Attribute() obj.Init(self._tab.Bytes, x) return obj diff --git a/tests/test_ADAr.py b/tests/test_ADAr.py index a8d9cf5..fce4cd2 100644 --- a/tests/test_ADAr.py +++ b/tests/test_ADAr.py @@ -1,5 +1,5 @@ import pytest -from streaming_data_types.area_detector_ADAr import serialise_ADAr, deserialise_ADAr +from streaming_data_types.area_detector_ADAr import serialise_ADAr, deserialise_ADAr, Attribute from streaming_data_types.fbschemas.ADAr_ADArray_schema.DType import DType from streaming_data_types import SERIALISERS, DESERIALISERS import numpy as np @@ -7,7 +7,7 @@ class TestSerialisationNDAr: - def test_serialises_and_deserialises_ADAr_message_correctly_array(self): + def test_serialises_and_deserialises_ADAr_int_array(self): """ Round-trip to check what we serialise is what we get back. """ @@ -15,7 +15,34 @@ def test_serialises_and_deserialises_ADAr_message_correctly_array(self): "source_name": "some source name", "unique_id": 754, "data": np.array([[1, 2, 3], [3, 4, 5]], dtype=np.uint64), - "timestamp": datetime.now() + "timestamp": datetime.now(), + "attributes": [Attribute("name1", "desc1", "src1", "value"), + Attribute("name2", "desc2", "src2", 11), + Attribute("name3", "desc3", "src3", 3.14), + Attribute("name4", "desc4", "src4", np.linspace(0, 10))] + } + + buf = serialise_ADAr(**original_entry) + entry = deserialise_ADAr(buf) + + assert entry.unique_id == original_entry["unique_id"] + assert entry.source_name == original_entry["source_name"] + assert entry.timestamp == original_entry["timestamp"] + assert np.array_equal(entry.data, original_entry["data"]) + assert entry.data.dtype == original_entry["data"].dtype + assert len(entry.attributes) == len(original_entry["attributes"]) + for i in range(len(entry.attributes)): + assert entry.attributes[i] == original_entry["attributes"][i] + + def test_serialises_and_deserialises_ADAr_float_array(self): + """ + Round-trip to check what we serialise is what we get back. + """ + original_entry = { + "source_name": "some other source name", + "unique_id": 789679, + "data": np.array([[1.1, 2.2, 3.3], [4.4, 5.5, 6.6]], dtype=np.float32), + "timestamp": datetime(year=1992, month=8, day=11, hour=3, minute=34, second=57) } buf = serialise_ADAr(**original_entry) @@ -27,7 +54,7 @@ def test_serialises_and_deserialises_ADAr_message_correctly_array(self): assert np.array_equal(entry.data, original_entry["data"]) assert entry.data.dtype == original_entry["data"].dtype - def test_serialises_and_deserialises_ADAr_message_correctly_string(self): + def test_serialises_and_deserialises_ADAr_string(self): """ Round-trip to check what we serialise is what we get back. """ From 623d438967e33ca37dd671be96010f320a7f40fe Mon Sep 17 00:00:00 2001 From: Jonas Nilsson Date: Mon, 23 Nov 2020 13:34:59 +0100 Subject: [PATCH 203/363] black --- streaming_data_types/area_detector_ADAr.py | 87 +++++++++++------- .../fbschemas/ADAr_ADArray_schema/ADArray.py | 92 +++++++++++++++---- .../ADAr_ADArray_schema/Attribute.py | 62 ++++++++++--- .../fbschemas/ADAr_ADArray_schema/DType.py | 4 +- tests/test_ADAr.py | 24 +++-- 5 files changed, 195 insertions(+), 74 deletions(-) diff --git a/streaming_data_types/area_detector_ADAr.py b/streaming_data_types/area_detector_ADAr.py index 468b723..8bca7df 100644 --- a/streaming_data_types/area_detector_ADAr.py +++ b/streaming_data_types/area_detector_ADAr.py @@ -12,7 +12,13 @@ class Attribute: - def __init__(self, name: str, description: str, source: str, data: Union[np.ndarray, str, int, float]): + def __init__( + self, + name: str, + description: str, + source: str, + data: Union[np.ndarray, str, int, float], + ): self.name = name self.description = description self.source = source @@ -24,30 +30,36 @@ def __eq__(self, other): data_is_equal = data_is_equal and np.array_equal(self.data, other.data) else: data_is_equal = data_is_equal and self.data == other.data - return self.name == other.name and self.description == other.description and self.source == other.source and data_is_equal + return ( + self.name == other.name + and self.description == other.description + and self.source == other.source + and data_is_equal + ) def serialise_ADAr( - source_name: str, - unique_id: int, - timestamp: datetime, - data: Union[np.ndarray, str], - attributes: List[Attribute] = [] + source_name: str, + unique_id: int, + timestamp: datetime, + data: Union[np.ndarray, str], + attributes: List[Attribute] = [], ) -> bytes: builder = flatbuffers.Builder(1024) builder.ForceDefaults(True) - type_map = {np.dtype("uint8"): DType.uint8, - np.dtype("int8"): DType.int8, - np.dtype("uint16"): DType.uint16, - np.dtype("int16"): DType.int16, - np.dtype("uint32"): DType.uint32, - np.dtype("int32"): DType.int32, - np.dtype("uint64"): DType.uint64, - np.dtype("int64"): DType.int64, - np.dtype("float32"): DType.float32, - np.dtype("float64"): DType.float64, - } + type_map = { + np.dtype("uint8"): DType.uint8, + np.dtype("int8"): DType.int8, + np.dtype("uint16"): DType.uint16, + np.dtype("int16"): DType.int16, + np.dtype("uint32"): DType.uint32, + np.dtype("int32"): DType.int32, + np.dtype("uint64"): DType.uint64, + np.dtype("int64"): DType.int64, + np.dtype("float32"): DType.float32, + np.dtype("float64"): DType.float64, + } if type(data) is str: data = np.frombuffer(data.encode(), np.uint8) @@ -117,7 +129,7 @@ def serialise_ADAr( ("unique_id", int), ("timestamp", datetime), ("data", np.ndarray), - ("attributes", List[Attribute]) + ("attributes", List[Attribute]), ), ) @@ -131,17 +143,18 @@ def get_data(fb_arr) -> np.ndarray: Converts the data array into the correct type. """ raw_data = fb_arr.DataAsNumpy() - type_map = {DType.uint8: np.uint8, - DType.int8: np.int8, - DType.uint16: np.uint16, - DType.int16: np.int16, - DType.uint32: np.uint32, - DType.int32: np.int32, - DType.uint64: np.uint64, - DType.int64: np.int64, - DType.float32: np.float32, - DType.float64: np.float64, - } + type_map = { + DType.uint8: np.uint8, + DType.int8: np.int8, + DType.uint16: np.uint16, + DType.int16: np.int16, + DType.uint32: np.uint32, + DType.int32: np.int32, + DType.uint64: np.uint64, + DType.int64: np.int64, + DType.float32: np.float32, + DType.float64: np.float64, + } return raw_data.view(type_map[fb_arr.DataType()]) @@ -150,7 +163,9 @@ def deserialise_ADAr(buffer: Union[bytearray, bytes]) -> ADArray: ad_array = ADArray.ADArray.GetRootAsADArray(buffer, 0) unique_id = ad_array.Id() - max_time = datetime(year=9000, month=1, day=1, hour=0, minute=0, second=0).timestamp() + max_time = datetime( + year=9000, month=1, day=1, hour=0, minute=0, second=0 + ).timestamp() used_timestamp = ad_array.Timestamp() / 1e9 if used_timestamp > max_time: used_timestamp = max_time @@ -166,8 +181,12 @@ def deserialise_ADAr(buffer: Union[bytearray, bytes]) -> ADArray: attr_data = attribute_ptr.DataAsNumpy().tobytes().decode() else: attr_data = get_data(attribute_ptr) - temp_attribute = Attribute(name=attribute_ptr.Name().decode(), description=attribute_ptr.Description().decode(), - source=attribute_ptr.Source().decode(), data=attr_data) + temp_attribute = Attribute( + name=attribute_ptr.Name().decode(), + description=attribute_ptr.Description().decode(), + source=attribute_ptr.Source().decode(), + data=attr_data, + ) if type(temp_attribute.data) is np.ndarray and len(temp_attribute.data) == 1: if np.issubdtype(temp_attribute.data.dtype, np.floating): temp_attribute.data = float(temp_attribute.data[0]) @@ -180,5 +199,5 @@ def deserialise_ADAr(buffer: Union[bytearray, bytes]) -> ADArray: unique_id=unique_id, timestamp=datetime.fromtimestamp(used_timestamp), data=data, - attributes=attributes_list + attributes=attributes_list, ) diff --git a/streaming_data_types/fbschemas/ADAr_ADArray_schema/ADArray.py b/streaming_data_types/fbschemas/ADAr_ADArray_schema/ADArray.py index 13acb55..a234658 100644 --- a/streaming_data_types/fbschemas/ADAr_ADArray_schema/ADArray.py +++ b/streaming_data_types/fbschemas/ADAr_ADArray_schema/ADArray.py @@ -1,13 +1,15 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers from flatbuffers.compat import import_numpy + np = import_numpy() + class ADArray(object): - __slots__ = ['_tab'] + __slots__ = ["_tab"] @classmethod def GetRootAsADArray(cls, buf, offset): @@ -18,7 +20,9 @@ def GetRootAsADArray(cls, buf, offset): @classmethod def ADArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x41\x44\x41\x72", size_prefixed=size_prefixed) + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x41\x44\x41\x72", size_prefixed=size_prefixed + ) # ADArray def Init(self, buf, pos): @@ -42,7 +46,9 @@ def Id(self): def Timestamp(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) if o != 0: - return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) + return self._tab.Get( + flatbuffers.number_types.Uint64Flags, o + self._tab.Pos + ) return 0 # ADArray @@ -50,7 +56,10 @@ def Dimensions(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Uint64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return self._tab.Get( + flatbuffers.number_types.Uint64Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8), + ) return 0 # ADArray @@ -84,7 +93,10 @@ def Data(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) + return self._tab.Get( + flatbuffers.number_types.Uint8Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1), + ) return 0 # ADArray @@ -114,6 +126,7 @@ def Attributes(self, j): x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 x = self._tab.Indirect(x) from .Attribute import Attribute + obj = Attribute() obj.Init(self._tab.Bytes, x) return obj @@ -131,15 +144,58 @@ def AttributesIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) return o == 0 -def ADArrayStart(builder): builder.StartObject(7) -def ADArrayAddSourceName(builder, sourceName): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(sourceName), 0) -def ADArrayAddId(builder, id): builder.PrependInt32Slot(1, id, 0) -def ADArrayAddTimestamp(builder, timestamp): builder.PrependUint64Slot(2, timestamp, 0) -def ADArrayAddDimensions(builder, dimensions): builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(dimensions), 0) -def ADArrayStartDimensionsVector(builder, numElems): return builder.StartVector(8, numElems, 8) -def ADArrayAddDataType(builder, dataType): builder.PrependInt8Slot(4, dataType, 0) -def ADArrayAddData(builder, data): builder.PrependUOffsetTRelativeSlot(5, flatbuffers.number_types.UOffsetTFlags.py_type(data), 0) -def ADArrayStartDataVector(builder, numElems): return builder.StartVector(1, numElems, 1) -def ADArrayAddAttributes(builder, attributes): builder.PrependUOffsetTRelativeSlot(6, flatbuffers.number_types.UOffsetTFlags.py_type(attributes), 0) -def ADArrayStartAttributesVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def ADArrayEnd(builder): return builder.EndObject() + +def ADArrayStart(builder): + builder.StartObject(7) + + +def ADArrayAddSourceName(builder, sourceName): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(sourceName), 0 + ) + + +def ADArrayAddId(builder, id): + builder.PrependInt32Slot(1, id, 0) + + +def ADArrayAddTimestamp(builder, timestamp): + builder.PrependUint64Slot(2, timestamp, 0) + + +def ADArrayAddDimensions(builder, dimensions): + builder.PrependUOffsetTRelativeSlot( + 3, flatbuffers.number_types.UOffsetTFlags.py_type(dimensions), 0 + ) + + +def ADArrayStartDimensionsVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + + +def ADArrayAddDataType(builder, dataType): + builder.PrependInt8Slot(4, dataType, 0) + + +def ADArrayAddData(builder, data): + builder.PrependUOffsetTRelativeSlot( + 5, flatbuffers.number_types.UOffsetTFlags.py_type(data), 0 + ) + + +def ADArrayStartDataVector(builder, numElems): + return builder.StartVector(1, numElems, 1) + + +def ADArrayAddAttributes(builder, attributes): + builder.PrependUOffsetTRelativeSlot( + 6, flatbuffers.number_types.UOffsetTFlags.py_type(attributes), 0 + ) + + +def ADArrayStartAttributesVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + +def ADArrayEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/ADAr_ADArray_schema/Attribute.py b/streaming_data_types/fbschemas/ADAr_ADArray_schema/Attribute.py index 7b12306..434f3f1 100644 --- a/streaming_data_types/fbschemas/ADAr_ADArray_schema/Attribute.py +++ b/streaming_data_types/fbschemas/ADAr_ADArray_schema/Attribute.py @@ -1,13 +1,15 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers from flatbuffers.compat import import_numpy + np = import_numpy() + class Attribute(object): - __slots__ = ['_tab'] + __slots__ = ["_tab"] @classmethod def GetRootAsAttribute(cls, buf, offset): @@ -18,7 +20,9 @@ def GetRootAsAttribute(cls, buf, offset): @classmethod def AttributeBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x41\x44\x41\x72", size_prefixed=size_prefixed) + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x41\x44\x41\x72", size_prefixed=size_prefixed + ) # Attribute def Init(self, buf, pos): @@ -57,7 +61,10 @@ def Data(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) + return self._tab.Get( + flatbuffers.number_types.Uint8Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1), + ) return 0 # Attribute @@ -79,11 +86,42 @@ def DataIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) return o == 0 -def AttributeStart(builder): builder.StartObject(5) -def AttributeAddName(builder, name): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(name), 0) -def AttributeAddDescription(builder, description): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(description), 0) -def AttributeAddSource(builder, source): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(source), 0) -def AttributeAddDataType(builder, dataType): builder.PrependInt8Slot(3, dataType, 0) -def AttributeAddData(builder, data): builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(data), 0) -def AttributeStartDataVector(builder, numElems): return builder.StartVector(1, numElems, 1) -def AttributeEnd(builder): return builder.EndObject() + +def AttributeStart(builder): + builder.StartObject(5) + + +def AttributeAddName(builder, name): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(name), 0 + ) + + +def AttributeAddDescription(builder, description): + builder.PrependUOffsetTRelativeSlot( + 1, flatbuffers.number_types.UOffsetTFlags.py_type(description), 0 + ) + + +def AttributeAddSource(builder, source): + builder.PrependUOffsetTRelativeSlot( + 2, flatbuffers.number_types.UOffsetTFlags.py_type(source), 0 + ) + + +def AttributeAddDataType(builder, dataType): + builder.PrependInt8Slot(3, dataType, 0) + + +def AttributeAddData(builder, data): + builder.PrependUOffsetTRelativeSlot( + 4, flatbuffers.number_types.UOffsetTFlags.py_type(data), 0 + ) + + +def AttributeStartDataVector(builder, numElems): + return builder.StartVector(1, numElems, 1) + + +def AttributeEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/ADAr_ADArray_schema/DType.py b/streaming_data_types/fbschemas/ADAr_ADArray_schema/DType.py index c1ee51a..22098af 100644 --- a/streaming_data_types/fbschemas/ADAr_ADArray_schema/DType.py +++ b/streaming_data_types/fbschemas/ADAr_ADArray_schema/DType.py @@ -1,6 +1,7 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: + class DType(object): int8 = 0 @@ -14,4 +15,3 @@ class DType(object): float32 = 8 float64 = 9 c_string = 10 - diff --git a/tests/test_ADAr.py b/tests/test_ADAr.py index fce4cd2..62787fa 100644 --- a/tests/test_ADAr.py +++ b/tests/test_ADAr.py @@ -1,5 +1,9 @@ import pytest -from streaming_data_types.area_detector_ADAr import serialise_ADAr, deserialise_ADAr, Attribute +from streaming_data_types.area_detector_ADAr import ( + serialise_ADAr, + deserialise_ADAr, + Attribute, +) from streaming_data_types.fbschemas.ADAr_ADArray_schema.DType import DType from streaming_data_types import SERIALISERS, DESERIALISERS import numpy as np @@ -16,10 +20,12 @@ def test_serialises_and_deserialises_ADAr_int_array(self): "unique_id": 754, "data": np.array([[1, 2, 3], [3, 4, 5]], dtype=np.uint64), "timestamp": datetime.now(), - "attributes": [Attribute("name1", "desc1", "src1", "value"), - Attribute("name2", "desc2", "src2", 11), - Attribute("name3", "desc3", "src3", 3.14), - Attribute("name4", "desc4", "src4", np.linspace(0, 10))] + "attributes": [ + Attribute("name1", "desc1", "src1", "value"), + Attribute("name2", "desc2", "src2", 11), + Attribute("name3", "desc3", "src3", 3.14), + Attribute("name4", "desc4", "src4", np.linspace(0, 10)), + ], } buf = serialise_ADAr(**original_entry) @@ -42,7 +48,9 @@ def test_serialises_and_deserialises_ADAr_float_array(self): "source_name": "some other source name", "unique_id": 789679, "data": np.array([[1.1, 2.2, 3.3], [4.4, 5.5, 6.6]], dtype=np.float32), - "timestamp": datetime(year=1992, month=8, day=11, hour=3, minute=34, second=57) + "timestamp": datetime( + year=1992, month=8, day=11, hour=3, minute=34, second=57 + ), } buf = serialise_ADAr(**original_entry) @@ -62,7 +70,7 @@ def test_serialises_and_deserialises_ADAr_string(self): "source_name": "some source name", "unique_id": 754, "data": "hi, this is a string", - "timestamp": datetime.now() + "timestamp": datetime.now(), } buf = serialise_ADAr(**original_entry) @@ -78,7 +86,7 @@ def test_if_buffer_has_wrong_id_then_throws(self): "source_name": "some source name", "unique_id": 754, "data": np.array([[1, 2, 3], [3, 4, 5]], dtype=np.uint64), - "timestamp": datetime.now() + "timestamp": datetime.now(), } buf = serialise_ADAr(**original_entry) From 0ccc26fa9037a5377307ec756ff9e1d92903dd39 Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Wed, 2 Dec 2020 11:41:43 +0000 Subject: [PATCH 204/363] Update README_DEV.md --- README_DEV.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README_DEV.md b/README_DEV.md index 1bf34b6..6c37fe8 100644 --- a/README_DEV.md +++ b/README_DEV.md @@ -58,6 +58,8 @@ pip install --user -e ./ #### Steps +Increase the version number in setup.py. + Delete any old builds you may have (IMPORTANT!): ``` rm -rf build dist From 7b27ba6df88ba9b39bdba81f0305b5377c688bd8 Mon Sep 17 00:00:00 2001 From: Jonas Nilsson Date: Wed, 2 Dec 2020 12:42:25 +0100 Subject: [PATCH 205/363] More formatting? --- streaming_data_types/action_response_answ.py | 4 +- streaming_data_types/area_detector_NDAr.py | 40 ++++----- .../fbschemas/NDAr_NDArray_schema/DType.py | 2 +- .../fbschemas/NDAr_NDArray_schema/NDArray.py | 86 +++++++++++++++---- .../NDAr_NDArray_schema/NDAttribute.py | 55 +++++++++--- .../NDAr_NDArray_schema/epicsTimeStamp.py | 17 +++- 6 files changed, 151 insertions(+), 53 deletions(-) diff --git a/streaming_data_types/action_response_answ.py b/streaming_data_types/action_response_answ.py index 2b8a526..51a36a0 100644 --- a/streaming_data_types/action_response_answ.py +++ b/streaming_data_types/action_response_answ.py @@ -62,7 +62,9 @@ def serialise_answ( def deserialise_answ(buffer: Union[bytearray, bytes]): check_schema_identifier(buffer, FILE_IDENTIFIER) answ_message = ActionResponse.ActionResponse.GetRootAsActionResponse(buffer, 0) - max_time = datetime(year=9000, month=1, day=1, hour=0, minute=0, second=0).timestamp() + max_time = datetime( + year=9000, month=1, day=1, hour=0, minute=0, second=0 + ).timestamp() used_timestamp = answ_message.StopTime() / 1000 if used_timestamp > max_time: used_timestamp = max_time diff --git a/streaming_data_types/area_detector_NDAr.py b/streaming_data_types/area_detector_NDAr.py index 4b5d7ba..7ff2eca 100644 --- a/streaming_data_types/area_detector_NDAr.py +++ b/streaming_data_types/area_detector_NDAr.py @@ -9,12 +9,7 @@ FILE_IDENTIFIER = b"NDAr" -def serialise_ndar( - id: str, - dims: list, - data_type: int, - data: list, -) -> bytes: +def serialise_ndar(id: str, dims: list, data_type: int, data: list) -> bytes: builder = flatbuffers.Builder(1024) builder.ForceDefaults(True) @@ -45,14 +40,7 @@ def serialise_ndar( return bytes(builder.Output()) -nd_Array = namedtuple( - "NDArray", - ( - "id", - "timestamp", - "data", - ), -) +nd_Array = namedtuple("NDArray", ("id", "timestamp", "data")) def get_data(fb_arr): @@ -60,9 +48,21 @@ def get_data(fb_arr): Converts the data array into the correct type. """ raw_data = fb_arr.PDataAsNumpy() - numpy_arr_type = [np.int8, np.uint8, np.int16, np.uint16, np.int32, np.uint32, np.int64, np.uint64, - np.float32, np.float64] - return raw_data.view(numpy_arr_type[fb_arr.DataType()]).reshape(fb_arr.DimsAsNumpy()) + numpy_arr_type = [ + np.int8, + np.uint8, + np.int16, + np.uint16, + np.int32, + np.uint32, + np.int64, + np.uint64, + np.float32, + np.float64, + ] + return raw_data.view(numpy_arr_type[fb_arr.DataType()]).reshape( + fb_arr.DimsAsNumpy() + ) def deserialise_ndar(buffer: Union[bytearray, bytes]) -> NDArray: @@ -73,8 +73,4 @@ def deserialise_ndar(buffer: Union[bytearray, bytes]) -> NDArray: timestamp = nd_array.TimeStamp() data = get_data(nd_array) - return nd_Array( - id=id, - timestamp=timestamp, - data=data, - ) + return nd_Array(id=id, timestamp=timestamp, data=data) diff --git a/streaming_data_types/fbschemas/NDAr_NDArray_schema/DType.py b/streaming_data_types/fbschemas/NDAr_NDArray_schema/DType.py index 8b46fce..48dac73 100644 --- a/streaming_data_types/fbschemas/NDAr_NDArray_schema/DType.py +++ b/streaming_data_types/fbschemas/NDAr_NDArray_schema/DType.py @@ -2,6 +2,7 @@ # namespace: FB_Tables + class DType(object): Int8 = 0 Uint8 = 1 @@ -14,4 +15,3 @@ class DType(object): Float32 = 8 Float64 = 9 c_string = 10 - diff --git a/streaming_data_types/fbschemas/NDAr_NDArray_schema/NDArray.py b/streaming_data_types/fbschemas/NDAr_NDArray_schema/NDArray.py index 013dd1a..9264718 100644 --- a/streaming_data_types/fbschemas/NDAr_NDArray_schema/NDArray.py +++ b/streaming_data_types/fbschemas/NDAr_NDArray_schema/NDArray.py @@ -4,8 +4,9 @@ import flatbuffers + class NDArray(object): - __slots__ = ['_tab'] + __slots__ = ["_tab"] @classmethod def GetRootAsNDArray(cls, buf, offset): @@ -29,7 +30,9 @@ def Id(self): def TimeStamp(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) if o != 0: - return self._tab.Get(flatbuffers.number_types.Float64Flags, o + self._tab.Pos) + return self._tab.Get( + flatbuffers.number_types.Float64Flags, o + self._tab.Pos + ) return 0.0 # NDArray @@ -38,6 +41,7 @@ def EpicsTS(self): if o != 0: x = o + self._tab.Pos from .epicsTimeStamp import epicsTimeStamp + obj = epicsTimeStamp() obj.Init(self._tab.Bytes, x) return obj @@ -48,7 +52,10 @@ def Dims(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Uint64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return self._tab.Get( + flatbuffers.number_types.Uint64Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8), + ) return 0 # NDArray @@ -77,7 +84,10 @@ def PData(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) + return self._tab.Get( + flatbuffers.number_types.Uint8Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1), + ) return 0 # NDArray @@ -102,6 +112,7 @@ def PAttributeList(self, j): x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 x = self._tab.Indirect(x) from .NDAttribute import NDAttribute + obj = NDAttribute() obj.Init(self._tab.Bytes, x) return obj @@ -114,15 +125,58 @@ def PAttributeListLength(self): return self._tab.VectorLen(o) return 0 -def NDArrayStart(builder): builder.StartObject(7) -def NDArrayAddId(builder, id): builder.PrependInt32Slot(0, id, 0) -def NDArrayAddTimeStamp(builder, timeStamp): builder.PrependFloat64Slot(1, timeStamp, 0.0) -def NDArrayAddEpicsTS(builder, epicsTS): builder.PrependStructSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(epicsTS), 0) -def NDArrayAddDims(builder, dims): builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(dims), 0) -def NDArrayStartDimsVector(builder, numElems): return builder.StartVector(8, numElems, 8) -def NDArrayAddDataType(builder, dataType): builder.PrependInt8Slot(4, dataType, 0) -def NDArrayAddPData(builder, pData): builder.PrependUOffsetTRelativeSlot(5, flatbuffers.number_types.UOffsetTFlags.py_type(pData), 0) -def NDArrayStartPDataVector(builder, numElems): return builder.StartVector(1, numElems, 1) -def NDArrayAddPAttributeList(builder, pAttributeList): builder.PrependUOffsetTRelativeSlot(6, flatbuffers.number_types.UOffsetTFlags.py_type(pAttributeList), 0) -def NDArrayStartPAttributeListVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def NDArrayEnd(builder): return builder.EndObject() + +def NDArrayStart(builder): + builder.StartObject(7) + + +def NDArrayAddId(builder, id): + builder.PrependInt32Slot(0, id, 0) + + +def NDArrayAddTimeStamp(builder, timeStamp): + builder.PrependFloat64Slot(1, timeStamp, 0.0) + + +def NDArrayAddEpicsTS(builder, epicsTS): + builder.PrependStructSlot( + 2, flatbuffers.number_types.UOffsetTFlags.py_type(epicsTS), 0 + ) + + +def NDArrayAddDims(builder, dims): + builder.PrependUOffsetTRelativeSlot( + 3, flatbuffers.number_types.UOffsetTFlags.py_type(dims), 0 + ) + + +def NDArrayStartDimsVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + + +def NDArrayAddDataType(builder, dataType): + builder.PrependInt8Slot(4, dataType, 0) + + +def NDArrayAddPData(builder, pData): + builder.PrependUOffsetTRelativeSlot( + 5, flatbuffers.number_types.UOffsetTFlags.py_type(pData), 0 + ) + + +def NDArrayStartPDataVector(builder, numElems): + return builder.StartVector(1, numElems, 1) + + +def NDArrayAddPAttributeList(builder, pAttributeList): + builder.PrependUOffsetTRelativeSlot( + 6, flatbuffers.number_types.UOffsetTFlags.py_type(pAttributeList), 0 + ) + + +def NDArrayStartPAttributeListVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + +def NDArrayEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/NDAr_NDArray_schema/NDAttribute.py b/streaming_data_types/fbschemas/NDAr_NDArray_schema/NDAttribute.py index 901e520..8448343 100644 --- a/streaming_data_types/fbschemas/NDAr_NDArray_schema/NDAttribute.py +++ b/streaming_data_types/fbschemas/NDAr_NDArray_schema/NDAttribute.py @@ -4,8 +4,9 @@ import flatbuffers + class NDAttribute(object): - __slots__ = ['_tab'] + __slots__ = ["_tab"] @classmethod def GetRootAsNDAttribute(cls, buf, offset): @@ -51,7 +52,10 @@ def PData(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) + return self._tab.Get( + flatbuffers.number_types.Uint8Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1), + ) return 0 # NDAttribute @@ -68,11 +72,42 @@ def PDataLength(self): return self._tab.VectorLen(o) return 0 -def NDAttributeStart(builder): builder.StartObject(5) -def NDAttributeAddPName(builder, pName): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(pName), 0) -def NDAttributeAddPDescription(builder, pDescription): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(pDescription), 0) -def NDAttributeAddPSource(builder, pSource): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(pSource), 0) -def NDAttributeAddDataType(builder, dataType): builder.PrependInt8Slot(3, dataType, 0) -def NDAttributeAddPData(builder, pData): builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(pData), 0) -def NDAttributeStartPDataVector(builder, numElems): return builder.StartVector(1, numElems, 1) -def NDAttributeEnd(builder): return builder.EndObject() + +def NDAttributeStart(builder): + builder.StartObject(5) + + +def NDAttributeAddPName(builder, pName): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(pName), 0 + ) + + +def NDAttributeAddPDescription(builder, pDescription): + builder.PrependUOffsetTRelativeSlot( + 1, flatbuffers.number_types.UOffsetTFlags.py_type(pDescription), 0 + ) + + +def NDAttributeAddPSource(builder, pSource): + builder.PrependUOffsetTRelativeSlot( + 2, flatbuffers.number_types.UOffsetTFlags.py_type(pSource), 0 + ) + + +def NDAttributeAddDataType(builder, dataType): + builder.PrependInt8Slot(3, dataType, 0) + + +def NDAttributeAddPData(builder, pData): + builder.PrependUOffsetTRelativeSlot( + 4, flatbuffers.number_types.UOffsetTFlags.py_type(pData), 0 + ) + + +def NDAttributeStartPDataVector(builder, numElems): + return builder.StartVector(1, numElems, 1) + + +def NDAttributeEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/NDAr_NDArray_schema/epicsTimeStamp.py b/streaming_data_types/fbschemas/NDAr_NDArray_schema/epicsTimeStamp.py index 1877faf..4afea11 100644 --- a/streaming_data_types/fbschemas/NDAr_NDArray_schema/epicsTimeStamp.py +++ b/streaming_data_types/fbschemas/NDAr_NDArray_schema/epicsTimeStamp.py @@ -4,17 +4,28 @@ import flatbuffers + class epicsTimeStamp(object): - __slots__ = ['_tab'] + __slots__ = ["_tab"] # epicsTimeStamp def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) # epicsTimeStamp - def SecPastEpoch(self): return self._tab.Get(flatbuffers.number_types.Int32Flags, self._tab.Pos + flatbuffers.number_types.UOffsetTFlags.py_type(0)) + def SecPastEpoch(self): + return self._tab.Get( + flatbuffers.number_types.Int32Flags, + self._tab.Pos + flatbuffers.number_types.UOffsetTFlags.py_type(0), + ) + # epicsTimeStamp - def Nsec(self): return self._tab.Get(flatbuffers.number_types.Int32Flags, self._tab.Pos + flatbuffers.number_types.UOffsetTFlags.py_type(4)) + def Nsec(self): + return self._tab.Get( + flatbuffers.number_types.Int32Flags, + self._tab.Pos + flatbuffers.number_types.UOffsetTFlags.py_type(4), + ) + def CreateepicsTimeStamp(builder, secPastEpoch, nsec): builder.Prep(4, 8) From beed807416c61b0762e603826d73a26025dc42f2 Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Wed, 2 Dec 2020 11:44:28 +0000 Subject: [PATCH 206/363] Update README_DEV.md --- README_DEV.md | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/README_DEV.md b/README_DEV.md index 6c37fe8..8749234 100644 --- a/README_DEV.md +++ b/README_DEV.md @@ -48,7 +48,6 @@ pip install --user -e ./ ``` ### Building the package locally and deploying it to PyPI -**First update the version number in setup.py and push the update to the repository.** #### Requirements * A [PyPi](https://pypi.org/) account @@ -58,7 +57,7 @@ pip install --user -e ./ #### Steps -Increase the version number in setup.py. +**First update the version number in setup.py and push the update to the repository.** Delete any old builds you may have (IMPORTANT!): ``` From ab9ceffeae6079be936d5a2b2dee6eba65443802 Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Wed, 2 Dec 2020 11:45:18 +0000 Subject: [PATCH 207/363] Update README_DEV.md --- README_DEV.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README_DEV.md b/README_DEV.md index 8749234..08693bf 100644 --- a/README_DEV.md +++ b/README_DEV.md @@ -57,7 +57,7 @@ pip install --user -e ./ #### Steps -**First update the version number in setup.py and push the update to the repository.** +***First update the version number in setup.py and push the update to the repository.*** Delete any old builds you may have (IMPORTANT!): ``` From 41d1d08ffd7f11615c5db359d9a0949a8fb0642e Mon Sep 17 00:00:00 2001 From: Jonas Nilsson Date: Wed, 2 Dec 2020 12:49:07 +0100 Subject: [PATCH 208/363] Bump version string. --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 57d9378..76db2f2 100644 --- a/setup.py +++ b/setup.py @@ -15,7 +15,7 @@ setup( name="ess_streaming_data_types", - version="0.9.5", + version="0.9.6", description=DESCRIPTION, long_description=LONG_DESCRIPTION, long_description_content_type="text/markdown", From 584a6993be510c0c66b1c8636bfbd00f6ec72ffa Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Mon, 21 Dec 2020 14:40:06 +0100 Subject: [PATCH 209/363] Make get_schema public --- streaming_data_types/utils.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/streaming_data_types/utils.py b/streaming_data_types/utils.py index 22f8234..d8a4478 100644 --- a/streaming_data_types/utils.py +++ b/streaming_data_types/utils.py @@ -1,7 +1,7 @@ from streaming_data_types.exceptions import ShortBufferException, WrongSchemaException -def _get_schema(buffer) -> str: +def get_schema(buffer) -> str: """ Extract the schema code embedded in the buffer @@ -20,7 +20,7 @@ def check_schema_identifier(buffer, expected_identifer: bytes): :param buffer: The raw buffer of the FlatBuffers message :param expected_identifer: The expected flatbuffer identifier """ - if _get_schema(buffer) != expected_identifer.decode(): + if get_schema(buffer) != expected_identifer.decode(): raise WrongSchemaException( - f"Incorrect schema: expected {expected_identifer} but got {_get_schema(buffer)}" + f"Incorrect schema: expected {expected_identifer} but got {get_schema(buffer)}" ) From 2ccf3ab1e76eb3da195accf2500486e5d1a0b79d Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Tue, 22 Dec 2020 11:10:27 +0000 Subject: [PATCH 210/363] use CreateNumpyVector in ev42 serialisation --- streaming_data_types/eventdata_ev42.py | 14 +++----------- 1 file changed, 3 insertions(+), 11 deletions(-) diff --git a/streaming_data_types/eventdata_ev42.py b/streaming_data_types/eventdata_ev42.py index da00ffe..62ab306 100644 --- a/streaming_data_types/eventdata_ev42.py +++ b/streaming_data_types/eventdata_ev42.py @@ -4,6 +4,7 @@ import streaming_data_types.fbschemas.eventdata_ev42.FacilityData as FacilityData import streaming_data_types.fbschemas.isis_event_info_is84.ISISData as ISISData from streaming_data_types.utils import check_schema_identifier +import numpy as np FILE_IDENTIFIER = b"ev42" @@ -73,17 +74,8 @@ def serialise_ev42( source = builder.CreateString(source_name) - EventMessage.EventMessageStartTimeOfFlightVector(builder, len(time_of_flight)) - # FlatBuffers builds arrays backwards - for x in reversed(time_of_flight): - builder.PrependInt32(x) - tof_data = builder.EndVector(len(time_of_flight)) - - EventMessage.EventMessageStartDetectorIdVector(builder, len(detector_id)) - # FlatBuffers builds arrays backwards - for x in reversed(detector_id): - builder.PrependInt32(x) - det_data = builder.EndVector(len(detector_id)) + tof_data = builder.CreateNumpyVector(np.array(time_of_flight).astype(np.uint32)) + det_data = builder.CreateNumpyVector(np.array(detector_id).astype(np.uint32)) isis_data = None if isis_specific: From 563ad1785076c6c8ca6cd8018355ea2472791abc Mon Sep 17 00:00:00 2001 From: Jonas Nilsson Date: Mon, 25 Jan 2021 15:07:29 +0100 Subject: [PATCH 211/363] Add sample environment Python code. --- streaming_data_types/__init__.py | 3 + .../fbschemas/nmx_mo01/__init__.py | 0 .../sample_environment_senv/Location.py | 10 ++ .../SampleEnvironmentData.py | 134 ++++++++++++++++++ .../sample_environment_senv/__init__.py | 0 .../sample_environment_senv.py | 78 ++++++++++ tests/test_senv.py | 39 +++++ 7 files changed, 264 insertions(+) create mode 100644 streaming_data_types/fbschemas/nmx_mo01/__init__.py create mode 100644 streaming_data_types/fbschemas/sample_environment_senv/Location.py create mode 100644 streaming_data_types/fbschemas/sample_environment_senv/SampleEnvironmentData.py create mode 100644 streaming_data_types/fbschemas/sample_environment_senv/__init__.py create mode 100644 streaming_data_types/sample_environment_senv.py create mode 100644 tests/test_senv.py diff --git a/streaming_data_types/__init__.py b/streaming_data_types/__init__.py index 8f0eacd..4ebbc78 100644 --- a/streaming_data_types/__init__.py +++ b/streaming_data_types/__init__.py @@ -17,6 +17,7 @@ serialise_rf5k, ) from streaming_data_types.area_detector_NDAr import deserialise_ndar, serialise_ndar +from streaming_data_types.sample_environment_senv import deserialise_senv, serialise_senv SERIALISERS = { @@ -33,6 +34,7 @@ "answ": serialise_answ, "wrdn": serialise_wrdn, "NDAr": serialise_ndar, + "senv": serialise_senv, } @@ -50,4 +52,5 @@ "answ": deserialise_answ, "wrdn": deserialise_wrdn, "NDAr": deserialise_ndar, + "senv": deserialise_senv, } diff --git a/streaming_data_types/fbschemas/nmx_mo01/__init__.py b/streaming_data_types/fbschemas/nmx_mo01/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/streaming_data_types/fbschemas/sample_environment_senv/Location.py b/streaming_data_types/fbschemas/sample_environment_senv/Location.py new file mode 100644 index 0000000..c8c7fb4 --- /dev/null +++ b/streaming_data_types/fbschemas/sample_environment_senv/Location.py @@ -0,0 +1,10 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +class Location(object): + Unknown = 0 + Start = 1 + Middle = 2 + End = 3 + diff --git a/streaming_data_types/fbschemas/sample_environment_senv/SampleEnvironmentData.py b/streaming_data_types/fbschemas/sample_environment_senv/SampleEnvironmentData.py new file mode 100644 index 0000000..2c1dfba --- /dev/null +++ b/streaming_data_types/fbschemas/sample_environment_senv/SampleEnvironmentData.py @@ -0,0 +1,134 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy +np = import_numpy() + +class SampleEnvironmentData(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAsSampleEnvironmentData(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = SampleEnvironmentData() + x.Init(buf, n + offset) + return x + + @classmethod + def SampleEnvironmentDataBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x73\x65\x6E\x76", size_prefixed=size_prefixed) + + # SampleEnvironmentData + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # SampleEnvironmentData + def Name(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # SampleEnvironmentData + def Channel(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # SampleEnvironmentData + def PacketTimestamp(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) + return 0 + + # SampleEnvironmentData + def TimeDelta(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Float64Flags, o + self._tab.Pos) + return 0.0 + + # SampleEnvironmentData + def TimestampLocation(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # SampleEnvironmentData + def Values(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Uint16Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 2)) + return 0 + + # SampleEnvironmentData + def ValuesAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint16Flags, o) + return 0 + + # SampleEnvironmentData + def ValuesLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # SampleEnvironmentData + def ValuesIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + return o == 0 + + # SampleEnvironmentData + def Timestamps(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Uint64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return 0 + + # SampleEnvironmentData + def TimestampsAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint64Flags, o) + return 0 + + # SampleEnvironmentData + def TimestampsLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # SampleEnvironmentData + def TimestampsIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + return o == 0 + + # SampleEnvironmentData + def MessageCounter(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) + return 0 + +def SampleEnvironmentDataStart(builder): builder.StartObject(8) +def SampleEnvironmentDataAddName(builder, Name): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(Name), 0) +def SampleEnvironmentDataAddChannel(builder, Channel): builder.PrependInt32Slot(1, Channel, 0) +def SampleEnvironmentDataAddPacketTimestamp(builder, PacketTimestamp): builder.PrependUint64Slot(2, PacketTimestamp, 0) +def SampleEnvironmentDataAddTimeDelta(builder, TimeDelta): builder.PrependFloat64Slot(3, TimeDelta, 0.0) +def SampleEnvironmentDataAddTimestampLocation(builder, TimestampLocation): builder.PrependInt8Slot(4, TimestampLocation, 0) +def SampleEnvironmentDataAddValues(builder, Values): builder.PrependUOffsetTRelativeSlot(5, flatbuffers.number_types.UOffsetTFlags.py_type(Values), 0) +def SampleEnvironmentDataStartValuesVector(builder, numElems): return builder.StartVector(2, numElems, 2) +def SampleEnvironmentDataAddTimestamps(builder, Timestamps): builder.PrependUOffsetTRelativeSlot(6, flatbuffers.number_types.UOffsetTFlags.py_type(Timestamps), 0) +def SampleEnvironmentDataStartTimestampsVector(builder, numElems): return builder.StartVector(8, numElems, 8) +def SampleEnvironmentDataAddMessageCounter(builder, MessageCounter): builder.PrependUint64Slot(7, MessageCounter, 0) +def SampleEnvironmentDataEnd(builder): return builder.EndObject() diff --git a/streaming_data_types/fbschemas/sample_environment_senv/__init__.py b/streaming_data_types/fbschemas/sample_environment_senv/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/streaming_data_types/sample_environment_senv.py b/streaming_data_types/sample_environment_senv.py new file mode 100644 index 0000000..195ecfb --- /dev/null +++ b/streaming_data_types/sample_environment_senv.py @@ -0,0 +1,78 @@ +from streaming_data_types.fbschemas.sample_environment_senv.SampleEnvironmentData import SampleEnvironmentData, SampleEnvironmentDataStart, SampleEnvironmentDataEnd, SampleEnvironmentDataAddName, SampleEnvironmentDataAddChannel, SampleEnvironmentDataAddMessageCounter, SampleEnvironmentDataAddTimeDelta, SampleEnvironmentDataAddTimestampLocation, SampleEnvironmentDataAddValues, SampleEnvironmentDataAddTimestamps, SampleEnvironmentDataAddPacketTimestamp +from streaming_data_types.fbschemas.sample_environment_senv.Location import Location +import flatbuffers +import numpy as np +from collections import namedtuple +from typing import Optional, Union, List, NamedTuple +from streaming_data_types.utils import check_schema_identifier +from datetime import datetime + +FILE_IDENTIFIER = b"senv" + + +def serialise_senv( + name: str, + channel: int, + timestamp: datetime, + sample_ts_delta: int, + message_counter: int, + values: Union[np.ndarray, List], + ts_location: Location = Location.Middle, + value_timestamps: Union[np.ndarray, List, None] = None, +) -> bytes: + builder = flatbuffers.Builder(1024) + + if value_timestamps is not None: + used_timestamps = np.atleast_1d(np.array(value_timestamps)).astype(np.uint64) + + temp_values = np.atleast_1d(np.array(values)).astype(np.uint16) + value_offset = builder.CreateNumpyVector(temp_values) + + name_offset = builder.CreateString(name) + + + SampleEnvironmentDataStart(builder) + SampleEnvironmentDataAddName(builder, name_offset) + SampleEnvironmentDataAddTimeDelta(builder, sample_ts_delta) + SampleEnvironmentDataAddTimestampLocation(builder, ts_location) + SampleEnvironmentDataAddMessageCounter(builder, message_counter) + SampleEnvironmentDataAddChannel(builder, channel) + SampleEnvironmentDataAddPacketTimestamp(builder, int(timestamp.timestamp() * 1e9)) + SampleEnvironmentDataAddValues(builder, value_offset) + if value_timestamps is not None: + SampleEnvironmentDataAddTimestamps(builder, used_timestamps) + + SE_Message = SampleEnvironmentDataEnd(builder) + + builder.Finish(SE_Message, file_identifier=FILE_IDENTIFIER) + return bytes(builder.Output()) + + +Response = NamedTuple( + "SampleEnvironmentData", + ( + ("name", str), + ("channel", int), + ("timestamp", datetime), + ("sample_ts_delta", int), + ("ts_location", Location), + ("message_counter", int), + ("values", np.ndarray), + ("value_ts", Optional[np.ndarray]), + ), +) + + +def deserialise_senv(buffer: Union[bytearray, bytes]) -> Response: + check_schema_identifier(buffer, FILE_IDENTIFIER) + + SE_data = SampleEnvironmentData.GetRootAsSampleEnvironmentData(buffer, 0) + + max_time = datetime( + year=9000, month=1, day=1, hour=0, minute=0, second=0 + ).timestamp() + used_timestamp = SE_data.PacketTimestamp() / 1e9 + if used_timestamp > max_time: + used_timestamp = max_time + + return Response(name=SE_data.Name().decode(), channel=SE_data.Channel(), timestamp=datetime.fromtimestamp(used_timestamp), sample_ts_delta=SE_data.TimeDelta(), ts_location=SE_data.TimestampLocation(), message_counter=SE_data.MessageCounter(), values=SE_data.ValuesAsNumpy(), value_ts=None) diff --git a/tests/test_senv.py b/tests/test_senv.py new file mode 100644 index 0000000..7d9a7c3 --- /dev/null +++ b/tests/test_senv.py @@ -0,0 +1,39 @@ +import pytest +import numpy as np + +from streaming_data_types.exceptions import WrongSchemaException +from streaming_data_types.sample_environment_senv import serialise_senv, deserialise_senv +from streaming_data_types import SERIALISERS, DESERIALISERS +from datetime import datetime +from streaming_data_types.fbschemas.sample_environment_senv.Location import Location + + +class TestSerialisationSenv: + original_entry = { + "name": "some_name", + "timestamp": datetime.now(), + "channel": 42, + "message_counter": 123456, + "sample_ts_delta": 0.005, + "values": np.arange(100), + "value_timestamps": None, + "ts_location": Location.End + } + + def test_serialises_and_deserialises_senv(self): + buf = serialise_senv(**self.original_entry) + deserialised_tuple = deserialise_senv(buf) + + assert self.original_entry["name"] == deserialised_tuple.name + assert self.original_entry["timestamp"] == deserialised_tuple.timestamp + assert self.original_entry["channel"] == deserialised_tuple.channel + assert self.original_entry["message_counter"] == deserialised_tuple.message_counter + assert self.original_entry["sample_ts_delta"] == deserialised_tuple.sample_ts_delta + assert np.array_equal(self.original_entry["values"], deserialised_tuple.values) + assert self.original_entry["value_timestamps"] == deserialised_tuple.value_ts + assert self.original_entry["ts_location"] == deserialised_tuple.ts_location + + def test_schema_type_is_in_global_serialisers_list(self): + assert "senv" in SERIALISERS + assert "senv" in DESERIALISERS + From 39de846048231450b0c4f62a28c2daa77174f4bf Mon Sep 17 00:00:00 2001 From: Jonas Nilsson Date: Mon, 25 Jan 2021 15:08:17 +0100 Subject: [PATCH 212/363] black --- .../sample_environment_senv.py | 26 ++++++++++++++++--- 1 file changed, 23 insertions(+), 3 deletions(-) diff --git a/streaming_data_types/sample_environment_senv.py b/streaming_data_types/sample_environment_senv.py index 195ecfb..e49d393 100644 --- a/streaming_data_types/sample_environment_senv.py +++ b/streaming_data_types/sample_environment_senv.py @@ -1,4 +1,16 @@ -from streaming_data_types.fbschemas.sample_environment_senv.SampleEnvironmentData import SampleEnvironmentData, SampleEnvironmentDataStart, SampleEnvironmentDataEnd, SampleEnvironmentDataAddName, SampleEnvironmentDataAddChannel, SampleEnvironmentDataAddMessageCounter, SampleEnvironmentDataAddTimeDelta, SampleEnvironmentDataAddTimestampLocation, SampleEnvironmentDataAddValues, SampleEnvironmentDataAddTimestamps, SampleEnvironmentDataAddPacketTimestamp +from streaming_data_types.fbschemas.sample_environment_senv.SampleEnvironmentData import ( + SampleEnvironmentData, + SampleEnvironmentDataStart, + SampleEnvironmentDataEnd, + SampleEnvironmentDataAddName, + SampleEnvironmentDataAddChannel, + SampleEnvironmentDataAddMessageCounter, + SampleEnvironmentDataAddTimeDelta, + SampleEnvironmentDataAddTimestampLocation, + SampleEnvironmentDataAddValues, + SampleEnvironmentDataAddTimestamps, + SampleEnvironmentDataAddPacketTimestamp, +) from streaming_data_types.fbschemas.sample_environment_senv.Location import Location import flatbuffers import numpy as np @@ -30,7 +42,6 @@ def serialise_senv( name_offset = builder.CreateString(name) - SampleEnvironmentDataStart(builder) SampleEnvironmentDataAddName(builder, name_offset) SampleEnvironmentDataAddTimeDelta(builder, sample_ts_delta) @@ -75,4 +86,13 @@ def deserialise_senv(buffer: Union[bytearray, bytes]) -> Response: if used_timestamp > max_time: used_timestamp = max_time - return Response(name=SE_data.Name().decode(), channel=SE_data.Channel(), timestamp=datetime.fromtimestamp(used_timestamp), sample_ts_delta=SE_data.TimeDelta(), ts_location=SE_data.TimestampLocation(), message_counter=SE_data.MessageCounter(), values=SE_data.ValuesAsNumpy(), value_ts=None) + return Response( + name=SE_data.Name().decode(), + channel=SE_data.Channel(), + timestamp=datetime.fromtimestamp(used_timestamp), + sample_ts_delta=SE_data.TimeDelta(), + ts_location=SE_data.TimestampLocation(), + message_counter=SE_data.MessageCounter(), + values=SE_data.ValuesAsNumpy(), + value_ts=None, + ) From 77ff7503e3bdf24511d190d1e272386aa7386de1 Mon Sep 17 00:00:00 2001 From: Jonas Nilsson Date: Mon, 25 Jan 2021 16:22:01 +0100 Subject: [PATCH 213/363] Minor fixes. --- streaming_data_types/sample_environment_senv.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/streaming_data_types/sample_environment_senv.py b/streaming_data_types/sample_environment_senv.py index e49d393..8776aee 100644 --- a/streaming_data_types/sample_environment_senv.py +++ b/streaming_data_types/sample_environment_senv.py @@ -14,7 +14,6 @@ from streaming_data_types.fbschemas.sample_environment_senv.Location import Location import flatbuffers import numpy as np -from collections import namedtuple from typing import Optional, Union, List, NamedTuple from streaming_data_types.utils import check_schema_identifier from datetime import datetime @@ -86,6 +85,10 @@ def deserialise_senv(buffer: Union[bytearray, bytes]) -> Response: if used_timestamp > max_time: used_timestamp = max_time + value_timestamps = None + if not SE_data.TimestampsIsNone(): + value_timestamps = SE_data.TimestampsAsNumpy() + return Response( name=SE_data.Name().decode(), channel=SE_data.Channel(), @@ -94,5 +97,5 @@ def deserialise_senv(buffer: Union[bytearray, bytes]) -> Response: ts_location=SE_data.TimestampLocation(), message_counter=SE_data.MessageCounter(), values=SE_data.ValuesAsNumpy(), - value_ts=None, + value_ts=value_timestamps, ) From 77f0e978de84cce50f24910c5ea6df0a2b33a571 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Fri, 29 Jan 2021 08:30:09 +0100 Subject: [PATCH 214/363] Added version number for library --- README_DEV.md | 7 ++++++- setup.py | 5 ++++- streaming_data_types/__init__.py | 1 + 3 files changed, 11 insertions(+), 2 deletions(-) diff --git a/README_DEV.md b/README_DEV.md index 08693bf..5ad3fdc 100644 --- a/README_DEV.md +++ b/README_DEV.md @@ -57,7 +57,12 @@ pip install --user -e ./ #### Steps -***First update the version number in setup.py and push the update to the repository.*** +***First update the __version__ number in streaming_data_types/__init__.py and push the update to the repository.*** + +Uninstall streaming_data_types if you have previous installed it from PyPi: +``` +pip uninstall ess_streaming_data_types +``` Delete any old builds you may have (IMPORTANT!): ``` diff --git a/setup.py b/setup.py index 76db2f2..a16c947 100644 --- a/setup.py +++ b/setup.py @@ -13,9 +13,12 @@ print("COULD NOT GET LONG DESC: {}".format(error)) LONG_DESCRIPTION = DESCRIPTION +# Import version number +from streaming_data_types.__init__ import __version__ as version + setup( name="ess_streaming_data_types", - version="0.9.6", + version=version, description=DESCRIPTION, long_description=LONG_DESCRIPTION, long_description_content_type="text/markdown", diff --git a/streaming_data_types/__init__.py b/streaming_data_types/__init__.py index 8f0eacd..d2b3fbf 100644 --- a/streaming_data_types/__init__.py +++ b/streaming_data_types/__init__.py @@ -18,6 +18,7 @@ ) from streaming_data_types.area_detector_NDAr import deserialise_ndar, serialise_ndar +__version__ = "0.9.7" SERIALISERS = { "ev42": serialise_ev42, From 8485dbc7904adf1c050fcc18fa769b1fdca8dee4 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Fri, 29 Jan 2021 07:44:55 +0000 Subject: [PATCH 215/363] Not a release so reverted the version --- streaming_data_types/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/streaming_data_types/__init__.py b/streaming_data_types/__init__.py index d2b3fbf..56267c3 100644 --- a/streaming_data_types/__init__.py +++ b/streaming_data_types/__init__.py @@ -18,7 +18,7 @@ ) from streaming_data_types.area_detector_NDAr import deserialise_ndar, serialise_ndar -__version__ = "0.9.7" +__version__ = "0.9.6" SERIALISERS = { "ev42": serialise_ev42, From 97cba8f5e73afc3e4a7eb854d1b6e4acfe0a4efd Mon Sep 17 00:00:00 2001 From: Jonas Nilsson Date: Fri, 29 Jan 2021 10:22:39 +0100 Subject: [PATCH 216/363] svnjdflkxnvdfkls --- tests/test_senv.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/tests/test_senv.py b/tests/test_senv.py index 7d9a7c3..cca593c 100644 --- a/tests/test_senv.py +++ b/tests/test_senv.py @@ -1,7 +1,4 @@ -import pytest import numpy as np - -from streaming_data_types.exceptions import WrongSchemaException from streaming_data_types.sample_environment_senv import serialise_senv, deserialise_senv from streaming_data_types import SERIALISERS, DESERIALISERS from datetime import datetime @@ -36,4 +33,3 @@ def test_serialises_and_deserialises_senv(self): def test_schema_type_is_in_global_serialisers_list(self): assert "senv" in SERIALISERS assert "senv" in DESERIALISERS - From 73cbc34d2fd4b95526ba8c1952636a028d2632db Mon Sep 17 00:00:00 2001 From: Jonas Nilsson Date: Fri, 5 Feb 2021 12:15:46 +0100 Subject: [PATCH 217/363] Updated documentation. --- README.md | 33 ++++++++++++++++----------------- 1 file changed, 16 insertions(+), 17 deletions(-) diff --git a/README.md b/README.md index 08886bd..420bad7 100644 --- a/README.md +++ b/README.md @@ -6,23 +6,22 @@ https://github.com/ess-dmsc/streaming-data-types ## FlatBuffer Schemas -|name|description|verifiable*| -|----|-----------|----------| -|hs00|Histogram schema|Y| -|ns10|NICOS cache entry schema|Y| -|pl72|Run start|N| -|6s4t|Run stop|N| -|f142|Log data|Y| -|ev42|Event data|Y| -|x5f2|Status messages|N| -|tdct|Timestamps|Y| -|ep00|EPICS connection info|Y| -|rf5k|Forwarder configuration update|Y| -|answ|File-writer command response|n/a| -|wrdn|File-writer finished writing|n/a| -|NDAr|NDArray schema for area detector images|n/a| - -\* whether it passes verification via the C++ FlatBuffers library. +|name|description| +|----|-----------| +|hs00|Histogram schema| +|ns10|NICOS cache entry schema| +|pl72|Run start| +|6s4t|Run stop| +|f142|Log data| +|ev42|Event data| +|x5f2|Status messages| +|tdct|Timestamps| +|ep00|EPICS connection info| +|rf5k|Forwarder configuration update| +|answ|File-writer command response| +|wrdn|File-writer finished writing| +|NDAr|**Deprecated**| +|ADAr|For storing EPICS areaDetector data| ### hs00 Schema for histogram data. It is one of the more complicated to use schemas. From cbf47d910cdf4cd252215d15317852545f339b1c Mon Sep 17 00:00:00 2001 From: Jonas Nilsson Date: Fri, 5 Feb 2021 12:23:13 +0100 Subject: [PATCH 218/363] nrfg --- tests/test_ADAr.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/test_ADAr.py b/tests/test_ADAr.py index 62787fa..e522212 100644 --- a/tests/test_ADAr.py +++ b/tests/test_ADAr.py @@ -4,7 +4,6 @@ deserialise_ADAr, Attribute, ) -from streaming_data_types.fbschemas.ADAr_ADArray_schema.DType import DType from streaming_data_types import SERIALISERS, DESERIALISERS import numpy as np from datetime import datetime From 129ae4b686b4b9f9cfa43f85fea3a90c9981b588 Mon Sep 17 00:00:00 2001 From: Jonas Nilsson Date: Fri, 5 Feb 2021 12:41:13 +0100 Subject: [PATCH 219/363] Disable warning. --- streaming_data_types/area_detector_ADAr.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/streaming_data_types/area_detector_ADAr.py b/streaming_data_types/area_detector_ADAr.py index 8bca7df..de0bc86 100644 --- a/streaming_data_types/area_detector_ADAr.py +++ b/streaming_data_types/area_detector_ADAr.py @@ -25,7 +25,7 @@ def __init__( self.data = data def __eq__(self, other): - data_is_equal = type(self.data) == type(other.data) + data_is_equal = type(self.data) == type(other.data) # noqa: E721 if type(self.data) is np.ndarray: data_is_equal = data_is_equal and np.array_equal(self.data, other.data) else: From 36f3b6f66571d6b04498a03673a125b18f04e507 Mon Sep 17 00:00:00 2001 From: Jonas Nilsson Date: Fri, 5 Feb 2021 12:56:56 +0100 Subject: [PATCH 220/363] Fix exceptions. --- tests/test_ADAr.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/test_ADAr.py b/tests/test_ADAr.py index e522212..72fb41b 100644 --- a/tests/test_ADAr.py +++ b/tests/test_ADAr.py @@ -7,6 +7,7 @@ from streaming_data_types import SERIALISERS, DESERIALISERS import numpy as np from datetime import datetime +from streaming_data_types.exceptions import WrongSchemaException class TestSerialisationNDAr: @@ -94,7 +95,7 @@ def test_if_buffer_has_wrong_id_then_throws(self): buf = bytearray(buf) buf[4:8] = b"1234" - with pytest.raises(RuntimeError): + with pytest.raises(WrongSchemaException): deserialise_ADAr(buf) def test_schema_type_is_in_global_serialisers_list(self): From d796272879eafcd5be1f8f11a0014ff61d6b76d8 Mon Sep 17 00:00:00 2001 From: Jonas Nilsson Date: Fri, 5 Feb 2021 13:06:30 +0100 Subject: [PATCH 221/363] Revert black. --- .../fbschemas/NDAr_NDArray_schema/DType.py | 2 +- .../fbschemas/NDAr_NDArray_schema/NDArray.py | 86 ++++--------------- .../NDAr_NDArray_schema/NDAttribute.py | 55 +++--------- .../NDAr_NDArray_schema/epicsTimeStamp.py | 17 +--- 4 files changed, 30 insertions(+), 130 deletions(-) diff --git a/streaming_data_types/fbschemas/NDAr_NDArray_schema/DType.py b/streaming_data_types/fbschemas/NDAr_NDArray_schema/DType.py index 48dac73..8b46fce 100644 --- a/streaming_data_types/fbschemas/NDAr_NDArray_schema/DType.py +++ b/streaming_data_types/fbschemas/NDAr_NDArray_schema/DType.py @@ -2,7 +2,6 @@ # namespace: FB_Tables - class DType(object): Int8 = 0 Uint8 = 1 @@ -15,3 +14,4 @@ class DType(object): Float32 = 8 Float64 = 9 c_string = 10 + diff --git a/streaming_data_types/fbschemas/NDAr_NDArray_schema/NDArray.py b/streaming_data_types/fbschemas/NDAr_NDArray_schema/NDArray.py index 9264718..013dd1a 100644 --- a/streaming_data_types/fbschemas/NDAr_NDArray_schema/NDArray.py +++ b/streaming_data_types/fbschemas/NDAr_NDArray_schema/NDArray.py @@ -4,9 +4,8 @@ import flatbuffers - class NDArray(object): - __slots__ = ["_tab"] + __slots__ = ['_tab'] @classmethod def GetRootAsNDArray(cls, buf, offset): @@ -30,9 +29,7 @@ def Id(self): def TimeStamp(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) if o != 0: - return self._tab.Get( - flatbuffers.number_types.Float64Flags, o + self._tab.Pos - ) + return self._tab.Get(flatbuffers.number_types.Float64Flags, o + self._tab.Pos) return 0.0 # NDArray @@ -41,7 +38,6 @@ def EpicsTS(self): if o != 0: x = o + self._tab.Pos from .epicsTimeStamp import epicsTimeStamp - obj = epicsTimeStamp() obj.Init(self._tab.Bytes, x) return obj @@ -52,10 +48,7 @@ def Dims(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Uint64Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8), - ) + return self._tab.Get(flatbuffers.number_types.Uint64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) return 0 # NDArray @@ -84,10 +77,7 @@ def PData(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Uint8Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1), - ) + return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) return 0 # NDArray @@ -112,7 +102,6 @@ def PAttributeList(self, j): x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 x = self._tab.Indirect(x) from .NDAttribute import NDAttribute - obj = NDAttribute() obj.Init(self._tab.Bytes, x) return obj @@ -125,58 +114,15 @@ def PAttributeListLength(self): return self._tab.VectorLen(o) return 0 - -def NDArrayStart(builder): - builder.StartObject(7) - - -def NDArrayAddId(builder, id): - builder.PrependInt32Slot(0, id, 0) - - -def NDArrayAddTimeStamp(builder, timeStamp): - builder.PrependFloat64Slot(1, timeStamp, 0.0) - - -def NDArrayAddEpicsTS(builder, epicsTS): - builder.PrependStructSlot( - 2, flatbuffers.number_types.UOffsetTFlags.py_type(epicsTS), 0 - ) - - -def NDArrayAddDims(builder, dims): - builder.PrependUOffsetTRelativeSlot( - 3, flatbuffers.number_types.UOffsetTFlags.py_type(dims), 0 - ) - - -def NDArrayStartDimsVector(builder, numElems): - return builder.StartVector(8, numElems, 8) - - -def NDArrayAddDataType(builder, dataType): - builder.PrependInt8Slot(4, dataType, 0) - - -def NDArrayAddPData(builder, pData): - builder.PrependUOffsetTRelativeSlot( - 5, flatbuffers.number_types.UOffsetTFlags.py_type(pData), 0 - ) - - -def NDArrayStartPDataVector(builder, numElems): - return builder.StartVector(1, numElems, 1) - - -def NDArrayAddPAttributeList(builder, pAttributeList): - builder.PrependUOffsetTRelativeSlot( - 6, flatbuffers.number_types.UOffsetTFlags.py_type(pAttributeList), 0 - ) - - -def NDArrayStartPAttributeListVector(builder, numElems): - return builder.StartVector(4, numElems, 4) - - -def NDArrayEnd(builder): - return builder.EndObject() +def NDArrayStart(builder): builder.StartObject(7) +def NDArrayAddId(builder, id): builder.PrependInt32Slot(0, id, 0) +def NDArrayAddTimeStamp(builder, timeStamp): builder.PrependFloat64Slot(1, timeStamp, 0.0) +def NDArrayAddEpicsTS(builder, epicsTS): builder.PrependStructSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(epicsTS), 0) +def NDArrayAddDims(builder, dims): builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(dims), 0) +def NDArrayStartDimsVector(builder, numElems): return builder.StartVector(8, numElems, 8) +def NDArrayAddDataType(builder, dataType): builder.PrependInt8Slot(4, dataType, 0) +def NDArrayAddPData(builder, pData): builder.PrependUOffsetTRelativeSlot(5, flatbuffers.number_types.UOffsetTFlags.py_type(pData), 0) +def NDArrayStartPDataVector(builder, numElems): return builder.StartVector(1, numElems, 1) +def NDArrayAddPAttributeList(builder, pAttributeList): builder.PrependUOffsetTRelativeSlot(6, flatbuffers.number_types.UOffsetTFlags.py_type(pAttributeList), 0) +def NDArrayStartPAttributeListVector(builder, numElems): return builder.StartVector(4, numElems, 4) +def NDArrayEnd(builder): return builder.EndObject() diff --git a/streaming_data_types/fbschemas/NDAr_NDArray_schema/NDAttribute.py b/streaming_data_types/fbschemas/NDAr_NDArray_schema/NDAttribute.py index 8448343..901e520 100644 --- a/streaming_data_types/fbschemas/NDAr_NDArray_schema/NDAttribute.py +++ b/streaming_data_types/fbschemas/NDAr_NDArray_schema/NDAttribute.py @@ -4,9 +4,8 @@ import flatbuffers - class NDAttribute(object): - __slots__ = ["_tab"] + __slots__ = ['_tab'] @classmethod def GetRootAsNDAttribute(cls, buf, offset): @@ -52,10 +51,7 @@ def PData(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Uint8Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1), - ) + return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) return 0 # NDAttribute @@ -72,42 +68,11 @@ def PDataLength(self): return self._tab.VectorLen(o) return 0 - -def NDAttributeStart(builder): - builder.StartObject(5) - - -def NDAttributeAddPName(builder, pName): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(pName), 0 - ) - - -def NDAttributeAddPDescription(builder, pDescription): - builder.PrependUOffsetTRelativeSlot( - 1, flatbuffers.number_types.UOffsetTFlags.py_type(pDescription), 0 - ) - - -def NDAttributeAddPSource(builder, pSource): - builder.PrependUOffsetTRelativeSlot( - 2, flatbuffers.number_types.UOffsetTFlags.py_type(pSource), 0 - ) - - -def NDAttributeAddDataType(builder, dataType): - builder.PrependInt8Slot(3, dataType, 0) - - -def NDAttributeAddPData(builder, pData): - builder.PrependUOffsetTRelativeSlot( - 4, flatbuffers.number_types.UOffsetTFlags.py_type(pData), 0 - ) - - -def NDAttributeStartPDataVector(builder, numElems): - return builder.StartVector(1, numElems, 1) - - -def NDAttributeEnd(builder): - return builder.EndObject() +def NDAttributeStart(builder): builder.StartObject(5) +def NDAttributeAddPName(builder, pName): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(pName), 0) +def NDAttributeAddPDescription(builder, pDescription): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(pDescription), 0) +def NDAttributeAddPSource(builder, pSource): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(pSource), 0) +def NDAttributeAddDataType(builder, dataType): builder.PrependInt8Slot(3, dataType, 0) +def NDAttributeAddPData(builder, pData): builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(pData), 0) +def NDAttributeStartPDataVector(builder, numElems): return builder.StartVector(1, numElems, 1) +def NDAttributeEnd(builder): return builder.EndObject() diff --git a/streaming_data_types/fbschemas/NDAr_NDArray_schema/epicsTimeStamp.py b/streaming_data_types/fbschemas/NDAr_NDArray_schema/epicsTimeStamp.py index 4afea11..1877faf 100644 --- a/streaming_data_types/fbschemas/NDAr_NDArray_schema/epicsTimeStamp.py +++ b/streaming_data_types/fbschemas/NDAr_NDArray_schema/epicsTimeStamp.py @@ -4,28 +4,17 @@ import flatbuffers - class epicsTimeStamp(object): - __slots__ = ["_tab"] + __slots__ = ['_tab'] # epicsTimeStamp def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) # epicsTimeStamp - def SecPastEpoch(self): - return self._tab.Get( - flatbuffers.number_types.Int32Flags, - self._tab.Pos + flatbuffers.number_types.UOffsetTFlags.py_type(0), - ) - + def SecPastEpoch(self): return self._tab.Get(flatbuffers.number_types.Int32Flags, self._tab.Pos + flatbuffers.number_types.UOffsetTFlags.py_type(0)) # epicsTimeStamp - def Nsec(self): - return self._tab.Get( - flatbuffers.number_types.Int32Flags, - self._tab.Pos + flatbuffers.number_types.UOffsetTFlags.py_type(4), - ) - + def Nsec(self): return self._tab.Get(flatbuffers.number_types.Int32Flags, self._tab.Pos + flatbuffers.number_types.UOffsetTFlags.py_type(4)) def CreateepicsTimeStamp(builder, secPastEpoch, nsec): builder.Prep(4, 8) From 678de696251cbc2bd2eea27799d8d0b080f6aa19 Mon Sep 17 00:00:00 2001 From: Jonas Nilsson Date: Fri, 5 Feb 2021 13:08:12 +0100 Subject: [PATCH 222/363] More revert black. --- streaming_data_types/action_response_answ.py | 4 +- streaming_data_types/area_detector_NDAr.py | 40 +++++++++++--------- 2 files changed, 23 insertions(+), 21 deletions(-) diff --git a/streaming_data_types/action_response_answ.py b/streaming_data_types/action_response_answ.py index 51a36a0..2b8a526 100644 --- a/streaming_data_types/action_response_answ.py +++ b/streaming_data_types/action_response_answ.py @@ -62,9 +62,7 @@ def serialise_answ( def deserialise_answ(buffer: Union[bytearray, bytes]): check_schema_identifier(buffer, FILE_IDENTIFIER) answ_message = ActionResponse.ActionResponse.GetRootAsActionResponse(buffer, 0) - max_time = datetime( - year=9000, month=1, day=1, hour=0, minute=0, second=0 - ).timestamp() + max_time = datetime(year=9000, month=1, day=1, hour=0, minute=0, second=0).timestamp() used_timestamp = answ_message.StopTime() / 1000 if used_timestamp > max_time: used_timestamp = max_time diff --git a/streaming_data_types/area_detector_NDAr.py b/streaming_data_types/area_detector_NDAr.py index 7ff2eca..4b5d7ba 100644 --- a/streaming_data_types/area_detector_NDAr.py +++ b/streaming_data_types/area_detector_NDAr.py @@ -9,7 +9,12 @@ FILE_IDENTIFIER = b"NDAr" -def serialise_ndar(id: str, dims: list, data_type: int, data: list) -> bytes: +def serialise_ndar( + id: str, + dims: list, + data_type: int, + data: list, +) -> bytes: builder = flatbuffers.Builder(1024) builder.ForceDefaults(True) @@ -40,7 +45,14 @@ def serialise_ndar(id: str, dims: list, data_type: int, data: list) -> bytes: return bytes(builder.Output()) -nd_Array = namedtuple("NDArray", ("id", "timestamp", "data")) +nd_Array = namedtuple( + "NDArray", + ( + "id", + "timestamp", + "data", + ), +) def get_data(fb_arr): @@ -48,21 +60,9 @@ def get_data(fb_arr): Converts the data array into the correct type. """ raw_data = fb_arr.PDataAsNumpy() - numpy_arr_type = [ - np.int8, - np.uint8, - np.int16, - np.uint16, - np.int32, - np.uint32, - np.int64, - np.uint64, - np.float32, - np.float64, - ] - return raw_data.view(numpy_arr_type[fb_arr.DataType()]).reshape( - fb_arr.DimsAsNumpy() - ) + numpy_arr_type = [np.int8, np.uint8, np.int16, np.uint16, np.int32, np.uint32, np.int64, np.uint64, + np.float32, np.float64] + return raw_data.view(numpy_arr_type[fb_arr.DataType()]).reshape(fb_arr.DimsAsNumpy()) def deserialise_ndar(buffer: Union[bytearray, bytes]) -> NDArray: @@ -73,4 +73,8 @@ def deserialise_ndar(buffer: Union[bytearray, bytes]) -> NDArray: timestamp = nd_array.TimeStamp() data = get_data(nd_array) - return nd_Array(id=id, timestamp=timestamp, data=data) + return nd_Array( + id=id, + timestamp=timestamp, + data=data, + ) From fbd7e6bc7cf3aed41e9bcaadfddfa7ed38f7af47 Mon Sep 17 00:00:00 2001 From: Jonas Nilsson Date: Fri, 5 Feb 2021 15:34:27 +0100 Subject: [PATCH 223/363] Version bump. --- streaming_data_types/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/streaming_data_types/__init__.py b/streaming_data_types/__init__.py index 56267c3..a1fac77 100644 --- a/streaming_data_types/__init__.py +++ b/streaming_data_types/__init__.py @@ -18,7 +18,7 @@ ) from streaming_data_types.area_detector_NDAr import deserialise_ndar, serialise_ndar -__version__ = "0.9.6" +__version__ = "0.10.0" SERIALISERS = { "ev42": serialise_ev42, From f960b7b033c6282b591fa5bd738128152f53f79d Mon Sep 17 00:00:00 2001 From: Jonas Nilsson Date: Thu, 11 Mar 2021 21:27:30 +0100 Subject: [PATCH 224/363] Updated to latest version of the senv schema. --- .../sample_environment_senv/Int16Array.py | 57 +++++++++++++++++++ .../sample_environment_senv/Int32Array.py | 57 +++++++++++++++++++ .../sample_environment_senv/Int64Array.py | 57 +++++++++++++++++++ .../sample_environment_senv/Int8Array.py | 57 +++++++++++++++++++ .../SampleEnvironmentData.py | 48 +++++++--------- .../sample_environment_senv/UInt16Array.py | 57 +++++++++++++++++++ .../sample_environment_senv/UInt32Array.py | 57 +++++++++++++++++++ .../sample_environment_senv/UInt64Array.py | 57 +++++++++++++++++++ .../sample_environment_senv/UInt8Array.py | 57 +++++++++++++++++++ .../sample_environment_senv/ValueUnion.py | 15 +++++ .../sample_environment_senv.py | 43 ++++++++++++-- tests/test_senv.py | 44 +++++++++----- 12 files changed, 559 insertions(+), 47 deletions(-) create mode 100644 streaming_data_types/fbschemas/sample_environment_senv/Int16Array.py create mode 100644 streaming_data_types/fbschemas/sample_environment_senv/Int32Array.py create mode 100644 streaming_data_types/fbschemas/sample_environment_senv/Int64Array.py create mode 100644 streaming_data_types/fbschemas/sample_environment_senv/Int8Array.py create mode 100644 streaming_data_types/fbschemas/sample_environment_senv/UInt16Array.py create mode 100644 streaming_data_types/fbschemas/sample_environment_senv/UInt32Array.py create mode 100644 streaming_data_types/fbschemas/sample_environment_senv/UInt64Array.py create mode 100644 streaming_data_types/fbschemas/sample_environment_senv/UInt8Array.py create mode 100644 streaming_data_types/fbschemas/sample_environment_senv/ValueUnion.py diff --git a/streaming_data_types/fbschemas/sample_environment_senv/Int16Array.py b/streaming_data_types/fbschemas/sample_environment_senv/Int16Array.py new file mode 100644 index 0000000..3b9fbdf --- /dev/null +++ b/streaming_data_types/fbschemas/sample_environment_senv/Int16Array.py @@ -0,0 +1,57 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy +np = import_numpy() + +class Int16Array(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAsInt16Array(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Int16Array() + x.Init(buf, n + offset) + return x + + @classmethod + def Int16ArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x73\x65\x6E\x76", size_prefixed=size_prefixed) + + # Int16Array + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Int16Array + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int16Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 2)) + return 0 + + # Int16Array + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int16Flags, o) + return 0 + + # Int16Array + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # Int16Array + def ValueIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + +def Int16ArrayStart(builder): builder.StartObject(1) +def Int16ArrayAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) +def Int16ArrayStartValueVector(builder, numElems): return builder.StartVector(2, numElems, 2) +def Int16ArrayEnd(builder): return builder.EndObject() diff --git a/streaming_data_types/fbschemas/sample_environment_senv/Int32Array.py b/streaming_data_types/fbschemas/sample_environment_senv/Int32Array.py new file mode 100644 index 0000000..35c747b --- /dev/null +++ b/streaming_data_types/fbschemas/sample_environment_senv/Int32Array.py @@ -0,0 +1,57 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy +np = import_numpy() + +class Int32Array(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAsInt32Array(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Int32Array() + x.Init(buf, n + offset) + return x + + @classmethod + def Int32ArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x73\x65\x6E\x76", size_prefixed=size_prefixed) + + # Int32Array + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Int32Array + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return 0 + + # Int32Array + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return 0 + + # Int32Array + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # Int32Array + def ValueIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + +def Int32ArrayStart(builder): builder.StartObject(1) +def Int32ArrayAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) +def Int32ArrayStartValueVector(builder, numElems): return builder.StartVector(4, numElems, 4) +def Int32ArrayEnd(builder): return builder.EndObject() diff --git a/streaming_data_types/fbschemas/sample_environment_senv/Int64Array.py b/streaming_data_types/fbschemas/sample_environment_senv/Int64Array.py new file mode 100644 index 0000000..50dfd4c --- /dev/null +++ b/streaming_data_types/fbschemas/sample_environment_senv/Int64Array.py @@ -0,0 +1,57 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy +np = import_numpy() + +class Int64Array(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAsInt64Array(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Int64Array() + x.Init(buf, n + offset) + return x + + @classmethod + def Int64ArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x73\x65\x6E\x76", size_prefixed=size_prefixed) + + # Int64Array + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Int64Array + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return 0 + + # Int64Array + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) + return 0 + + # Int64Array + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # Int64Array + def ValueIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + +def Int64ArrayStart(builder): builder.StartObject(1) +def Int64ArrayAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) +def Int64ArrayStartValueVector(builder, numElems): return builder.StartVector(8, numElems, 8) +def Int64ArrayEnd(builder): return builder.EndObject() diff --git a/streaming_data_types/fbschemas/sample_environment_senv/Int8Array.py b/streaming_data_types/fbschemas/sample_environment_senv/Int8Array.py new file mode 100644 index 0000000..783a606 --- /dev/null +++ b/streaming_data_types/fbschemas/sample_environment_senv/Int8Array.py @@ -0,0 +1,57 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy +np = import_numpy() + +class Int8Array(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAsInt8Array(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Int8Array() + x.Init(buf, n + offset) + return x + + @classmethod + def Int8ArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x73\x65\x6E\x76", size_prefixed=size_prefixed) + + # Int8Array + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Int8Array + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) + return 0 + + # Int8Array + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int8Flags, o) + return 0 + + # Int8Array + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # Int8Array + def ValueIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + +def Int8ArrayStart(builder): builder.StartObject(1) +def Int8ArrayAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) +def Int8ArrayStartValueVector(builder, numElems): return builder.StartVector(1, numElems, 1) +def Int8ArrayEnd(builder): return builder.EndObject() diff --git a/streaming_data_types/fbschemas/sample_environment_senv/SampleEnvironmentData.py b/streaming_data_types/fbschemas/sample_environment_senv/SampleEnvironmentData.py index 2c1dfba..7351b43 100644 --- a/streaming_data_types/fbschemas/sample_environment_senv/SampleEnvironmentData.py +++ b/streaming_data_types/fbschemas/sample_environment_senv/SampleEnvironmentData.py @@ -60,35 +60,25 @@ def TimestampLocation(self): return 0 # SampleEnvironmentData - def Values(self, j): + def ValuesType(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) if o != 0: - a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Uint16Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 2)) - return 0 - - # SampleEnvironmentData - def ValuesAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint16Flags, o) + return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) return 0 # SampleEnvironmentData - def ValuesLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + def Values(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) if o != 0: - return self._tab.VectorLen(o) - return 0 - - # SampleEnvironmentData - def ValuesIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) - return o == 0 + from flatbuffers.table import Table + obj = Table(bytearray(), 0) + self._tab.Union(obj, o) + return obj + return None # SampleEnvironmentData def Timestamps(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) if o != 0: a = self._tab.Vector(o) return self._tab.Get(flatbuffers.number_types.Uint64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) @@ -96,39 +86,39 @@ def Timestamps(self, j): # SampleEnvironmentData def TimestampsAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) if o != 0: return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint64Flags, o) return 0 # SampleEnvironmentData def TimestampsLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) if o != 0: return self._tab.VectorLen(o) return 0 # SampleEnvironmentData def TimestampsIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) return o == 0 # SampleEnvironmentData def MessageCounter(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(20)) if o != 0: return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) return 0 -def SampleEnvironmentDataStart(builder): builder.StartObject(8) +def SampleEnvironmentDataStart(builder): builder.StartObject(9) def SampleEnvironmentDataAddName(builder, Name): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(Name), 0) def SampleEnvironmentDataAddChannel(builder, Channel): builder.PrependInt32Slot(1, Channel, 0) def SampleEnvironmentDataAddPacketTimestamp(builder, PacketTimestamp): builder.PrependUint64Slot(2, PacketTimestamp, 0) def SampleEnvironmentDataAddTimeDelta(builder, TimeDelta): builder.PrependFloat64Slot(3, TimeDelta, 0.0) def SampleEnvironmentDataAddTimestampLocation(builder, TimestampLocation): builder.PrependInt8Slot(4, TimestampLocation, 0) -def SampleEnvironmentDataAddValues(builder, Values): builder.PrependUOffsetTRelativeSlot(5, flatbuffers.number_types.UOffsetTFlags.py_type(Values), 0) -def SampleEnvironmentDataStartValuesVector(builder, numElems): return builder.StartVector(2, numElems, 2) -def SampleEnvironmentDataAddTimestamps(builder, Timestamps): builder.PrependUOffsetTRelativeSlot(6, flatbuffers.number_types.UOffsetTFlags.py_type(Timestamps), 0) +def SampleEnvironmentDataAddValuesType(builder, ValuesType): builder.PrependUint8Slot(5, ValuesType, 0) +def SampleEnvironmentDataAddValues(builder, Values): builder.PrependUOffsetTRelativeSlot(6, flatbuffers.number_types.UOffsetTFlags.py_type(Values), 0) +def SampleEnvironmentDataAddTimestamps(builder, Timestamps): builder.PrependUOffsetTRelativeSlot(7, flatbuffers.number_types.UOffsetTFlags.py_type(Timestamps), 0) def SampleEnvironmentDataStartTimestampsVector(builder, numElems): return builder.StartVector(8, numElems, 8) -def SampleEnvironmentDataAddMessageCounter(builder, MessageCounter): builder.PrependUint64Slot(7, MessageCounter, 0) +def SampleEnvironmentDataAddMessageCounter(builder, MessageCounter): builder.PrependUint64Slot(8, MessageCounter, 0) def SampleEnvironmentDataEnd(builder): return builder.EndObject() diff --git a/streaming_data_types/fbschemas/sample_environment_senv/UInt16Array.py b/streaming_data_types/fbschemas/sample_environment_senv/UInt16Array.py new file mode 100644 index 0000000..2664579 --- /dev/null +++ b/streaming_data_types/fbschemas/sample_environment_senv/UInt16Array.py @@ -0,0 +1,57 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy +np = import_numpy() + +class UInt16Array(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAsUInt16Array(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = UInt16Array() + x.Init(buf, n + offset) + return x + + @classmethod + def UInt16ArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x73\x65\x6E\x76", size_prefixed=size_prefixed) + + # UInt16Array + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # UInt16Array + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Uint16Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 2)) + return 0 + + # UInt16Array + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint16Flags, o) + return 0 + + # UInt16Array + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # UInt16Array + def ValueIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + +def UInt16ArrayStart(builder): builder.StartObject(1) +def UInt16ArrayAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) +def UInt16ArrayStartValueVector(builder, numElems): return builder.StartVector(2, numElems, 2) +def UInt16ArrayEnd(builder): return builder.EndObject() diff --git a/streaming_data_types/fbschemas/sample_environment_senv/UInt32Array.py b/streaming_data_types/fbschemas/sample_environment_senv/UInt32Array.py new file mode 100644 index 0000000..18116ae --- /dev/null +++ b/streaming_data_types/fbschemas/sample_environment_senv/UInt32Array.py @@ -0,0 +1,57 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy +np = import_numpy() + +class UInt32Array(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAsUInt32Array(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = UInt32Array() + x.Init(buf, n + offset) + return x + + @classmethod + def UInt32ArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x73\x65\x6E\x76", size_prefixed=size_prefixed) + + # UInt32Array + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # UInt32Array + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Uint32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return 0 + + # UInt32Array + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint32Flags, o) + return 0 + + # UInt32Array + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # UInt32Array + def ValueIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + +def UInt32ArrayStart(builder): builder.StartObject(1) +def UInt32ArrayAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) +def UInt32ArrayStartValueVector(builder, numElems): return builder.StartVector(4, numElems, 4) +def UInt32ArrayEnd(builder): return builder.EndObject() diff --git a/streaming_data_types/fbschemas/sample_environment_senv/UInt64Array.py b/streaming_data_types/fbschemas/sample_environment_senv/UInt64Array.py new file mode 100644 index 0000000..e49674a --- /dev/null +++ b/streaming_data_types/fbschemas/sample_environment_senv/UInt64Array.py @@ -0,0 +1,57 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy +np = import_numpy() + +class UInt64Array(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAsUInt64Array(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = UInt64Array() + x.Init(buf, n + offset) + return x + + @classmethod + def UInt64ArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x73\x65\x6E\x76", size_prefixed=size_prefixed) + + # UInt64Array + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # UInt64Array + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Uint64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return 0 + + # UInt64Array + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint64Flags, o) + return 0 + + # UInt64Array + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # UInt64Array + def ValueIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + +def UInt64ArrayStart(builder): builder.StartObject(1) +def UInt64ArrayAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) +def UInt64ArrayStartValueVector(builder, numElems): return builder.StartVector(8, numElems, 8) +def UInt64ArrayEnd(builder): return builder.EndObject() diff --git a/streaming_data_types/fbschemas/sample_environment_senv/UInt8Array.py b/streaming_data_types/fbschemas/sample_environment_senv/UInt8Array.py new file mode 100644 index 0000000..8631654 --- /dev/null +++ b/streaming_data_types/fbschemas/sample_environment_senv/UInt8Array.py @@ -0,0 +1,57 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy +np = import_numpy() + +class UInt8Array(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAsUInt8Array(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = UInt8Array() + x.Init(buf, n + offset) + return x + + @classmethod + def UInt8ArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x73\x65\x6E\x76", size_prefixed=size_prefixed) + + # UInt8Array + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # UInt8Array + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) + return 0 + + # UInt8Array + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) + return 0 + + # UInt8Array + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # UInt8Array + def ValueIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + +def UInt8ArrayStart(builder): builder.StartObject(1) +def UInt8ArrayAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) +def UInt8ArrayStartValueVector(builder, numElems): return builder.StartVector(1, numElems, 1) +def UInt8ArrayEnd(builder): return builder.EndObject() diff --git a/streaming_data_types/fbschemas/sample_environment_senv/ValueUnion.py b/streaming_data_types/fbschemas/sample_environment_senv/ValueUnion.py new file mode 100644 index 0000000..069b7bc --- /dev/null +++ b/streaming_data_types/fbschemas/sample_environment_senv/ValueUnion.py @@ -0,0 +1,15 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +class ValueUnion(object): + NONE = 0 + Int8Array = 1 + UInt8Array = 2 + Int16Array = 3 + UInt16Array = 4 + Int32Array = 5 + UInt32Array = 6 + Int64Array = 7 + UInt64Array = 8 + diff --git a/streaming_data_types/sample_environment_senv.py b/streaming_data_types/sample_environment_senv.py index 8776aee..8e5953a 100644 --- a/streaming_data_types/sample_environment_senv.py +++ b/streaming_data_types/sample_environment_senv.py @@ -8,10 +8,12 @@ SampleEnvironmentDataAddTimeDelta, SampleEnvironmentDataAddTimestampLocation, SampleEnvironmentDataAddValues, + SampleEnvironmentDataAddValuesType, SampleEnvironmentDataAddTimestamps, SampleEnvironmentDataAddPacketTimestamp, ) from streaming_data_types.fbschemas.sample_environment_senv.Location import Location +from streaming_data_types.fbschemas.sample_environment_senv.ValueUnion import ValueUnion import flatbuffers import numpy as np from typing import Optional, Union, List, NamedTuple @@ -35,9 +37,25 @@ def serialise_senv( if value_timestamps is not None: used_timestamps = np.atleast_1d(np.array(value_timestamps)).astype(np.uint64) + timestamps_offset = builder.CreateNumpyVector(used_timestamps) - temp_values = np.atleast_1d(np.array(values)).astype(np.uint16) - value_offset = builder.CreateNumpyVector(temp_values) + numpy_type_map = {np.dtype("int8"): ValueUnion.Int8Array, + np.dtype("uint8"): ValueUnion.UInt8Array, + np.dtype("int16"): ValueUnion.Int16Array, + np.dtype("uint16"): ValueUnion.UInt16Array, + np.dtype("int32"): ValueUnion.Int32Array, + np.dtype("uint32"): ValueUnion.UInt32Array, + np.dtype("int64"): ValueUnion.Int64Array, + np.dtype("uint64"): ValueUnion.UInt64Array} + + temp_values = np.atleast_1d(np.array(values)) + + value_array_offset = builder.CreateNumpyVector(temp_values) + + # Some flatbuffer fu in order to avoid >200 lines of code + builder.StartObject(1) + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value_array_offset), 0) + value_offset = builder.EndObject() name_offset = builder.CreateString(name) @@ -49,8 +67,9 @@ def serialise_senv( SampleEnvironmentDataAddChannel(builder, channel) SampleEnvironmentDataAddPacketTimestamp(builder, int(timestamp.timestamp() * 1e9)) SampleEnvironmentDataAddValues(builder, value_offset) + SampleEnvironmentDataAddValuesType(builder, numpy_type_map[temp_values.dtype]) if value_timestamps is not None: - SampleEnvironmentDataAddTimestamps(builder, used_timestamps) + SampleEnvironmentDataAddTimestamps(builder, timestamps_offset) SE_Message = SampleEnvironmentDataEnd(builder) @@ -89,6 +108,22 @@ def deserialise_senv(buffer: Union[bytearray, bytes]) -> Response: if not SE_data.TimestampsIsNone(): value_timestamps = SE_data.TimestampsAsNumpy() + from flatbuffers.number_types import Int8Flags, Uint8Flags, Int16Flags, Uint16Flags, Int32Flags, Uint32Flags, Int64Flags, Uint64Flags + + flag_map = {ValueUnion.Int8Array: Int8Flags, + ValueUnion.UInt8Array: Uint8Flags, + ValueUnion.Int16Array: Int16Flags, + ValueUnion.UInt16Array: Uint16Flags, + ValueUnion.Int32Array: Int32Flags, + ValueUnion.UInt32Array: Uint32Flags, + ValueUnion.Int64Array: Int64Flags, + ValueUnion.UInt64Array: Uint64Flags} + + # Some flatbuffers fu in order to avoid >200 lines of code + value_offset = SE_data.Values() + value_type = SE_data.ValuesType() + values = value_offset.GetVectorAsNumpy(flag_map[value_type], 4) + return Response( name=SE_data.Name().decode(), channel=SE_data.Channel(), @@ -96,6 +131,6 @@ def deserialise_senv(buffer: Union[bytearray, bytes]) -> Response: sample_ts_delta=SE_data.TimeDelta(), ts_location=SE_data.TimestampLocation(), message_counter=SE_data.MessageCounter(), - values=SE_data.ValuesAsNumpy(), + values=values, value_ts=value_timestamps, ) diff --git a/tests/test_senv.py b/tests/test_senv.py index cca593c..41fb857 100644 --- a/tests/test_senv.py +++ b/tests/test_senv.py @@ -3,32 +3,48 @@ from streaming_data_types import SERIALISERS, DESERIALISERS from datetime import datetime from streaming_data_types.fbschemas.sample_environment_senv.Location import Location +import pytest -class TestSerialisationSenv: - original_entry = { +entry_1 = { "name": "some_name", "timestamp": datetime.now(), "channel": 42, "message_counter": 123456, "sample_ts_delta": 0.005, - "values": np.arange(100), - "value_timestamps": None, + "values": np.arange(100, dtype=np.uint16), + "value_timestamps": np.arange(50) + 1111, "ts_location": Location.End } - def test_serialises_and_deserialises_senv(self): - buf = serialise_senv(**self.original_entry) +entry_2 = { + "name": "some_name_other_name", + "timestamp": datetime.now(), + "channel": 11, + "message_counter": 654321, + "sample_ts_delta": 1.666, + "values": np.arange(1000, dtype=np.int64), + "value_timestamps": None, + "ts_location": Location.Middle +} + + +class TestSerialisationSenv: + @pytest.mark.parametrize("input_entry", [entry_1, entry_2]) + def test_serialises_and_deserialises_senv(self, input_entry): + original_entry = input_entry + buf = serialise_senv(**original_entry) deserialised_tuple = deserialise_senv(buf) - assert self.original_entry["name"] == deserialised_tuple.name - assert self.original_entry["timestamp"] == deserialised_tuple.timestamp - assert self.original_entry["channel"] == deserialised_tuple.channel - assert self.original_entry["message_counter"] == deserialised_tuple.message_counter - assert self.original_entry["sample_ts_delta"] == deserialised_tuple.sample_ts_delta - assert np.array_equal(self.original_entry["values"], deserialised_tuple.values) - assert self.original_entry["value_timestamps"] == deserialised_tuple.value_ts - assert self.original_entry["ts_location"] == deserialised_tuple.ts_location + assert original_entry["name"] == deserialised_tuple.name + assert original_entry["timestamp"] == deserialised_tuple.timestamp + assert original_entry["channel"] == deserialised_tuple.channel + assert original_entry["message_counter"] == deserialised_tuple.message_counter + assert original_entry["sample_ts_delta"] == deserialised_tuple.sample_ts_delta + assert np.array_equal(original_entry["values"], deserialised_tuple.values) + assert np.array_equal(original_entry["value_timestamps"], deserialised_tuple.value_ts) + assert original_entry["values"].dtype == deserialised_tuple.values.dtype + assert original_entry["ts_location"] == deserialised_tuple.ts_location def test_schema_type_is_in_global_serialisers_list(self): assert "senv" in SERIALISERS From 44e9b1bd5ea788067d44459e47d3644d72c9c131 Mon Sep 17 00:00:00 2001 From: Jonas Nilsson Date: Thu, 11 Mar 2021 21:35:24 +0100 Subject: [PATCH 225/363] Formatting. --- .../sample_environment_senv.py | 51 ++++++++++++------- tests/test_senv.py | 18 +++---- 2 files changed, 42 insertions(+), 27 deletions(-) diff --git a/streaming_data_types/sample_environment_senv.py b/streaming_data_types/sample_environment_senv.py index 8e5953a..3422e6d 100644 --- a/streaming_data_types/sample_environment_senv.py +++ b/streaming_data_types/sample_environment_senv.py @@ -39,14 +39,16 @@ def serialise_senv( used_timestamps = np.atleast_1d(np.array(value_timestamps)).astype(np.uint64) timestamps_offset = builder.CreateNumpyVector(used_timestamps) - numpy_type_map = {np.dtype("int8"): ValueUnion.Int8Array, - np.dtype("uint8"): ValueUnion.UInt8Array, - np.dtype("int16"): ValueUnion.Int16Array, - np.dtype("uint16"): ValueUnion.UInt16Array, - np.dtype("int32"): ValueUnion.Int32Array, - np.dtype("uint32"): ValueUnion.UInt32Array, - np.dtype("int64"): ValueUnion.Int64Array, - np.dtype("uint64"): ValueUnion.UInt64Array} + numpy_type_map = { + np.dtype("int8"): ValueUnion.Int8Array, + np.dtype("uint8"): ValueUnion.UInt8Array, + np.dtype("int16"): ValueUnion.Int16Array, + np.dtype("uint16"): ValueUnion.UInt16Array, + np.dtype("int32"): ValueUnion.Int32Array, + np.dtype("uint32"): ValueUnion.UInt32Array, + np.dtype("int64"): ValueUnion.Int64Array, + np.dtype("uint64"): ValueUnion.UInt64Array, + } temp_values = np.atleast_1d(np.array(values)) @@ -54,7 +56,9 @@ def serialise_senv( # Some flatbuffer fu in order to avoid >200 lines of code builder.StartObject(1) - builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value_array_offset), 0) + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(value_array_offset), 0 + ) value_offset = builder.EndObject() name_offset = builder.CreateString(name) @@ -108,16 +112,27 @@ def deserialise_senv(buffer: Union[bytearray, bytes]) -> Response: if not SE_data.TimestampsIsNone(): value_timestamps = SE_data.TimestampsAsNumpy() - from flatbuffers.number_types import Int8Flags, Uint8Flags, Int16Flags, Uint16Flags, Int32Flags, Uint32Flags, Int64Flags, Uint64Flags + from flatbuffers.number_types import ( + Int8Flags, + Uint8Flags, + Int16Flags, + Uint16Flags, + Int32Flags, + Uint32Flags, + Int64Flags, + Uint64Flags, + ) - flag_map = {ValueUnion.Int8Array: Int8Flags, - ValueUnion.UInt8Array: Uint8Flags, - ValueUnion.Int16Array: Int16Flags, - ValueUnion.UInt16Array: Uint16Flags, - ValueUnion.Int32Array: Int32Flags, - ValueUnion.UInt32Array: Uint32Flags, - ValueUnion.Int64Array: Int64Flags, - ValueUnion.UInt64Array: Uint64Flags} + flag_map = { + ValueUnion.Int8Array: Int8Flags, + ValueUnion.UInt8Array: Uint8Flags, + ValueUnion.Int16Array: Int16Flags, + ValueUnion.UInt16Array: Uint16Flags, + ValueUnion.Int32Array: Int32Flags, + ValueUnion.UInt32Array: Uint32Flags, + ValueUnion.Int64Array: Int64Flags, + ValueUnion.UInt64Array: Uint64Flags, + } # Some flatbuffers fu in order to avoid >200 lines of code value_offset = SE_data.Values() diff --git a/tests/test_senv.py b/tests/test_senv.py index 41fb857..43097f3 100644 --- a/tests/test_senv.py +++ b/tests/test_senv.py @@ -7,15 +7,15 @@ entry_1 = { - "name": "some_name", - "timestamp": datetime.now(), - "channel": 42, - "message_counter": 123456, - "sample_ts_delta": 0.005, - "values": np.arange(100, dtype=np.uint16), - "value_timestamps": np.arange(50) + 1111, - "ts_location": Location.End - } + "name": "some_name", + "timestamp": datetime.now(), + "channel": 42, + "message_counter": 123456, + "sample_ts_delta": 0.005, + "values": np.arange(100, dtype=np.uint16), + "value_timestamps": np.arange(50) + 1111, + "ts_location": Location.End +} entry_2 = { "name": "some_name_other_name", From fdddc65a610753d3aa439db39f8ea6dff06d35cd Mon Sep 17 00:00:00 2001 From: Jonas Nilsson Date: Wed, 31 Mar 2021 01:27:59 +0200 Subject: [PATCH 226/363] Add tz to datetime. --- streaming_data_types/action_response_answ.py | 8 +++++--- streaming_data_types/area_detector_ADAr.py | 4 ++-- .../sample_environment_senv.py | 4 ++-- streaming_data_types/timestamps_tdct.py | 8 +++++--- tests/test_ADAr.py | 14 ++++++++++---- tests/test_answ.py | 18 +++++++++++++++--- tests/test_senv.py | 19 ++++++++++++------- 7 files changed, 51 insertions(+), 24 deletions(-) diff --git a/streaming_data_types/action_response_answ.py b/streaming_data_types/action_response_answ.py index 2b8a526..64bcb15 100644 --- a/streaming_data_types/action_response_answ.py +++ b/streaming_data_types/action_response_answ.py @@ -7,7 +7,7 @@ from streaming_data_types.utils import check_schema_identifier from typing import Union from typing import NamedTuple -from datetime import datetime +from datetime import datetime, timezone FILE_IDENTIFIER = b"answ" @@ -62,7 +62,9 @@ def serialise_answ( def deserialise_answ(buffer: Union[bytearray, bytes]): check_schema_identifier(buffer, FILE_IDENTIFIER) answ_message = ActionResponse.ActionResponse.GetRootAsActionResponse(buffer, 0) - max_time = datetime(year=9000, month=1, day=1, hour=0, minute=0, second=0).timestamp() + max_time = datetime( + year=9000, month=1, day=1, hour=0, minute=0, second=0 + ).timestamp() used_timestamp = answ_message.StopTime() / 1000 if used_timestamp > max_time: used_timestamp = max_time @@ -74,5 +76,5 @@ def deserialise_answ(buffer: Union[bytearray, bytes]): outcome=answ_message.Outcome(), message=answ_message.Message().decode("utf-8"), status_code=answ_message.StatusCode(), - stop_time=datetime.fromtimestamp(used_timestamp), + stop_time=datetime.fromtimestamp(used_timestamp, tz=timezone.utc), ) diff --git a/streaming_data_types/area_detector_ADAr.py b/streaming_data_types/area_detector_ADAr.py index de0bc86..6fdbb09 100644 --- a/streaming_data_types/area_detector_ADAr.py +++ b/streaming_data_types/area_detector_ADAr.py @@ -5,7 +5,7 @@ import streaming_data_types.fbschemas.ADAr_ADArray_schema.Attribute as ADArAttribute from streaming_data_types.utils import check_schema_identifier import numpy as np -from datetime import datetime +from datetime import datetime, timezone from struct import pack FILE_IDENTIFIER = b"ADAr" @@ -197,7 +197,7 @@ def deserialise_ADAr(buffer: Union[bytearray, bytes]) -> ADArray: return ADArray_t( source_name=ad_array.SourceName().decode(), unique_id=unique_id, - timestamp=datetime.fromtimestamp(used_timestamp), + timestamp=datetime.fromtimestamp(used_timestamp, tz=timezone.utc), data=data, attributes=attributes_list, ) diff --git a/streaming_data_types/sample_environment_senv.py b/streaming_data_types/sample_environment_senv.py index 3422e6d..1084756 100644 --- a/streaming_data_types/sample_environment_senv.py +++ b/streaming_data_types/sample_environment_senv.py @@ -18,7 +18,7 @@ import numpy as np from typing import Optional, Union, List, NamedTuple from streaming_data_types.utils import check_schema_identifier -from datetime import datetime +from datetime import datetime, timezone FILE_IDENTIFIER = b"senv" @@ -142,7 +142,7 @@ def deserialise_senv(buffer: Union[bytearray, bytes]) -> Response: return Response( name=SE_data.Name().decode(), channel=SE_data.Channel(), - timestamp=datetime.fromtimestamp(used_timestamp), + timestamp=datetime.fromtimestamp(used_timestamp, tz=timezone.utc), sample_ts_delta=SE_data.TimeDelta(), ts_location=SE_data.TimestampLocation(), message_counter=SE_data.MessageCounter(), diff --git a/streaming_data_types/timestamps_tdct.py b/streaming_data_types/timestamps_tdct.py index 6360542..d16d036 100644 --- a/streaming_data_types/timestamps_tdct.py +++ b/streaming_data_types/timestamps_tdct.py @@ -8,8 +8,7 @@ ) import flatbuffers import numpy as np -from collections import namedtuple -from typing import Optional, Union, List +from typing import Optional, Union, List, NamedTuple from streaming_data_types.utils import check_schema_identifier FILE_IDENTIFIER = b"tdct" @@ -40,7 +39,10 @@ def serialise_tdct( return bytes(builder.Output()) -Timestamps = namedtuple("Timestamps", ("name", "timestamps", "sequence_counter")) +Timestamps = NamedTuple( + "Timestamps", + (("name", str), ("timestamps", np.ndarray), ("sequence_counter", int)), +) def deserialise_tdct(buffer: Union[bytearray, bytes]) -> Timestamps: diff --git a/tests/test_ADAr.py b/tests/test_ADAr.py index 72fb41b..25e5ddc 100644 --- a/tests/test_ADAr.py +++ b/tests/test_ADAr.py @@ -6,7 +6,7 @@ ) from streaming_data_types import SERIALISERS, DESERIALISERS import numpy as np -from datetime import datetime +from datetime import datetime, timezone from streaming_data_types.exceptions import WrongSchemaException @@ -19,7 +19,7 @@ def test_serialises_and_deserialises_ADAr_int_array(self): "source_name": "some source name", "unique_id": 754, "data": np.array([[1, 2, 3], [3, 4, 5]], dtype=np.uint64), - "timestamp": datetime.now(), + "timestamp": datetime.now(tz=timezone.utc), "attributes": [ Attribute("name1", "desc1", "src1", "value"), Attribute("name2", "desc2", "src2", 11), @@ -49,7 +49,13 @@ def test_serialises_and_deserialises_ADAr_float_array(self): "unique_id": 789679, "data": np.array([[1.1, 2.2, 3.3], [4.4, 5.5, 6.6]], dtype=np.float32), "timestamp": datetime( - year=1992, month=8, day=11, hour=3, minute=34, second=57 + year=1992, + month=8, + day=11, + hour=3, + minute=34, + second=57, + tzinfo=timezone.utc, ), } @@ -70,7 +76,7 @@ def test_serialises_and_deserialises_ADAr_string(self): "source_name": "some source name", "unique_id": 754, "data": "hi, this is a string", - "timestamp": datetime.now(), + "timestamp": datetime.now(tz=timezone.utc), } buf = serialise_ADAr(**original_entry) diff --git a/tests/test_answ.py b/tests/test_answ.py index 3c19695..7738058 100644 --- a/tests/test_answ.py +++ b/tests/test_answ.py @@ -1,4 +1,4 @@ -from datetime import datetime +from datetime import datetime, timezone import pytest from streaming_data_types.action_response_answ import ( serialise_answ, @@ -24,7 +24,13 @@ def test_serialise_and_deserialise_answ_message(self): "message": "some random error message", "status_code": 123456789, "stop_time": datetime( - year=2021, month=2, day=12, hour=2, minute=12, second=12 + year=2021, + month=2, + day=12, + hour=2, + minute=12, + second=12, + tzinfo=timezone.utc, ), } @@ -50,7 +56,13 @@ def test_if_buffer_has_wrong_id_then_throws(self): "message": "some random error message", "status_code": 123456789, "stop_time": datetime( - year=2021, month=2, day=12, hour=2, minute=12, second=12 + year=2021, + month=2, + day=12, + hour=2, + minute=12, + second=12, + tzinfo=timezone.utc, ), } diff --git a/tests/test_senv.py b/tests/test_senv.py index 43097f3..e90c1d6 100644 --- a/tests/test_senv.py +++ b/tests/test_senv.py @@ -1,31 +1,34 @@ import numpy as np -from streaming_data_types.sample_environment_senv import serialise_senv, deserialise_senv +from streaming_data_types.sample_environment_senv import ( + serialise_senv, + deserialise_senv, +) from streaming_data_types import SERIALISERS, DESERIALISERS -from datetime import datetime +from datetime import datetime, timezone from streaming_data_types.fbschemas.sample_environment_senv.Location import Location import pytest entry_1 = { "name": "some_name", - "timestamp": datetime.now(), + "timestamp": datetime.now(tz=timezone.utc), "channel": 42, "message_counter": 123456, "sample_ts_delta": 0.005, "values": np.arange(100, dtype=np.uint16), "value_timestamps": np.arange(50) + 1111, - "ts_location": Location.End + "ts_location": Location.End, } entry_2 = { "name": "some_name_other_name", - "timestamp": datetime.now(), + "timestamp": datetime.now(tz=timezone.utc), "channel": 11, "message_counter": 654321, "sample_ts_delta": 1.666, "values": np.arange(1000, dtype=np.int64), "value_timestamps": None, - "ts_location": Location.Middle + "ts_location": Location.Middle, } @@ -42,7 +45,9 @@ def test_serialises_and_deserialises_senv(self, input_entry): assert original_entry["message_counter"] == deserialised_tuple.message_counter assert original_entry["sample_ts_delta"] == deserialised_tuple.sample_ts_delta assert np.array_equal(original_entry["values"], deserialised_tuple.values) - assert np.array_equal(original_entry["value_timestamps"], deserialised_tuple.value_ts) + assert np.array_equal( + original_entry["value_timestamps"], deserialised_tuple.value_ts + ) assert original_entry["values"].dtype == deserialised_tuple.values.dtype assert original_entry["ts_location"] == deserialised_tuple.ts_location From 955703cbda64f794a02e805f2d60c9735f17dccd Mon Sep 17 00:00:00 2001 From: Jonas Nilsson Date: Wed, 31 Mar 2021 01:35:51 +0200 Subject: [PATCH 227/363] Version bump. --- streaming_data_types/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/streaming_data_types/__init__.py b/streaming_data_types/__init__.py index de2688f..0e0538e 100644 --- a/streaming_data_types/__init__.py +++ b/streaming_data_types/__init__.py @@ -20,7 +20,7 @@ from streaming_data_types.sample_environment_senv import deserialise_senv, serialise_senv from streaming_data_types.area_detector_ADAr import deserialise_ADAr, serialise_ADAr -__version__ = "0.10.0" +__version__ = "0.11.0" SERIALISERS = { "ev42": serialise_ev42, From 6e89f906a0adc710c25ea2b82bbb46b9901ebaf4 Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Fri, 9 Apr 2021 16:03:36 +0100 Subject: [PATCH 228/363] Move version number out of __init__ --- setup.py | 4 +--- streaming_data_types/__init__.py | 8 ++++++-- streaming_data_types/_version.py | 4 ++++ 3 files changed, 11 insertions(+), 5 deletions(-) create mode 100644 streaming_data_types/_version.py diff --git a/setup.py b/setup.py index a16c947..442c099 100644 --- a/setup.py +++ b/setup.py @@ -1,5 +1,6 @@ import os from setuptools import setup, find_packages +from streaming_data_types._version import version DESCRIPTION = "Python utilities for handling ESS streamed data" @@ -13,9 +14,6 @@ print("COULD NOT GET LONG DESC: {}".format(error)) LONG_DESCRIPTION = DESCRIPTION -# Import version number -from streaming_data_types.__init__ import __version__ as version - setup( name="ess_streaming_data_types", version=version, diff --git a/streaming_data_types/__init__.py b/streaming_data_types/__init__.py index 0e0538e..2ff37c1 100644 --- a/streaming_data_types/__init__.py +++ b/streaming_data_types/__init__.py @@ -17,10 +17,14 @@ serialise_rf5k, ) from streaming_data_types.area_detector_NDAr import deserialise_ndar, serialise_ndar -from streaming_data_types.sample_environment_senv import deserialise_senv, serialise_senv +from streaming_data_types.sample_environment_senv import ( + deserialise_senv, + serialise_senv, +) from streaming_data_types.area_detector_ADAr import deserialise_ADAr, serialise_ADAr +from streaming_data_types._version import version -__version__ = "0.11.0" +__version__ = version SERIALISERS = { "ev42": serialise_ev42, diff --git a/streaming_data_types/_version.py b/streaming_data_types/_version.py new file mode 100644 index 0000000..ce2fecd --- /dev/null +++ b/streaming_data_types/_version.py @@ -0,0 +1,4 @@ +# Version is not directly defined in __init__ because that causes all +# run time dependencies to become build-time dependencies when it is +# imported in setup.py +version = "0.11.0" From 5ef143ccaca89733c7104e5c0264f8760ef2c4f9 Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Sat, 10 Apr 2021 10:08:21 +0100 Subject: [PATCH 229/363] Add conda package recipe --- conda/meta.yaml | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) create mode 100644 conda/meta.yaml diff --git a/conda/meta.yaml b/conda/meta.yaml new file mode 100644 index 0000000..70697ae --- /dev/null +++ b/conda/meta.yaml @@ -0,0 +1,22 @@ +package: + name: ess-streaming-data-types + version: {{ GIT_DESCRIBE_TAG }} + +source: + path: .. + +build: + number: {{ environ.get('GIT_DESCRIBE_NUMBER', 0) }} + noarch: python + script: python -m pip install --ignore-installed -vv . + +requirements: + run: + - python-flatbuffers >=1.12 + - numpy + - python >=3.6 + +about: + home: https://github.com/ess-dmsc/python-streaming-data-types + summary: Python utilities for handling ESS streamed data + license: BSD-2-Clause From 6a8c8a743f9d00a1d2ec32a97523717b29260669 Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Sat, 10 Apr 2021 10:10:55 +0100 Subject: [PATCH 230/363] Document conda package build and upload --- README_DEV.md | 39 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) diff --git a/README_DEV.md b/README_DEV.md index 5ad3fdc..c9b4c73 100644 --- a/README_DEV.md +++ b/README_DEV.md @@ -107,3 +107,42 @@ After testing installing from test.pypi.org works, push to PyPI: twine upload dist/* ``` Finally, create a tag on the GitHub repository with the appropriate name, e.g. `v0.7.0`. + +### Build and upload conda package + +The conda package is used by ESS DMSC DRAM group for the Scipp library. + +#### Steps + +You must first have a conda installation, for example `conda` via pip, or [miniconda](https://docs.conda.io/en/latest/miniconda.html). + +From the directory of the ess-streaming-data-types repository, build the package with +```sh +conda create -c conda-forge -n build_pysdt_package python=3.7 +conda activate build_pysdt_package +conda install -c conda-forge conda-build +conda build -c conda-forge ./conda +``` + +If you already have an environment to build in then instead of creating a new one you can run +```sh +conda activate build_pysdt_package +conda env update +conda build -c conda-forge ./conda +``` + +To upload the package, first login +```sh +anaconda login +``` +use the ESS-DMSC-ECDC account or personal account linked to ESS-DMSC organisation. + +Find the path for the built package using +```sh +conda build ./conda --output +``` + +Then upload +```sh +anaconda upload --user ESS-DMSC /path/to/package +``` From d48e39bd5c9bcde9033adc7c571a838e536d19c2 Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Sat, 10 Apr 2021 10:15:36 +0100 Subject: [PATCH 231/363] Document create tag before conda package --- README_DEV.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README_DEV.md b/README_DEV.md index c9b4c73..e6eb8d2 100644 --- a/README_DEV.md +++ b/README_DEV.md @@ -111,6 +111,7 @@ Finally, create a tag on the GitHub repository with the appropriate name, e.g. ` ### Build and upload conda package The conda package is used by ESS DMSC DRAM group for the Scipp library. +Please create the release version tag on github before creating the conda package as it gets the version number from the tag. #### Steps From f09ebef0af324246e80c6d8aa5cddbddebb7789f Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Fri, 30 Apr 2021 17:37:41 +0100 Subject: [PATCH 232/363] Fix install location and ensure that import is tested during conda build --- conda/meta.yaml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/conda/meta.yaml b/conda/meta.yaml index 70697ae..b1180fb 100644 --- a/conda/meta.yaml +++ b/conda/meta.yaml @@ -8,7 +8,7 @@ source: build: number: {{ environ.get('GIT_DESCRIBE_NUMBER', 0) }} noarch: python - script: python -m pip install --ignore-installed -vv . + script: python -m pip install --target={{ environ.get('PREFIX') }}/lib/python/site-packages --ignore-installed --no-deps -vv . requirements: run: @@ -16,6 +16,10 @@ requirements: - numpy - python >=3.6 +test: + imports: + - streaming_data_types + about: home: https://github.com/ess-dmsc/python-streaming-data-types summary: Python utilities for handling ESS streamed data From ce0c1bbe7fbd5d939e3801a46453bf865f05e54f Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Tue, 11 May 2021 11:58:48 +0000 Subject: [PATCH 233/363] Update requirements.txt Pin until we can move to 2.0.0 --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 9b4366a..2c75ce0 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ -flatbuffers>=1.12 +flatbuffers==1.12 numpy From a011d911e3671c316c7ffe65ee39adf306911410 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Tue, 11 May 2021 12:14:39 +0000 Subject: [PATCH 234/363] Update _version.py --- streaming_data_types/_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/streaming_data_types/_version.py b/streaming_data_types/_version.py index ce2fecd..e0cabf3 100644 --- a/streaming_data_types/_version.py +++ b/streaming_data_types/_version.py @@ -1,4 +1,4 @@ # Version is not directly defined in __init__ because that causes all # run time dependencies to become build-time dependencies when it is # imported in setup.py -version = "0.11.0" +version = "0.12.0" From 6581166c6fb2b73d4cf1b2a27f0169c03deeb7cf Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Tue, 11 May 2021 12:15:03 +0000 Subject: [PATCH 235/363] Update setup.py --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 442c099..71d3d67 100644 --- a/setup.py +++ b/setup.py @@ -25,6 +25,6 @@ license="BSD 2-Clause License", packages=find_packages(exclude=["tests", "tests.*"]), python_requires=">=3.6.0", - install_requires=["flatbuffers>=1.12", "numpy"], + install_requires=["flatbuffers==1.12", "numpy"], extras_require={"dev": ["flake8", "pre-commit", "pytest", "tox"]}, ) From 1aba466ff5feb5a7998d0055bcbe9247c35f560f Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Tue, 11 May 2021 12:15:43 +0000 Subject: [PATCH 236/363] Update meta.yaml --- conda/meta.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/conda/meta.yaml b/conda/meta.yaml index b1180fb..e320107 100644 --- a/conda/meta.yaml +++ b/conda/meta.yaml @@ -12,7 +12,7 @@ build: requirements: run: - - python-flatbuffers >=1.12 + - python-flatbuffers ==1.12 - numpy - python >=3.6 From 3f3ea998224b347605eece78b6caa630e60c10df Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Tue, 11 May 2021 12:19:26 +0000 Subject: [PATCH 237/363] Update README_DEV.md --- README_DEV.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README_DEV.md b/README_DEV.md index e6eb8d2..8f09212 100644 --- a/README_DEV.md +++ b/README_DEV.md @@ -99,7 +99,7 @@ Test the module using the existing test-suite (from project root): ``` rm -rf streaming_data_types # Rename the local source directory pytest # The tests will be run against the pip installed module -git reset --hard origin/master # Put everything back to before +git reset --hard origin/main # Put everything back to before ``` After testing installing from test.pypi.org works, push to PyPI: From 7cbfc8248c7c34a893e553e25565061b9c76650e Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Tue, 11 May 2021 14:21:13 +0100 Subject: [PATCH 238/363] Update README_DEV.md --- README_DEV.md | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/README_DEV.md b/README_DEV.md index 8f09212..90b33a8 100644 --- a/README_DEV.md +++ b/README_DEV.md @@ -117,18 +117,16 @@ Please create the release version tag on github before creating the conda packag You must first have a conda installation, for example `conda` via pip, or [miniconda](https://docs.conda.io/en/latest/miniconda.html). -From the directory of the ess-streaming-data-types repository, build the package with +If you already have a conda environment from previously building the package then remove it first with ```sh -conda create -c conda-forge -n build_pysdt_package python=3.7 -conda activate build_pysdt_package -conda install -c conda-forge conda-build -conda build -c conda-forge ./conda +conda env remove -n build_pysdt_package ``` -If you already have an environment to build in then instead of creating a new one you can run +From the directory of the ess-streaming-data-types repository, build the package with ```sh +conda create -c conda-forge -n build_pysdt_package python=3.7 conda activate build_pysdt_package -conda env update +conda install -c conda-forge conda-build anaconda-client conda build -c conda-forge ./conda ``` From b48c71c743800705dd3d594da7686e7d567545e3 Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Tue, 18 May 2021 09:31:34 +0100 Subject: [PATCH 239/363] Simplify conda package build instructions --- README_DEV.md | 7 ------- 1 file changed, 7 deletions(-) diff --git a/README_DEV.md b/README_DEV.md index 90b33a8..69966d5 100644 --- a/README_DEV.md +++ b/README_DEV.md @@ -117,15 +117,8 @@ Please create the release version tag on github before creating the conda packag You must first have a conda installation, for example `conda` via pip, or [miniconda](https://docs.conda.io/en/latest/miniconda.html). -If you already have a conda environment from previously building the package then remove it first with -```sh -conda env remove -n build_pysdt_package -``` - From the directory of the ess-streaming-data-types repository, build the package with ```sh -conda create -c conda-forge -n build_pysdt_package python=3.7 -conda activate build_pysdt_package conda install -c conda-forge conda-build anaconda-client conda build -c conda-forge ./conda ``` From b2c5f6e4bd8398ccda9685d3562613f32f727218 Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Wed, 19 May 2021 20:53:29 +0100 Subject: [PATCH 240/363] Add flatc generated SpectraDetectorMapping.py --- .../run_start_pl72/SpectraDetectorMapping.py | 109 ++++++++++++++++++ 1 file changed, 109 insertions(+) create mode 100644 streaming_data_types/fbschemas/run_start_pl72/SpectraDetectorMapping.py diff --git a/streaming_data_types/fbschemas/run_start_pl72/SpectraDetectorMapping.py b/streaming_data_types/fbschemas/run_start_pl72/SpectraDetectorMapping.py new file mode 100644 index 0000000..37a2ae6 --- /dev/null +++ b/streaming_data_types/fbschemas/run_start_pl72/SpectraDetectorMapping.py @@ -0,0 +1,109 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + + +class SpectraDetectorMapping(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAsSpectraDetectorMapping(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = SpectraDetectorMapping() + x.Init(buf, n + offset) + return x + + # SpectraDetectorMapping + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # SpectraDetectorMapping + def Spectrum(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Int32Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), + ) + return 0 + + # SpectraDetectorMapping + def SpectrumAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return 0 + + # SpectraDetectorMapping + def SpectrumLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # SpectraDetectorMapping + def DetectorId(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Int32Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), + ) + return 0 + + # SpectraDetectorMapping + def DetectorIdAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return 0 + + # SpectraDetectorMapping + def DetectorIdLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # SpectraDetectorMapping + def NSpectra(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + +def SpectraDetectorMappingStart(builder): + builder.StartObject(3) + + +def SpectraDetectorMappingAddSpectrum(builder, spectrum): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(spectrum), 0 + ) + + +def SpectraDetectorMappingStartSpectrumVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + +def SpectraDetectorMappingAddDetectorId(builder, detectorId): + builder.PrependUOffsetTRelativeSlot( + 1, flatbuffers.number_types.UOffsetTFlags.py_type(detectorId), 0 + ) + + +def SpectraDetectorMappingStartDetectorIdVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + +def SpectraDetectorMappingAddNSpectra(builder, nSpectra): + builder.PrependInt32Slot(2, nSpectra, 0) + + +def SpectraDetectorMappingEnd(builder): + return builder.EndObject() From 865499e3a65649fcf52ee86965ad6f39d850d8a1 Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Thu, 20 May 2021 12:27:54 +0100 Subject: [PATCH 241/363] Test serialisation of det-spec map --- tests/test_pl72.py | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/tests/test_pl72.py b/tests/test_pl72.py index 543c875..4fe4ab4 100644 --- a/tests/test_pl72.py +++ b/tests/test_pl72.py @@ -1,7 +1,12 @@ import pytest from streaming_data_types.exceptions import WrongSchemaException -from streaming_data_types.run_start_pl72 import serialise_pl72, deserialise_pl72 +from streaming_data_types.run_start_pl72 import ( + serialise_pl72, + deserialise_pl72, + DetectorSpectrumMap, +) from streaming_data_types import SERIALISERS, DESERIALISERS +import numpy as np class TestSerialisationPl72: @@ -16,6 +21,9 @@ class TestSerialisationPl72: "instrument_name": "LOKI", "broker": "localhost:9092", "metadata": "{3:1}", + "detector_spectrum_map": DetectorSpectrumMap( + np.array([4, 5, 6]), np.array([0, 1, 2]), 3 + ), } def test_serialises_and_deserialises_pl72_message_correctly(self): @@ -36,6 +44,10 @@ def test_serialises_and_deserialises_pl72_message_correctly(self): ) assert deserialised_tuple.broker == self.original_entry["broker"] assert deserialised_tuple.metadata == self.original_entry["metadata"] + assert ( + deserialised_tuple.detector_spectrum_map + == self.original_entry["detector_spectrum_map"] + ) def test_if_buffer_has_wrong_id_then_throws(self): buf = serialise_pl72(**self.original_entry) From 41ae749f9d8b799cdf91e7b39da3509a1eabf8c0 Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Thu, 20 May 2021 12:43:41 +0100 Subject: [PATCH 242/363] Impl serialisation of det-spec map --- .../fbschemas/run_start_pl72/RunStart.py | 2 +- streaming_data_types/run_start_pl72.py | 55 ++++++++++++++++++- tests/test_pl72.py | 13 ++++- 3 files changed, 65 insertions(+), 5 deletions(-) diff --git a/streaming_data_types/fbschemas/run_start_pl72/RunStart.py b/streaming_data_types/fbschemas/run_start_pl72/RunStart.py index 9cf7231..1e76ce9 100644 --- a/streaming_data_types/fbschemas/run_start_pl72/RunStart.py +++ b/streaming_data_types/fbschemas/run_start_pl72/RunStart.py @@ -109,7 +109,7 @@ def DetectorSpectrumMap(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(24)) if o != 0: x = self._tab.Indirect(o + self._tab.Pos) - from SpectraDetectorMapping import SpectraDetectorMapping + from .SpectraDetectorMapping import SpectraDetectorMapping obj = SpectraDetectorMapping() obj.Init(self._tab.Bytes, x) diff --git a/streaming_data_types/run_start_pl72.py b/streaming_data_types/run_start_pl72.py index 6763028..9295e17 100644 --- a/streaming_data_types/run_start_pl72.py +++ b/streaming_data_types/run_start_pl72.py @@ -1,14 +1,29 @@ import time -from typing import Union +from typing import Union, Optional import flatbuffers -from streaming_data_types.fbschemas.run_start_pl72 import RunStart +from streaming_data_types.fbschemas.run_start_pl72 import ( + RunStart, + SpectraDetectorMapping, +) from streaming_data_types.utils import check_schema_identifier from typing import NamedTuple from datetime import datetime +import numpy as np +from collections import namedtuple FILE_IDENTIFIER = b"pl72" +DetectorSpectrumMap = namedtuple( + "DetectorSpectrumMap", + ( + "spectrum_numbers", # numpy ndarray of int + "detector_ids", # numpy ndarray of int + "n_spectra", # int + ), +) + + def serialise_pl72( job_id: str, filename: str, @@ -20,6 +35,7 @@ def serialise_pl72( instrument_name: str = "TEST", broker: str = "localhost:9092", metadata: str = "{}", + detector_spectrum_map: Optional[DetectorSpectrumMap] = None, ) -> bytes: builder = flatbuffers.Builder(512) builder.ForceDefaults(True) @@ -44,6 +60,28 @@ def serialise_pl72( filename_offset = builder.CreateString(filename) metadata_offset = builder.CreateString(metadata) + # Build detector-spectrum map + if detector_spectrum_map is not None: + spectrum_map_offset = builder.CreateNumpyVector( + np.array(detector_spectrum_map.spectrum_numbers).astype(np.int32) + ) + det_id_map_offset = builder.CreateNumpyVector( + np.array(detector_spectrum_map.detector_ids).astype(np.int32) + ) + SpectraDetectorMapping.SpectraDetectorMappingStart(builder) + SpectraDetectorMapping.SpectraDetectorMappingAddSpectrum( + builder, spectrum_map_offset + ) + SpectraDetectorMapping.SpectraDetectorMappingAddDetectorId( + builder, det_id_map_offset + ) + SpectraDetectorMapping.SpectraDetectorMappingAddNSpectra( + builder, detector_spectrum_map.n_spectra + ) + detector_spectrum_map_offset = SpectraDetectorMapping.SpectraDetectorMappingEnd( + builder + ) + # Build the actual buffer RunStart.RunStartStart(builder) RunStart.RunStartAddServiceId(builder, service_id_offset) @@ -57,6 +95,8 @@ def serialise_pl72( RunStart.RunStartAddFilename(builder, filename_offset) RunStart.RunStartAddNPeriods(builder, 1) RunStart.RunStartAddMetadata(builder, metadata_offset) + if detector_spectrum_map is not None: + RunStart.RunStartAddDetectorSpectrumMap(builder, detector_spectrum_map_offset) run_start_message = RunStart.RunStartEnd(builder) @@ -77,6 +117,7 @@ def serialise_pl72( ("instrument_name", str), ("broker", str), ("metadata", str), + ("detector_spectrum_map", Optional[DetectorSpectrumMap]), ), ) @@ -94,6 +135,15 @@ def deserialise_pl72(buffer: Union[bytearray, bytes]) -> RunStartInfo: run_name = run_start.RunName() if run_start.RunName() else b"" metadata = run_start.Metadata() if run_start.Metadata() else b"" + detector_spectrum_map = None + det_spec_map_buf = run_start.DetectorSpectrumMap() + if det_spec_map_buf is not None: + detector_spectrum_map = DetectorSpectrumMap( + det_spec_map_buf.SpectrumAsNumpy(), + det_spec_map_buf.DetectorIdAsNumpy(), + det_spec_map_buf.NSpectra(), + ) + return RunStartInfo( job_id=job_id.decode(), filename=filename.decode(), @@ -105,4 +155,5 @@ def deserialise_pl72(buffer: Union[bytearray, bytes]) -> RunStartInfo: instrument_name=instrument_name.decode(), broker=broker.decode(), metadata=metadata.decode(), + detector_spectrum_map=detector_spectrum_map, ) diff --git a/tests/test_pl72.py b/tests/test_pl72.py index 4fe4ab4..0c935c5 100644 --- a/tests/test_pl72.py +++ b/tests/test_pl72.py @@ -44,9 +44,18 @@ def test_serialises_and_deserialises_pl72_message_correctly(self): ) assert deserialised_tuple.broker == self.original_entry["broker"] assert deserialised_tuple.metadata == self.original_entry["metadata"] + assert ( - deserialised_tuple.detector_spectrum_map - == self.original_entry["detector_spectrum_map"] + deserialised_tuple.detector_spectrum_map.n_spectra + == self.original_entry["detector_spectrum_map"].n_spectra + ) + assert np.array_equal( + deserialised_tuple.detector_spectrum_map.spectrum_numbers, + self.original_entry["detector_spectrum_map"].spectrum_numbers, + ) + assert np.array_equal( + deserialised_tuple.detector_spectrum_map.detector_ids, + self.original_entry["detector_spectrum_map"].detector_ids, ) def test_if_buffer_has_wrong_id_then_throws(self): From 70516b7f12c47f173bf4d6cfaa21a1abfc6f3717 Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Fri, 21 May 2021 14:50:56 +0100 Subject: [PATCH 243/363] Update _version.py --- streaming_data_types/_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/streaming_data_types/_version.py b/streaming_data_types/_version.py index e0cabf3..6110f58 100644 --- a/streaming_data_types/_version.py +++ b/streaming_data_types/_version.py @@ -1,4 +1,4 @@ # Version is not directly defined in __init__ because that causes all # run time dependencies to become build-time dependencies when it is # imported in setup.py -version = "0.12.0" +version = "0.13.0" From c5304dfb77a4a22ea920c818dccbf4f046675572 Mon Sep 17 00:00:00 2001 From: Jonas Nilsson Date: Thu, 1 Jul 2021 01:38:15 +0200 Subject: [PATCH 244/363] Fix compatibility issue. --- streaming_data_types/_version.py | 2 +- streaming_data_types/run_start_pl72.py | 28 +++++++++++--------------- 2 files changed, 13 insertions(+), 17 deletions(-) diff --git a/streaming_data_types/_version.py b/streaming_data_types/_version.py index 6110f58..2f496a1 100644 --- a/streaming_data_types/_version.py +++ b/streaming_data_types/_version.py @@ -1,4 +1,4 @@ # Version is not directly defined in __init__ because that causes all # run time dependencies to become build-time dependencies when it is # imported in setup.py -version = "0.13.0" +version = "0.13.1" diff --git a/streaming_data_types/run_start_pl72.py b/streaming_data_types/run_start_pl72.py index 9295e17..5dc816a 100644 --- a/streaming_data_types/run_start_pl72.py +++ b/streaming_data_types/run_start_pl72.py @@ -104,22 +104,18 @@ def serialise_pl72( return bytes(builder.Output()) -RunStartInfo = NamedTuple( - "RunStartInfo", - ( - ("job_id", str), - ("filename", str), - ("start_time", int), - ("stop_time", int), - ("run_name", str), - ("nexus_structure", str), - ("service_id", str), - ("instrument_name", str), - ("broker", str), - ("metadata", str), - ("detector_spectrum_map", Optional[DetectorSpectrumMap]), - ), -) +class RunStartInfo(NamedTuple): + job_id: str + filename: str + start_time: int + stop_time: int + nexus_structure: str + service_id: str + broker: str + run_name: str = "" + instrument_name: str = "" + metadata: str = "" + detector_spectrum_map: Optional[DetectorSpectrumMap] = None def deserialise_pl72(buffer: Union[bytearray, bytes]) -> RunStartInfo: From 624a96021968f1841b50e11aefbfb9860c34b959 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Mon, 5 Jul 2021 15:51:47 +0200 Subject: [PATCH 245/363] Update streaming_data_types/_version.py --- streaming_data_types/_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/streaming_data_types/_version.py b/streaming_data_types/_version.py index 2f496a1..3ea9ff4 100644 --- a/streaming_data_types/_version.py +++ b/streaming_data_types/_version.py @@ -1,4 +1,4 @@ # Version is not directly defined in __init__ because that causes all # run time dependencies to become build-time dependencies when it is # imported in setup.py -version = "0.13.1" +version = "0.14.0" From d5f86d7256097c05edf06fdb2bb72292363d52d7 Mon Sep 17 00:00:00 2001 From: Matthew D Jones Date: Sun, 11 Jul 2021 16:58:49 +0100 Subject: [PATCH 246/363] Use np asarray to avoid copies --- streaming_data_types/area_detector_ADAr.py | 2 +- streaming_data_types/eventdata_ev42.py | 4 ++-- streaming_data_types/run_start_pl72.py | 4 ++-- streaming_data_types/sample_environment_senv.py | 4 ++-- streaming_data_types/timestamps_tdct.py | 2 +- 5 files changed, 8 insertions(+), 8 deletions(-) diff --git a/streaming_data_types/area_detector_ADAr.py b/streaming_data_types/area_detector_ADAr.py index 6fdbb09..d416a74 100644 --- a/streaming_data_types/area_detector_ADAr.py +++ b/streaming_data_types/area_detector_ADAr.py @@ -68,7 +68,7 @@ def serialise_ADAr( data_type = type_map[data.dtype] # Build dims - dims_offset = builder.CreateNumpyVector(np.array(data.shape)) + dims_offset = builder.CreateNumpyVector(np.asarray(data.shape)) # Build data data_offset = builder.CreateNumpyVector(data.flatten().view(np.uint8)) diff --git a/streaming_data_types/eventdata_ev42.py b/streaming_data_types/eventdata_ev42.py index 62ab306..73b4f67 100644 --- a/streaming_data_types/eventdata_ev42.py +++ b/streaming_data_types/eventdata_ev42.py @@ -74,8 +74,8 @@ def serialise_ev42( source = builder.CreateString(source_name) - tof_data = builder.CreateNumpyVector(np.array(time_of_flight).astype(np.uint32)) - det_data = builder.CreateNumpyVector(np.array(detector_id).astype(np.uint32)) + tof_data = builder.CreateNumpyVector(np.asarray(time_of_flight).astype(np.uint32)) + det_data = builder.CreateNumpyVector(np.asarray(detector_id).astype(np.uint32)) isis_data = None if isis_specific: diff --git a/streaming_data_types/run_start_pl72.py b/streaming_data_types/run_start_pl72.py index 5dc816a..accd66a 100644 --- a/streaming_data_types/run_start_pl72.py +++ b/streaming_data_types/run_start_pl72.py @@ -63,10 +63,10 @@ def serialise_pl72( # Build detector-spectrum map if detector_spectrum_map is not None: spectrum_map_offset = builder.CreateNumpyVector( - np.array(detector_spectrum_map.spectrum_numbers).astype(np.int32) + np.asarray(detector_spectrum_map.spectrum_numbers).astype(np.int32) ) det_id_map_offset = builder.CreateNumpyVector( - np.array(detector_spectrum_map.detector_ids).astype(np.int32) + np.asarray(detector_spectrum_map.detector_ids).astype(np.int32) ) SpectraDetectorMapping.SpectraDetectorMappingStart(builder) SpectraDetectorMapping.SpectraDetectorMappingAddSpectrum( diff --git a/streaming_data_types/sample_environment_senv.py b/streaming_data_types/sample_environment_senv.py index 1084756..933be58 100644 --- a/streaming_data_types/sample_environment_senv.py +++ b/streaming_data_types/sample_environment_senv.py @@ -36,7 +36,7 @@ def serialise_senv( builder = flatbuffers.Builder(1024) if value_timestamps is not None: - used_timestamps = np.atleast_1d(np.array(value_timestamps)).astype(np.uint64) + used_timestamps = np.atleast_1d(np.asarray(value_timestamps)).astype(np.uint64) timestamps_offset = builder.CreateNumpyVector(used_timestamps) numpy_type_map = { @@ -50,7 +50,7 @@ def serialise_senv( np.dtype("uint64"): ValueUnion.UInt64Array, } - temp_values = np.atleast_1d(np.array(values)) + temp_values = np.atleast_1d(np.asarray(values)) value_array_offset = builder.CreateNumpyVector(temp_values) diff --git a/streaming_data_types/timestamps_tdct.py b/streaming_data_types/timestamps_tdct.py index d16d036..82105ed 100644 --- a/streaming_data_types/timestamps_tdct.py +++ b/streaming_data_types/timestamps_tdct.py @@ -22,7 +22,7 @@ def serialise_tdct( builder = flatbuffers.Builder(1024) builder.ForceDefaults(True) - timestamps = np.atleast_1d(np.array(timestamps)).astype(np.uint64) + timestamps = np.atleast_1d(np.asarray(timestamps)).astype(np.uint64) name_offset = builder.CreateString(name) From c5e10e741962efef08efd2db89edbdf30ecbfd57 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Mon, 19 Jul 2021 12:49:20 +0200 Subject: [PATCH 247/363] Added control_topic to pl72 run start --- .../fbschemas/run_start_pl72/RunStart.py | 15 ++++++++++++++- streaming_data_types/run_start_pl72.py | 6 ++++++ tests/test_pl72.py | 2 ++ 3 files changed, 22 insertions(+), 1 deletion(-) diff --git a/streaming_data_types/fbschemas/run_start_pl72/RunStart.py b/streaming_data_types/fbschemas/run_start_pl72/RunStart.py index 1e76ce9..d6e86fd 100644 --- a/streaming_data_types/fbschemas/run_start_pl72/RunStart.py +++ b/streaming_data_types/fbschemas/run_start_pl72/RunStart.py @@ -123,9 +123,16 @@ def Metadata(self): return self._tab.String(o + self._tab.Pos) return None + # RunStart + def ControlTopic(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(28)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + def RunStartStart(builder): - builder.StartObject(12) + builder.StartObject(13) def RunStartAddStartTime(builder, startTime): @@ -194,5 +201,11 @@ def RunStartAddMetadata(builder, metadata): ) +def RunStartAddControlTopic(builder, controlTopic): + builder.PrependUOffsetTRelativeSlot( + 12, flatbuffers.number_types.UOffsetTFlags.py_type(controlTopic), 0 + ) + + def RunStartEnd(builder): return builder.EndObject() diff --git a/streaming_data_types/run_start_pl72.py b/streaming_data_types/run_start_pl72.py index accd66a..dbb90c2 100644 --- a/streaming_data_types/run_start_pl72.py +++ b/streaming_data_types/run_start_pl72.py @@ -36,6 +36,7 @@ def serialise_pl72( broker: str = "localhost:9092", metadata: str = "{}", detector_spectrum_map: Optional[DetectorSpectrumMap] = None, + control_topic: str = "", ) -> bytes: builder = flatbuffers.Builder(512) builder.ForceDefaults(True) @@ -59,6 +60,7 @@ def serialise_pl72( run_name_offset = builder.CreateString(run_name) filename_offset = builder.CreateString(filename) metadata_offset = builder.CreateString(metadata) + control_topic_offset = builder.CreateString(control_topic) # Build detector-spectrum map if detector_spectrum_map is not None: @@ -97,6 +99,7 @@ def serialise_pl72( RunStart.RunStartAddMetadata(builder, metadata_offset) if detector_spectrum_map is not None: RunStart.RunStartAddDetectorSpectrumMap(builder, detector_spectrum_map_offset) + RunStart.RunStartAddControlTopic(builder, control_topic_offset) run_start_message = RunStart.RunStartEnd(builder) @@ -116,6 +119,7 @@ class RunStartInfo(NamedTuple): instrument_name: str = "" metadata: str = "" detector_spectrum_map: Optional[DetectorSpectrumMap] = None + control_topic: str = "" def deserialise_pl72(buffer: Union[bytearray, bytes]) -> RunStartInfo: @@ -130,6 +134,7 @@ def deserialise_pl72(buffer: Union[bytearray, bytes]) -> RunStartInfo: instrument_name = run_start.InstrumentName() if run_start.InstrumentName() else b"" run_name = run_start.RunName() if run_start.RunName() else b"" metadata = run_start.Metadata() if run_start.Metadata() else b"" + control_topic = run_start.ControlTopic() if run_start.ControlTopic() else b"" detector_spectrum_map = None det_spec_map_buf = run_start.DetectorSpectrumMap() @@ -152,4 +157,5 @@ def deserialise_pl72(buffer: Union[bytearray, bytes]) -> RunStartInfo: broker=broker.decode(), metadata=metadata.decode(), detector_spectrum_map=detector_spectrum_map, + control_topic=control_topic.decode(), ) diff --git a/tests/test_pl72.py b/tests/test_pl72.py index 0c935c5..0d7bf77 100644 --- a/tests/test_pl72.py +++ b/tests/test_pl72.py @@ -24,6 +24,7 @@ class TestSerialisationPl72: "detector_spectrum_map": DetectorSpectrumMap( np.array([4, 5, 6]), np.array([0, 1, 2]), 3 ), + "control_topic": "some_topic_name", } def test_serialises_and_deserialises_pl72_message_correctly(self): @@ -57,6 +58,7 @@ def test_serialises_and_deserialises_pl72_message_correctly(self): deserialised_tuple.detector_spectrum_map.detector_ids, self.original_entry["detector_spectrum_map"].detector_ids, ) + assert deserialised_tuple.control_topic == self.original_entry["control_topic"] def test_if_buffer_has_wrong_id_then_throws(self): buf = serialise_pl72(**self.original_entry) From 33e1b798805711a4c5be1b84a1b4e77592b4d9db Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Mon, 19 Jul 2021 12:56:47 +0200 Subject: [PATCH 248/363] Grammar corrections --- README_DEV.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README_DEV.md b/README_DEV.md index 69966d5..57e904c 100644 --- a/README_DEV.md +++ b/README_DEV.md @@ -59,7 +59,7 @@ pip install --user -e ./ ***First update the __version__ number in streaming_data_types/__init__.py and push the update to the repository.*** -Uninstall streaming_data_types if you have previous installed it from PyPi: +Uninstall streaming_data_types if you have previously installed it from PyPi: ``` pip uninstall ess_streaming_data_types ``` @@ -93,7 +93,7 @@ Unfortunately, flatbuffers is not on test.pypi.org so the following error may oc ``` ERROR: Could not find a version that satisfies the requirement flatbuffers ``` -The workaround is install flatbuffers manually first using `pip install flatbuffers` and then rerun the previous command. +The workaround is to install flatbuffers manually first using `pip install flatbuffers` and then rerun the previous command. Test the module using the existing test-suite (from project root): ``` From 6eaf710111828395c9f7e9a57d41552a99f7e7ad Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Mon, 30 Aug 2021 08:19:55 +0200 Subject: [PATCH 249/363] Made the location of the requirements for tox explicit --- README_DEV.md | 2 ++ tox.ini | 4 ++-- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/README_DEV.md b/README_DEV.md index 57e904c..2b99dca 100644 --- a/README_DEV.md +++ b/README_DEV.md @@ -113,6 +113,8 @@ Finally, create a tag on the GitHub repository with the appropriate name, e.g. ` The conda package is used by ESS DMSC DRAM group for the Scipp library. Please create the release version tag on github before creating the conda package as it gets the version number from the tag. +Note: anecdotal evidence suggests that this works better on Linux than on MacOS. + #### Steps You must first have a conda installation, for example `conda` via pip, or [miniconda](https://docs.conda.io/en/latest/miniconda.html). diff --git a/tox.ini b/tox.ini index ea5302e..04457fc 100644 --- a/tox.ini +++ b/tox.ini @@ -6,8 +6,8 @@ skipsdist=true [testenv] deps = - -r requirements.txt - -r requirements-dev.txt + -r{toxinidir}/requirements.txt + -r{toxinidir}/requirements-dev.txt commands = python -m pytest {posargs} From 6c2c7717fd10711593ebd4a6efdf121a460d2b4e Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Mon, 30 Aug 2021 08:21:12 +0200 Subject: [PATCH 250/363] Updated gitignore --- .gitignore | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.gitignore b/.gitignore index b6e4761..50af712 100644 --- a/.gitignore +++ b/.gitignore @@ -127,3 +127,6 @@ dmypy.json # Pyre type checker .pyre/ + +# PyCharm +.idea From b8606e20e266af025cca204fc277f5e3d6cbadbe Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Mon, 30 Aug 2021 08:25:42 +0200 Subject: [PATCH 251/363] Show which python being run --- Jenkinsfile | 1 + 1 file changed, 1 insertion(+) diff --git a/Jenkinsfile b/Jenkinsfile index e1b5721..e83e56f 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -39,6 +39,7 @@ builders = pipeline_builder.createBuilders { container -> /opt/miniconda/bin/conda init bash export PATH=/opt/miniconda/bin:$PATH python --version + which python python -m pip install --user -r ${project}/requirements.txt python -m pip install --user -r ${project}/requirements-dev.txt """ From bf9a4c4f7e0d0a6e8b07305a9e25397d001bd32d Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Mon, 30 Aug 2021 08:27:56 +0200 Subject: [PATCH 252/363] Debugging. --- Jenkinsfile | 1 + 1 file changed, 1 insertion(+) diff --git a/Jenkinsfile b/Jenkinsfile index e83e56f..dabe286 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -40,6 +40,7 @@ builders = pipeline_builder.createBuilders { container -> export PATH=/opt/miniconda/bin:$PATH python --version which python + ls /opt/miniconda/bin/ python -m pip install --user -r ${project}/requirements.txt python -m pip install --user -r ${project}/requirements-dev.txt """ From c51851e46a8f59a9cf0a1da9e6f603df04e917cd Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Mon, 30 Aug 2021 08:29:29 +0200 Subject: [PATCH 253/363] Explicitly install pip for conda --- Jenkinsfile | 1 + 1 file changed, 1 insertion(+) diff --git a/Jenkinsfile b/Jenkinsfile index dabe286..e2362dc 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -37,6 +37,7 @@ builders = pipeline_builder.createBuilders { container -> pipeline_builder.stage("${container.key}: Dependencies") { container.sh """ /opt/miniconda/bin/conda init bash + conda install -c anaconda pip export PATH=/opt/miniconda/bin:$PATH python --version which python From 221e83e422a009945864b04a3ec0074f24b70edd Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Mon, 30 Aug 2021 08:31:21 +0200 Subject: [PATCH 254/363] Changed order --- Jenkinsfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Jenkinsfile b/Jenkinsfile index e2362dc..a36a138 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -37,8 +37,8 @@ builders = pipeline_builder.createBuilders { container -> pipeline_builder.stage("${container.key}: Dependencies") { container.sh """ /opt/miniconda/bin/conda init bash - conda install -c anaconda pip export PATH=/opt/miniconda/bin:$PATH + conda install -c anaconda pip python --version which python ls /opt/miniconda/bin/ From 7f502d74e6f9247786dff9200d6fe7581b3edfc1 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Mon, 30 Aug 2021 08:35:17 +0200 Subject: [PATCH 255/363] Removed build debugging bits --- Jenkinsfile | 2 -- 1 file changed, 2 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index a36a138..e83e56f 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -38,10 +38,8 @@ builders = pipeline_builder.createBuilders { container -> container.sh """ /opt/miniconda/bin/conda init bash export PATH=/opt/miniconda/bin:$PATH - conda install -c anaconda pip python --version which python - ls /opt/miniconda/bin/ python -m pip install --user -r ${project}/requirements.txt python -m pip install --user -r ${project}/requirements-dev.txt """ From 0972e1a83444120f017448d73f1f940e1b9fc670 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Mon, 30 Aug 2021 08:42:16 +0200 Subject: [PATCH 256/363] Temporary workaround to install pip --- Jenkinsfile | 2 ++ 1 file changed, 2 insertions(+) diff --git a/Jenkinsfile b/Jenkinsfile index e83e56f..dd336af 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -40,6 +40,8 @@ builders = pipeline_builder.createBuilders { container -> export PATH=/opt/miniconda/bin:$PATH python --version which python + curl -sSL https://bootstrap.pypa.io/get-pip.py -o get-pip.py + python get-pip.py python -m pip install --user -r ${project}/requirements.txt python -m pip install --user -r ${project}/requirements-dev.txt """ From 82e016168c22b301b6370467c2ece7210f41f6af Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Wed, 1 Sep 2021 04:59:55 +0000 Subject: [PATCH 257/363] Remove workaround --- Jenkinsfile | 2 -- 1 file changed, 2 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index dd336af..e83e56f 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -40,8 +40,6 @@ builders = pipeline_builder.createBuilders { container -> export PATH=/opt/miniconda/bin:$PATH python --version which python - curl -sSL https://bootstrap.pypa.io/get-pip.py -o get-pip.py - python get-pip.py python -m pip install --user -r ${project}/requirements.txt python -m pip install --user -r ${project}/requirements-dev.txt """ From 0018bec680b31130a559c3113d959f611d6dc6c3 Mon Sep 17 00:00:00 2001 From: Geish Miladinovic Date: Fri, 22 Oct 2021 14:41:30 +1100 Subject: [PATCH 258/363] added ev43 and fix int8 f142 serialise --- .pre-commit-config.yaml | 2 +- README.md | 1 + streaming_data_types/__init__.py | 3 + streaming_data_types/eventdata_ev43.py | 81 +++++++ .../eventdata_ev43/Event43Message.py | 223 ++++++++++++++++++ .../fbschemas/eventdata_ev43/__init__.py | 0 streaming_data_types/logdata_f142.py | 8 +- tests/test_ev43.py | 75 ++++++ tests/test_f142.py | 27 +++ 9 files changed, 415 insertions(+), 5 deletions(-) create mode 100644 streaming_data_types/eventdata_ev43.py create mode 100644 streaming_data_types/fbschemas/eventdata_ev43/Event43Message.py create mode 100644 streaming_data_types/fbschemas/eventdata_ev43/__init__.py create mode 100644 tests/test_ev43.py diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 038f378..a2b82a1 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -3,7 +3,7 @@ repos: rev: stable hooks: - id: black - language_version: python3.7 + language_version: python3.6 - repo: https://github.com/pycqa/flake8 rev: 3.8.3 hooks: diff --git a/README.md b/README.md index 420bad7..aecdb09 100644 --- a/README.md +++ b/README.md @@ -14,6 +14,7 @@ https://github.com/ess-dmsc/streaming-data-types |6s4t|Run stop| |f142|Log data| |ev42|Event data| +|ev43|Event data from multiple pulses| |x5f2|Status messages| |tdct|Timestamps| |ep00|EPICS connection info| diff --git a/streaming_data_types/__init__.py b/streaming_data_types/__init__.py index 2ff37c1..d5e11f0 100644 --- a/streaming_data_types/__init__.py +++ b/streaming_data_types/__init__.py @@ -1,4 +1,5 @@ from streaming_data_types.eventdata_ev42 import deserialise_ev42, serialise_ev42 +from streaming_data_types.eventdata_ev43 import deserialise_ev43, serialise_ev43 from streaming_data_types.histogram_hs00 import deserialise_hs00, serialise_hs00 from streaming_data_types.logdata_f142 import deserialise_f142, serialise_f142 from streaming_data_types.nicos_cache_ns10 import deserialise_ns10, serialise_ns10 @@ -28,6 +29,7 @@ SERIALISERS = { "ev42": serialise_ev42, + "ev43": serialise_ev43, "hs00": serialise_hs00, "f142": serialise_f142, "ns10": serialise_ns10, @@ -47,6 +49,7 @@ DESERIALISERS = { "ev42": deserialise_ev42, + "ev43": deserialise_ev43, "hs00": deserialise_hs00, "f142": deserialise_f142, "ns10": deserialise_ns10, diff --git a/streaming_data_types/eventdata_ev43.py b/streaming_data_types/eventdata_ev43.py new file mode 100644 index 0000000..08b5e3b --- /dev/null +++ b/streaming_data_types/eventdata_ev43.py @@ -0,0 +1,81 @@ +from collections import namedtuple +import flatbuffers +import streaming_data_types.fbschemas.eventdata_ev43.Event43Message as Event43Message +from streaming_data_types.utils import check_schema_identifier +import numpy as np + + +FILE_IDENTIFIER = b"ev43" + + +EventData = namedtuple( + "EventData", + ( + "source_name", + "message_id", + "pulse_time", + "pulse_index", + "time_of_flight", + "detector_id", + ), +) + + +def deserialise_ev43(buffer): + """ + Deserialise FlatBuffer ev43. + + :param buffer: The FlatBuffers buffer. + :return: The deserialised data. + """ + check_schema_identifier(buffer, FILE_IDENTIFIER) + + event = Event43Message.Event43Message.GetRootAsEvent43Message(buffer, 0) + + return EventData( + event.SourceName().decode("utf-8"), + event.MessageId(), + event.PulseTimeAsNumpy(), + event.PulseIndexAsNumpy(), + event.TimeOfFlightAsNumpy(), + event.DetectorIdAsNumpy(), + ) + + +def serialise_ev43( + source_name, message_id, pulse_time, pulse_index, time_of_flight, detector_id +): + """ + Serialise event data as an ev43 FlatBuffers message. + + :param source_name: + :param message_id: + :param pulse_time: + :param pulse_index: + :param time_of_flight: + :param detector_id: + :return: + """ + builder = flatbuffers.Builder(1024) + builder.ForceDefaults(True) + + source = builder.CreateString(source_name) + + pulse_ts_data = builder.CreateNumpyVector(np.asarray(pulse_time).astype(np.uint64)) + pulse_ix_data = builder.CreateNumpyVector(np.asarray(pulse_index).astype(np.uint32)) + tof_data = builder.CreateNumpyVector(np.asarray(time_of_flight).astype(np.uint32)) + det_data = builder.CreateNumpyVector(np.asarray(detector_id).astype(np.uint32)) + + # Build the actual buffer + Event43Message.Event43MessageStart(builder) + Event43Message.Event43MessageAddPulseTime(builder, pulse_ts_data) + Event43Message.Event43MessageAddPulseIndex(builder, pulse_ix_data) + Event43Message.Event43MessageAddDetectorId(builder, det_data) + Event43Message.Event43MessageAddTimeOfFlight(builder, tof_data) + Event43Message.Event43MessageAddMessageId(builder, message_id) + Event43Message.Event43MessageAddSourceName(builder, source) + + data = Event43Message.Event43MessageEnd(builder) + + builder.Finish(data, file_identifier=FILE_IDENTIFIER) + return bytes(builder.Output()) diff --git a/streaming_data_types/fbschemas/eventdata_ev43/Event43Message.py b/streaming_data_types/fbschemas/eventdata_ev43/Event43Message.py new file mode 100644 index 0000000..ad0b58a --- /dev/null +++ b/streaming_data_types/fbschemas/eventdata_ev43/Event43Message.py @@ -0,0 +1,223 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy + +np = import_numpy() + + +class Event43Message(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAsEvent43Message(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Event43Message() + x.Init(buf, n + offset) + return x + + @classmethod + def Event43MessageBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x65\x76\x34\x33", size_prefixed=size_prefixed + ) + + # Event43Message + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Event43Message + def SourceName(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # Event43Message + def MessageId(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get( + flatbuffers.number_types.Uint64Flags, o + self._tab.Pos + ) + return 0 + + # Event43Message + def PulseTime(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Uint64Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8), + ) + return 0 + + # Event43Message + def PulseTimeAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint64Flags, o) + return 0 + + # Event43Message + def PulseTimeLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # Event43Message + def PulseTimeIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + return o == 0 + + # Event43Message + def PulseIndex(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Uint32Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), + ) + return 0 + + # Event43Message + def PulseIndexAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint32Flags, o) + return 0 + + # Event43Message + def PulseIndexLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # Event43Message + def PulseIndexIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + return o == 0 + + # Event43Message + def TimeOfFlight(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Uint32Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), + ) + return 0 + + # Event43Message + def TimeOfFlightAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint32Flags, o) + return 0 + + # Event43Message + def TimeOfFlightLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # Event43Message + def TimeOfFlightIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + return o == 0 + + # Event43Message + def DetectorId(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Uint32Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), + ) + return 0 + + # Event43Message + def DetectorIdAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint32Flags, o) + return 0 + + # Event43Message + def DetectorIdLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # Event43Message + def DetectorIdIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + return o == 0 + + +def Event43MessageStart(builder): + builder.StartObject(6) + + +def Event43MessageAddSourceName(builder, sourceName): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(sourceName), 0 + ) + + +def Event43MessageAddMessageId(builder, messageId): + builder.PrependUint64Slot(1, messageId, 0) + + +def Event43MessageAddPulseTime(builder, pulseTime): + builder.PrependUOffsetTRelativeSlot( + 2, flatbuffers.number_types.UOffsetTFlags.py_type(pulseTime), 0 + ) + + +def Event43MessageStartPulseTimeVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + + +def Event43MessageAddPulseIndex(builder, pulseIndex): + builder.PrependUOffsetTRelativeSlot( + 3, flatbuffers.number_types.UOffsetTFlags.py_type(pulseIndex), 0 + ) + + +def Event43MessageStartPulseIndexVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + +def Event43MessageAddTimeOfFlight(builder, timeOfFlight): + builder.PrependUOffsetTRelativeSlot( + 4, flatbuffers.number_types.UOffsetTFlags.py_type(timeOfFlight), 0 + ) + + +def Event43MessageStartTimeOfFlightVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + +def Event43MessageAddDetectorId(builder, detectorId): + builder.PrependUOffsetTRelativeSlot( + 5, flatbuffers.number_types.UOffsetTFlags.py_type(detectorId), 0 + ) + + +def Event43MessageStartDetectorIdVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + +def Event43MessageEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/eventdata_ev43/__init__.py b/streaming_data_types/fbschemas/eventdata_ev43/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/streaming_data_types/logdata_f142.py b/streaming_data_types/logdata_f142.py index 993693c..94a3774 100644 --- a/streaming_data_types/logdata_f142.py +++ b/streaming_data_types/logdata_f142.py @@ -451,11 +451,11 @@ def _serialise_stringarray(builder: flatbuffers.Builder, data: np.ndarray, sourc _map_scalar_type_to_serialiser = { np.dtype("byte"): _serialise_byte, np.dtype("ubyte"): _serialise_ubyte, - np.dtype("int8"): _serialise_short, + np.dtype("int8"): _serialise_byte, np.dtype("int16"): _serialise_short, np.dtype("int32"): _serialise_int, np.dtype("int64"): _serialise_long, - np.dtype("uint8"): _serialise_ushort, + np.dtype("uint8"): _serialise_ubyte, np.dtype("uint16"): _serialise_ushort, np.dtype("uint32"): _serialise_uint, np.dtype("uint64"): _serialise_ulong, @@ -466,11 +466,11 @@ def _serialise_stringarray(builder: flatbuffers.Builder, data: np.ndarray, sourc _map_array_type_to_serialiser = { np.dtype("byte"): _serialise_bytearray, np.dtype("ubyte"): _serialise_ubytearray, - np.dtype("int8"): _serialise_shortarray, + np.dtype("int8"): _serialise_bytearray, np.dtype("int16"): _serialise_shortarray, np.dtype("int32"): _serialise_intarray, np.dtype("int64"): _serialise_longarray, - np.dtype("uint8"): _serialise_ushortarray, + np.dtype("uint8"): _serialise_ubytearray, np.dtype("uint16"): _serialise_ushortarray, np.dtype("uint32"): _serialise_uintarray, np.dtype("uint64"): _serialise_ulongarray, diff --git a/tests/test_ev43.py b/tests/test_ev43.py new file mode 100644 index 0000000..946d7ed --- /dev/null +++ b/tests/test_ev43.py @@ -0,0 +1,75 @@ +import numpy as np +import pytest +from streaming_data_types.eventdata_ev43 import serialise_ev43, deserialise_ev43 +from streaming_data_types import SERIALISERS, DESERIALISERS +from streaming_data_types.exceptions import WrongSchemaException + + +class TestSerialisationEv42: + def test_serialises_and_deserialises_ev43_message_correctly(self): + """ + Round-trip to check what we serialise is what we get back. + """ + original_entry = { + "source_name": "some_source", + "message_id": 123456, + "pulse_time": [567890, 568890], + "pulse_index": [0, 4], + "time_of_flight": [1, 2, 3, 4, 5, 6, 7, 8, 9], + "detector_id": [10, 20, 30, 40, 50, 60, 70, 80, 90], + } + + buf = serialise_ev43(**original_entry) + entry = deserialise_ev43(buf) + + assert entry.source_name == original_entry["source_name"] + assert entry.message_id == original_entry["message_id"] + assert np.array_equal(entry.pulse_time, original_entry["pulse_time"]) + assert np.array_equal(entry.pulse_index, original_entry["pulse_index"]) + assert np.array_equal(entry.time_of_flight, original_entry["time_of_flight"]) + assert np.array_equal(entry.detector_id, original_entry["detector_id"]) + + def test_serialises_and_deserialises_ev43_message_correctly_for_numpy_arrays(self): + """ + Round-trip to check what we serialise is what we get back. + """ + original_entry = { + "source_name": "some_source", + "message_id": 123456, + "pulse_time": np.array([567890, 568890]), + "pulse_index": np.array([0, 4]), + "time_of_flight": np.array([1, 2, 3, 4, 5, 6, 7, 8, 9]), + "detector_id": np.array([10, 20, 30, 40, 50, 60, 70, 80, 90]), + } + + buf = serialise_ev43(**original_entry) + entry = deserialise_ev43(buf) + + assert entry.source_name == original_entry["source_name"] + assert entry.message_id == original_entry["message_id"] + assert np.array_equal(entry.pulse_time, original_entry["pulse_time"]) + assert np.array_equal(entry.pulse_index, original_entry["pulse_index"]) + assert np.array_equal(entry.time_of_flight, original_entry["time_of_flight"]) + assert np.array_equal(entry.detector_id, original_entry["detector_id"]) + + def test_if_buffer_has_wrong_id_then_throws(self): + original_entry = { + "source_name": "some_source", + "message_id": 123456, + "pulse_time": [567890, 568890], + "pulse_index": [0, 4], + "time_of_flight": [1, 2, 3, 4, 5, 6, 7, 8, 9], + "detector_id": [10, 20, 30, 40, 50, 60, 70, 80, 90], + } + buf = serialise_ev43(**original_entry) + + # Manually hack the id + buf = bytearray(buf) + buf[4:8] = b"1234" + + with pytest.raises(WrongSchemaException): + deserialise_ev43(buf) + + def test_schema_type_is_in_global_serialisers_list(self): + assert "ev43" in SERIALISERS + assert "ev43" in DESERIALISERS diff --git a/tests/test_f142.py b/tests/test_f142.py index e6221e5..b0d9796 100644 --- a/tests/test_f142.py +++ b/tests/test_f142.py @@ -25,6 +25,19 @@ def test_serialises_and_deserialises_integer_f142_message_correctly(self): == self.original_entry["timestamp_unix_ns"] ) + def test_serialises_and_deserialises_byte_f142_message_correctly(self): + byte_log = { + "source_name": "some_source", + "value": 0x7F, + "timestamp_unix_ns": 1585332414000000000, + } + buf = serialise_f142(**byte_log) + deserialised_tuple = deserialise_f142(buf) + + assert deserialised_tuple.source_name == byte_log["source_name"] + assert deserialised_tuple.value == byte_log["value"] + assert deserialised_tuple.timestamp_unix_ns == byte_log["timestamp_unix_ns"] + def test_serialises_and_deserialises_float_f142_message_correctly(self): float_log = { "source_name": "some_source", @@ -91,6 +104,20 @@ def test_serialises_and_deserialises_numpy_array_integers_correctly(self): assert np.array_equal(deserialised_tuple.value, array_log["value"]) assert deserialised_tuple.timestamp_unix_ns == array_log["timestamp_unix_ns"] + def test_serialises_and_deserialises_numpy_array_preserves_byte_type_correctly( + self, + ): + array_log = { + "source_name": "some_source", + "value": np.array([1, 2, 3], dtype=np.uint8), + "timestamp_unix_ns": 1585332414000000000, + } + buf = serialise_f142(**array_log) + deserialised_tuple = deserialise_f142(buf) + + assert np.array_equal(deserialised_tuple.value, array_log["value"]) + assert deserialised_tuple.value.dtype == array_log["value"].dtype + def test_serialises_and_deserialises_numpy_array_preserves_integer_type_correctly( self, ): From 13a992595716bc5f8a986b620d4830bd2ec106dd Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Fri, 22 Oct 2021 08:57:10 +0200 Subject: [PATCH 259/363] Bumped version number --- streaming_data_types/_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/streaming_data_types/_version.py b/streaming_data_types/_version.py index 3ea9ff4..c9f3b37 100644 --- a/streaming_data_types/_version.py +++ b/streaming_data_types/_version.py @@ -1,4 +1,4 @@ # Version is not directly defined in __init__ because that causes all # run time dependencies to become build-time dependencies when it is # imported in setup.py -version = "0.14.0" +version = "0.15.0" From 958779bbc3b65ea683c753034a43c0513abdd090 Mon Sep 17 00:00:00 2001 From: Jonas Nilsson Date: Wed, 29 Dec 2021 22:55:31 +0100 Subject: [PATCH 260/363] Massively improve the performance of serialisation of arrays in the f142 serialiser module. --- streaming_data_types/_version.py | 2 +- streaming_data_types/logdata_f142.py | 50 ++++++---------------------- 2 files changed, 11 insertions(+), 41 deletions(-) diff --git a/streaming_data_types/_version.py b/streaming_data_types/_version.py index c9f3b37..d8abffc 100644 --- a/streaming_data_types/_version.py +++ b/streaming_data_types/_version.py @@ -1,4 +1,4 @@ # Version is not directly defined in __init__ because that causes all # run time dependencies to become build-time dependencies when it is # imported in setup.py -version = "0.15.0" +version = "0.15.1" diff --git a/streaming_data_types/logdata_f142.py b/streaming_data_types/logdata_f142.py index 94a3774..a64238f 100644 --- a/streaming_data_types/logdata_f142.py +++ b/streaming_data_types/logdata_f142.py @@ -191,10 +191,7 @@ def _serialise_byte(builder: flatbuffers.Builder, data: np.ndarray, source: int) def _serialise_bytearray(builder: flatbuffers.Builder, data: np.ndarray, source: int): - ArrayByteStartValueVector(builder, len(data)) - for single_value in reversed(data): - builder.PrependInt8(single_value) - array_offset = builder.EndVector(len(data)) + array_offset = builder.CreateNumpyVector(data) ArrayByteStart(builder) ArrayByteAddValue(builder, array_offset) value_position = ArrayByteEnd(builder) @@ -215,10 +212,7 @@ def _serialise_ubyte(builder: flatbuffers.Builder, data: np.ndarray, source: int def _serialise_ubytearray(builder: flatbuffers.Builder, data: np.ndarray, source: int): - ArrayUByteStartValueVector(builder, len(data)) - for single_value in reversed(data): - builder.PrependUint8(single_value) - array_offset = builder.EndVector(len(data)) + array_offset = builder.CreateNumpyVector(data) ArrayUByteStart(builder) ArrayUByteAddValue(builder, array_offset) value_position = ArrayUByteEnd(builder) @@ -239,10 +233,7 @@ def _serialise_short(builder: flatbuffers.Builder, data: np.ndarray, source: int def _serialise_shortarray(builder: flatbuffers.Builder, data: np.ndarray, source: int): - ArrayShortStartValueVector(builder, len(data)) - for single_value in reversed(data): - builder.PrependInt16(single_value) - array_offset = builder.EndVector(len(data)) + array_offset = builder.CreateNumpyVector(data) ArrayShortStart(builder) ArrayShortAddValue(builder, array_offset) value_position = ArrayShortEnd(builder) @@ -263,10 +254,7 @@ def _serialise_ushort(builder: flatbuffers.Builder, data: np.ndarray, source: in def _serialise_ushortarray(builder: flatbuffers.Builder, data: np.ndarray, source: int): - ArrayUShortStartValueVector(builder, len(data)) - for single_value in reversed(data): - builder.PrependUint16(single_value) - array_offset = builder.EndVector(len(data)) + array_offset = builder.CreateNumpyVector(data) ArrayUShortStart(builder) ArrayUShortAddValue(builder, array_offset) value_position = ArrayUShortEnd(builder) @@ -287,10 +275,7 @@ def _serialise_int(builder: flatbuffers.Builder, data: np.ndarray, source: int): def _serialise_intarray(builder: flatbuffers.Builder, data: np.ndarray, source: int): - ArrayIntStartValueVector(builder, len(data)) - for single_value in reversed(data): - builder.PrependInt32(single_value) - array_offset = builder.EndVector(len(data)) + array_offset = builder.CreateNumpyVector(data) ArrayIntStart(builder) ArrayIntAddValue(builder, array_offset) value_position = ArrayIntEnd(builder) @@ -311,10 +296,7 @@ def _serialise_uint(builder: flatbuffers.Builder, data: np.ndarray, source: int) def _serialise_uintarray(builder: flatbuffers.Builder, data: np.ndarray, source: int): - ArrayUIntStartValueVector(builder, len(data)) - for single_value in reversed(data): - builder.PrependUint32(single_value) - array_offset = builder.EndVector(len(data)) + array_offset = builder.CreateNumpyVector(data) ArrayUIntStart(builder) ArrayUIntAddValue(builder, array_offset) value_position = ArrayUIntEnd(builder) @@ -335,10 +317,7 @@ def _serialise_long(builder: flatbuffers.Builder, data: np.ndarray, source: int) def _serialise_longarray(builder: flatbuffers.Builder, data: np.ndarray, source: int): - ArrayLongStartValueVector(builder, len(data)) - for single_value in reversed(data): - builder.PrependInt64(single_value) - array_offset = builder.EndVector(len(data)) + array_offset = builder.CreateNumpyVector(data) ArrayLongStart(builder) ArrayLongAddValue(builder, array_offset) value_position = ArrayLongEnd(builder) @@ -359,10 +338,7 @@ def _serialise_ulong(builder: flatbuffers.Builder, data: np.ndarray, source: int def _serialise_ulongarray(builder: flatbuffers.Builder, data: np.ndarray, source: int): - ArrayULongStartValueVector(builder, len(data)) - for single_value in reversed(data): - builder.PrependUint64(single_value) - array_offset = builder.EndVector(len(data)) + array_offset = builder.CreateNumpyVector(data) ArrayULongStart(builder) ArrayULongAddValue(builder, array_offset) value_position = ArrayULongEnd(builder) @@ -383,10 +359,7 @@ def _serialise_float(builder: flatbuffers.Builder, data: np.ndarray, source: int def _serialise_floatarray(builder: flatbuffers.Builder, data: np.ndarray, source: int): - ArrayFloatStartValueVector(builder, len(data)) - for single_value in reversed(data): - builder.PrependFloat32(single_value) - array_offset = builder.EndVector(len(data)) + array_offset = builder.CreateNumpyVector(data) ArrayFloatStart(builder) ArrayFloatAddValue(builder, array_offset) value_position = ArrayFloatEnd(builder) @@ -407,10 +380,7 @@ def _serialise_double(builder: flatbuffers.Builder, data: np.ndarray, source: in def _serialise_doublearray(builder: flatbuffers.Builder, data: np.ndarray, source: int): - ArrayDoubleStartValueVector(builder, len(data)) - for single_value in reversed(data): - builder.PrependFloat64(single_value) - array_offset = builder.EndVector(len(data)) + array_offset = builder.CreateNumpyVector(data) ArrayDoubleStart(builder) ArrayDoubleAddValue(builder, array_offset) value_position = ArrayDoubleEnd(builder) From fb8ce5bc24549aa34f7228bf4b7def4350557b1b Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Mon, 3 Jan 2022 08:06:18 +0100 Subject: [PATCH 261/363] Removed unused imports --- streaming_data_types/logdata_f142.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/streaming_data_types/logdata_f142.py b/streaming_data_types/logdata_f142.py index a64238f..3c1aa1b 100644 --- a/streaming_data_types/logdata_f142.py +++ b/streaming_data_types/logdata_f142.py @@ -12,7 +12,6 @@ ArrayUByteStart, ArrayUByteAddValue, ArrayUByteEnd, - ArrayUByteStartValueVector, ) from streaming_data_types.fbschemas.logdata_f142.Byte import ( Byte, @@ -25,7 +24,6 @@ ArrayByteStart, ArrayByteAddValue, ArrayByteEnd, - ArrayByteStartValueVector, ) from streaming_data_types.fbschemas.logdata_f142.UShort import ( UShort, @@ -38,7 +36,6 @@ ArrayUShortStart, ArrayUShortAddValue, ArrayUShortEnd, - ArrayUShortStartValueVector, ) from streaming_data_types.fbschemas.logdata_f142.Short import ( Short, @@ -51,7 +48,6 @@ ArrayShortStart, ArrayShortAddValue, ArrayShortEnd, - ArrayShortStartValueVector, ) from streaming_data_types.fbschemas.logdata_f142.UInt import ( UInt, @@ -64,7 +60,6 @@ ArrayUIntStart, ArrayUIntAddValue, ArrayUIntEnd, - ArrayUIntStartValueVector, ) from streaming_data_types.fbschemas.logdata_f142.Int import ( Int, @@ -77,7 +72,6 @@ ArrayIntStart, ArrayIntAddValue, ArrayIntEnd, - ArrayIntStartValueVector, ) from streaming_data_types.fbschemas.logdata_f142.ULong import ( ULong, @@ -90,7 +84,6 @@ ArrayULongStart, ArrayULongAddValue, ArrayULongEnd, - ArrayULongStartValueVector, ) from streaming_data_types.fbschemas.logdata_f142.Long import ( Long, @@ -103,7 +96,6 @@ ArrayLongStart, ArrayLongAddValue, ArrayLongEnd, - ArrayLongStartValueVector, ) from streaming_data_types.fbschemas.logdata_f142.Float import ( Float, @@ -116,7 +108,6 @@ ArrayFloatStart, ArrayFloatAddValue, ArrayFloatEnd, - ArrayFloatStartValueVector, ) from streaming_data_types.fbschemas.logdata_f142.Double import ( Double, @@ -129,7 +120,6 @@ ArrayDoubleStart, ArrayDoubleAddValue, ArrayDoubleEnd, - ArrayDoubleStartValueVector, ) from streaming_data_types.fbschemas.logdata_f142.String import ( String, From 86e60aa2fce7f7c39adc4ec0fde53d096b4cc00e Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Wed, 16 Feb 2022 14:14:59 +0100 Subject: [PATCH 262/363] Add support for hs01 --- README.md | 5 +- streaming_data_types/__init__.py | 3 + .../fbschemas/histogram_hs01/Array.py | 11 + .../fbschemas/histogram_hs01/ArrayDouble.py | 57 +++ .../fbschemas/histogram_hs01/ArrayFloat.py | 57 +++ .../fbschemas/histogram_hs01/ArrayInt.py | 57 +++ .../fbschemas/histogram_hs01/ArrayLong.py | 57 +++ .../histogram_hs01/DimensionMetaData.py | 71 ++++ .../histogram_hs01/EventHistogram.py | 183 +++++++++ .../fbschemas/histogram_hs01/__init__.py | 0 streaming_data_types/histogram_hs01.py | 231 +++++++++++ tests/test_hs01.py | 364 ++++++++++++++++++ 12 files changed, 1094 insertions(+), 2 deletions(-) create mode 100644 streaming_data_types/fbschemas/histogram_hs01/Array.py create mode 100644 streaming_data_types/fbschemas/histogram_hs01/ArrayDouble.py create mode 100644 streaming_data_types/fbschemas/histogram_hs01/ArrayFloat.py create mode 100644 streaming_data_types/fbschemas/histogram_hs01/ArrayInt.py create mode 100644 streaming_data_types/fbschemas/histogram_hs01/ArrayLong.py create mode 100644 streaming_data_types/fbschemas/histogram_hs01/DimensionMetaData.py create mode 100644 streaming_data_types/fbschemas/histogram_hs01/EventHistogram.py create mode 100644 streaming_data_types/fbschemas/histogram_hs01/__init__.py create mode 100644 streaming_data_types/histogram_hs01.py create mode 100644 tests/test_hs01.py diff --git a/README.md b/README.md index aecdb09..4e08284 100644 --- a/README.md +++ b/README.md @@ -8,7 +8,8 @@ https://github.com/ess-dmsc/streaming-data-types |name|description| |----|-----------| -|hs00|Histogram schema| +|hs00|Histogram schema (deprecated in favour of hs01)| +|hs01|Histogram schema| |ns10|NICOS cache entry schema| |pl72|Run start| |6s4t|Run stop| @@ -24,7 +25,7 @@ https://github.com/ess-dmsc/streaming-data-types |NDAr|**Deprecated**| |ADAr|For storing EPICS areaDetector data| -### hs00 +### hs00 and hs01 Schema for histogram data. It is one of the more complicated to use schemas. It takes a Python dictionary as its input; this dictionary needs to have correctly named fields. diff --git a/streaming_data_types/__init__.py b/streaming_data_types/__init__.py index d5e11f0..f0e41a0 100644 --- a/streaming_data_types/__init__.py +++ b/streaming_data_types/__init__.py @@ -1,6 +1,7 @@ from streaming_data_types.eventdata_ev42 import deserialise_ev42, serialise_ev42 from streaming_data_types.eventdata_ev43 import deserialise_ev43, serialise_ev43 from streaming_data_types.histogram_hs00 import deserialise_hs00, serialise_hs00 +from streaming_data_types.histogram_hs01 import serialise_hs01, deserialise_hs01 from streaming_data_types.logdata_f142 import deserialise_f142, serialise_f142 from streaming_data_types.nicos_cache_ns10 import deserialise_ns10, serialise_ns10 from streaming_data_types.run_start_pl72 import deserialise_pl72, serialise_pl72 @@ -31,6 +32,7 @@ "ev42": serialise_ev42, "ev43": serialise_ev43, "hs00": serialise_hs00, + "hs01": serialise_hs01, "f142": serialise_f142, "ns10": serialise_ns10, "pl72": serialise_pl72, @@ -51,6 +53,7 @@ "ev42": deserialise_ev42, "ev43": deserialise_ev43, "hs00": deserialise_hs00, + "hs01": deserialise_hs01, "f142": deserialise_f142, "ns10": deserialise_ns10, "pl72": deserialise_pl72, diff --git a/streaming_data_types/fbschemas/histogram_hs01/Array.py b/streaming_data_types/fbschemas/histogram_hs01/Array.py new file mode 100644 index 0000000..f386136 --- /dev/null +++ b/streaming_data_types/fbschemas/histogram_hs01/Array.py @@ -0,0 +1,11 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +class Array(object): + NONE = 0 + ArrayInt = 1 + ArrayLong = 2 + ArrayDouble = 3 + ArrayFloat = 4 + diff --git a/streaming_data_types/fbschemas/histogram_hs01/ArrayDouble.py b/streaming_data_types/fbschemas/histogram_hs01/ArrayDouble.py new file mode 100644 index 0000000..9bf9497 --- /dev/null +++ b/streaming_data_types/fbschemas/histogram_hs01/ArrayDouble.py @@ -0,0 +1,57 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy +np = import_numpy() + +class ArrayDouble(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAsArrayDouble(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ArrayDouble() + x.Init(buf, n + offset) + return x + + @classmethod + def ArrayDoubleBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x68\x73\x30\x31", size_prefixed=size_prefixed) + + # ArrayDouble + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ArrayDouble + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Float64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return 0 + + # ArrayDouble + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Float64Flags, o) + return 0 + + # ArrayDouble + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # ArrayDouble + def ValueIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + +def ArrayDoubleStart(builder): builder.StartObject(1) +def ArrayDoubleAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) +def ArrayDoubleStartValueVector(builder, numElems): return builder.StartVector(8, numElems, 8) +def ArrayDoubleEnd(builder): return builder.EndObject() diff --git a/streaming_data_types/fbschemas/histogram_hs01/ArrayFloat.py b/streaming_data_types/fbschemas/histogram_hs01/ArrayFloat.py new file mode 100644 index 0000000..fe27916 --- /dev/null +++ b/streaming_data_types/fbschemas/histogram_hs01/ArrayFloat.py @@ -0,0 +1,57 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy +np = import_numpy() + +class ArrayFloat(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAsArrayFloat(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ArrayFloat() + x.Init(buf, n + offset) + return x + + @classmethod + def ArrayFloatBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x68\x73\x30\x31", size_prefixed=size_prefixed) + + # ArrayFloat + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ArrayFloat + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Float32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return 0 + + # ArrayFloat + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Float32Flags, o) + return 0 + + # ArrayFloat + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # ArrayFloat + def ValueIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + +def ArrayFloatStart(builder): builder.StartObject(1) +def ArrayFloatAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) +def ArrayFloatStartValueVector(builder, numElems): return builder.StartVector(4, numElems, 4) +def ArrayFloatEnd(builder): return builder.EndObject() diff --git a/streaming_data_types/fbschemas/histogram_hs01/ArrayInt.py b/streaming_data_types/fbschemas/histogram_hs01/ArrayInt.py new file mode 100644 index 0000000..3c9fdc9 --- /dev/null +++ b/streaming_data_types/fbschemas/histogram_hs01/ArrayInt.py @@ -0,0 +1,57 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy +np = import_numpy() + +class ArrayInt(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAsArrayInt(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ArrayInt() + x.Init(buf, n + offset) + return x + + @classmethod + def ArrayIntBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x68\x73\x30\x31", size_prefixed=size_prefixed) + + # ArrayInt + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ArrayInt + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return 0 + + # ArrayInt + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return 0 + + # ArrayInt + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # ArrayInt + def ValueIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + +def ArrayIntStart(builder): builder.StartObject(1) +def ArrayIntAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) +def ArrayIntStartValueVector(builder, numElems): return builder.StartVector(4, numElems, 4) +def ArrayIntEnd(builder): return builder.EndObject() diff --git a/streaming_data_types/fbschemas/histogram_hs01/ArrayLong.py b/streaming_data_types/fbschemas/histogram_hs01/ArrayLong.py new file mode 100644 index 0000000..68e0d4f --- /dev/null +++ b/streaming_data_types/fbschemas/histogram_hs01/ArrayLong.py @@ -0,0 +1,57 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy +np = import_numpy() + +class ArrayLong(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAsArrayLong(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ArrayLong() + x.Init(buf, n + offset) + return x + + @classmethod + def ArrayLongBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x68\x73\x30\x31", size_prefixed=size_prefixed) + + # ArrayLong + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ArrayLong + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return 0 + + # ArrayLong + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) + return 0 + + # ArrayLong + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # ArrayLong + def ValueIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + +def ArrayLongStart(builder): builder.StartObject(1) +def ArrayLongAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) +def ArrayLongStartValueVector(builder, numElems): return builder.StartVector(8, numElems, 8) +def ArrayLongEnd(builder): return builder.EndObject() diff --git a/streaming_data_types/fbschemas/histogram_hs01/DimensionMetaData.py b/streaming_data_types/fbschemas/histogram_hs01/DimensionMetaData.py new file mode 100644 index 0000000..eea5fe6 --- /dev/null +++ b/streaming_data_types/fbschemas/histogram_hs01/DimensionMetaData.py @@ -0,0 +1,71 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy +np = import_numpy() + +class DimensionMetaData(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAsDimensionMetaData(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = DimensionMetaData() + x.Init(buf, n + offset) + return x + + @classmethod + def DimensionMetaDataBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x68\x73\x30\x31", size_prefixed=size_prefixed) + + # DimensionMetaData + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # DimensionMetaData + def Length(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # DimensionMetaData + def Unit(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # DimensionMetaData + def Label(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # DimensionMetaData + def BinBoundariesType(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) + return 0 + + # DimensionMetaData + def BinBoundaries(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + from flatbuffers.table import Table + obj = Table(bytearray(), 0) + self._tab.Union(obj, o) + return obj + return None + +def DimensionMetaDataStart(builder): builder.StartObject(5) +def DimensionMetaDataAddLength(builder, length): builder.PrependInt32Slot(0, length, 0) +def DimensionMetaDataAddUnit(builder, unit): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(unit), 0) +def DimensionMetaDataAddLabel(builder, label): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(label), 0) +def DimensionMetaDataAddBinBoundariesType(builder, binBoundariesType): builder.PrependUint8Slot(3, binBoundariesType, 0) +def DimensionMetaDataAddBinBoundaries(builder, binBoundaries): builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(binBoundaries), 0) +def DimensionMetaDataEnd(builder): return builder.EndObject() diff --git a/streaming_data_types/fbschemas/histogram_hs01/EventHistogram.py b/streaming_data_types/fbschemas/histogram_hs01/EventHistogram.py new file mode 100644 index 0000000..f82d198 --- /dev/null +++ b/streaming_data_types/fbschemas/histogram_hs01/EventHistogram.py @@ -0,0 +1,183 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy +np = import_numpy() + +class EventHistogram(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAsEventHistogram(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = EventHistogram() + x.Init(buf, n + offset) + return x + + @classmethod + def EventHistogramBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x68\x73\x30\x31", size_prefixed=size_prefixed) + + # EventHistogram + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # EventHistogram + def Source(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # EventHistogram + def Timestamp(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) + return 0 + + # EventHistogram + def DimMetadata(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + x = self._tab.Vector(o) + x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 + x = self._tab.Indirect(x) + from .DimensionMetaData import DimensionMetaData + obj = DimensionMetaData() + obj.Init(self._tab.Bytes, x) + return obj + return None + + # EventHistogram + def DimMetadataLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # EventHistogram + def DimMetadataIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + return o == 0 + + # EventHistogram + def LastMetadataTimestamp(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) + return 0 + + # EventHistogram + def CurrentShape(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return 0 + + # EventHistogram + def CurrentShapeAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return 0 + + # EventHistogram + def CurrentShapeLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # EventHistogram + def CurrentShapeIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + return o == 0 + + # EventHistogram + def Offset(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return 0 + + # EventHistogram + def OffsetAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return 0 + + # EventHistogram + def OffsetLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # EventHistogram + def OffsetIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + return o == 0 + + # EventHistogram + def DataType(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) + return 0 + + # EventHistogram + def Data(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) + if o != 0: + from flatbuffers.table import Table + obj = Table(bytearray(), 0) + self._tab.Union(obj, o) + return obj + return None + + # EventHistogram + def ErrorsType(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(20)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) + return 0 + + # EventHistogram + def Errors(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(22)) + if o != 0: + from flatbuffers.table import Table + obj = Table(bytearray(), 0) + self._tab.Union(obj, o) + return obj + return None + + # EventHistogram + def Info(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(24)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + +def EventHistogramStart(builder): builder.StartObject(11) +def EventHistogramAddSource(builder, source): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(source), 0) +def EventHistogramAddTimestamp(builder, timestamp): builder.PrependInt64Slot(1, timestamp, 0) +def EventHistogramAddDimMetadata(builder, dimMetadata): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(dimMetadata), 0) +def EventHistogramStartDimMetadataVector(builder, numElems): return builder.StartVector(4, numElems, 4) +def EventHistogramAddLastMetadataTimestamp(builder, lastMetadataTimestamp): builder.PrependInt64Slot(3, lastMetadataTimestamp, 0) +def EventHistogramAddCurrentShape(builder, currentShape): builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(currentShape), 0) +def EventHistogramStartCurrentShapeVector(builder, numElems): return builder.StartVector(4, numElems, 4) +def EventHistogramAddOffset(builder, offset): builder.PrependUOffsetTRelativeSlot(5, flatbuffers.number_types.UOffsetTFlags.py_type(offset), 0) +def EventHistogramStartOffsetVector(builder, numElems): return builder.StartVector(4, numElems, 4) +def EventHistogramAddDataType(builder, dataType): builder.PrependUint8Slot(6, dataType, 0) +def EventHistogramAddData(builder, data): builder.PrependUOffsetTRelativeSlot(7, flatbuffers.number_types.UOffsetTFlags.py_type(data), 0) +def EventHistogramAddErrorsType(builder, errorsType): builder.PrependUint8Slot(8, errorsType, 0) +def EventHistogramAddErrors(builder, errors): builder.PrependUOffsetTRelativeSlot(9, flatbuffers.number_types.UOffsetTFlags.py_type(errors), 0) +def EventHistogramAddInfo(builder, info): builder.PrependUOffsetTRelativeSlot(10, flatbuffers.number_types.UOffsetTFlags.py_type(info), 0) +def EventHistogramEnd(builder): return builder.EndObject() diff --git a/streaming_data_types/fbschemas/histogram_hs01/__init__.py b/streaming_data_types/fbschemas/histogram_hs01/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/streaming_data_types/histogram_hs01.py b/streaming_data_types/histogram_hs01.py new file mode 100644 index 0000000..62fc1b3 --- /dev/null +++ b/streaming_data_types/histogram_hs01.py @@ -0,0 +1,231 @@ +import flatbuffers +import numpy +import streaming_data_types.fbschemas.histogram_hs01.ArrayFloat as ArrayFloat +import streaming_data_types.fbschemas.histogram_hs01.ArrayDouble as ArrayDouble +import streaming_data_types.fbschemas.histogram_hs01.ArrayInt as ArrayInt +import streaming_data_types.fbschemas.histogram_hs01.ArrayLong as ArrayLong +import streaming_data_types.fbschemas.histogram_hs01.DimensionMetaData as DimensionMetaData +import streaming_data_types.fbschemas.histogram_hs01.EventHistogram as EventHistogram +from streaming_data_types.fbschemas.histogram_hs01.Array import Array +from streaming_data_types.utils import check_schema_identifier + + +FILE_IDENTIFIER = b"hs01" + + +_array_for_type = { + Array.ArrayInt: ArrayInt.ArrayInt(), + Array.ArrayLong: ArrayLong.ArrayLong(), + Array.ArrayFloat: ArrayFloat.ArrayFloat(), +} + + +def _create_array_object_for_type(array_type): + return _array_for_type.get(array_type, ArrayDouble.ArrayDouble()) + + +def deserialise_hs01(buffer): + """ + Deserialise flatbuffer hs10 into a histogram. + + :param buffer: + :return: dict of histogram information + """ + check_schema_identifier(buffer, FILE_IDENTIFIER) + event_hist = EventHistogram.EventHistogram.GetRootAsEventHistogram(buffer, 0) + + dims = [] + for i in range(event_hist.DimMetadataLength()): + bins_fb = _create_array_object_for_type( + event_hist.DimMetadata(i).BinBoundariesType() + ) + + # Get bins + bins_offset = event_hist.DimMetadata(i).BinBoundaries() + bins_fb.Init(bins_offset.Bytes, bins_offset.Pos) + bin_boundaries = bins_fb.ValueAsNumpy() + + hist_info = { + "length": event_hist.DimMetadata(i).Length(), + "bin_boundaries": bin_boundaries, + "unit": event_hist.DimMetadata(i).Unit().decode("utf-8") + if event_hist.DimMetadata(i).Unit() + else "", + "label": event_hist.DimMetadata(i).Label().decode("utf-8") + if event_hist.DimMetadata(i).Label() + else "", + } + dims.append(hist_info) + + metadata_timestamp = event_hist.LastMetadataTimestamp() + + data_fb = _create_array_object_for_type(event_hist.DataType()) + data_offset = event_hist.Data() + data_fb.Init(data_offset.Bytes, data_offset.Pos) + shape = event_hist.CurrentShapeAsNumpy().tolist() + data = data_fb.ValueAsNumpy().reshape(shape) + + # Get the errors + errors_offset = event_hist.Errors() + if errors_offset: + errors_fb = _create_array_object_for_type(event_hist.ErrorsType()) + errors_fb.Init(errors_offset.Bytes, errors_offset.Pos) + errors = errors_fb.ValueAsNumpy().reshape(shape) + else: + errors = [] + + hist = { + "source": event_hist.Source().decode("utf-8") if event_hist.Source() else "", + "timestamp": event_hist.Timestamp(), + "current_shape": shape, + "dim_metadata": dims, + "data": data, + "errors": errors, + "last_metadata_timestamp": metadata_timestamp, + "info": event_hist.Info().decode("utf-8") if event_hist.Info() else "", + } + return hist + + +def _serialise_metadata(builder, length, edges, unit, label): + unit_offset = builder.CreateString(unit) + label_offset = builder.CreateString(label) + + bins_offset, bin_type = _serialise_array(builder, edges) + + DimensionMetaData.DimensionMetaDataStart(builder) + DimensionMetaData.DimensionMetaDataAddLength(builder, length) + DimensionMetaData.DimensionMetaDataAddBinBoundaries(builder, bins_offset) + DimensionMetaData.DimensionMetaDataAddBinBoundariesType(builder, bin_type) + DimensionMetaData.DimensionMetaDataAddLabel(builder, label_offset) + DimensionMetaData.DimensionMetaDataAddUnit(builder, unit_offset) + return DimensionMetaData.DimensionMetaDataEnd(builder) + + +def serialise_hs01(histogram): + """ + Serialise a histogram as an hs01 FlatBuffers message. + + If arrays are provided as numpy arrays with type np.int32, np.int64, np.float32 + or np.float64 then type is preserved in output buffer. + + :param histogram: A dictionary containing the histogram to serialise. + """ + source_offset = None + info_offset = None + + builder = flatbuffers.Builder(1024) + builder.ForceDefaults(True) + if "source" in histogram: + source_offset = builder.CreateString(histogram["source"]) + if "info" in histogram: + info_offset = builder.CreateString(histogram["info"]) + + # Build shape array + shape_offset = builder.CreateNumpyVector( + numpy.array(histogram["current_shape"]).astype(numpy.int32) + ) + + # Build dimensions metadata + metadata = [] + for meta in histogram["dim_metadata"]: + unit = "" if "unit" not in meta else meta["unit"] + label = "" if "label" not in meta else meta["label"] + metadata.append( + _serialise_metadata( + builder, meta["length"], meta["bin_boundaries"], unit, label + ) + ) + + rank = len(histogram["current_shape"]) + EventHistogram.EventHistogramStartDimMetadataVector(builder, rank) + # FlatBuffers builds arrays backwards + for m in reversed(metadata): + builder.PrependUOffsetTRelative(m) + metadata_vector = builder.EndVector(rank) + + # Build the data + data_offset, data_type = _serialise_array(builder, histogram["data"]) + + errors_offset = None + if "errors" in histogram: + errors_offset, error_type = _serialise_array(builder, histogram["errors"]) + + # Build the actual buffer + EventHistogram.EventHistogramStart(builder) + if info_offset: + EventHistogram.EventHistogramAddInfo(builder, info_offset) + EventHistogram.EventHistogramAddData(builder, data_offset) + EventHistogram.EventHistogramAddCurrentShape(builder, shape_offset) + EventHistogram.EventHistogramAddDimMetadata(builder, metadata_vector) + EventHistogram.EventHistogramAddTimestamp(builder, histogram["timestamp"]) + if source_offset: + EventHistogram.EventHistogramAddSource(builder, source_offset) + EventHistogram.EventHistogramAddDataType(builder, data_type) + if errors_offset: + EventHistogram.EventHistogramAddErrors(builder, errors_offset) + EventHistogram.EventHistogramAddErrorsType(builder, error_type) + if "last_metadata_timestamp" in histogram: + EventHistogram.EventHistogramAddLastMetadataTimestamp( + builder, histogram["last_metadata_timestamp"] + ) + hist_message = EventHistogram.EventHistogramEnd(builder) + + builder.Finish(hist_message, file_identifier=FILE_IDENTIFIER) + return bytes(builder.Output()) + + +def _serialise_array(builder, data): + flattened_data = numpy.asarray(data).flatten() + + # Carefully preserve explicitly supported types + if numpy.issubdtype(flattened_data.dtype, numpy.int32): + return _serialise_int32(builder, flattened_data) + if numpy.issubdtype(flattened_data.dtype, numpy.int64): + return _serialise_int64(builder, flattened_data) + if numpy.issubdtype(flattened_data.dtype, numpy.float32): + return _serialise_float(builder, flattened_data) + if numpy.issubdtype(flattened_data.dtype, numpy.float64): + return _serialise_double(builder, flattened_data) + + # Otherwise if it looks like an int then use int64, or use double as last resort + if numpy.issubdtype(flattened_data.dtype, numpy.int64): + return _serialise_int64(builder, flattened_data) + + return _serialise_double(builder, flattened_data) + + +def _serialise_float(builder, flattened_data): + data_type = Array.ArrayFloat + data_vector = builder.CreateNumpyVector(flattened_data) + ArrayFloat.ArrayFloatStart(builder) + ArrayFloat.ArrayFloatAddValue(builder, data_vector) + data_offset = ArrayFloat.ArrayFloatEnd(builder) + return data_offset, data_type + + +def _serialise_double(builder, flattened_data): + data_type = Array.ArrayDouble + data_vector = builder.CreateNumpyVector(flattened_data) + ArrayDouble.ArrayDoubleStart(builder) + ArrayDouble.ArrayDoubleAddValue(builder, data_vector) + data_offset = ArrayDouble.ArrayDoubleEnd(builder) + return data_offset, data_type + + +def _serialise_int32(builder, flattened_data): + data_type = Array.ArrayInt + data_vector = builder.CreateNumpyVector(flattened_data) + ArrayInt.ArrayIntStart(builder) + ArrayInt.ArrayIntAddValue(builder, data_vector) + data_offset = ArrayInt.ArrayIntEnd(builder) + return data_offset, data_type + + +def _serialise_int64(builder, flattened_data): + data_type = Array.ArrayLong + data_vector = builder.CreateNumpyVector(flattened_data) + ArrayLong.ArrayLongStart(builder) + ArrayLong.ArrayLongAddValue(builder, data_vector) + data_offset = ArrayLong.ArrayLongEnd(builder) + return data_offset, data_type diff --git a/tests/test_hs01.py b/tests/test_hs01.py new file mode 100644 index 0000000..40bcf7e --- /dev/null +++ b/tests/test_hs01.py @@ -0,0 +1,364 @@ +import numpy as np +import pytest +from streaming_data_types.exceptions import WrongSchemaException +from streaming_data_types.histogram_hs01 import serialise_hs01, deserialise_hs01 +from streaming_data_types import SERIALISERS, DESERIALISERS + + +def create_test_data_with_type(numpy_type): + return { + "source": "some_source", + "timestamp": 123456, + "current_shape": [5], + "dim_metadata": [ + { + "length": 5, + "unit": "m", + "label": "some_label", + "bin_boundaries": np.array([0, 1, 2, 3, 4, 5]).astype(numpy_type), + } + ], + "last_metadata_timestamp": 123456, + "data": np.array([1, 2, 3, 4, 5]).astype(numpy_type), + "errors": np.array([5, 4, 3, 2, 1]).astype(numpy_type), + "info": "info_string", + } + + +class TestSerialisationHs01: + def _check_metadata_for_one_dimension(self, data, original_data): + assert np.array_equal(data["bin_boundaries"], original_data["bin_boundaries"]) + assert data["length"] == original_data["length"] + assert data["unit"] == original_data["unit"] + assert data["label"] == original_data["label"] + + def test_serialises_and_deserialises_hs01_message_correctly_for_full_1d_data(self): + """ + Round-trip to check what we serialise is what we get back. + """ + original_hist = { + "source": "some_source", + "timestamp": 123456, + "current_shape": [5], + "dim_metadata": [ + { + "length": 5, + "unit": "m", + "label": "some_label", + "bin_boundaries": np.array([0.0, 1.0, 2.0, 3.0, 4.0, 5.0]), + } + ], + "last_metadata_timestamp": 123456, + "data": np.array([1.0, 2.0, 3.0, 4.0, 5.0]), + "errors": np.array([5.0, 4.0, 3.0, 2.0, 1.0]), + "info": "info_string", + } + + buf = serialise_hs01(original_hist) + hist = deserialise_hs01(buf) + + assert hist["source"] == original_hist["source"] + assert hist["timestamp"] == original_hist["timestamp"] + assert hist["current_shape"] == original_hist["current_shape"] + self._check_metadata_for_one_dimension( + hist["dim_metadata"][0], original_hist["dim_metadata"][0] + ) + assert np.array_equal(hist["data"], original_hist["data"]) + assert np.array_equal(hist["errors"], original_hist["errors"]) + assert hist["info"] == original_hist["info"] + assert ( + hist["last_metadata_timestamp"] == original_hist["last_metadata_timestamp"] + ) + + def test_serialises_and_deserialises_hs01_message_correctly_for_minimal_1d_data( + self, + ): + """ + Round-trip to check what we serialise is what we get back. + """ + original_hist = { + "timestamp": 123456, + "current_shape": [5], + "dim_metadata": [ + { + "length": 5, + "unit": "m", + "label": "some_label", + "bin_boundaries": np.array([0.0, 1.0, 2.0, 3.0, 4.0, 5.0]), + } + ], + "data": np.array([1.0, 2.0, 3.0, 4.0, 5.0]), + } + buf = serialise_hs01(original_hist) + + hist = deserialise_hs01(buf) + assert hist["source"] == "" + assert hist["timestamp"] == original_hist["timestamp"] + assert hist["current_shape"] == original_hist["current_shape"] + self._check_metadata_for_one_dimension( + hist["dim_metadata"][0], original_hist["dim_metadata"][0] + ) + assert np.array_equal(hist["data"], original_hist["data"]) + assert len(hist["errors"]) == 0 + assert hist["info"] == "" + + def test_serialises_and_deserialises_hs01_message_correctly_for_full_2d_data(self): + """ + Round-trip to check what we serialise is what we get back. + """ + original_hist = { + "source": "some_source", + "timestamp": 123456, + "current_shape": [2, 5], + "dim_metadata": [ + { + "length": 2, + "unit": "b", + "label": "y", + "bin_boundaries": np.array([10.0, 11.0, 12.0]), + }, + { + "length": 5, + "unit": "m", + "label": "x", + "bin_boundaries": np.array([0.0, 1.0, 2.0, 3.0, 4.0, 5.0]), + }, + ], + "last_metadata_timestamp": 123456, + "data": np.array([[1.0, 2.0, 3.0, 4.0, 5.0], [6.0, 7.0, 8.0, 9.0, 10.0]]), + "errors": np.array([[5.0, 4.0, 3.0, 2.0, 1.0], [10.0, 9.0, 8.0, 7.0, 6.0]]), + "info": "info_string", + } + buf = serialise_hs01(original_hist) + + hist = deserialise_hs01(buf) + assert hist["source"] == original_hist["source"] + assert hist["timestamp"] == original_hist["timestamp"] + assert hist["current_shape"] == original_hist["current_shape"] + self._check_metadata_for_one_dimension( + hist["dim_metadata"][0], original_hist["dim_metadata"][0] + ) + self._check_metadata_for_one_dimension( + hist["dim_metadata"][1], original_hist["dim_metadata"][1] + ) + assert np.array_equal(hist["data"], original_hist["data"]) + assert np.array_equal(hist["errors"], original_hist["errors"]) + assert hist["info"] == original_hist["info"] + assert ( + hist["last_metadata_timestamp"] == original_hist["last_metadata_timestamp"] + ) + + def test_if_buffer_has_wrong_id_then_throws(self): + original_hist = { + "timestamp": 123456, + "current_shape": [5], + "dim_metadata": [ + { + "length": 5, + "unit": "m", + "label": "some_label", + "bin_boundaries": np.array([0.0, 1.0, 2.0, 3.0, 4.0, 5.0]), + } + ], + "data": np.array([1.0, 2.0, 3.0, 4.0, 5.0]), + } + buf = serialise_hs01(original_hist) + + # Manually hack the id + buf = bytearray(buf) + buf[4:8] = b"1234" + + with pytest.raises(WrongSchemaException): + deserialise_hs01(buf) + + def test_serialises_and_deserialises_hs01_message_correctly_for_int_array_data( + self, + ): + """ + Round-trip to check what we serialise is what we get back. + """ + original_hist = { + "source": "some_source", + "timestamp": 123456, + "current_shape": [5], + "dim_metadata": [ + { + "length": 5, + "unit": "m", + "label": "some_label", + "bin_boundaries": np.array([0, 1, 2, 3, 4, 5]), + } + ], + "last_metadata_timestamp": 123456, + "data": np.array([1, 2, 3, 4, 5]), + "errors": np.array([5, 4, 3, 2, 1]), + "info": "info_string", + } + + buf = serialise_hs01(original_hist) + hist = deserialise_hs01(buf) + + assert hist["source"] == original_hist["source"] + assert hist["timestamp"] == original_hist["timestamp"] + assert hist["current_shape"] == original_hist["current_shape"] + self._check_metadata_for_one_dimension( + hist["dim_metadata"][0], original_hist["dim_metadata"][0] + ) + assert np.array_equal(hist["data"], original_hist["data"]) + assert np.array_equal(hist["errors"], original_hist["errors"]) + assert hist["info"] == original_hist["info"] + assert ( + hist["last_metadata_timestamp"] == original_hist["last_metadata_timestamp"] + ) + + def test_serialise_and_deserialise_hs01_message_returns_int32_type(self): + original_hist = create_test_data_with_type(np.int32) + + buf = serialise_hs01(original_hist) + hist = deserialise_hs01(buf) + + assert np.issubdtype( + hist["dim_metadata"][0]["bin_boundaries"].dtype, + original_hist["dim_metadata"][0]["bin_boundaries"].dtype, + ) + assert np.issubdtype(hist["data"].dtype, original_hist["data"].dtype) + assert np.issubdtype(hist["errors"].dtype, original_hist["errors"].dtype) + + def test_serialise_and_deserialise_hs01_message_returns_int64_type(self): + original_hist = create_test_data_with_type(np.int64) + + buf = serialise_hs01(original_hist) + hist = deserialise_hs01(buf) + + assert np.issubdtype( + hist["dim_metadata"][0]["bin_boundaries"].dtype, + original_hist["dim_metadata"][0]["bin_boundaries"].dtype, + ) + assert np.issubdtype(hist["data"].dtype, original_hist["data"].dtype) + assert np.issubdtype(hist["errors"].dtype, original_hist["errors"].dtype) + + def test_serialise_and_deserialise_hs01_message_returns_float32_type(self): + original_hist = create_test_data_with_type(np.float32) + + buf = serialise_hs01(original_hist) + hist = deserialise_hs01(buf) + + assert np.issubdtype( + hist["dim_metadata"][0]["bin_boundaries"].dtype, + original_hist["dim_metadata"][0]["bin_boundaries"].dtype, + ) + assert np.issubdtype(hist["data"].dtype, original_hist["data"].dtype) + assert np.issubdtype(hist["errors"].dtype, original_hist["errors"].dtype) + + def test_serialise_and_deserialise_hs01_message_returns_float64_type(self): + original_hist = create_test_data_with_type(np.float64) + + buf = serialise_hs01(original_hist) + hist = deserialise_hs01(buf) + + assert np.issubdtype( + hist["dim_metadata"][0]["bin_boundaries"].dtype, + original_hist["dim_metadata"][0]["bin_boundaries"].dtype, + ) + assert np.issubdtype(hist["data"].dtype, original_hist["data"].dtype) + assert np.issubdtype(hist["errors"].dtype, original_hist["errors"].dtype) + + def test_serialises_and_deserialises_hs01_message_correctly_when_float_input_is_not_ndarray( + self, + ): + """ + Round-trip to check what we serialise is what we get back. + """ + original_hist = { + "source": "some_source", + "timestamp": 123456, + "current_shape": [2, 5], + "dim_metadata": [ + { + "length": 2, + "unit": "b", + "label": "y", + "bin_boundaries": [10.0, 11.0, 12.0], + }, + { + "length": 5, + "unit": "m", + "label": "x", + "bin_boundaries": [0.0, 1.0, 2.0, 3.0, 4.0, 5.0], + }, + ], + "last_metadata_timestamp": 123456, + "data": [[1.0, 2.0, 3.0, 4.0, 5.0], [6.0, 7.0, 8.0, 9.0, 10.0]], + "errors": [[5.0, 4.0, 3.0, 2.0, 1.0], [10.0, 9.0, 8.0, 7.0, 6.0]], + "info": "info_string", + } + buf = serialise_hs01(original_hist) + + hist = deserialise_hs01(buf) + assert hist["source"] == original_hist["source"] + assert hist["timestamp"] == original_hist["timestamp"] + assert hist["current_shape"] == original_hist["current_shape"] + self._check_metadata_for_one_dimension( + hist["dim_metadata"][0], original_hist["dim_metadata"][0] + ) + self._check_metadata_for_one_dimension( + hist["dim_metadata"][1], original_hist["dim_metadata"][1] + ) + assert np.array_equal(hist["data"], original_hist["data"]) + assert np.array_equal(hist["errors"], original_hist["errors"]) + assert hist["info"] == original_hist["info"] + assert ( + hist["last_metadata_timestamp"] == original_hist["last_metadata_timestamp"] + ) + + def test_serialises_and_deserialises_hs01_message_correctly_when_int_input_is_not_ndarray( + self, + ): + """ + Round-trip to check what we serialise is what we get back. + """ + original_hist = { + "source": "some_source", + "timestamp": 123456, + "current_shape": [2, 5], + "dim_metadata": [ + { + "length": 2, + "unit": "b", + "label": "y", + "bin_boundaries": [10, 11, 12], + }, + { + "length": 5, + "unit": "m", + "label": "x", + "bin_boundaries": [0, 1, 2, 3, 4, 5], + }, + ], + "last_metadata_timestamp": 123456, + "data": [[1, 2, 3, 4, 5], [6, 7, 8, 9, 10]], + "errors": [[5, 4, 3, 2, 1], [10, 9, 8, 7, 6]], + "info": "info_string", + } + buf = serialise_hs01(original_hist) + + hist = deserialise_hs01(buf) + assert hist["source"] == original_hist["source"] + assert hist["timestamp"] == original_hist["timestamp"] + assert hist["current_shape"] == original_hist["current_shape"] + self._check_metadata_for_one_dimension( + hist["dim_metadata"][0], original_hist["dim_metadata"][0] + ) + self._check_metadata_for_one_dimension( + hist["dim_metadata"][1], original_hist["dim_metadata"][1] + ) + assert np.array_equal(hist["data"], original_hist["data"]) + assert np.array_equal(hist["errors"], original_hist["errors"]) + assert hist["info"] == original_hist["info"] + assert ( + hist["last_metadata_timestamp"] == original_hist["last_metadata_timestamp"] + ) + + def test_schema_type_is_in_global_serialisers_list(self): + assert "hs01" in SERIALISERS + assert "hs01" in DESERIALISERS From dd13bd8c06ad998062e676f896fc8a539f6ebc7b Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Wed, 16 Feb 2022 14:53:02 +0100 Subject: [PATCH 263/363] Pin flake8 and black so they are consistent with the pre-commit settings --- .pre-commit-config.yaml | 8 ++++---- requirements-dev.txt | 4 +++- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a2b82a1..a46fa98 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,10 +1,10 @@ repos: -- repo: https://github.com/ambv/black - rev: stable +- repo: https://github.com/psf/black + rev: 21.9b0 hooks: - id: black - language_version: python3.6 + language_version: python3 - repo: https://github.com/pycqa/flake8 - rev: 3.8.3 + rev: 4.0.1 hooks: - id: flake8 diff --git a/requirements-dev.txt b/requirements-dev.txt index fdda646..b05031e 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,5 +1,7 @@ -r requirements.txt -black +black==21.9b0 # Pinned to match pre-commit config +flake8==4.0.1 # Pinned to match pre-commit config +isort==5.10.1 # Pinned to match pre-commit configblack flake8 pre-commit pytest From 22b1cbcad262cfd8f5af8c557f8642173b194916 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Wed, 16 Feb 2022 14:55:06 +0100 Subject: [PATCH 264/363] add isort --- .pre-commit-config.yaml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a46fa98..8affa1b 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -8,3 +8,9 @@ repos: rev: 4.0.1 hooks: - id: flake8 +- repo: https://github.com/pycqa/isort + rev: 5.10.1 + hooks: + - id: isort + args: ["--profile", "black"] + From e1ea92db03351cc5bc487de38d6d44a7b82a296f Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Wed, 16 Feb 2022 14:56:19 +0100 Subject: [PATCH 265/363] apply isort --- setup.py | 4 +- streaming_data_types/__init__.py | 26 +-- streaming_data_types/action_response_answ.py | 9 +- streaming_data_types/area_detector_ADAr.py | 12 +- streaming_data_types/area_detector_NDAr.py | 26 ++- .../epics_connection_info_ep00.py | 10 +- streaming_data_types/eventdata_ev42.py | 5 +- streaming_data_types/eventdata_ev43.py | 5 +- .../fbschemas/NDAr_NDArray_schema/DType.py | 2 +- .../fbschemas/NDAr_NDArray_schema/NDArray.py | 86 ++++++-- .../NDAr_NDArray_schema/NDAttribute.py | 55 ++++- .../NDAr_NDArray_schema/epicsTimeStamp.py | 17 +- .../sample_environment_senv/Int16Array.py | 36 +++- .../sample_environment_senv/Int32Array.py | 36 +++- .../sample_environment_senv/Int64Array.py | 36 +++- .../sample_environment_senv/Int8Array.py | 36 +++- .../sample_environment_senv/Location.py | 4 +- .../SampleEnvironmentData.py | 93 +++++++-- .../sample_environment_senv/UInt16Array.py | 36 +++- .../sample_environment_senv/UInt32Array.py | 36 +++- .../sample_environment_senv/UInt64Array.py | 36 +++- .../sample_environment_senv/UInt8Array.py | 36 +++- .../sample_environment_senv/ValueUnion.py | 4 +- streaming_data_types/finished_writing_wrdn.py | 6 +- .../forwarder_config_update_rf5k.py | 10 +- streaming_data_types/histogram_hs00.py | 4 +- streaming_data_types/logdata_f142.py | 197 +++++++++--------- streaming_data_types/nicos_cache_ns10.py | 3 +- streaming_data_types/run_start_pl72.py | 11 +- streaming_data_types/run_stop_6s4t.py | 7 +- .../sample_environment_senv.py | 28 +-- streaming_data_types/status_x5f2.py | 3 +- streaming_data_types/timestamps_tdct.py | 12 +- tests/test_6s4t.py | 4 +- tests/test_ADAr.py | 12 +- tests/test_NDAr.py | 7 +- tests/test_answ.py | 10 +- tests/test_ep00.py | 9 +- tests/test_ev42.py | 5 +- tests/test_ev43.py | 5 +- tests/test_f142.py | 7 +- tests/test_hs00.py | 5 +- tests/test_ns10.py | 5 +- tests/test_pl72.py | 9 +- tests/test_rf5k.py | 15 +- tests/test_senv.py | 13 +- tests/test_tdct.py | 6 +- tests/test_utils.py | 1 + tests/test_wrdn.py | 5 +- tests/test_x52f.py | 6 +- 50 files changed, 708 insertions(+), 343 deletions(-) diff --git a/setup.py b/setup.py index 71d3d67..15c3afb 100644 --- a/setup.py +++ b/setup.py @@ -1,5 +1,7 @@ import os -from setuptools import setup, find_packages + +from setuptools import find_packages, setup + from streaming_data_types._version import version DESCRIPTION = "Python utilities for handling ESS streamed data" diff --git a/streaming_data_types/__init__.py b/streaming_data_types/__init__.py index d5e11f0..c5ec060 100644 --- a/streaming_data_types/__init__.py +++ b/streaming_data_types/__init__.py @@ -1,29 +1,29 @@ -from streaming_data_types.eventdata_ev42 import deserialise_ev42, serialise_ev42 -from streaming_data_types.eventdata_ev43 import deserialise_ev43, serialise_ev43 -from streaming_data_types.histogram_hs00 import deserialise_hs00, serialise_hs00 -from streaming_data_types.logdata_f142 import deserialise_f142, serialise_f142 -from streaming_data_types.nicos_cache_ns10 import deserialise_ns10, serialise_ns10 -from streaming_data_types.run_start_pl72 import deserialise_pl72, serialise_pl72 -from streaming_data_types.run_stop_6s4t import deserialise_6s4t, serialise_6s4t -from streaming_data_types.status_x5f2 import deserialise_x5f2, serialise_x5f2 +from streaming_data_types._version import version from streaming_data_types.action_response_answ import deserialise_answ, serialise_answ -from streaming_data_types.finished_writing_wrdn import deserialise_wrdn, serialise_wrdn +from streaming_data_types.area_detector_ADAr import deserialise_ADAr, serialise_ADAr +from streaming_data_types.area_detector_NDAr import deserialise_ndar, serialise_ndar from streaming_data_types.epics_connection_info_ep00 import ( deserialise_ep00, serialise_ep00, ) -from streaming_data_types.timestamps_tdct import deserialise_tdct, serialise_tdct +from streaming_data_types.eventdata_ev42 import deserialise_ev42, serialise_ev42 +from streaming_data_types.eventdata_ev43 import deserialise_ev43, serialise_ev43 +from streaming_data_types.finished_writing_wrdn import deserialise_wrdn, serialise_wrdn from streaming_data_types.forwarder_config_update_rf5k import ( deserialise_rf5k, serialise_rf5k, ) -from streaming_data_types.area_detector_NDAr import deserialise_ndar, serialise_ndar +from streaming_data_types.histogram_hs00 import deserialise_hs00, serialise_hs00 +from streaming_data_types.logdata_f142 import deserialise_f142, serialise_f142 +from streaming_data_types.nicos_cache_ns10 import deserialise_ns10, serialise_ns10 +from streaming_data_types.run_start_pl72 import deserialise_pl72, serialise_pl72 +from streaming_data_types.run_stop_6s4t import deserialise_6s4t, serialise_6s4t from streaming_data_types.sample_environment_senv import ( deserialise_senv, serialise_senv, ) -from streaming_data_types.area_detector_ADAr import deserialise_ADAr, serialise_ADAr -from streaming_data_types._version import version +from streaming_data_types.status_x5f2 import deserialise_x5f2, serialise_x5f2 +from streaming_data_types.timestamps_tdct import deserialise_tdct, serialise_tdct __version__ = version diff --git a/streaming_data_types/action_response_answ.py b/streaming_data_types/action_response_answ.py index 64bcb15..c5e6546 100644 --- a/streaming_data_types/action_response_answ.py +++ b/streaming_data_types/action_response_answ.py @@ -1,13 +1,14 @@ +from datetime import datetime, timezone +from typing import NamedTuple, Union + import flatbuffers + +import streaming_data_types.fbschemas.action_response_answ.ActionResponse as ActionResponse from streaming_data_types.fbschemas.action_response_answ.ActionOutcome import ( ActionOutcome, ) -import streaming_data_types.fbschemas.action_response_answ.ActionResponse as ActionResponse from streaming_data_types.fbschemas.action_response_answ.ActionType import ActionType from streaming_data_types.utils import check_schema_identifier -from typing import Union -from typing import NamedTuple -from datetime import datetime, timezone FILE_IDENTIFIER = b"answ" diff --git a/streaming_data_types/area_detector_ADAr.py b/streaming_data_types/area_detector_ADAr.py index d416a74..9ad1aad 100644 --- a/streaming_data_types/area_detector_ADAr.py +++ b/streaming_data_types/area_detector_ADAr.py @@ -1,12 +1,14 @@ -from typing import Union, NamedTuple, List +from datetime import datetime, timezone +from struct import pack +from typing import List, NamedTuple, Union + import flatbuffers +import numpy as np + +import streaming_data_types.fbschemas.ADAr_ADArray_schema.Attribute as ADArAttribute from streaming_data_types.fbschemas.ADAr_ADArray_schema import ADArray from streaming_data_types.fbschemas.ADAr_ADArray_schema.DType import DType -import streaming_data_types.fbschemas.ADAr_ADArray_schema.Attribute as ADArAttribute from streaming_data_types.utils import check_schema_identifier -import numpy as np -from datetime import datetime, timezone -from struct import pack FILE_IDENTIFIER = b"ADAr" diff --git a/streaming_data_types/area_detector_NDAr.py b/streaming_data_types/area_detector_NDAr.py index 4b5d7ba..8e3a048 100644 --- a/streaming_data_types/area_detector_NDAr.py +++ b/streaming_data_types/area_detector_NDAr.py @@ -1,10 +1,12 @@ +import time +from collections import namedtuple from typing import Union + import flatbuffers +import numpy as np + from streaming_data_types.fbschemas.NDAr_NDArray_schema import NDArray from streaming_data_types.utils import check_schema_identifier -from collections import namedtuple -import time -import numpy as np FILE_IDENTIFIER = b"NDAr" @@ -60,9 +62,21 @@ def get_data(fb_arr): Converts the data array into the correct type. """ raw_data = fb_arr.PDataAsNumpy() - numpy_arr_type = [np.int8, np.uint8, np.int16, np.uint16, np.int32, np.uint32, np.int64, np.uint64, - np.float32, np.float64] - return raw_data.view(numpy_arr_type[fb_arr.DataType()]).reshape(fb_arr.DimsAsNumpy()) + numpy_arr_type = [ + np.int8, + np.uint8, + np.int16, + np.uint16, + np.int32, + np.uint32, + np.int64, + np.uint64, + np.float32, + np.float64, + ] + return raw_data.view(numpy_arr_type[fb_arr.DataType()]).reshape( + fb_arr.DimsAsNumpy() + ) def deserialise_ndar(buffer: Union[bytearray, bytes]) -> NDArray: diff --git a/streaming_data_types/epics_connection_info_ep00.py b/streaming_data_types/epics_connection_info_ep00.py index 590fc54..99718b5 100644 --- a/streaming_data_types/epics_connection_info_ep00.py +++ b/streaming_data_types/epics_connection_info_ep00.py @@ -1,11 +1,13 @@ -from typing import Union, Optional +from collections import namedtuple +from typing import Optional, Union + import flatbuffers + from streaming_data_types.fbschemas.epics_connection_info_ep00 import ( EpicsConnectionInfo, EventType, ) from streaming_data_types.utils import check_schema_identifier -from collections import namedtuple FILE_IDENTIFIER = b"ep00" @@ -43,8 +45,8 @@ def serialise_ep00( def deserialise_ep00(buffer: Union[bytearray, bytes]) -> EpicsConnection: check_schema_identifier(buffer, FILE_IDENTIFIER) - epics_connection = EpicsConnectionInfo.EpicsConnectionInfo.GetRootAsEpicsConnectionInfo( - buffer, 0 + epics_connection = ( + EpicsConnectionInfo.EpicsConnectionInfo.GetRootAsEpicsConnectionInfo(buffer, 0) ) source_name = ( diff --git a/streaming_data_types/eventdata_ev42.py b/streaming_data_types/eventdata_ev42.py index 73b4f67..5315841 100644 --- a/streaming_data_types/eventdata_ev42.py +++ b/streaming_data_types/eventdata_ev42.py @@ -1,11 +1,12 @@ from collections import namedtuple + import flatbuffers +import numpy as np + import streaming_data_types.fbschemas.eventdata_ev42.EventMessage as EventMessage import streaming_data_types.fbschemas.eventdata_ev42.FacilityData as FacilityData import streaming_data_types.fbschemas.isis_event_info_is84.ISISData as ISISData from streaming_data_types.utils import check_schema_identifier -import numpy as np - FILE_IDENTIFIER = b"ev42" diff --git a/streaming_data_types/eventdata_ev43.py b/streaming_data_types/eventdata_ev43.py index 08b5e3b..3bf5274 100644 --- a/streaming_data_types/eventdata_ev43.py +++ b/streaming_data_types/eventdata_ev43.py @@ -1,9 +1,10 @@ from collections import namedtuple + import flatbuffers -import streaming_data_types.fbschemas.eventdata_ev43.Event43Message as Event43Message -from streaming_data_types.utils import check_schema_identifier import numpy as np +import streaming_data_types.fbschemas.eventdata_ev43.Event43Message as Event43Message +from streaming_data_types.utils import check_schema_identifier FILE_IDENTIFIER = b"ev43" diff --git a/streaming_data_types/fbschemas/NDAr_NDArray_schema/DType.py b/streaming_data_types/fbschemas/NDAr_NDArray_schema/DType.py index 8b46fce..48dac73 100644 --- a/streaming_data_types/fbschemas/NDAr_NDArray_schema/DType.py +++ b/streaming_data_types/fbschemas/NDAr_NDArray_schema/DType.py @@ -2,6 +2,7 @@ # namespace: FB_Tables + class DType(object): Int8 = 0 Uint8 = 1 @@ -14,4 +15,3 @@ class DType(object): Float32 = 8 Float64 = 9 c_string = 10 - diff --git a/streaming_data_types/fbschemas/NDAr_NDArray_schema/NDArray.py b/streaming_data_types/fbschemas/NDAr_NDArray_schema/NDArray.py index 013dd1a..9264718 100644 --- a/streaming_data_types/fbschemas/NDAr_NDArray_schema/NDArray.py +++ b/streaming_data_types/fbschemas/NDAr_NDArray_schema/NDArray.py @@ -4,8 +4,9 @@ import flatbuffers + class NDArray(object): - __slots__ = ['_tab'] + __slots__ = ["_tab"] @classmethod def GetRootAsNDArray(cls, buf, offset): @@ -29,7 +30,9 @@ def Id(self): def TimeStamp(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) if o != 0: - return self._tab.Get(flatbuffers.number_types.Float64Flags, o + self._tab.Pos) + return self._tab.Get( + flatbuffers.number_types.Float64Flags, o + self._tab.Pos + ) return 0.0 # NDArray @@ -38,6 +41,7 @@ def EpicsTS(self): if o != 0: x = o + self._tab.Pos from .epicsTimeStamp import epicsTimeStamp + obj = epicsTimeStamp() obj.Init(self._tab.Bytes, x) return obj @@ -48,7 +52,10 @@ def Dims(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Uint64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return self._tab.Get( + flatbuffers.number_types.Uint64Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8), + ) return 0 # NDArray @@ -77,7 +84,10 @@ def PData(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) + return self._tab.Get( + flatbuffers.number_types.Uint8Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1), + ) return 0 # NDArray @@ -102,6 +112,7 @@ def PAttributeList(self, j): x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 x = self._tab.Indirect(x) from .NDAttribute import NDAttribute + obj = NDAttribute() obj.Init(self._tab.Bytes, x) return obj @@ -114,15 +125,58 @@ def PAttributeListLength(self): return self._tab.VectorLen(o) return 0 -def NDArrayStart(builder): builder.StartObject(7) -def NDArrayAddId(builder, id): builder.PrependInt32Slot(0, id, 0) -def NDArrayAddTimeStamp(builder, timeStamp): builder.PrependFloat64Slot(1, timeStamp, 0.0) -def NDArrayAddEpicsTS(builder, epicsTS): builder.PrependStructSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(epicsTS), 0) -def NDArrayAddDims(builder, dims): builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(dims), 0) -def NDArrayStartDimsVector(builder, numElems): return builder.StartVector(8, numElems, 8) -def NDArrayAddDataType(builder, dataType): builder.PrependInt8Slot(4, dataType, 0) -def NDArrayAddPData(builder, pData): builder.PrependUOffsetTRelativeSlot(5, flatbuffers.number_types.UOffsetTFlags.py_type(pData), 0) -def NDArrayStartPDataVector(builder, numElems): return builder.StartVector(1, numElems, 1) -def NDArrayAddPAttributeList(builder, pAttributeList): builder.PrependUOffsetTRelativeSlot(6, flatbuffers.number_types.UOffsetTFlags.py_type(pAttributeList), 0) -def NDArrayStartPAttributeListVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def NDArrayEnd(builder): return builder.EndObject() + +def NDArrayStart(builder): + builder.StartObject(7) + + +def NDArrayAddId(builder, id): + builder.PrependInt32Slot(0, id, 0) + + +def NDArrayAddTimeStamp(builder, timeStamp): + builder.PrependFloat64Slot(1, timeStamp, 0.0) + + +def NDArrayAddEpicsTS(builder, epicsTS): + builder.PrependStructSlot( + 2, flatbuffers.number_types.UOffsetTFlags.py_type(epicsTS), 0 + ) + + +def NDArrayAddDims(builder, dims): + builder.PrependUOffsetTRelativeSlot( + 3, flatbuffers.number_types.UOffsetTFlags.py_type(dims), 0 + ) + + +def NDArrayStartDimsVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + + +def NDArrayAddDataType(builder, dataType): + builder.PrependInt8Slot(4, dataType, 0) + + +def NDArrayAddPData(builder, pData): + builder.PrependUOffsetTRelativeSlot( + 5, flatbuffers.number_types.UOffsetTFlags.py_type(pData), 0 + ) + + +def NDArrayStartPDataVector(builder, numElems): + return builder.StartVector(1, numElems, 1) + + +def NDArrayAddPAttributeList(builder, pAttributeList): + builder.PrependUOffsetTRelativeSlot( + 6, flatbuffers.number_types.UOffsetTFlags.py_type(pAttributeList), 0 + ) + + +def NDArrayStartPAttributeListVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + +def NDArrayEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/NDAr_NDArray_schema/NDAttribute.py b/streaming_data_types/fbschemas/NDAr_NDArray_schema/NDAttribute.py index 901e520..8448343 100644 --- a/streaming_data_types/fbschemas/NDAr_NDArray_schema/NDAttribute.py +++ b/streaming_data_types/fbschemas/NDAr_NDArray_schema/NDAttribute.py @@ -4,8 +4,9 @@ import flatbuffers + class NDAttribute(object): - __slots__ = ['_tab'] + __slots__ = ["_tab"] @classmethod def GetRootAsNDAttribute(cls, buf, offset): @@ -51,7 +52,10 @@ def PData(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) + return self._tab.Get( + flatbuffers.number_types.Uint8Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1), + ) return 0 # NDAttribute @@ -68,11 +72,42 @@ def PDataLength(self): return self._tab.VectorLen(o) return 0 -def NDAttributeStart(builder): builder.StartObject(5) -def NDAttributeAddPName(builder, pName): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(pName), 0) -def NDAttributeAddPDescription(builder, pDescription): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(pDescription), 0) -def NDAttributeAddPSource(builder, pSource): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(pSource), 0) -def NDAttributeAddDataType(builder, dataType): builder.PrependInt8Slot(3, dataType, 0) -def NDAttributeAddPData(builder, pData): builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(pData), 0) -def NDAttributeStartPDataVector(builder, numElems): return builder.StartVector(1, numElems, 1) -def NDAttributeEnd(builder): return builder.EndObject() + +def NDAttributeStart(builder): + builder.StartObject(5) + + +def NDAttributeAddPName(builder, pName): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(pName), 0 + ) + + +def NDAttributeAddPDescription(builder, pDescription): + builder.PrependUOffsetTRelativeSlot( + 1, flatbuffers.number_types.UOffsetTFlags.py_type(pDescription), 0 + ) + + +def NDAttributeAddPSource(builder, pSource): + builder.PrependUOffsetTRelativeSlot( + 2, flatbuffers.number_types.UOffsetTFlags.py_type(pSource), 0 + ) + + +def NDAttributeAddDataType(builder, dataType): + builder.PrependInt8Slot(3, dataType, 0) + + +def NDAttributeAddPData(builder, pData): + builder.PrependUOffsetTRelativeSlot( + 4, flatbuffers.number_types.UOffsetTFlags.py_type(pData), 0 + ) + + +def NDAttributeStartPDataVector(builder, numElems): + return builder.StartVector(1, numElems, 1) + + +def NDAttributeEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/NDAr_NDArray_schema/epicsTimeStamp.py b/streaming_data_types/fbschemas/NDAr_NDArray_schema/epicsTimeStamp.py index 1877faf..4afea11 100644 --- a/streaming_data_types/fbschemas/NDAr_NDArray_schema/epicsTimeStamp.py +++ b/streaming_data_types/fbschemas/NDAr_NDArray_schema/epicsTimeStamp.py @@ -4,17 +4,28 @@ import flatbuffers + class epicsTimeStamp(object): - __slots__ = ['_tab'] + __slots__ = ["_tab"] # epicsTimeStamp def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) # epicsTimeStamp - def SecPastEpoch(self): return self._tab.Get(flatbuffers.number_types.Int32Flags, self._tab.Pos + flatbuffers.number_types.UOffsetTFlags.py_type(0)) + def SecPastEpoch(self): + return self._tab.Get( + flatbuffers.number_types.Int32Flags, + self._tab.Pos + flatbuffers.number_types.UOffsetTFlags.py_type(0), + ) + # epicsTimeStamp - def Nsec(self): return self._tab.Get(flatbuffers.number_types.Int32Flags, self._tab.Pos + flatbuffers.number_types.UOffsetTFlags.py_type(4)) + def Nsec(self): + return self._tab.Get( + flatbuffers.number_types.Int32Flags, + self._tab.Pos + flatbuffers.number_types.UOffsetTFlags.py_type(4), + ) + def CreateepicsTimeStamp(builder, secPastEpoch, nsec): builder.Prep(4, 8) diff --git a/streaming_data_types/fbschemas/sample_environment_senv/Int16Array.py b/streaming_data_types/fbschemas/sample_environment_senv/Int16Array.py index 3b9fbdf..9edffdd 100644 --- a/streaming_data_types/fbschemas/sample_environment_senv/Int16Array.py +++ b/streaming_data_types/fbschemas/sample_environment_senv/Int16Array.py @@ -1,13 +1,15 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers from flatbuffers.compat import import_numpy + np = import_numpy() + class Int16Array(object): - __slots__ = ['_tab'] + __slots__ = ["_tab"] @classmethod def GetRootAsInt16Array(cls, buf, offset): @@ -18,7 +20,9 @@ def GetRootAsInt16Array(cls, buf, offset): @classmethod def Int16ArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x73\x65\x6E\x76", size_prefixed=size_prefixed) + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x73\x65\x6E\x76", size_prefixed=size_prefixed + ) # Int16Array def Init(self, buf, pos): @@ -29,7 +33,10 @@ def Value(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Int16Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 2)) + return self._tab.Get( + flatbuffers.number_types.Int16Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 2), + ) return 0 # Int16Array @@ -51,7 +58,20 @@ def ValueIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) return o == 0 -def Int16ArrayStart(builder): builder.StartObject(1) -def Int16ArrayAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) -def Int16ArrayStartValueVector(builder, numElems): return builder.StartVector(2, numElems, 2) -def Int16ArrayEnd(builder): return builder.EndObject() + +def Int16ArrayStart(builder): + builder.StartObject(1) + + +def Int16ArrayAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 + ) + + +def Int16ArrayStartValueVector(builder, numElems): + return builder.StartVector(2, numElems, 2) + + +def Int16ArrayEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/sample_environment_senv/Int32Array.py b/streaming_data_types/fbschemas/sample_environment_senv/Int32Array.py index 35c747b..1cfa5a9 100644 --- a/streaming_data_types/fbschemas/sample_environment_senv/Int32Array.py +++ b/streaming_data_types/fbschemas/sample_environment_senv/Int32Array.py @@ -1,13 +1,15 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers from flatbuffers.compat import import_numpy + np = import_numpy() + class Int32Array(object): - __slots__ = ['_tab'] + __slots__ = ["_tab"] @classmethod def GetRootAsInt32Array(cls, buf, offset): @@ -18,7 +20,9 @@ def GetRootAsInt32Array(cls, buf, offset): @classmethod def Int32ArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x73\x65\x6E\x76", size_prefixed=size_prefixed) + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x73\x65\x6E\x76", size_prefixed=size_prefixed + ) # Int32Array def Init(self, buf, pos): @@ -29,7 +33,10 @@ def Value(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return self._tab.Get( + flatbuffers.number_types.Int32Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), + ) return 0 # Int32Array @@ -51,7 +58,20 @@ def ValueIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) return o == 0 -def Int32ArrayStart(builder): builder.StartObject(1) -def Int32ArrayAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) -def Int32ArrayStartValueVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def Int32ArrayEnd(builder): return builder.EndObject() + +def Int32ArrayStart(builder): + builder.StartObject(1) + + +def Int32ArrayAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 + ) + + +def Int32ArrayStartValueVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + +def Int32ArrayEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/sample_environment_senv/Int64Array.py b/streaming_data_types/fbschemas/sample_environment_senv/Int64Array.py index 50dfd4c..9647205 100644 --- a/streaming_data_types/fbschemas/sample_environment_senv/Int64Array.py +++ b/streaming_data_types/fbschemas/sample_environment_senv/Int64Array.py @@ -1,13 +1,15 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers from flatbuffers.compat import import_numpy + np = import_numpy() + class Int64Array(object): - __slots__ = ['_tab'] + __slots__ = ["_tab"] @classmethod def GetRootAsInt64Array(cls, buf, offset): @@ -18,7 +20,9 @@ def GetRootAsInt64Array(cls, buf, offset): @classmethod def Int64ArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x73\x65\x6E\x76", size_prefixed=size_prefixed) + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x73\x65\x6E\x76", size_prefixed=size_prefixed + ) # Int64Array def Init(self, buf, pos): @@ -29,7 +33,10 @@ def Value(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Int64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return self._tab.Get( + flatbuffers.number_types.Int64Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8), + ) return 0 # Int64Array @@ -51,7 +58,20 @@ def ValueIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) return o == 0 -def Int64ArrayStart(builder): builder.StartObject(1) -def Int64ArrayAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) -def Int64ArrayStartValueVector(builder, numElems): return builder.StartVector(8, numElems, 8) -def Int64ArrayEnd(builder): return builder.EndObject() + +def Int64ArrayStart(builder): + builder.StartObject(1) + + +def Int64ArrayAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 + ) + + +def Int64ArrayStartValueVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + + +def Int64ArrayEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/sample_environment_senv/Int8Array.py b/streaming_data_types/fbschemas/sample_environment_senv/Int8Array.py index 783a606..ec4a41f 100644 --- a/streaming_data_types/fbschemas/sample_environment_senv/Int8Array.py +++ b/streaming_data_types/fbschemas/sample_environment_senv/Int8Array.py @@ -1,13 +1,15 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers from flatbuffers.compat import import_numpy + np = import_numpy() + class Int8Array(object): - __slots__ = ['_tab'] + __slots__ = ["_tab"] @classmethod def GetRootAsInt8Array(cls, buf, offset): @@ -18,7 +20,9 @@ def GetRootAsInt8Array(cls, buf, offset): @classmethod def Int8ArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x73\x65\x6E\x76", size_prefixed=size_prefixed) + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x73\x65\x6E\x76", size_prefixed=size_prefixed + ) # Int8Array def Init(self, buf, pos): @@ -29,7 +33,10 @@ def Value(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Int8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) + return self._tab.Get( + flatbuffers.number_types.Int8Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1), + ) return 0 # Int8Array @@ -51,7 +58,20 @@ def ValueIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) return o == 0 -def Int8ArrayStart(builder): builder.StartObject(1) -def Int8ArrayAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) -def Int8ArrayStartValueVector(builder, numElems): return builder.StartVector(1, numElems, 1) -def Int8ArrayEnd(builder): return builder.EndObject() + +def Int8ArrayStart(builder): + builder.StartObject(1) + + +def Int8ArrayAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 + ) + + +def Int8ArrayStartValueVector(builder, numElems): + return builder.StartVector(1, numElems, 1) + + +def Int8ArrayEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/sample_environment_senv/Location.py b/streaming_data_types/fbschemas/sample_environment_senv/Location.py index c8c7fb4..7af8fcf 100644 --- a/streaming_data_types/fbschemas/sample_environment_senv/Location.py +++ b/streaming_data_types/fbschemas/sample_environment_senv/Location.py @@ -1,10 +1,10 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: + class Location(object): Unknown = 0 Start = 1 Middle = 2 End = 3 - diff --git a/streaming_data_types/fbschemas/sample_environment_senv/SampleEnvironmentData.py b/streaming_data_types/fbschemas/sample_environment_senv/SampleEnvironmentData.py index 7351b43..f67e817 100644 --- a/streaming_data_types/fbschemas/sample_environment_senv/SampleEnvironmentData.py +++ b/streaming_data_types/fbschemas/sample_environment_senv/SampleEnvironmentData.py @@ -1,13 +1,15 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers from flatbuffers.compat import import_numpy + np = import_numpy() + class SampleEnvironmentData(object): - __slots__ = ['_tab'] + __slots__ = ["_tab"] @classmethod def GetRootAsSampleEnvironmentData(cls, buf, offset): @@ -18,7 +20,9 @@ def GetRootAsSampleEnvironmentData(cls, buf, offset): @classmethod def SampleEnvironmentDataBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x73\x65\x6E\x76", size_prefixed=size_prefixed) + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x73\x65\x6E\x76", size_prefixed=size_prefixed + ) # SampleEnvironmentData def Init(self, buf, pos): @@ -42,14 +46,18 @@ def Channel(self): def PacketTimestamp(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) if o != 0: - return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) + return self._tab.Get( + flatbuffers.number_types.Uint64Flags, o + self._tab.Pos + ) return 0 # SampleEnvironmentData def TimeDelta(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) if o != 0: - return self._tab.Get(flatbuffers.number_types.Float64Flags, o + self._tab.Pos) + return self._tab.Get( + flatbuffers.number_types.Float64Flags, o + self._tab.Pos + ) return 0.0 # SampleEnvironmentData @@ -71,6 +79,7 @@ def Values(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) if o != 0: from flatbuffers.table import Table + obj = Table(bytearray(), 0) self._tab.Union(obj, o) return obj @@ -81,7 +90,10 @@ def Timestamps(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Uint64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return self._tab.Get( + flatbuffers.number_types.Uint64Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8), + ) return 0 # SampleEnvironmentData @@ -107,18 +119,61 @@ def TimestampsIsNone(self): def MessageCounter(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(20)) if o != 0: - return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) + return self._tab.Get( + flatbuffers.number_types.Uint64Flags, o + self._tab.Pos + ) return 0 -def SampleEnvironmentDataStart(builder): builder.StartObject(9) -def SampleEnvironmentDataAddName(builder, Name): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(Name), 0) -def SampleEnvironmentDataAddChannel(builder, Channel): builder.PrependInt32Slot(1, Channel, 0) -def SampleEnvironmentDataAddPacketTimestamp(builder, PacketTimestamp): builder.PrependUint64Slot(2, PacketTimestamp, 0) -def SampleEnvironmentDataAddTimeDelta(builder, TimeDelta): builder.PrependFloat64Slot(3, TimeDelta, 0.0) -def SampleEnvironmentDataAddTimestampLocation(builder, TimestampLocation): builder.PrependInt8Slot(4, TimestampLocation, 0) -def SampleEnvironmentDataAddValuesType(builder, ValuesType): builder.PrependUint8Slot(5, ValuesType, 0) -def SampleEnvironmentDataAddValues(builder, Values): builder.PrependUOffsetTRelativeSlot(6, flatbuffers.number_types.UOffsetTFlags.py_type(Values), 0) -def SampleEnvironmentDataAddTimestamps(builder, Timestamps): builder.PrependUOffsetTRelativeSlot(7, flatbuffers.number_types.UOffsetTFlags.py_type(Timestamps), 0) -def SampleEnvironmentDataStartTimestampsVector(builder, numElems): return builder.StartVector(8, numElems, 8) -def SampleEnvironmentDataAddMessageCounter(builder, MessageCounter): builder.PrependUint64Slot(8, MessageCounter, 0) -def SampleEnvironmentDataEnd(builder): return builder.EndObject() + +def SampleEnvironmentDataStart(builder): + builder.StartObject(9) + + +def SampleEnvironmentDataAddName(builder, Name): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(Name), 0 + ) + + +def SampleEnvironmentDataAddChannel(builder, Channel): + builder.PrependInt32Slot(1, Channel, 0) + + +def SampleEnvironmentDataAddPacketTimestamp(builder, PacketTimestamp): + builder.PrependUint64Slot(2, PacketTimestamp, 0) + + +def SampleEnvironmentDataAddTimeDelta(builder, TimeDelta): + builder.PrependFloat64Slot(3, TimeDelta, 0.0) + + +def SampleEnvironmentDataAddTimestampLocation(builder, TimestampLocation): + builder.PrependInt8Slot(4, TimestampLocation, 0) + + +def SampleEnvironmentDataAddValuesType(builder, ValuesType): + builder.PrependUint8Slot(5, ValuesType, 0) + + +def SampleEnvironmentDataAddValues(builder, Values): + builder.PrependUOffsetTRelativeSlot( + 6, flatbuffers.number_types.UOffsetTFlags.py_type(Values), 0 + ) + + +def SampleEnvironmentDataAddTimestamps(builder, Timestamps): + builder.PrependUOffsetTRelativeSlot( + 7, flatbuffers.number_types.UOffsetTFlags.py_type(Timestamps), 0 + ) + + +def SampleEnvironmentDataStartTimestampsVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + + +def SampleEnvironmentDataAddMessageCounter(builder, MessageCounter): + builder.PrependUint64Slot(8, MessageCounter, 0) + + +def SampleEnvironmentDataEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/sample_environment_senv/UInt16Array.py b/streaming_data_types/fbschemas/sample_environment_senv/UInt16Array.py index 2664579..633ed7c 100644 --- a/streaming_data_types/fbschemas/sample_environment_senv/UInt16Array.py +++ b/streaming_data_types/fbschemas/sample_environment_senv/UInt16Array.py @@ -1,13 +1,15 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers from flatbuffers.compat import import_numpy + np = import_numpy() + class UInt16Array(object): - __slots__ = ['_tab'] + __slots__ = ["_tab"] @classmethod def GetRootAsUInt16Array(cls, buf, offset): @@ -18,7 +20,9 @@ def GetRootAsUInt16Array(cls, buf, offset): @classmethod def UInt16ArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x73\x65\x6E\x76", size_prefixed=size_prefixed) + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x73\x65\x6E\x76", size_prefixed=size_prefixed + ) # UInt16Array def Init(self, buf, pos): @@ -29,7 +33,10 @@ def Value(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Uint16Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 2)) + return self._tab.Get( + flatbuffers.number_types.Uint16Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 2), + ) return 0 # UInt16Array @@ -51,7 +58,20 @@ def ValueIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) return o == 0 -def UInt16ArrayStart(builder): builder.StartObject(1) -def UInt16ArrayAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) -def UInt16ArrayStartValueVector(builder, numElems): return builder.StartVector(2, numElems, 2) -def UInt16ArrayEnd(builder): return builder.EndObject() + +def UInt16ArrayStart(builder): + builder.StartObject(1) + + +def UInt16ArrayAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 + ) + + +def UInt16ArrayStartValueVector(builder, numElems): + return builder.StartVector(2, numElems, 2) + + +def UInt16ArrayEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/sample_environment_senv/UInt32Array.py b/streaming_data_types/fbschemas/sample_environment_senv/UInt32Array.py index 18116ae..d62876f 100644 --- a/streaming_data_types/fbschemas/sample_environment_senv/UInt32Array.py +++ b/streaming_data_types/fbschemas/sample_environment_senv/UInt32Array.py @@ -1,13 +1,15 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers from flatbuffers.compat import import_numpy + np = import_numpy() + class UInt32Array(object): - __slots__ = ['_tab'] + __slots__ = ["_tab"] @classmethod def GetRootAsUInt32Array(cls, buf, offset): @@ -18,7 +20,9 @@ def GetRootAsUInt32Array(cls, buf, offset): @classmethod def UInt32ArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x73\x65\x6E\x76", size_prefixed=size_prefixed) + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x73\x65\x6E\x76", size_prefixed=size_prefixed + ) # UInt32Array def Init(self, buf, pos): @@ -29,7 +33,10 @@ def Value(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Uint32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return self._tab.Get( + flatbuffers.number_types.Uint32Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), + ) return 0 # UInt32Array @@ -51,7 +58,20 @@ def ValueIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) return o == 0 -def UInt32ArrayStart(builder): builder.StartObject(1) -def UInt32ArrayAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) -def UInt32ArrayStartValueVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def UInt32ArrayEnd(builder): return builder.EndObject() + +def UInt32ArrayStart(builder): + builder.StartObject(1) + + +def UInt32ArrayAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 + ) + + +def UInt32ArrayStartValueVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + +def UInt32ArrayEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/sample_environment_senv/UInt64Array.py b/streaming_data_types/fbschemas/sample_environment_senv/UInt64Array.py index e49674a..7fc6498 100644 --- a/streaming_data_types/fbschemas/sample_environment_senv/UInt64Array.py +++ b/streaming_data_types/fbschemas/sample_environment_senv/UInt64Array.py @@ -1,13 +1,15 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers from flatbuffers.compat import import_numpy + np = import_numpy() + class UInt64Array(object): - __slots__ = ['_tab'] + __slots__ = ["_tab"] @classmethod def GetRootAsUInt64Array(cls, buf, offset): @@ -18,7 +20,9 @@ def GetRootAsUInt64Array(cls, buf, offset): @classmethod def UInt64ArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x73\x65\x6E\x76", size_prefixed=size_prefixed) + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x73\x65\x6E\x76", size_prefixed=size_prefixed + ) # UInt64Array def Init(self, buf, pos): @@ -29,7 +33,10 @@ def Value(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Uint64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return self._tab.Get( + flatbuffers.number_types.Uint64Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8), + ) return 0 # UInt64Array @@ -51,7 +58,20 @@ def ValueIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) return o == 0 -def UInt64ArrayStart(builder): builder.StartObject(1) -def UInt64ArrayAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) -def UInt64ArrayStartValueVector(builder, numElems): return builder.StartVector(8, numElems, 8) -def UInt64ArrayEnd(builder): return builder.EndObject() + +def UInt64ArrayStart(builder): + builder.StartObject(1) + + +def UInt64ArrayAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 + ) + + +def UInt64ArrayStartValueVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + + +def UInt64ArrayEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/sample_environment_senv/UInt8Array.py b/streaming_data_types/fbschemas/sample_environment_senv/UInt8Array.py index 8631654..702a7d7 100644 --- a/streaming_data_types/fbschemas/sample_environment_senv/UInt8Array.py +++ b/streaming_data_types/fbschemas/sample_environment_senv/UInt8Array.py @@ -1,13 +1,15 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers from flatbuffers.compat import import_numpy + np = import_numpy() + class UInt8Array(object): - __slots__ = ['_tab'] + __slots__ = ["_tab"] @classmethod def GetRootAsUInt8Array(cls, buf, offset): @@ -18,7 +20,9 @@ def GetRootAsUInt8Array(cls, buf, offset): @classmethod def UInt8ArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x73\x65\x6E\x76", size_prefixed=size_prefixed) + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x73\x65\x6E\x76", size_prefixed=size_prefixed + ) # UInt8Array def Init(self, buf, pos): @@ -29,7 +33,10 @@ def Value(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) + return self._tab.Get( + flatbuffers.number_types.Uint8Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1), + ) return 0 # UInt8Array @@ -51,7 +58,20 @@ def ValueIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) return o == 0 -def UInt8ArrayStart(builder): builder.StartObject(1) -def UInt8ArrayAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) -def UInt8ArrayStartValueVector(builder, numElems): return builder.StartVector(1, numElems, 1) -def UInt8ArrayEnd(builder): return builder.EndObject() + +def UInt8ArrayStart(builder): + builder.StartObject(1) + + +def UInt8ArrayAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 + ) + + +def UInt8ArrayStartValueVector(builder, numElems): + return builder.StartVector(1, numElems, 1) + + +def UInt8ArrayEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/sample_environment_senv/ValueUnion.py b/streaming_data_types/fbschemas/sample_environment_senv/ValueUnion.py index 069b7bc..50e6a2b 100644 --- a/streaming_data_types/fbschemas/sample_environment_senv/ValueUnion.py +++ b/streaming_data_types/fbschemas/sample_environment_senv/ValueUnion.py @@ -1,6 +1,7 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: + class ValueUnion(object): NONE = 0 @@ -12,4 +13,3 @@ class ValueUnion(object): UInt32Array = 6 Int64Array = 7 UInt64Array = 8 - diff --git a/streaming_data_types/finished_writing_wrdn.py b/streaming_data_types/finished_writing_wrdn.py index 0887893..e17737c 100644 --- a/streaming_data_types/finished_writing_wrdn.py +++ b/streaming_data_types/finished_writing_wrdn.py @@ -1,9 +1,9 @@ -from typing import Union +from typing import NamedTuple, Optional, Union + import flatbuffers + from streaming_data_types.fbschemas.finished_writing_wrdn import FinishedWriting from streaming_data_types.utils import check_schema_identifier -from typing import NamedTuple -from typing import Optional FILE_IDENTIFIER = b"wrdn" diff --git a/streaming_data_types/forwarder_config_update_rf5k.py b/streaming_data_types/forwarder_config_update_rf5k.py index 598d0da..5cc3b87 100644 --- a/streaming_data_types/forwarder_config_update_rf5k.py +++ b/streaming_data_types/forwarder_config_update_rf5k.py @@ -1,14 +1,16 @@ from collections import namedtuple +from typing import List, Union + import flatbuffers from flatbuffers.packer import struct as flatbuffer_struct -from streaming_data_types.utils import check_schema_identifier + from streaming_data_types.fbschemas.forwarder_config_update_rf5k import ( - UpdateType, ConfigUpdate, - Stream, Protocol, + Stream, + UpdateType, ) -from typing import List, Union +from streaming_data_types.utils import check_schema_identifier FILE_IDENTIFIER = b"rf5k" diff --git a/streaming_data_types/histogram_hs00.py b/streaming_data_types/histogram_hs00.py index 2ba6312..4c8700a 100644 --- a/streaming_data_types/histogram_hs00.py +++ b/streaming_data_types/histogram_hs00.py @@ -1,7 +1,8 @@ import flatbuffers import numpy -import streaming_data_types.fbschemas.histogram_hs00.ArrayFloat as ArrayFloat + import streaming_data_types.fbschemas.histogram_hs00.ArrayDouble as ArrayDouble +import streaming_data_types.fbschemas.histogram_hs00.ArrayFloat as ArrayFloat import streaming_data_types.fbschemas.histogram_hs00.ArrayUInt as ArrayUInt import streaming_data_types.fbschemas.histogram_hs00.ArrayULong as ArrayULong import streaming_data_types.fbschemas.histogram_hs00.DimensionMetaData as DimensionMetaData @@ -9,7 +10,6 @@ from streaming_data_types.fbschemas.histogram_hs00.Array import Array from streaming_data_types.utils import check_schema_identifier - FILE_IDENTIFIER = b"hs00" diff --git a/streaming_data_types/logdata_f142.py b/streaming_data_types/logdata_f142.py index 3c1aa1b..68de89f 100644 --- a/streaming_data_types/logdata_f142.py +++ b/streaming_data_types/logdata_f142.py @@ -1,144 +1,145 @@ +from collections import namedtuple +from typing import Any, Callable, Dict, Tuple, Union + import flatbuffers +import numpy as np + from streaming_data_types.fbschemas.logdata_f142 import LogData -from streaming_data_types.fbschemas.logdata_f142.Value import Value -from streaming_data_types.fbschemas.logdata_f142.UByte import ( - UByte, - UByteStart, - UByteAddValue, - UByteEnd, -) -from streaming_data_types.fbschemas.logdata_f142.ArrayUByte import ( - ArrayUByte, - ArrayUByteStart, - ArrayUByteAddValue, - ArrayUByteEnd, -) -from streaming_data_types.fbschemas.logdata_f142.Byte import ( - Byte, - ByteStart, - ByteAddValue, - ByteEnd, -) from streaming_data_types.fbschemas.logdata_f142.ArrayByte import ( ArrayByte, - ArrayByteStart, ArrayByteAddValue, ArrayByteEnd, + ArrayByteStart, ) -from streaming_data_types.fbschemas.logdata_f142.UShort import ( - UShort, - UShortStart, - UShortAddValue, - UShortEnd, +from streaming_data_types.fbschemas.logdata_f142.ArrayDouble import ( + ArrayDouble, + ArrayDoubleAddValue, + ArrayDoubleEnd, + ArrayDoubleStart, ) -from streaming_data_types.fbschemas.logdata_f142.ArrayUShort import ( - ArrayUShort, - ArrayUShortStart, - ArrayUShortAddValue, - ArrayUShortEnd, +from streaming_data_types.fbschemas.logdata_f142.ArrayFloat import ( + ArrayFloat, + ArrayFloatAddValue, + ArrayFloatEnd, + ArrayFloatStart, ) -from streaming_data_types.fbschemas.logdata_f142.Short import ( - Short, - ShortStart, - ShortAddValue, - ShortEnd, +from streaming_data_types.fbschemas.logdata_f142.ArrayInt import ( + ArrayInt, + ArrayIntAddValue, + ArrayIntEnd, + ArrayIntStart, +) +from streaming_data_types.fbschemas.logdata_f142.ArrayLong import ( + ArrayLong, + ArrayLongAddValue, + ArrayLongEnd, + ArrayLongStart, ) from streaming_data_types.fbschemas.logdata_f142.ArrayShort import ( ArrayShort, - ArrayShortStart, ArrayShortAddValue, ArrayShortEnd, + ArrayShortStart, ) -from streaming_data_types.fbschemas.logdata_f142.UInt import ( - UInt, - UIntStart, - UIntAddValue, - UIntEnd, +from streaming_data_types.fbschemas.logdata_f142.ArrayString import ( + ArrayString, + ArrayStringAddValue, + ArrayStringEnd, + ArrayStringStart, + ArrayStringStartValueVector, +) +from streaming_data_types.fbschemas.logdata_f142.ArrayUByte import ( + ArrayUByte, + ArrayUByteAddValue, + ArrayUByteEnd, + ArrayUByteStart, ) from streaming_data_types.fbschemas.logdata_f142.ArrayUInt import ( ArrayUInt, - ArrayUIntStart, ArrayUIntAddValue, ArrayUIntEnd, -) -from streaming_data_types.fbschemas.logdata_f142.Int import ( - Int, - IntStart, - IntAddValue, - IntEnd, -) -from streaming_data_types.fbschemas.logdata_f142.ArrayInt import ( - ArrayInt, - ArrayIntStart, - ArrayIntAddValue, - ArrayIntEnd, -) -from streaming_data_types.fbschemas.logdata_f142.ULong import ( - ULong, - ULongStart, - ULongAddValue, - ULongEnd, + ArrayUIntStart, ) from streaming_data_types.fbschemas.logdata_f142.ArrayULong import ( ArrayULong, - ArrayULongStart, ArrayULongAddValue, ArrayULongEnd, + ArrayULongStart, ) -from streaming_data_types.fbschemas.logdata_f142.Long import ( - Long, - LongStart, - LongAddValue, - LongEnd, +from streaming_data_types.fbschemas.logdata_f142.ArrayUShort import ( + ArrayUShort, + ArrayUShortAddValue, + ArrayUShortEnd, + ArrayUShortStart, ) -from streaming_data_types.fbschemas.logdata_f142.ArrayLong import ( - ArrayLong, - ArrayLongStart, - ArrayLongAddValue, - ArrayLongEnd, +from streaming_data_types.fbschemas.logdata_f142.Byte import ( + Byte, + ByteAddValue, + ByteEnd, + ByteStart, +) +from streaming_data_types.fbschemas.logdata_f142.Double import ( + Double, + DoubleAddValue, + DoubleEnd, + DoubleStart, ) from streaming_data_types.fbschemas.logdata_f142.Float import ( Float, - FloatStart, FloatAddValue, FloatEnd, + FloatStart, ) -from streaming_data_types.fbschemas.logdata_f142.ArrayFloat import ( - ArrayFloat, - ArrayFloatStart, - ArrayFloatAddValue, - ArrayFloatEnd, +from streaming_data_types.fbschemas.logdata_f142.Int import ( + Int, + IntAddValue, + IntEnd, + IntStart, ) -from streaming_data_types.fbschemas.logdata_f142.Double import ( - Double, - DoubleStart, - DoubleAddValue, - DoubleEnd, +from streaming_data_types.fbschemas.logdata_f142.Long import ( + Long, + LongAddValue, + LongEnd, + LongStart, ) -from streaming_data_types.fbschemas.logdata_f142.ArrayDouble import ( - ArrayDouble, - ArrayDoubleStart, - ArrayDoubleAddValue, - ArrayDoubleEnd, +from streaming_data_types.fbschemas.logdata_f142.Short import ( + Short, + ShortAddValue, + ShortEnd, + ShortStart, ) from streaming_data_types.fbschemas.logdata_f142.String import ( String, - StringStart, StringAddValue, StringEnd, + StringStart, ) -from streaming_data_types.fbschemas.logdata_f142.ArrayString import ( - ArrayString, - ArrayStringStart, - ArrayStringAddValue, - ArrayStringEnd, - ArrayStringStartValueVector, +from streaming_data_types.fbschemas.logdata_f142.UByte import ( + UByte, + UByteAddValue, + UByteEnd, + UByteStart, +) +from streaming_data_types.fbschemas.logdata_f142.UInt import ( + UInt, + UIntAddValue, + UIntEnd, + UIntStart, +) +from streaming_data_types.fbschemas.logdata_f142.ULong import ( + ULong, + ULongAddValue, + ULongEnd, + ULongStart, +) +from streaming_data_types.fbschemas.logdata_f142.UShort import ( + UShort, + UShortAddValue, + UShortEnd, + UShortStart, ) +from streaming_data_types.fbschemas.logdata_f142.Value import Value from streaming_data_types.utils import check_schema_identifier -import numpy as np -from typing import Any, Tuple, Callable, Dict, Union -from collections import namedtuple - FILE_IDENTIFIER = b"f142" diff --git a/streaming_data_types/nicos_cache_ns10.py b/streaming_data_types/nicos_cache_ns10.py index 7e7f237..c40ef19 100644 --- a/streaming_data_types/nicos_cache_ns10.py +++ b/streaming_data_types/nicos_cache_ns10.py @@ -1,9 +1,10 @@ from collections import namedtuple + import flatbuffers + from streaming_data_types.fbschemas.nicos_cache_ns10 import CacheEntry from streaming_data_types.utils import check_schema_identifier - FILE_IDENTIFIER = b"ns10" diff --git a/streaming_data_types/run_start_pl72.py b/streaming_data_types/run_start_pl72.py index dbb90c2..cbb2d8a 100644 --- a/streaming_data_types/run_start_pl72.py +++ b/streaming_data_types/run_start_pl72.py @@ -1,15 +1,16 @@ import time -from typing import Union, Optional +from collections import namedtuple +from datetime import datetime +from typing import NamedTuple, Optional, Union + import flatbuffers +import numpy as np + from streaming_data_types.fbschemas.run_start_pl72 import ( RunStart, SpectraDetectorMapping, ) from streaming_data_types.utils import check_schema_identifier -from typing import NamedTuple -from datetime import datetime -import numpy as np -from collections import namedtuple FILE_IDENTIFIER = b"pl72" diff --git a/streaming_data_types/run_stop_6s4t.py b/streaming_data_types/run_stop_6s4t.py index a957f10..fadf4ef 100644 --- a/streaming_data_types/run_stop_6s4t.py +++ b/streaming_data_types/run_stop_6s4t.py @@ -1,9 +1,10 @@ -from typing import Union +from datetime import datetime +from typing import NamedTuple, Union + import flatbuffers + from streaming_data_types.fbschemas.run_stop_6s4t import RunStop from streaming_data_types.utils import check_schema_identifier -from typing import NamedTuple -from datetime import datetime FILE_IDENTIFIER = b"6s4t" diff --git a/streaming_data_types/sample_environment_senv.py b/streaming_data_types/sample_environment_senv.py index 933be58..5635cc2 100644 --- a/streaming_data_types/sample_environment_senv.py +++ b/streaming_data_types/sample_environment_senv.py @@ -1,24 +1,26 @@ +from datetime import datetime, timezone +from typing import List, NamedTuple, Optional, Union + +import flatbuffers +import numpy as np + +from streaming_data_types.fbschemas.sample_environment_senv.Location import Location from streaming_data_types.fbschemas.sample_environment_senv.SampleEnvironmentData import ( SampleEnvironmentData, - SampleEnvironmentDataStart, - SampleEnvironmentDataEnd, - SampleEnvironmentDataAddName, SampleEnvironmentDataAddChannel, SampleEnvironmentDataAddMessageCounter, + SampleEnvironmentDataAddName, + SampleEnvironmentDataAddPacketTimestamp, SampleEnvironmentDataAddTimeDelta, SampleEnvironmentDataAddTimestampLocation, + SampleEnvironmentDataAddTimestamps, SampleEnvironmentDataAddValues, SampleEnvironmentDataAddValuesType, - SampleEnvironmentDataAddTimestamps, - SampleEnvironmentDataAddPacketTimestamp, + SampleEnvironmentDataEnd, + SampleEnvironmentDataStart, ) -from streaming_data_types.fbschemas.sample_environment_senv.Location import Location from streaming_data_types.fbschemas.sample_environment_senv.ValueUnion import ValueUnion -import flatbuffers -import numpy as np -from typing import Optional, Union, List, NamedTuple from streaming_data_types.utils import check_schema_identifier -from datetime import datetime, timezone FILE_IDENTIFIER = b"senv" @@ -114,12 +116,12 @@ def deserialise_senv(buffer: Union[bytearray, bytes]) -> Response: from flatbuffers.number_types import ( Int8Flags, - Uint8Flags, Int16Flags, - Uint16Flags, Int32Flags, - Uint32Flags, Int64Flags, + Uint8Flags, + Uint16Flags, + Uint32Flags, Uint64Flags, ) diff --git a/streaming_data_types/status_x5f2.py b/streaming_data_types/status_x5f2.py index a25d16e..6d43684 100644 --- a/streaming_data_types/status_x5f2.py +++ b/streaming_data_types/status_x5f2.py @@ -1,8 +1,9 @@ from collections import namedtuple + import flatbuffers -from streaming_data_types.utils import check_schema_identifier from streaming_data_types.fbschemas.status_x5f2 import Status +from streaming_data_types.utils import check_schema_identifier FILE_IDENTIFIER = b"x5f2" diff --git a/streaming_data_types/timestamps_tdct.py b/streaming_data_types/timestamps_tdct.py index 82105ed..7820b85 100644 --- a/streaming_data_types/timestamps_tdct.py +++ b/streaming_data_types/timestamps_tdct.py @@ -1,14 +1,16 @@ +from typing import List, NamedTuple, Optional, Union + +import flatbuffers +import numpy as np + from streaming_data_types.fbschemas.timestamps_tdct.timestamp import ( timestamp, - timestampStart, timestampAddName, - timestampAddTimestamps, timestampAddSequenceCounter, + timestampAddTimestamps, timestampEnd, + timestampStart, ) -import flatbuffers -import numpy as np -from typing import Optional, Union, List, NamedTuple from streaming_data_types.utils import check_schema_identifier FILE_IDENTIFIER = b"tdct" diff --git a/tests/test_6s4t.py b/tests/test_6s4t.py index 7ec16f9..772bb61 100644 --- a/tests/test_6s4t.py +++ b/tests/test_6s4t.py @@ -1,8 +1,8 @@ import pytest +from streaming_data_types import DESERIALISERS, SERIALISERS from streaming_data_types.exceptions import WrongSchemaException -from streaming_data_types.run_stop_6s4t import serialise_6s4t, deserialise_6s4t -from streaming_data_types import SERIALISERS, DESERIALISERS +from streaming_data_types.run_stop_6s4t import deserialise_6s4t, serialise_6s4t class TestSerialisation6s4t: diff --git a/tests/test_ADAr.py b/tests/test_ADAr.py index 25e5ddc..1840dc0 100644 --- a/tests/test_ADAr.py +++ b/tests/test_ADAr.py @@ -1,12 +1,14 @@ +from datetime import datetime, timezone + +import numpy as np import pytest + +from streaming_data_types import DESERIALISERS, SERIALISERS from streaming_data_types.area_detector_ADAr import ( - serialise_ADAr, - deserialise_ADAr, Attribute, + deserialise_ADAr, + serialise_ADAr, ) -from streaming_data_types import SERIALISERS, DESERIALISERS -import numpy as np -from datetime import datetime, timezone from streaming_data_types.exceptions import WrongSchemaException diff --git a/tests/test_NDAr.py b/tests/test_NDAr.py index 8c914cf..369997c 100644 --- a/tests/test_NDAr.py +++ b/tests/test_NDAr.py @@ -1,9 +1,10 @@ +import numpy as np import pytest -from streaming_data_types.area_detector_NDAr import serialise_ndar, deserialise_ndar + +from streaming_data_types import DESERIALISERS, SERIALISERS +from streaming_data_types.area_detector_NDAr import deserialise_ndar, serialise_ndar from streaming_data_types.exceptions import WrongSchemaException from streaming_data_types.fbschemas.NDAr_NDArray_schema.DType import DType -from streaming_data_types import SERIALISERS, DESERIALISERS -import numpy as np class TestSerialisationNDAr: diff --git a/tests/test_answ.py b/tests/test_answ.py index 7738058..b911770 100644 --- a/tests/test_answ.py +++ b/tests/test_answ.py @@ -1,12 +1,14 @@ from datetime import datetime, timezone + import pytest + +from streaming_data_types import DESERIALISERS, SERIALISERS from streaming_data_types.action_response_answ import ( - serialise_answ, - deserialise_answ, - ActionType, ActionOutcome, + ActionType, + deserialise_answ, + serialise_answ, ) -from streaming_data_types import SERIALISERS, DESERIALISERS from streaming_data_types.exceptions import WrongSchemaException diff --git a/tests/test_ep00.py b/tests/test_ep00.py index 2abfa31..1c716db 100644 --- a/tests/test_ep00.py +++ b/tests/test_ep00.py @@ -1,11 +1,12 @@ import pytest -from streaming_data_types.exceptions import WrongSchemaException -from streaming_data_types.fbschemas.epics_connection_info_ep00 import EventType + +from streaming_data_types import DESERIALISERS, SERIALISERS from streaming_data_types.epics_connection_info_ep00 import ( - serialise_ep00, deserialise_ep00, + serialise_ep00, ) -from streaming_data_types import SERIALISERS, DESERIALISERS +from streaming_data_types.exceptions import WrongSchemaException +from streaming_data_types.fbschemas.epics_connection_info_ep00 import EventType class TestSerialisationEp00: diff --git a/tests/test_ev42.py b/tests/test_ev42.py index 545f8dd..27f30b8 100644 --- a/tests/test_ev42.py +++ b/tests/test_ev42.py @@ -1,7 +1,8 @@ import numpy as np import pytest -from streaming_data_types.eventdata_ev42 import serialise_ev42, deserialise_ev42 -from streaming_data_types import SERIALISERS, DESERIALISERS + +from streaming_data_types import DESERIALISERS, SERIALISERS +from streaming_data_types.eventdata_ev42 import deserialise_ev42, serialise_ev42 from streaming_data_types.exceptions import WrongSchemaException diff --git a/tests/test_ev43.py b/tests/test_ev43.py index 946d7ed..2a46332 100644 --- a/tests/test_ev43.py +++ b/tests/test_ev43.py @@ -1,7 +1,8 @@ import numpy as np import pytest -from streaming_data_types.eventdata_ev43 import serialise_ev43, deserialise_ev43 -from streaming_data_types import SERIALISERS, DESERIALISERS + +from streaming_data_types import DESERIALISERS, SERIALISERS +from streaming_data_types.eventdata_ev43 import deserialise_ev43, serialise_ev43 from streaming_data_types.exceptions import WrongSchemaException diff --git a/tests/test_f142.py b/tests/test_f142.py index b0d9796..e216c3e 100644 --- a/tests/test_f142.py +++ b/tests/test_f142.py @@ -1,10 +1,11 @@ -import pytest import numpy as np +import pytest + +from streaming_data_types import DESERIALISERS, SERIALISERS from streaming_data_types.exceptions import WrongSchemaException -from streaming_data_types.logdata_f142 import serialise_f142, deserialise_f142 from streaming_data_types.fbschemas.logdata_f142.AlarmSeverity import AlarmSeverity from streaming_data_types.fbschemas.logdata_f142.AlarmStatus import AlarmStatus -from streaming_data_types import SERIALISERS, DESERIALISERS +from streaming_data_types.logdata_f142 import deserialise_f142, serialise_f142 class TestSerialisationF142: diff --git a/tests/test_hs00.py b/tests/test_hs00.py index f552869..cfca5a0 100644 --- a/tests/test_hs00.py +++ b/tests/test_hs00.py @@ -1,8 +1,9 @@ import numpy as np import pytest + +from streaming_data_types import DESERIALISERS, SERIALISERS from streaming_data_types.exceptions import WrongSchemaException -from streaming_data_types.histogram_hs00 import serialise_hs00, deserialise_hs00 -from streaming_data_types import SERIALISERS, DESERIALISERS +from streaming_data_types.histogram_hs00 import deserialise_hs00, serialise_hs00 def create_test_data_with_type(numpy_type): diff --git a/tests/test_ns10.py b/tests/test_ns10.py index 7d90174..346c66e 100644 --- a/tests/test_ns10.py +++ b/tests/test_ns10.py @@ -1,7 +1,8 @@ import pytest + +from streaming_data_types import DESERIALISERS, SERIALISERS from streaming_data_types.exceptions import WrongSchemaException -from streaming_data_types.nicos_cache_ns10 import serialise_ns10, deserialise_ns10 -from streaming_data_types import SERIALISERS, DESERIALISERS +from streaming_data_types.nicos_cache_ns10 import deserialise_ns10, serialise_ns10 class TestSerialisationNs10: diff --git a/tests/test_pl72.py b/tests/test_pl72.py index 0d7bf77..eab7d78 100644 --- a/tests/test_pl72.py +++ b/tests/test_pl72.py @@ -1,12 +1,13 @@ +import numpy as np import pytest + +from streaming_data_types import DESERIALISERS, SERIALISERS from streaming_data_types.exceptions import WrongSchemaException from streaming_data_types.run_start_pl72 import ( - serialise_pl72, - deserialise_pl72, DetectorSpectrumMap, + deserialise_pl72, + serialise_pl72, ) -from streaming_data_types import SERIALISERS, DESERIALISERS -import numpy as np class TestSerialisationPl72: diff --git a/tests/test_rf5k.py b/tests/test_rf5k.py index 74c21f9..079b4c9 100644 --- a/tests/test_rf5k.py +++ b/tests/test_rf5k.py @@ -1,15 +1,16 @@ import pytest + +from streaming_data_types import DESERIALISERS, SERIALISERS from streaming_data_types.exceptions import WrongSchemaException -from streaming_data_types.forwarder_config_update_rf5k import ( - serialise_rf5k, - deserialise_rf5k, - StreamInfo, - Protocol, -) -from streaming_data_types import SERIALISERS, DESERIALISERS from streaming_data_types.fbschemas.forwarder_config_update_rf5k.UpdateType import ( UpdateType, ) +from streaming_data_types.forwarder_config_update_rf5k import ( + Protocol, + StreamInfo, + deserialise_rf5k, + serialise_rf5k, +) class TestSerialisationRf5k: diff --git a/tests/test_senv.py b/tests/test_senv.py index e90c1d6..e7d5ff1 100644 --- a/tests/test_senv.py +++ b/tests/test_senv.py @@ -1,13 +1,14 @@ +from datetime import datetime, timezone + import numpy as np +import pytest + +from streaming_data_types import DESERIALISERS, SERIALISERS +from streaming_data_types.fbschemas.sample_environment_senv.Location import Location from streaming_data_types.sample_environment_senv import ( - serialise_senv, deserialise_senv, + serialise_senv, ) -from streaming_data_types import SERIALISERS, DESERIALISERS -from datetime import datetime, timezone -from streaming_data_types.fbschemas.sample_environment_senv.Location import Location -import pytest - entry_1 = { "name": "some_name", diff --git a/tests/test_tdct.py b/tests/test_tdct.py index 0858695..10092b7 100644 --- a/tests/test_tdct.py +++ b/tests/test_tdct.py @@ -1,9 +1,9 @@ -import pytest import numpy as np +import pytest +from streaming_data_types import DESERIALISERS, SERIALISERS from streaming_data_types.exceptions import WrongSchemaException -from streaming_data_types.timestamps_tdct import serialise_tdct, deserialise_tdct -from streaming_data_types import SERIALISERS, DESERIALISERS +from streaming_data_types.timestamps_tdct import deserialise_tdct, serialise_tdct class TestSerialisationTdct: diff --git a/tests/test_utils.py b/tests/test_utils.py index 93f62d2..6c33224 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,4 +1,5 @@ import pytest + from streaming_data_types.exceptions import ShortBufferException from streaming_data_types.utils import check_schema_identifier diff --git a/tests/test_wrdn.py b/tests/test_wrdn.py index 7a99bca..d85bc56 100644 --- a/tests/test_wrdn.py +++ b/tests/test_wrdn.py @@ -1,7 +1,8 @@ import pytest + +from streaming_data_types import DESERIALISERS, SERIALISERS from streaming_data_types.exceptions import WrongSchemaException -from streaming_data_types.finished_writing_wrdn import serialise_wrdn, deserialise_wrdn -from streaming_data_types import SERIALISERS, DESERIALISERS +from streaming_data_types.finished_writing_wrdn import deserialise_wrdn, serialise_wrdn class TestEncoder(object): diff --git a/tests/test_x52f.py b/tests/test_x52f.py index 9e4c03f..cbb3fc3 100644 --- a/tests/test_x52f.py +++ b/tests/test_x52f.py @@ -1,8 +1,8 @@ import pytest -from streaming_data_types.exceptions import WrongSchemaException -from streaming_data_types.status_x5f2 import serialise_x5f2, deserialise_x5f2 -from streaming_data_types import SERIALISERS, DESERIALISERS +from streaming_data_types import DESERIALISERS, SERIALISERS +from streaming_data_types.exceptions import WrongSchemaException +from streaming_data_types.status_x5f2 import deserialise_x5f2, serialise_x5f2 original_entry = { "software_name": "nicos/test", From 7369e5557aa4390cfaf306e6a21fea3d126a13a2 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Thu, 17 Feb 2022 08:04:21 +0100 Subject: [PATCH 266/363] Fixed merge conflict issue --- streaming_data_types/__init__.py | 1 + .../fbschemas/histogram_hs01/Array.py | 4 +- .../fbschemas/histogram_hs01/ArrayDouble.py | 36 ++++-- .../fbschemas/histogram_hs01/ArrayFloat.py | 36 ++++-- .../fbschemas/histogram_hs01/ArrayInt.py | 36 ++++-- .../fbschemas/histogram_hs01/ArrayLong.py | 36 ++++-- .../histogram_hs01/DimensionMetaData.py | 51 ++++++-- .../histogram_hs01/EventHistogram.py | 114 +++++++++++++++--- streaming_data_types/histogram_hs01.py | 4 +- tests/test_hs01.py | 5 +- 10 files changed, 255 insertions(+), 68 deletions(-) diff --git a/streaming_data_types/__init__.py b/streaming_data_types/__init__.py index 6486621..40649ab 100644 --- a/streaming_data_types/__init__.py +++ b/streaming_data_types/__init__.py @@ -14,6 +14,7 @@ serialise_rf5k, ) from streaming_data_types.histogram_hs00 import deserialise_hs00, serialise_hs00 +from streaming_data_types.histogram_hs01 import deserialise_hs01, serialise_hs01 from streaming_data_types.logdata_f142 import deserialise_f142, serialise_f142 from streaming_data_types.nicos_cache_ns10 import deserialise_ns10, serialise_ns10 from streaming_data_types.run_start_pl72 import deserialise_pl72, serialise_pl72 diff --git a/streaming_data_types/fbschemas/histogram_hs01/Array.py b/streaming_data_types/fbschemas/histogram_hs01/Array.py index f386136..c1ac39d 100644 --- a/streaming_data_types/fbschemas/histogram_hs01/Array.py +++ b/streaming_data_types/fbschemas/histogram_hs01/Array.py @@ -1,6 +1,7 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: + class Array(object): NONE = 0 @@ -8,4 +9,3 @@ class Array(object): ArrayLong = 2 ArrayDouble = 3 ArrayFloat = 4 - diff --git a/streaming_data_types/fbschemas/histogram_hs01/ArrayDouble.py b/streaming_data_types/fbschemas/histogram_hs01/ArrayDouble.py index 9bf9497..47a10d5 100644 --- a/streaming_data_types/fbschemas/histogram_hs01/ArrayDouble.py +++ b/streaming_data_types/fbschemas/histogram_hs01/ArrayDouble.py @@ -1,13 +1,15 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers from flatbuffers.compat import import_numpy + np = import_numpy() + class ArrayDouble(object): - __slots__ = ['_tab'] + __slots__ = ["_tab"] @classmethod def GetRootAsArrayDouble(cls, buf, offset): @@ -18,7 +20,9 @@ def GetRootAsArrayDouble(cls, buf, offset): @classmethod def ArrayDoubleBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x68\x73\x30\x31", size_prefixed=size_prefixed) + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x68\x73\x30\x31", size_prefixed=size_prefixed + ) # ArrayDouble def Init(self, buf, pos): @@ -29,7 +33,10 @@ def Value(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Float64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return self._tab.Get( + flatbuffers.number_types.Float64Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8), + ) return 0 # ArrayDouble @@ -51,7 +58,20 @@ def ValueIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) return o == 0 -def ArrayDoubleStart(builder): builder.StartObject(1) -def ArrayDoubleAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) -def ArrayDoubleStartValueVector(builder, numElems): return builder.StartVector(8, numElems, 8) -def ArrayDoubleEnd(builder): return builder.EndObject() + +def ArrayDoubleStart(builder): + builder.StartObject(1) + + +def ArrayDoubleAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 + ) + + +def ArrayDoubleStartValueVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + + +def ArrayDoubleEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/histogram_hs01/ArrayFloat.py b/streaming_data_types/fbschemas/histogram_hs01/ArrayFloat.py index fe27916..c1e79de 100644 --- a/streaming_data_types/fbschemas/histogram_hs01/ArrayFloat.py +++ b/streaming_data_types/fbschemas/histogram_hs01/ArrayFloat.py @@ -1,13 +1,15 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers from flatbuffers.compat import import_numpy + np = import_numpy() + class ArrayFloat(object): - __slots__ = ['_tab'] + __slots__ = ["_tab"] @classmethod def GetRootAsArrayFloat(cls, buf, offset): @@ -18,7 +20,9 @@ def GetRootAsArrayFloat(cls, buf, offset): @classmethod def ArrayFloatBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x68\x73\x30\x31", size_prefixed=size_prefixed) + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x68\x73\x30\x31", size_prefixed=size_prefixed + ) # ArrayFloat def Init(self, buf, pos): @@ -29,7 +33,10 @@ def Value(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Float32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return self._tab.Get( + flatbuffers.number_types.Float32Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), + ) return 0 # ArrayFloat @@ -51,7 +58,20 @@ def ValueIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) return o == 0 -def ArrayFloatStart(builder): builder.StartObject(1) -def ArrayFloatAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) -def ArrayFloatStartValueVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def ArrayFloatEnd(builder): return builder.EndObject() + +def ArrayFloatStart(builder): + builder.StartObject(1) + + +def ArrayFloatAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 + ) + + +def ArrayFloatStartValueVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + +def ArrayFloatEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/histogram_hs01/ArrayInt.py b/streaming_data_types/fbschemas/histogram_hs01/ArrayInt.py index 3c9fdc9..9ff65ae 100644 --- a/streaming_data_types/fbschemas/histogram_hs01/ArrayInt.py +++ b/streaming_data_types/fbschemas/histogram_hs01/ArrayInt.py @@ -1,13 +1,15 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers from flatbuffers.compat import import_numpy + np = import_numpy() + class ArrayInt(object): - __slots__ = ['_tab'] + __slots__ = ["_tab"] @classmethod def GetRootAsArrayInt(cls, buf, offset): @@ -18,7 +20,9 @@ def GetRootAsArrayInt(cls, buf, offset): @classmethod def ArrayIntBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x68\x73\x30\x31", size_prefixed=size_prefixed) + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x68\x73\x30\x31", size_prefixed=size_prefixed + ) # ArrayInt def Init(self, buf, pos): @@ -29,7 +33,10 @@ def Value(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return self._tab.Get( + flatbuffers.number_types.Int32Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), + ) return 0 # ArrayInt @@ -51,7 +58,20 @@ def ValueIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) return o == 0 -def ArrayIntStart(builder): builder.StartObject(1) -def ArrayIntAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) -def ArrayIntStartValueVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def ArrayIntEnd(builder): return builder.EndObject() + +def ArrayIntStart(builder): + builder.StartObject(1) + + +def ArrayIntAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 + ) + + +def ArrayIntStartValueVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + +def ArrayIntEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/histogram_hs01/ArrayLong.py b/streaming_data_types/fbschemas/histogram_hs01/ArrayLong.py index 68e0d4f..9d96446 100644 --- a/streaming_data_types/fbschemas/histogram_hs01/ArrayLong.py +++ b/streaming_data_types/fbschemas/histogram_hs01/ArrayLong.py @@ -1,13 +1,15 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers from flatbuffers.compat import import_numpy + np = import_numpy() + class ArrayLong(object): - __slots__ = ['_tab'] + __slots__ = ["_tab"] @classmethod def GetRootAsArrayLong(cls, buf, offset): @@ -18,7 +20,9 @@ def GetRootAsArrayLong(cls, buf, offset): @classmethod def ArrayLongBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x68\x73\x30\x31", size_prefixed=size_prefixed) + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x68\x73\x30\x31", size_prefixed=size_prefixed + ) # ArrayLong def Init(self, buf, pos): @@ -29,7 +33,10 @@ def Value(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Int64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return self._tab.Get( + flatbuffers.number_types.Int64Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8), + ) return 0 # ArrayLong @@ -51,7 +58,20 @@ def ValueIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) return o == 0 -def ArrayLongStart(builder): builder.StartObject(1) -def ArrayLongAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) -def ArrayLongStartValueVector(builder, numElems): return builder.StartVector(8, numElems, 8) -def ArrayLongEnd(builder): return builder.EndObject() + +def ArrayLongStart(builder): + builder.StartObject(1) + + +def ArrayLongAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 + ) + + +def ArrayLongStartValueVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + + +def ArrayLongEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/histogram_hs01/DimensionMetaData.py b/streaming_data_types/fbschemas/histogram_hs01/DimensionMetaData.py index eea5fe6..9c5631d 100644 --- a/streaming_data_types/fbschemas/histogram_hs01/DimensionMetaData.py +++ b/streaming_data_types/fbschemas/histogram_hs01/DimensionMetaData.py @@ -1,13 +1,15 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers from flatbuffers.compat import import_numpy + np = import_numpy() + class DimensionMetaData(object): - __slots__ = ['_tab'] + __slots__ = ["_tab"] @classmethod def GetRootAsDimensionMetaData(cls, buf, offset): @@ -18,7 +20,9 @@ def GetRootAsDimensionMetaData(cls, buf, offset): @classmethod def DimensionMetaDataBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x68\x73\x30\x31", size_prefixed=size_prefixed) + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x68\x73\x30\x31", size_prefixed=size_prefixed + ) # DimensionMetaData def Init(self, buf, pos): @@ -57,15 +61,42 @@ def BinBoundaries(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) if o != 0: from flatbuffers.table import Table + obj = Table(bytearray(), 0) self._tab.Union(obj, o) return obj return None -def DimensionMetaDataStart(builder): builder.StartObject(5) -def DimensionMetaDataAddLength(builder, length): builder.PrependInt32Slot(0, length, 0) -def DimensionMetaDataAddUnit(builder, unit): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(unit), 0) -def DimensionMetaDataAddLabel(builder, label): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(label), 0) -def DimensionMetaDataAddBinBoundariesType(builder, binBoundariesType): builder.PrependUint8Slot(3, binBoundariesType, 0) -def DimensionMetaDataAddBinBoundaries(builder, binBoundaries): builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(binBoundaries), 0) -def DimensionMetaDataEnd(builder): return builder.EndObject() + +def DimensionMetaDataStart(builder): + builder.StartObject(5) + + +def DimensionMetaDataAddLength(builder, length): + builder.PrependInt32Slot(0, length, 0) + + +def DimensionMetaDataAddUnit(builder, unit): + builder.PrependUOffsetTRelativeSlot( + 1, flatbuffers.number_types.UOffsetTFlags.py_type(unit), 0 + ) + + +def DimensionMetaDataAddLabel(builder, label): + builder.PrependUOffsetTRelativeSlot( + 2, flatbuffers.number_types.UOffsetTFlags.py_type(label), 0 + ) + + +def DimensionMetaDataAddBinBoundariesType(builder, binBoundariesType): + builder.PrependUint8Slot(3, binBoundariesType, 0) + + +def DimensionMetaDataAddBinBoundaries(builder, binBoundaries): + builder.PrependUOffsetTRelativeSlot( + 4, flatbuffers.number_types.UOffsetTFlags.py_type(binBoundaries), 0 + ) + + +def DimensionMetaDataEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/histogram_hs01/EventHistogram.py b/streaming_data_types/fbschemas/histogram_hs01/EventHistogram.py index f82d198..78117ab 100644 --- a/streaming_data_types/fbschemas/histogram_hs01/EventHistogram.py +++ b/streaming_data_types/fbschemas/histogram_hs01/EventHistogram.py @@ -4,10 +4,12 @@ import flatbuffers from flatbuffers.compat import import_numpy + np = import_numpy() + class EventHistogram(object): - __slots__ = ['_tab'] + __slots__ = ["_tab"] @classmethod def GetRootAsEventHistogram(cls, buf, offset): @@ -18,7 +20,9 @@ def GetRootAsEventHistogram(cls, buf, offset): @classmethod def EventHistogramBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x68\x73\x30\x31", size_prefixed=size_prefixed) + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x68\x73\x30\x31", size_prefixed=size_prefixed + ) # EventHistogram def Init(self, buf, pos): @@ -46,6 +50,7 @@ def DimMetadata(self, j): x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 x = self._tab.Indirect(x) from .DimensionMetaData import DimensionMetaData + obj = DimensionMetaData() obj.Init(self._tab.Bytes, x) return obj @@ -75,7 +80,10 @@ def CurrentShape(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return self._tab.Get( + flatbuffers.number_types.Int32Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), + ) return 0 # EventHistogram @@ -102,7 +110,10 @@ def Offset(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return self._tab.Get( + flatbuffers.number_types.Int32Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), + ) return 0 # EventHistogram @@ -136,6 +147,7 @@ def Data(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) if o != 0: from flatbuffers.table import Table + obj = Table(bytearray(), 0) self._tab.Union(obj, o) return obj @@ -153,6 +165,7 @@ def Errors(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(22)) if o != 0: from flatbuffers.table import Table + obj = Table(bytearray(), 0) self._tab.Union(obj, o) return obj @@ -165,19 +178,80 @@ def Info(self): return self._tab.String(o + self._tab.Pos) return None -def EventHistogramStart(builder): builder.StartObject(11) -def EventHistogramAddSource(builder, source): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(source), 0) -def EventHistogramAddTimestamp(builder, timestamp): builder.PrependInt64Slot(1, timestamp, 0) -def EventHistogramAddDimMetadata(builder, dimMetadata): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(dimMetadata), 0) -def EventHistogramStartDimMetadataVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def EventHistogramAddLastMetadataTimestamp(builder, lastMetadataTimestamp): builder.PrependInt64Slot(3, lastMetadataTimestamp, 0) -def EventHistogramAddCurrentShape(builder, currentShape): builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(currentShape), 0) -def EventHistogramStartCurrentShapeVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def EventHistogramAddOffset(builder, offset): builder.PrependUOffsetTRelativeSlot(5, flatbuffers.number_types.UOffsetTFlags.py_type(offset), 0) -def EventHistogramStartOffsetVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def EventHistogramAddDataType(builder, dataType): builder.PrependUint8Slot(6, dataType, 0) -def EventHistogramAddData(builder, data): builder.PrependUOffsetTRelativeSlot(7, flatbuffers.number_types.UOffsetTFlags.py_type(data), 0) -def EventHistogramAddErrorsType(builder, errorsType): builder.PrependUint8Slot(8, errorsType, 0) -def EventHistogramAddErrors(builder, errors): builder.PrependUOffsetTRelativeSlot(9, flatbuffers.number_types.UOffsetTFlags.py_type(errors), 0) -def EventHistogramAddInfo(builder, info): builder.PrependUOffsetTRelativeSlot(10, flatbuffers.number_types.UOffsetTFlags.py_type(info), 0) -def EventHistogramEnd(builder): return builder.EndObject() + +def EventHistogramStart(builder): + builder.StartObject(11) + + +def EventHistogramAddSource(builder, source): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(source), 0 + ) + + +def EventHistogramAddTimestamp(builder, timestamp): + builder.PrependInt64Slot(1, timestamp, 0) + + +def EventHistogramAddDimMetadata(builder, dimMetadata): + builder.PrependUOffsetTRelativeSlot( + 2, flatbuffers.number_types.UOffsetTFlags.py_type(dimMetadata), 0 + ) + + +def EventHistogramStartDimMetadataVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + +def EventHistogramAddLastMetadataTimestamp(builder, lastMetadataTimestamp): + builder.PrependInt64Slot(3, lastMetadataTimestamp, 0) + + +def EventHistogramAddCurrentShape(builder, currentShape): + builder.PrependUOffsetTRelativeSlot( + 4, flatbuffers.number_types.UOffsetTFlags.py_type(currentShape), 0 + ) + + +def EventHistogramStartCurrentShapeVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + +def EventHistogramAddOffset(builder, offset): + builder.PrependUOffsetTRelativeSlot( + 5, flatbuffers.number_types.UOffsetTFlags.py_type(offset), 0 + ) + + +def EventHistogramStartOffsetVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + +def EventHistogramAddDataType(builder, dataType): + builder.PrependUint8Slot(6, dataType, 0) + + +def EventHistogramAddData(builder, data): + builder.PrependUOffsetTRelativeSlot( + 7, flatbuffers.number_types.UOffsetTFlags.py_type(data), 0 + ) + + +def EventHistogramAddErrorsType(builder, errorsType): + builder.PrependUint8Slot(8, errorsType, 0) + + +def EventHistogramAddErrors(builder, errors): + builder.PrependUOffsetTRelativeSlot( + 9, flatbuffers.number_types.UOffsetTFlags.py_type(errors), 0 + ) + + +def EventHistogramAddInfo(builder, info): + builder.PrependUOffsetTRelativeSlot( + 10, flatbuffers.number_types.UOffsetTFlags.py_type(info), 0 + ) + + +def EventHistogramEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/histogram_hs01.py b/streaming_data_types/histogram_hs01.py index 62fc1b3..09d7faf 100644 --- a/streaming_data_types/histogram_hs01.py +++ b/streaming_data_types/histogram_hs01.py @@ -1,7 +1,8 @@ import flatbuffers import numpy -import streaming_data_types.fbschemas.histogram_hs01.ArrayFloat as ArrayFloat + import streaming_data_types.fbschemas.histogram_hs01.ArrayDouble as ArrayDouble +import streaming_data_types.fbschemas.histogram_hs01.ArrayFloat as ArrayFloat import streaming_data_types.fbschemas.histogram_hs01.ArrayInt as ArrayInt import streaming_data_types.fbschemas.histogram_hs01.ArrayLong as ArrayLong import streaming_data_types.fbschemas.histogram_hs01.DimensionMetaData as DimensionMetaData @@ -9,7 +10,6 @@ from streaming_data_types.fbschemas.histogram_hs01.Array import Array from streaming_data_types.utils import check_schema_identifier - FILE_IDENTIFIER = b"hs01" diff --git a/tests/test_hs01.py b/tests/test_hs01.py index 40bcf7e..f27b83a 100644 --- a/tests/test_hs01.py +++ b/tests/test_hs01.py @@ -1,8 +1,9 @@ import numpy as np import pytest + +from streaming_data_types import DESERIALISERS, SERIALISERS from streaming_data_types.exceptions import WrongSchemaException -from streaming_data_types.histogram_hs01 import serialise_hs01, deserialise_hs01 -from streaming_data_types import SERIALISERS, DESERIALISERS +from streaming_data_types.histogram_hs01 import deserialise_hs01, serialise_hs01 def create_test_data_with_type(numpy_type): From c7faf42fa081b05d67b8a1b369765e01d8459f14 Mon Sep 17 00:00:00 2001 From: Michele Brambilla Date: Mon, 4 Apr 2022 11:03:49 +0200 Subject: [PATCH 267/363] Update _version.py after hs01 support has been added --- streaming_data_types/_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/streaming_data_types/_version.py b/streaming_data_types/_version.py index d8abffc..241e44f 100644 --- a/streaming_data_types/_version.py +++ b/streaming_data_types/_version.py @@ -1,4 +1,4 @@ # Version is not directly defined in __init__ because that causes all # run time dependencies to become build-time dependencies when it is # imported in setup.py -version = "0.15.1" +version = "0.15.2" From 67e685487c8460bd4647a1003828990d45933e8b Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Mon, 4 Apr 2022 09:19:53 +0000 Subject: [PATCH 268/363] Update streaming_data_types/_version.py --- streaming_data_types/_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/streaming_data_types/_version.py b/streaming_data_types/_version.py index 241e44f..378fd35 100644 --- a/streaming_data_types/_version.py +++ b/streaming_data_types/_version.py @@ -1,4 +1,4 @@ # Version is not directly defined in __init__ because that causes all # run time dependencies to become build-time dependencies when it is # imported in setup.py -version = "0.15.2" +version = "0.16.0" From 3278785a5267afffea3bef006a7c6d9217f191f2 Mon Sep 17 00:00:00 2001 From: Kenan Muric Date: Wed, 22 Jun 2022 15:57:05 +0200 Subject: [PATCH 269/363] new ev44 streaming data type implementation --- streaming_data_types/eventdata_ev44.py | 78 ++++++ .../eventdata_ev44/Event44Message.py | 229 ++++++++++++++++++ .../fbschemas/eventdata_ev44/__init__.py | 0 3 files changed, 307 insertions(+) create mode 100644 streaming_data_types/eventdata_ev44.py create mode 100644 streaming_data_types/fbschemas/eventdata_ev44/Event44Message.py create mode 100644 streaming_data_types/fbschemas/eventdata_ev44/__init__.py diff --git a/streaming_data_types/eventdata_ev44.py b/streaming_data_types/eventdata_ev44.py new file mode 100644 index 0000000..4249f94 --- /dev/null +++ b/streaming_data_types/eventdata_ev44.py @@ -0,0 +1,78 @@ +from collections import namedtuple + +import flatbuffers +import numpy as np + +import streaming_data_types.fbschemas.eventdata_ev44.Event44Message as Event44Message +from streaming_data_types.utils import check_schema_identifier + +FILE_IDENTIFIER = b"ev44" + + +EventData = namedtuple( + "EventData", + ( + "source_name", + "reference_time", + "reference_time_index", + "time_of_flight", + "pixel_id", + ), +) + + +def deserialise_ev44(buffer): + """ + Deserialise FlatBuffer ev44. + + :param buffer: The FlatBuffers buffer. + :return: The deserialised data. + """ + check_schema_identifier(buffer, FILE_IDENTIFIER) + + event = Event44Message.Event44Message.GetRootAs(buffer, 0) + + return EventData( + event.SourceName().decode("utf-8"), + event.ReferenceTimeAsNumpy(), + event.ReferenceTimeIndexAsNumpy(), + event.TimeOfFlightAsNumpy(), + event.PixelIdAsNumpy(), + ) + + +def serialize_ev44(source_name, + reference_time, + reference_time_index, + time_of_flight, + pixel_id): + """ + Serialise event data as an ev44 FlatBuffers message. + + :param source_name: + :param reference_time: + :param reference_time_index: + :param time_of_flight: + :param pixel_id: + :return: + """ + builder = flatbuffers.Builder(1024) + builder.ForceDefaults(True) + + source = builder.CreateString(source_name) + ref_time_data = builder.CreateNumpyVector(np.asarray(reference_time).astype(np.longlong)) + ref_time_index_data = builder.CreateNumpyVector(np.asarray(reference_time_index).astype(np.int32)) + tof_data = builder.CreateNumpyVector(np.asarray(time_of_flight).astype(np.int64)) + pixel_id_data = builder.CreateNumpyVector(np.asarray(pixel_id).astype(np.int32)) + + Event44Message.Event44MessageStart(builder) + Event44Message.Event44MessageAddReferenceTime(builder, ref_time_data) + Event44Message.Event44MessageAddReferenceTimeIndex(builder, ref_time_index_data) + Event44Message.Event44MessageAddTimeOfFlight(builder, tof_data) + Event44Message.Event44MessageAddPixelId(builder, pixel_id_data) + Event44Message.Event44MessageAddSourceName(builder, source) + + data = Event44Message.Event44MessageEnd(builder) + builder.Finish(data, file_identifier=FILE_IDENTIFIER) + + return bytes(builder.Output()) diff --git a/streaming_data_types/fbschemas/eventdata_ev44/Event44Message.py b/streaming_data_types/fbschemas/eventdata_ev44/Event44Message.py new file mode 100644 index 0000000..437edc5 --- /dev/null +++ b/streaming_data_types/fbschemas/eventdata_ev44/Event44Message.py @@ -0,0 +1,229 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy +np = import_numpy() + + +class Event44Message(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Event44Message() + x.Init(buf, n + offset) + return x + + @classmethod + def Event44MessageBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x65\x76\x34\x34", size_prefixed=size_prefixed) + + # Event44Message + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Event44Message + def SourceName(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # Event44Message + def ReferenceTime(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return 0 + + # Event44Message + def ReferenceTimeAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) + return 0 + + # Event44Message + def ReferenceTimeLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # Event44Message + def ReferenceTimeIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + return o == 0 + + # Event44Message + def ReferenceTimeIndex(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return 0 + + # Event44Message + def ReferenceTimeIndexAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return 0 + + # Event44Message + def ReferenceTimeIndexLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # Event44Message + def ReferenceTimeIndexIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + return o == 0 + + # Event44Message + def TimeOfFlight(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return 0 + + # Event44Message + def TimeOfFlightAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return 0 + + # Event44Message + def TimeOfFlightLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # Event44Message + def TimeOfFlightIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + return o == 0 + + # Event44Message + def PixelId(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return 0 + + # Event44Message + def PixelIdAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return 0 + + # Event44Message + def PixelIdLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # Event44Message + def PixelIdIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + return o == 0 + + +def Event44MessageStart(builder): + builder.StartObject(5) + + +def Start(builder): + return Event44MessageStart(builder) + + +def Event44MessageAddSourceName(builder, sourceName): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(sourceName), 0) + + +def AddSourceName(builder, sourceName): + return Event44MessageAddSourceName(builder, sourceName) + + +def Event44MessageAddReferenceTime(builder, referenceTime): + builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(referenceTime), 0) + + +def AddReferenceTime(builder, referenceTime): + return Event44MessageAddReferenceTime(builder, referenceTime) + + +def Event44MessageStartReferenceTimeVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + + +def StartReferenceTimeVector(builder, numElems): + return Event44MessageStartReferenceTimeVector(builder, numElems) + + +def Event44MessageAddReferenceTimeIndex(builder, referenceTimeIndex): + builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(referenceTimeIndex), 0) + + +def AddReferenceTimeIndex(builder, referenceTimeIndex): + return Event44MessageAddReferenceTimeIndex(builder, referenceTimeIndex) + + +def Event44MessageStartReferenceTimeIndexVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + +def StartReferenceTimeIndexVector(builder, numElems): + return Event44MessageStartReferenceTimeIndexVector(builder, numElems) + + +def Event44MessageAddTimeOfFlight(builder, timeOfFlight): + builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(timeOfFlight), 0) + + +def AddTimeOfFlight(builder, timeOfFlight): + return Event44MessageAddTimeOfFlight(builder, timeOfFlight) + + +def Event44MessageStartTimeOfFlightVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + +def StartTimeOfFlightVector(builder, numElems): + return Event44MessageStartTimeOfFlightVector(builder, numElems) + + +def Event44MessageAddPixelId(builder, pixelId): + builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(pixelId), 0) + + +def AddPixelId(builder, pixelId): + return Event44MessageAddPixelId(builder, pixelId) + + +def Event44MessageStartPixelIdVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + +def StartPixelIdVector(builder, numElems): + return Event44MessageStartPixelIdVector(builder, numElems) + + +def Event44MessageEnd(builder): + return builder.EndObject() + + +def End(builder): + return Event44MessageEnd(builder) diff --git a/streaming_data_types/fbschemas/eventdata_ev44/__init__.py b/streaming_data_types/fbschemas/eventdata_ev44/__init__.py new file mode 100644 index 0000000..e69de29 From 2f8c3d2a9eac9fe3b944af6a24541680a51880d1 Mon Sep 17 00:00:00 2001 From: Kenan Muric Date: Wed, 22 Jun 2022 17:34:21 +0200 Subject: [PATCH 270/363] minor fix and adding unit tests to ev44 streaming data type --- streaming_data_types/__init__.py | 3 ++ streaming_data_types/eventdata_ev44.py | 6 +-- tests/test_ev44.py | 58 ++++++++++++++++++++++++++ 3 files changed, 64 insertions(+), 3 deletions(-) create mode 100644 tests/test_ev44.py diff --git a/streaming_data_types/__init__.py b/streaming_data_types/__init__.py index 40649ab..36a89e5 100644 --- a/streaming_data_types/__init__.py +++ b/streaming_data_types/__init__.py @@ -8,6 +8,7 @@ ) from streaming_data_types.eventdata_ev42 import deserialise_ev42, serialise_ev42 from streaming_data_types.eventdata_ev43 import deserialise_ev43, serialise_ev43 +from streaming_data_types.eventdata_ev44 import deserialise_ev44, serialise_ev44 from streaming_data_types.finished_writing_wrdn import deserialise_wrdn, serialise_wrdn from streaming_data_types.forwarder_config_update_rf5k import ( deserialise_rf5k, @@ -31,6 +32,7 @@ SERIALISERS = { "ev42": serialise_ev42, "ev43": serialise_ev43, + "ev44": serialise_ev44, "hs00": serialise_hs00, "hs01": serialise_hs01, "f142": serialise_f142, @@ -52,6 +54,7 @@ DESERIALISERS = { "ev42": deserialise_ev42, "ev43": deserialise_ev43, + "ev44": deserialise_ev44, "hs00": deserialise_hs00, "hs01": deserialise_hs01, "f142": deserialise_f142, diff --git a/streaming_data_types/eventdata_ev44.py b/streaming_data_types/eventdata_ev44.py index 4249f94..be595e7 100644 --- a/streaming_data_types/eventdata_ev44.py +++ b/streaming_data_types/eventdata_ev44.py @@ -41,7 +41,7 @@ def deserialise_ev44(buffer): ) -def serialize_ev44(source_name, +def serialise_ev44(source_name, reference_time, reference_time_index, time_of_flight, @@ -62,9 +62,9 @@ def serialize_ev44(source_name, source = builder.CreateString(source_name) ref_time_data = builder.CreateNumpyVector(np.asarray(reference_time).astype(np.longlong)) ref_time_index_data = builder.CreateNumpyVector(np.asarray(reference_time_index).astype(np.int32)) - tof_data = builder.CreateNumpyVector(np.asarray(time_of_flight).astype(np.int64)) + tof_data = builder.CreateNumpyVector(np.asarray(time_of_flight).astype(np.int32)) pixel_id_data = builder.CreateNumpyVector(np.asarray(pixel_id).astype(np.int32)) - + Event44Message.Event44MessageStart(builder) Event44Message.Event44MessageAddReferenceTime(builder, ref_time_data) Event44Message.Event44MessageAddReferenceTimeIndex(builder, ref_time_index_data) diff --git a/tests/test_ev44.py b/tests/test_ev44.py new file mode 100644 index 0000000..2cce57b --- /dev/null +++ b/tests/test_ev44.py @@ -0,0 +1,58 @@ +import numpy as np + +from streaming_data_types import DESERIALISERS, SERIALISERS +from streaming_data_types.eventdata_ev44 import deserialise_ev44, serialise_ev44 + + +class TestSerialisationEv44: + def test_serialises_and_deserialises_ev44_message_correctly(self): + """ + Round-trip to check what we serialise is what we get back. + """ + original_entry = { + "source_name": "some_source", + "reference_time": [1618573589123781958, 1618573590133830371, 1618573593677164112, + 1618573594185190549, 1618573596217316066, 1618573596725363109, + 1618573601295720976, 1618573601799761445, 1618573607354064836], + "reference_time_index": [2, 4, 5, 7], + "time_of_flight": [100, 200, 300, 400, 500, 600, 700, 800, 900], + "pixel_id": [10, 20, 30, 40, 50, 60, 70, 80, 90], + } + + buf = serialise_ev44(**original_entry) + entry = deserialise_ev44(buf) + + assert entry.source_name == original_entry["source_name"] + assert np.array_equal(entry.reference_time, original_entry["reference_time"]) + assert np.array_equal(entry.reference_time_index, original_entry["reference_time_index"]) + assert np.array_equal(entry.time_of_flight, original_entry["time_of_flight"]) + assert np.array_equal(entry.pixel_id, original_entry["pixel_id"]) + + def test_serialises_and_deserialises_ev44_message_correctly_for_numpy_arrays(self): + """ + Round-trip to check what we serialise is what we get back. + """ + original_entry = { + "source_name": "some_source", + "reference_time": np.array([1618573589123781958, 1618573590133830371, + 1618573593677164112, 1618573594185190549, + 1618573596217316066, 1618573596725363109, + 1618573601295720976, 1618573601799761445, + 1618573607354064836]), + "reference_time_index": np.array([2, 4, 5, 7]), + "time_of_flight": np.array([100, 200, 300, 400, 500, 600, 700, 800, 900]), + "pixel_id": np.array([10, 20, 30, 40, 50, 60, 70, 80, 90]), + } + + buf = serialise_ev44(**original_entry) + entry = deserialise_ev44(buf) + + assert entry.source_name == original_entry["source_name"] + assert np.array_equal(entry.reference_time, original_entry["reference_time"]) + assert np.array_equal(entry.reference_time_index, original_entry["reference_time_index"]) + assert np.array_equal(entry.time_of_flight, original_entry["time_of_flight"]) + assert np.array_equal(entry.pixel_id, original_entry["pixel_id"]) + + def test_schema_type_is_in_global_serialisers_list(self): + assert "ev44" in SERIALISERS + assert "ev44" in DESERIALISERS \ No newline at end of file From 749b91fc965412d4b0cb2580d8765058f241efac Mon Sep 17 00:00:00 2001 From: Kenan Muric Date: Wed, 22 Jun 2022 17:38:28 +0200 Subject: [PATCH 271/363] adding new line --- tests/test_ev44.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_ev44.py b/tests/test_ev44.py index 2cce57b..f1aa72c 100644 --- a/tests/test_ev44.py +++ b/tests/test_ev44.py @@ -55,4 +55,4 @@ def test_serialises_and_deserialises_ev44_message_correctly_for_numpy_arrays(sel def test_schema_type_is_in_global_serialisers_list(self): assert "ev44" in SERIALISERS - assert "ev44" in DESERIALISERS \ No newline at end of file + assert "ev44" in DESERIALISERS From 25282455adba5eab353ec212b4859684252f0b8b Mon Sep 17 00:00:00 2001 From: Kenan Muric Date: Wed, 22 Jun 2022 21:00:38 +0200 Subject: [PATCH 272/363] adding ev44 to readme --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 4e08284..69b42ba 100644 --- a/README.md +++ b/README.md @@ -16,6 +16,7 @@ https://github.com/ess-dmsc/streaming-data-types |f142|Log data| |ev42|Event data| |ev43|Event data from multiple pulses| +|ev44|Event data with signed data types| |x5f2|Status messages| |tdct|Timestamps| |ep00|EPICS connection info| From 57cba3bfa5101089d5d91a02af47011d08423195 Mon Sep 17 00:00:00 2001 From: Kenan Muric Date: Thu, 23 Jun 2022 13:31:10 +0200 Subject: [PATCH 273/363] changing from longlong to int64 --- streaming_data_types/eventdata_ev44.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/streaming_data_types/eventdata_ev44.py b/streaming_data_types/eventdata_ev44.py index be595e7..cbb2176 100644 --- a/streaming_data_types/eventdata_ev44.py +++ b/streaming_data_types/eventdata_ev44.py @@ -60,7 +60,7 @@ def serialise_ev44(source_name, builder.ForceDefaults(True) source = builder.CreateString(source_name) - ref_time_data = builder.CreateNumpyVector(np.asarray(reference_time).astype(np.longlong)) + ref_time_data = builder.CreateNumpyVector(np.asarray(reference_time).astype(np.int64)) ref_time_index_data = builder.CreateNumpyVector(np.asarray(reference_time_index).astype(np.int32)) tof_data = builder.CreateNumpyVector(np.asarray(time_of_flight).astype(np.int32)) pixel_id_data = builder.CreateNumpyVector(np.asarray(pixel_id).astype(np.int32)) From 39a9ebbdc515d6afa0f45a62dcbd203ae6986364 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Wed, 29 Jun 2022 09:32:51 +0000 Subject: [PATCH 274/363] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 69b42ba..9e24fa8 100644 --- a/README.md +++ b/README.md @@ -14,7 +14,7 @@ https://github.com/ess-dmsc/streaming-data-types |pl72|Run start| |6s4t|Run stop| |f142|Log data| -|ev42|Event data| +|ev42|Event data (deprecated in favour of ev44)| |ev43|Event data from multiple pulses| |ev44|Event data with signed data types| |x5f2|Status messages| From 5d0a08ccbbb4b7dfb432adf218251d97750ad487 Mon Sep 17 00:00:00 2001 From: Kenan Muric Date: Fri, 11 Nov 2022 14:30:51 +0100 Subject: [PATCH 275/363] patching ev44 schema --- streaming_data_types/eventdata_ev44.py | 5 + .../eventdata_ev44/Event44Message.py | 125 ++++++------------ tests/test_ev44.py | 28 ++++ 3 files changed, 75 insertions(+), 83 deletions(-) diff --git a/streaming_data_types/eventdata_ev44.py b/streaming_data_types/eventdata_ev44.py index cbb2176..77d54a1 100644 --- a/streaming_data_types/eventdata_ev44.py +++ b/streaming_data_types/eventdata_ev44.py @@ -13,6 +13,7 @@ "EventData", ( "source_name", + "message_id", "reference_time", "reference_time_index", "time_of_flight", @@ -34,6 +35,7 @@ def deserialise_ev44(buffer): return EventData( event.SourceName().decode("utf-8"), + event.MessageId(), event.ReferenceTimeAsNumpy(), event.ReferenceTimeIndexAsNumpy(), event.TimeOfFlightAsNumpy(), @@ -42,6 +44,7 @@ def deserialise_ev44(buffer): def serialise_ev44(source_name, + message_id, reference_time, reference_time_index, time_of_flight, @@ -50,6 +53,7 @@ def serialise_ev44(source_name, Serialise event data as an ev44 FlatBuffers message. :param source_name: + :param message_id: :param reference_time: :param reference_time_index: :param time_of_flight: @@ -70,6 +74,7 @@ def serialise_ev44(source_name, Event44Message.Event44MessageAddReferenceTimeIndex(builder, ref_time_index_data) Event44Message.Event44MessageAddTimeOfFlight(builder, tof_data) Event44Message.Event44MessageAddPixelId(builder, pixel_id_data) + Event44Message.Event44MessageAddMessageId(builder, message_id) Event44Message.Event44MessageAddSourceName(builder, source) data = Event44Message.Event44MessageEnd(builder) diff --git a/streaming_data_types/fbschemas/eventdata_ev44/Event44Message.py b/streaming_data_types/fbschemas/eventdata_ev44/Event44Message.py index 437edc5..28e463f 100644 --- a/streaming_data_types/fbschemas/eventdata_ev44/Event44Message.py +++ b/streaming_data_types/fbschemas/eventdata_ev44/Event44Message.py @@ -6,7 +6,6 @@ from flatbuffers.compat import import_numpy np = import_numpy() - class Event44Message(object): __slots__ = ['_tab'] @@ -18,6 +17,10 @@ def GetRootAs(cls, buf, offset=0): return x @classmethod + def GetRootAsEvent44Message(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod def Event44MessageBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x65\x76\x34\x34", size_prefixed=size_prefixed) @@ -33,8 +36,15 @@ def SourceName(self): return None # Event44Message - def ReferenceTime(self, j): + def MessageId(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) + return 0 + + # Event44Message + def ReferenceTime(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) if o != 0: a = self._tab.Vector(o) return self._tab.Get(flatbuffers.number_types.Int64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) @@ -42,26 +52,26 @@ def ReferenceTime(self, j): # Event44Message def ReferenceTimeAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) if o != 0: return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) return 0 # Event44Message def ReferenceTimeLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) if o != 0: return self._tab.VectorLen(o) return 0 # Event44Message def ReferenceTimeIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) return o == 0 # Event44Message def ReferenceTimeIndex(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) if o != 0: a = self._tab.Vector(o) return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) @@ -69,26 +79,26 @@ def ReferenceTimeIndex(self, j): # Event44Message def ReferenceTimeIndexAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) if o != 0: return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) return 0 # Event44Message def ReferenceTimeIndexLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) if o != 0: return self._tab.VectorLen(o) return 0 # Event44Message def ReferenceTimeIndexIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) return o == 0 # Event44Message def TimeOfFlight(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) if o != 0: a = self._tab.Vector(o) return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) @@ -96,26 +106,26 @@ def TimeOfFlight(self, j): # Event44Message def TimeOfFlightAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) if o != 0: return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) return 0 # Event44Message def TimeOfFlightLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) if o != 0: return self._tab.VectorLen(o) return 0 # Event44Message def TimeOfFlightIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) return o == 0 # Event44Message def PixelId(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) if o != 0: a = self._tab.Vector(o) return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) @@ -123,107 +133,56 @@ def PixelId(self, j): # Event44Message def PixelIdAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) if o != 0: return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) return 0 # Event44Message def PixelIdLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) if o != 0: return self._tab.VectorLen(o) return 0 # Event44Message def PixelIdIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) return o == 0 - -def Event44MessageStart(builder): - builder.StartObject(5) - - +def Event44MessageStart(builder): builder.StartObject(6) def Start(builder): return Event44MessageStart(builder) - - -def Event44MessageAddSourceName(builder, sourceName): - builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(sourceName), 0) - - +def Event44MessageAddSourceName(builder, sourceName): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(sourceName), 0) def AddSourceName(builder, sourceName): return Event44MessageAddSourceName(builder, sourceName) - - -def Event44MessageAddReferenceTime(builder, referenceTime): - builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(referenceTime), 0) - - +def Event44MessageAddMessageId(builder, messageId): builder.PrependInt64Slot(1, messageId, 0) +def AddMessageId(builder, messageId): + return Event44MessageAddMessageId(builder, messageId) +def Event44MessageAddReferenceTime(builder, referenceTime): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(referenceTime), 0) def AddReferenceTime(builder, referenceTime): return Event44MessageAddReferenceTime(builder, referenceTime) - - -def Event44MessageStartReferenceTimeVector(builder, numElems): - return builder.StartVector(8, numElems, 8) - - +def Event44MessageStartReferenceTimeVector(builder, numElems): return builder.StartVector(8, numElems, 8) def StartReferenceTimeVector(builder, numElems): return Event44MessageStartReferenceTimeVector(builder, numElems) - - -def Event44MessageAddReferenceTimeIndex(builder, referenceTimeIndex): - builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(referenceTimeIndex), 0) - - +def Event44MessageAddReferenceTimeIndex(builder, referenceTimeIndex): builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(referenceTimeIndex), 0) def AddReferenceTimeIndex(builder, referenceTimeIndex): return Event44MessageAddReferenceTimeIndex(builder, referenceTimeIndex) - - -def Event44MessageStartReferenceTimeIndexVector(builder, numElems): - return builder.StartVector(4, numElems, 4) - - +def Event44MessageStartReferenceTimeIndexVector(builder, numElems): return builder.StartVector(4, numElems, 4) def StartReferenceTimeIndexVector(builder, numElems): return Event44MessageStartReferenceTimeIndexVector(builder, numElems) - - -def Event44MessageAddTimeOfFlight(builder, timeOfFlight): - builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(timeOfFlight), 0) - - +def Event44MessageAddTimeOfFlight(builder, timeOfFlight): builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(timeOfFlight), 0) def AddTimeOfFlight(builder, timeOfFlight): return Event44MessageAddTimeOfFlight(builder, timeOfFlight) - - -def Event44MessageStartTimeOfFlightVector(builder, numElems): - return builder.StartVector(4, numElems, 4) - - +def Event44MessageStartTimeOfFlightVector(builder, numElems): return builder.StartVector(4, numElems, 4) def StartTimeOfFlightVector(builder, numElems): return Event44MessageStartTimeOfFlightVector(builder, numElems) - - -def Event44MessageAddPixelId(builder, pixelId): - builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(pixelId), 0) - - +def Event44MessageAddPixelId(builder, pixelId): builder.PrependUOffsetTRelativeSlot(5, flatbuffers.number_types.UOffsetTFlags.py_type(pixelId), 0) def AddPixelId(builder, pixelId): return Event44MessageAddPixelId(builder, pixelId) - - -def Event44MessageStartPixelIdVector(builder, numElems): - return builder.StartVector(4, numElems, 4) - - +def Event44MessageStartPixelIdVector(builder, numElems): return builder.StartVector(4, numElems, 4) def StartPixelIdVector(builder, numElems): return Event44MessageStartPixelIdVector(builder, numElems) - - -def Event44MessageEnd(builder): - return builder.EndObject() - - +def Event44MessageEnd(builder): return builder.EndObject() def End(builder): - return Event44MessageEnd(builder) + return Event44MessageEnd(builder) \ No newline at end of file diff --git a/tests/test_ev44.py b/tests/test_ev44.py index f1aa72c..c0b7687 100644 --- a/tests/test_ev44.py +++ b/tests/test_ev44.py @@ -1,7 +1,9 @@ import numpy as np +import pytest from streaming_data_types import DESERIALISERS, SERIALISERS from streaming_data_types.eventdata_ev44 import deserialise_ev44, serialise_ev44 +from streaming_data_types.exceptions import WrongSchemaException class TestSerialisationEv44: @@ -11,6 +13,7 @@ def test_serialises_and_deserialises_ev44_message_correctly(self): """ original_entry = { "source_name": "some_source", + "message_id": 123456, "reference_time": [1618573589123781958, 1618573590133830371, 1618573593677164112, 1618573594185190549, 1618573596217316066, 1618573596725363109, 1618573601295720976, 1618573601799761445, 1618573607354064836], @@ -23,6 +26,7 @@ def test_serialises_and_deserialises_ev44_message_correctly(self): entry = deserialise_ev44(buf) assert entry.source_name == original_entry["source_name"] + assert entry.message_id == original_entry["message_id"] assert np.array_equal(entry.reference_time, original_entry["reference_time"]) assert np.array_equal(entry.reference_time_index, original_entry["reference_time_index"]) assert np.array_equal(entry.time_of_flight, original_entry["time_of_flight"]) @@ -34,6 +38,7 @@ def test_serialises_and_deserialises_ev44_message_correctly_for_numpy_arrays(sel """ original_entry = { "source_name": "some_source", + "message_id": 123456, "reference_time": np.array([1618573589123781958, 1618573590133830371, 1618573593677164112, 1618573594185190549, 1618573596217316066, 1618573596725363109, @@ -48,11 +53,34 @@ def test_serialises_and_deserialises_ev44_message_correctly_for_numpy_arrays(sel entry = deserialise_ev44(buf) assert entry.source_name == original_entry["source_name"] + assert entry.message_id == original_entry["message_id"] assert np.array_equal(entry.reference_time, original_entry["reference_time"]) assert np.array_equal(entry.reference_time_index, original_entry["reference_time_index"]) assert np.array_equal(entry.time_of_flight, original_entry["time_of_flight"]) assert np.array_equal(entry.pixel_id, original_entry["pixel_id"]) + def test_if_buffer_has_wrong_id_then_throws(self): + original_entry = { + "source_name": "some_source", + "message_id": 123456, + "reference_time": np.array([1618573589123781958, 1618573590133830371, + 1618573593677164112, 1618573594185190549, + 1618573596217316066, 1618573596725363109, + 1618573601295720976, 1618573601799761445, + 1618573607354064836]), + "reference_time_index": np.array([2, 4, 5, 7]), + "time_of_flight": np.array([100, 200, 300, 400, 500, 600, 700, 800, 900]), + "pixel_id": np.array([10, 20, 30, 40, 50, 60, 70, 80, 90]), + } + buf = serialise_ev44(**original_entry) + + # Manually introduce error in id. + buf = bytearray(buf) + buf[4:8] = b"1234" + + with pytest.raises(WrongSchemaException): + deserialise_ev44(buf) + def test_schema_type_is_in_global_serialisers_list(self): assert "ev44" in SERIALISERS assert "ev44" in DESERIALISERS From e46029e2a225bf404e127b56720daade6249cff3 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Wed, 16 Nov 2022 07:39:07 +0100 Subject: [PATCH 276/363] added vscode to gitignore --- .gitignore | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.gitignore b/.gitignore index 50af712..37260d0 100644 --- a/.gitignore +++ b/.gitignore @@ -130,3 +130,7 @@ dmypy.json # PyCharm .idea + +# VSCode +.vscode + From b5e693028c00ee347580a4fb4fd0086573ea8fcd Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Wed, 16 Nov 2022 07:13:51 +0000 Subject: [PATCH 277/363] Add al00 alarms schema (#62) * added helpers * make timestamps explicitly ns --- README.md | 3 +- streaming_data_types/__init__.py | 55 +++++++++------- streaming_data_types/alarm_al00.py | 63 +++++++++++++++++++ .../fbschemas/alarm_al00/Alarm.py | 54 ++++++++++++++++ .../fbschemas/alarm_al00/Severity.py | 10 +++ tests/test_al00.py | 34 ++++++++++ 6 files changed, 195 insertions(+), 24 deletions(-) create mode 100644 streaming_data_types/alarm_al00.py create mode 100644 streaming_data_types/fbschemas/alarm_al00/Alarm.py create mode 100644 streaming_data_types/fbschemas/alarm_al00/Severity.py create mode 100644 tests/test_al00.py diff --git a/README.md b/README.md index 9e24fa8..1a80f57 100644 --- a/README.md +++ b/README.md @@ -24,7 +24,8 @@ https://github.com/ess-dmsc/streaming-data-types |answ|File-writer command response| |wrdn|File-writer finished writing| |NDAr|**Deprecated**| -|ADAr|For storing EPICS areaDetector data| +|ADAr|EPICS areaDetector data| +|al00|Alarm/status messages used by the Forwarder and NICOS| ### hs00 and hs01 Schema for histogram data. It is one of the more complicated to use schemas. diff --git a/streaming_data_types/__init__.py b/streaming_data_types/__init__.py index 36a89e5..ce2aa34 100644 --- a/streaming_data_types/__init__.py +++ b/streaming_data_types/__init__.py @@ -1,31 +1,38 @@ from streaming_data_types._version import version -from streaming_data_types.action_response_answ import deserialise_answ, serialise_answ -from streaming_data_types.area_detector_ADAr import deserialise_ADAr, serialise_ADAr -from streaming_data_types.area_detector_NDAr import deserialise_ndar, serialise_ndar -from streaming_data_types.epics_connection_info_ep00 import ( - deserialise_ep00, - serialise_ep00, -) -from streaming_data_types.eventdata_ev42 import deserialise_ev42, serialise_ev42 -from streaming_data_types.eventdata_ev43 import deserialise_ev43, serialise_ev43 -from streaming_data_types.eventdata_ev44 import deserialise_ev44, serialise_ev44 -from streaming_data_types.finished_writing_wrdn import deserialise_wrdn, serialise_wrdn +from streaming_data_types.action_response_answ import (deserialise_answ, + serialise_answ) +from streaming_data_types.alarm_al00 import deserialise_al00, serialise_al00 +from streaming_data_types.area_detector_ADAr import (deserialise_ADAr, + serialise_ADAr) +from streaming_data_types.area_detector_NDAr import (deserialise_ndar, + serialise_ndar) +from streaming_data_types.epics_connection_info_ep00 import (deserialise_ep00, + serialise_ep00) +from streaming_data_types.eventdata_ev42 import (deserialise_ev42, + serialise_ev42) +from streaming_data_types.eventdata_ev43 import (deserialise_ev43, + serialise_ev43) +from streaming_data_types.eventdata_ev44 import (deserialise_ev44, + serialise_ev44) +from streaming_data_types.finished_writing_wrdn import (deserialise_wrdn, + serialise_wrdn) from streaming_data_types.forwarder_config_update_rf5k import ( - deserialise_rf5k, - serialise_rf5k, -) -from streaming_data_types.histogram_hs00 import deserialise_hs00, serialise_hs00 -from streaming_data_types.histogram_hs01 import deserialise_hs01, serialise_hs01 + deserialise_rf5k, serialise_rf5k) +from streaming_data_types.histogram_hs00 import (deserialise_hs00, + serialise_hs00) +from streaming_data_types.histogram_hs01 import (deserialise_hs01, + serialise_hs01) from streaming_data_types.logdata_f142 import deserialise_f142, serialise_f142 -from streaming_data_types.nicos_cache_ns10 import deserialise_ns10, serialise_ns10 -from streaming_data_types.run_start_pl72 import deserialise_pl72, serialise_pl72 +from streaming_data_types.nicos_cache_ns10 import (deserialise_ns10, + serialise_ns10) +from streaming_data_types.run_start_pl72 import (deserialise_pl72, + serialise_pl72) from streaming_data_types.run_stop_6s4t import deserialise_6s4t, serialise_6s4t -from streaming_data_types.sample_environment_senv import ( - deserialise_senv, - serialise_senv, -) +from streaming_data_types.sample_environment_senv import (deserialise_senv, + serialise_senv) from streaming_data_types.status_x5f2 import deserialise_x5f2, serialise_x5f2 -from streaming_data_types.timestamps_tdct import deserialise_tdct, serialise_tdct +from streaming_data_types.timestamps_tdct import (deserialise_tdct, + serialise_tdct) __version__ = version @@ -48,6 +55,7 @@ "NDAr": serialise_ndar, "senv": serialise_senv, "ADAr": serialise_ADAr, + "al00": serialise_al00, } @@ -70,4 +78,5 @@ "NDAr": deserialise_ndar, "senv": deserialise_senv, "ADAr": deserialise_ADAr, + "al00": deserialise_al00, } diff --git a/streaming_data_types/alarm_al00.py b/streaming_data_types/alarm_al00.py new file mode 100644 index 0000000..ee206c2 --- /dev/null +++ b/streaming_data_types/alarm_al00.py @@ -0,0 +1,63 @@ +from collections import namedtuple +from enum import Enum + +import flatbuffers + +import streaming_data_types.fbschemas.alarm_al00.Alarm as Alarm +import streaming_data_types.fbschemas.alarm_al00.Severity as FBSeverity +from streaming_data_types.utils import check_schema_identifier + +FILE_IDENTIFIER = b"al00" + +AlarmInfo = namedtuple("AlarmInfo", ("source", "timestamp_ns", "severity", "message")) + + +class Severity(Enum): + OK = 0, + MINOR = 1, + MAJOR = 2, + INVALID = 3 + + +_enum_to_severity = { + Severity.OK: FBSeverity.Severity.OK, + Severity.MINOR: FBSeverity.Severity.MINOR, + Severity.MAJOR: FBSeverity.Severity.MAJOR, + Severity.INVALID: FBSeverity.Severity.INVALID, +} + +_severity_to_enum = { + FBSeverity.Severity.OK: Severity.OK, + FBSeverity.Severity.MINOR: Severity.MINOR, + FBSeverity.Severity.MAJOR: Severity.MAJOR, + FBSeverity.Severity.INVALID: Severity.INVALID, +} + + +def deserialise_al00(buffer): + check_schema_identifier(buffer, FILE_IDENTIFIER) + alarm = Alarm.Alarm.GetRootAsAlarm(buffer, 0) + + return AlarmInfo( + alarm.SourceName().decode("utf-8") if alarm.SourceName() else "", + alarm.Timestamp(), + _severity_to_enum[alarm.Severity()], + alarm.Message().decode("utf-8") if alarm.Message() else "" + ) + + +def serialise_al00(source: str, timestamp_ns: int, severity: Severity, message: str): + builder = flatbuffers.Builder(128) + + message_offset = builder.CreateString(message) + source_offset = builder.CreateString(source) + + Alarm.AlarmStart(builder) + Alarm.AlarmAddSourceName(builder, source_offset) + Alarm.AlarmAddTimestamp(builder, timestamp_ns) + Alarm.AlarmAddSeverity(builder, _enum_to_severity[severity]) + Alarm.AlarmAddMessage(builder, message_offset) + alarm = Alarm.AlarmEnd(builder) + + builder.Finish(alarm, file_identifier=FILE_IDENTIFIER) + return bytes(builder.Output()) diff --git a/streaming_data_types/fbschemas/alarm_al00/Alarm.py b/streaming_data_types/fbschemas/alarm_al00/Alarm.py new file mode 100644 index 0000000..e81dda1 --- /dev/null +++ b/streaming_data_types/fbschemas/alarm_al00/Alarm.py @@ -0,0 +1,54 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + +class Alarm(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAsAlarm(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Alarm() + x.Init(buf, n + offset) + return x + + # Alarm + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Alarm + def SourceName(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # Alarm + def Timestamp(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) + return 0 + + # Alarm + def Severity(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int16Flags, o + self._tab.Pos) + return 0 + + # Alarm + def Message(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + +def AlarmStart(builder): builder.StartObject(4) +def AlarmAddSourceName(builder, sourceName): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(sourceName), 0) +def AlarmAddTimestamp(builder, timestamp): builder.PrependInt64Slot(1, timestamp, 0) +def AlarmAddSeverity(builder, severity): builder.PrependInt16Slot(2, severity, 0) +def AlarmAddMessage(builder, message): builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(message), 0) +def AlarmEnd(builder): return builder.EndObject() diff --git a/streaming_data_types/fbschemas/alarm_al00/Severity.py b/streaming_data_types/fbschemas/alarm_al00/Severity.py new file mode 100644 index 0000000..b9e39c5 --- /dev/null +++ b/streaming_data_types/fbschemas/alarm_al00/Severity.py @@ -0,0 +1,10 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +class Severity(object): + OK = 0 + MINOR = 1 + MAJOR = 2 + INVALID = 3 + diff --git a/tests/test_al00.py b/tests/test_al00.py new file mode 100644 index 0000000..84c83b3 --- /dev/null +++ b/tests/test_al00.py @@ -0,0 +1,34 @@ +import pytest + +from streaming_data_types import DESERIALISERS, SERIALISERS +from streaming_data_types.alarm_al00 import (Severity, deserialise_al00, + serialise_al00) +from streaming_data_types.exceptions import WrongSchemaException + + +class TestSerialisationAl00: + def test_serialises_and_deserialises_al00_message_correctly(self): + """ + Round-trip to check what we serialise is what we get back. + """ + buf = serialise_al00("some_source", 1234567890, Severity.MAJOR, "Some message") + entry = deserialise_al00(buf) + + assert entry.source == "some_source" + assert entry.timestamp_ns == 1234567890 + assert entry.severity == Severity.MAJOR + assert entry.message == "Some message" + + def test_if_buffer_has_wrong_id_then_throws(self): + buf = serialise_al00("some_source", 1234567890, Severity.MAJOR, "Some message") + + # Manually hack the id + buf = bytearray(buf) + buf[4:8] = b"1234" + + with pytest.raises(WrongSchemaException): + deserialise_al00(buf) + + def test_schema_type_is_in_global_serialisers_list(self): + assert "al00" in SERIALISERS + assert "al00" in DESERIALISERS From e4338f78c885d2db76667a71c4e079d8cdecdaf4 Mon Sep 17 00:00:00 2001 From: hurvan <43351280+hurvan@users.noreply.github.com> Date: Wed, 16 Nov 2022 12:01:10 +0100 Subject: [PATCH 278/363] ECDC-3190 add se00 schema (#61) * Adding se00 schema Adding the new se00 schema that is replacing the senv schema. Also updating black since there was a bug in a previous version. The pre-commit formatted some ev44 files as well. * updating readme * reverting black version for seperate PR * reverting ev44 formatting * Update README.md * Merge conflicts * removing local variable * changing timestamp from datetime to ns time int * remove unused import * moving maps and imports out of functions * removing year 9000 check * changing tests to not use time_ns() for python36 Co-authored-by: Matt Clarke Co-authored-by: Matt Clarke --- README.md | 2 + streaming_data_types/__init__.py | 4 + streaming_data_types/array_1d_se00.py | 149 ++++++++++++ .../fbschemas/array_1d_se00/DoubleArray.py | 98 ++++++++ .../fbschemas/array_1d_se00/FloatArray.py | 98 ++++++++ .../fbschemas/array_1d_se00/Int16Array.py | 98 ++++++++ .../fbschemas/array_1d_se00/Int32Array.py | 98 ++++++++ .../fbschemas/array_1d_se00/Int64Array.py | 98 ++++++++ .../fbschemas/array_1d_se00/Int8Array.py | 98 ++++++++ .../fbschemas/array_1d_se00/Location.py | 10 + .../array_1d_se00/SampleEnvironmentData.py | 228 ++++++++++++++++++ .../fbschemas/array_1d_se00/UInt16Array.py | 98 ++++++++ .../fbschemas/array_1d_se00/UInt32Array.py | 98 ++++++++ .../fbschemas/array_1d_se00/UInt64Array.py | 98 ++++++++ .../fbschemas/array_1d_se00/UInt8Array.py | 98 ++++++++ .../fbschemas/array_1d_se00/ValueUnion.py | 17 ++ .../fbschemas/array_1d_se00/__init__.py | 0 tests/test_se00.py | 73 ++++++ 18 files changed, 1463 insertions(+) create mode 100644 streaming_data_types/array_1d_se00.py create mode 100644 streaming_data_types/fbschemas/array_1d_se00/DoubleArray.py create mode 100644 streaming_data_types/fbschemas/array_1d_se00/FloatArray.py create mode 100644 streaming_data_types/fbschemas/array_1d_se00/Int16Array.py create mode 100644 streaming_data_types/fbschemas/array_1d_se00/Int32Array.py create mode 100644 streaming_data_types/fbschemas/array_1d_se00/Int64Array.py create mode 100644 streaming_data_types/fbschemas/array_1d_se00/Int8Array.py create mode 100644 streaming_data_types/fbschemas/array_1d_se00/Location.py create mode 100644 streaming_data_types/fbschemas/array_1d_se00/SampleEnvironmentData.py create mode 100644 streaming_data_types/fbschemas/array_1d_se00/UInt16Array.py create mode 100644 streaming_data_types/fbschemas/array_1d_se00/UInt32Array.py create mode 100644 streaming_data_types/fbschemas/array_1d_se00/UInt64Array.py create mode 100644 streaming_data_types/fbschemas/array_1d_se00/UInt8Array.py create mode 100644 streaming_data_types/fbschemas/array_1d_se00/ValueUnion.py create mode 100644 streaming_data_types/fbschemas/array_1d_se00/__init__.py create mode 100644 tests/test_se00.py diff --git a/README.md b/README.md index 1a80f57..7df0c01 100644 --- a/README.md +++ b/README.md @@ -26,6 +26,8 @@ https://github.com/ess-dmsc/streaming-data-types |NDAr|**Deprecated**| |ADAr|EPICS areaDetector data| |al00|Alarm/status messages used by the Forwarder and NICOS| +|senv|**Deprecated**| +|se00|Arrays with optional timestamps, for example waveform data. Replaces _senv_. | ### hs00 and hs01 Schema for histogram data. It is one of the more complicated to use schemas. diff --git a/streaming_data_types/__init__.py b/streaming_data_types/__init__.py index ce2aa34..84aa78c 100644 --- a/streaming_data_types/__init__.py +++ b/streaming_data_types/__init__.py @@ -6,6 +6,8 @@ serialise_ADAr) from streaming_data_types.area_detector_NDAr import (deserialise_ndar, serialise_ndar) +from streaming_data_types.array_1d_se00 import (deserialise_se00, + serialise_se00) from streaming_data_types.epics_connection_info_ep00 import (deserialise_ep00, serialise_ep00) from streaming_data_types.eventdata_ev42 import (deserialise_ev42, @@ -54,6 +56,7 @@ "wrdn": serialise_wrdn, "NDAr": serialise_ndar, "senv": serialise_senv, + "se00": serialise_se00, "ADAr": serialise_ADAr, "al00": serialise_al00, } @@ -77,6 +80,7 @@ "wrdn": deserialise_wrdn, "NDAr": deserialise_ndar, "senv": deserialise_senv, + "se00": deserialise_se00, "ADAr": deserialise_ADAr, "al00": deserialise_al00, } diff --git a/streaming_data_types/array_1d_se00.py b/streaming_data_types/array_1d_se00.py new file mode 100644 index 0000000..ffcc160 --- /dev/null +++ b/streaming_data_types/array_1d_se00.py @@ -0,0 +1,149 @@ +from typing import List, NamedTuple, Optional, Union + +import flatbuffers +import numpy as np +from flatbuffers.number_types import ( + Float32Flags, + Float64Flags, + Int8Flags, + Int16Flags, + Int32Flags, + Int64Flags, + Uint8Flags, + Uint16Flags, + Uint32Flags, + Uint64Flags, +) + +from streaming_data_types.fbschemas.array_1d_se00.Location import Location +from streaming_data_types.fbschemas.array_1d_se00.SampleEnvironmentData import ( + SampleEnvironmentData, + SampleEnvironmentDataAddChannel, + SampleEnvironmentDataAddMessageCounter, + SampleEnvironmentDataAddName, + SampleEnvironmentDataAddPacketTimestamp, + SampleEnvironmentDataAddTimeDelta, + SampleEnvironmentDataAddTimestampLocation, + SampleEnvironmentDataAddTimestamps, + SampleEnvironmentDataAddValues, + SampleEnvironmentDataAddValuesType, + SampleEnvironmentDataEnd, + SampleEnvironmentDataStart, +) +from streaming_data_types.fbschemas.array_1d_se00.ValueUnion import ValueUnion +from streaming_data_types.utils import check_schema_identifier + +FILE_IDENTIFIER = b"se00" + +flag_map = { + ValueUnion.Int8Array: Int8Flags, + ValueUnion.UInt8Array: Uint8Flags, + ValueUnion.Int16Array: Int16Flags, + ValueUnion.UInt16Array: Uint16Flags, + ValueUnion.Int32Array: Int32Flags, + ValueUnion.UInt32Array: Uint32Flags, + ValueUnion.Int64Array: Int64Flags, + ValueUnion.UInt64Array: Uint64Flags, + ValueUnion.DoubleArray: Float64Flags, + ValueUnion.FloatArray: Float32Flags, +} + +numpy_type_map = { + np.dtype("int8"): ValueUnion.Int8Array, + np.dtype("uint8"): ValueUnion.UInt8Array, + np.dtype("int16"): ValueUnion.Int16Array, + np.dtype("uint16"): ValueUnion.UInt16Array, + np.dtype("int32"): ValueUnion.Int32Array, + np.dtype("uint32"): ValueUnion.UInt32Array, + np.dtype("int64"): ValueUnion.Int64Array, + np.dtype("uint64"): ValueUnion.UInt64Array, + np.dtype("float64"): ValueUnion.DoubleArray, + np.dtype("float32"): ValueUnion.FloatArray, +} + +Response = NamedTuple( + "SampleEnvironmentData", + ( + ("name", str), + ("channel", int), + ("timestamp_unix_ns", int), + ("sample_ts_delta", int), + ("ts_location", Location), + ("message_counter", int), + ("values", np.ndarray), + ("value_ts", Optional[np.ndarray]), + ), +) + + +def serialise_se00( + name: str, + channel: int, + timestamp_unix_ns: int, + sample_ts_delta: int, + message_counter: int, + values: Union[np.ndarray, List], + ts_location: Location = Location.Middle, + value_timestamps: Union[np.ndarray, List, None] = None, +) -> bytes: + builder = flatbuffers.Builder(1024) + + if value_timestamps is not None: + used_timestamps = np.atleast_1d(np.asarray(value_timestamps)).astype(np.uint64) + timestamps_offset = builder.CreateNumpyVector(used_timestamps) + + temp_values = np.atleast_1d(np.asarray(values)) + + value_array_offset = builder.CreateNumpyVector(temp_values) + + # Some flatbuffer fu in order to avoid >200 lines of code + builder.StartObject(1) + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(value_array_offset), 0 + ) + value_offset = builder.EndObject() + + name_offset = builder.CreateString(name) + + SampleEnvironmentDataStart(builder) + SampleEnvironmentDataAddName(builder, name_offset) + SampleEnvironmentDataAddTimeDelta(builder, sample_ts_delta) + SampleEnvironmentDataAddTimestampLocation(builder, ts_location) + SampleEnvironmentDataAddMessageCounter(builder, message_counter) + SampleEnvironmentDataAddChannel(builder, channel) + SampleEnvironmentDataAddPacketTimestamp(builder, timestamp_unix_ns) + SampleEnvironmentDataAddValues(builder, value_offset) + SampleEnvironmentDataAddValuesType(builder, numpy_type_map[temp_values.dtype]) + if value_timestamps is not None: + SampleEnvironmentDataAddTimestamps(builder, timestamps_offset) + + SE_Message = SampleEnvironmentDataEnd(builder) + + builder.Finish(SE_Message, file_identifier=FILE_IDENTIFIER) + return bytes(builder.Output()) + + +def deserialise_se00(buffer: Union[bytearray, bytes]) -> Response: + check_schema_identifier(buffer, FILE_IDENTIFIER) + + SE_data = SampleEnvironmentData.GetRootAsSampleEnvironmentData(buffer, 0) + + value_timestamps = None + if not SE_data.TimestampsIsNone(): + value_timestamps = SE_data.TimestampsAsNumpy() + + # Some flatbuffers fu in order to avoid >200 lines of code + value_offset = SE_data.Values() + value_type = SE_data.ValuesType() + values = value_offset.GetVectorAsNumpy(flag_map[value_type], 4) + + return Response( + name=SE_data.Name().decode(), + channel=SE_data.Channel(), + timestamp_unix_ns=SE_data.PacketTimestamp(), + sample_ts_delta=SE_data.TimeDelta(), + ts_location=SE_data.TimestampLocation(), + message_counter=SE_data.MessageCounter(), + values=values, + value_ts=value_timestamps, + ) diff --git a/streaming_data_types/fbschemas/array_1d_se00/DoubleArray.py b/streaming_data_types/fbschemas/array_1d_se00/DoubleArray.py new file mode 100644 index 0000000..36432c7 --- /dev/null +++ b/streaming_data_types/fbschemas/array_1d_se00/DoubleArray.py @@ -0,0 +1,98 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy + +np = import_numpy() + + +class DoubleArray(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = DoubleArray() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsDoubleArray(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + + @classmethod + def DoubleArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x73\x65\x30\x30", size_prefixed=size_prefixed + ) + + # DoubleArray + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # DoubleArray + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Float64Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8), + ) + return 0 + + # DoubleArray + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Float64Flags, o) + return 0 + + # DoubleArray + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # DoubleArray + def ValueIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + + +def DoubleArrayStart(builder): + builder.StartObject(1) + + +def Start(builder): + return DoubleArrayStart(builder) + + +def DoubleArrayAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 + ) + + +def AddValue(builder, value): + return DoubleArrayAddValue(builder, value) + + +def DoubleArrayStartValueVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + + +def StartValueVector(builder, numElems): + return DoubleArrayStartValueVector(builder, numElems) + + +def DoubleArrayEnd(builder): + return builder.EndObject() + + +def End(builder): + return DoubleArrayEnd(builder) diff --git a/streaming_data_types/fbschemas/array_1d_se00/FloatArray.py b/streaming_data_types/fbschemas/array_1d_se00/FloatArray.py new file mode 100644 index 0000000..ec2238f --- /dev/null +++ b/streaming_data_types/fbschemas/array_1d_se00/FloatArray.py @@ -0,0 +1,98 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy + +np = import_numpy() + + +class FloatArray(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = FloatArray() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsFloatArray(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + + @classmethod + def FloatArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x73\x65\x30\x30", size_prefixed=size_prefixed + ) + + # FloatArray + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # FloatArray + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Float32Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), + ) + return 0 + + # FloatArray + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Float32Flags, o) + return 0 + + # FloatArray + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # FloatArray + def ValueIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + + +def FloatArrayStart(builder): + builder.StartObject(1) + + +def Start(builder): + return FloatArrayStart(builder) + + +def FloatArrayAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 + ) + + +def AddValue(builder, value): + return FloatArrayAddValue(builder, value) + + +def FloatArrayStartValueVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + +def StartValueVector(builder, numElems): + return FloatArrayStartValueVector(builder, numElems) + + +def FloatArrayEnd(builder): + return builder.EndObject() + + +def End(builder): + return FloatArrayEnd(builder) diff --git a/streaming_data_types/fbschemas/array_1d_se00/Int16Array.py b/streaming_data_types/fbschemas/array_1d_se00/Int16Array.py new file mode 100644 index 0000000..801d2ec --- /dev/null +++ b/streaming_data_types/fbschemas/array_1d_se00/Int16Array.py @@ -0,0 +1,98 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy + +np = import_numpy() + + +class Int16Array(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Int16Array() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsInt16Array(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + + @classmethod + def Int16ArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x73\x65\x30\x30", size_prefixed=size_prefixed + ) + + # Int16Array + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Int16Array + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Int16Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 2), + ) + return 0 + + # Int16Array + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int16Flags, o) + return 0 + + # Int16Array + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # Int16Array + def ValueIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + + +def Int16ArrayStart(builder): + builder.StartObject(1) + + +def Start(builder): + return Int16ArrayStart(builder) + + +def Int16ArrayAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 + ) + + +def AddValue(builder, value): + return Int16ArrayAddValue(builder, value) + + +def Int16ArrayStartValueVector(builder, numElems): + return builder.StartVector(2, numElems, 2) + + +def StartValueVector(builder, numElems): + return Int16ArrayStartValueVector(builder, numElems) + + +def Int16ArrayEnd(builder): + return builder.EndObject() + + +def End(builder): + return Int16ArrayEnd(builder) diff --git a/streaming_data_types/fbschemas/array_1d_se00/Int32Array.py b/streaming_data_types/fbschemas/array_1d_se00/Int32Array.py new file mode 100644 index 0000000..d716e81 --- /dev/null +++ b/streaming_data_types/fbschemas/array_1d_se00/Int32Array.py @@ -0,0 +1,98 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy + +np = import_numpy() + + +class Int32Array(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Int32Array() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsInt32Array(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + + @classmethod + def Int32ArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x73\x65\x30\x30", size_prefixed=size_prefixed + ) + + # Int32Array + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Int32Array + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Int32Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), + ) + return 0 + + # Int32Array + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return 0 + + # Int32Array + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # Int32Array + def ValueIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + + +def Int32ArrayStart(builder): + builder.StartObject(1) + + +def Start(builder): + return Int32ArrayStart(builder) + + +def Int32ArrayAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 + ) + + +def AddValue(builder, value): + return Int32ArrayAddValue(builder, value) + + +def Int32ArrayStartValueVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + +def StartValueVector(builder, numElems): + return Int32ArrayStartValueVector(builder, numElems) + + +def Int32ArrayEnd(builder): + return builder.EndObject() + + +def End(builder): + return Int32ArrayEnd(builder) diff --git a/streaming_data_types/fbschemas/array_1d_se00/Int64Array.py b/streaming_data_types/fbschemas/array_1d_se00/Int64Array.py new file mode 100644 index 0000000..f31fc83 --- /dev/null +++ b/streaming_data_types/fbschemas/array_1d_se00/Int64Array.py @@ -0,0 +1,98 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy + +np = import_numpy() + + +class Int64Array(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Int64Array() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsInt64Array(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + + @classmethod + def Int64ArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x73\x65\x30\x30", size_prefixed=size_prefixed + ) + + # Int64Array + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Int64Array + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Int64Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8), + ) + return 0 + + # Int64Array + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) + return 0 + + # Int64Array + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # Int64Array + def ValueIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + + +def Int64ArrayStart(builder): + builder.StartObject(1) + + +def Start(builder): + return Int64ArrayStart(builder) + + +def Int64ArrayAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 + ) + + +def AddValue(builder, value): + return Int64ArrayAddValue(builder, value) + + +def Int64ArrayStartValueVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + + +def StartValueVector(builder, numElems): + return Int64ArrayStartValueVector(builder, numElems) + + +def Int64ArrayEnd(builder): + return builder.EndObject() + + +def End(builder): + return Int64ArrayEnd(builder) diff --git a/streaming_data_types/fbschemas/array_1d_se00/Int8Array.py b/streaming_data_types/fbschemas/array_1d_se00/Int8Array.py new file mode 100644 index 0000000..06f283f --- /dev/null +++ b/streaming_data_types/fbschemas/array_1d_se00/Int8Array.py @@ -0,0 +1,98 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy + +np = import_numpy() + + +class Int8Array(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Int8Array() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsInt8Array(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + + @classmethod + def Int8ArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x73\x65\x30\x30", size_prefixed=size_prefixed + ) + + # Int8Array + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Int8Array + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Int8Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1), + ) + return 0 + + # Int8Array + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int8Flags, o) + return 0 + + # Int8Array + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # Int8Array + def ValueIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + + +def Int8ArrayStart(builder): + builder.StartObject(1) + + +def Start(builder): + return Int8ArrayStart(builder) + + +def Int8ArrayAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 + ) + + +def AddValue(builder, value): + return Int8ArrayAddValue(builder, value) + + +def Int8ArrayStartValueVector(builder, numElems): + return builder.StartVector(1, numElems, 1) + + +def StartValueVector(builder, numElems): + return Int8ArrayStartValueVector(builder, numElems) + + +def Int8ArrayEnd(builder): + return builder.EndObject() + + +def End(builder): + return Int8ArrayEnd(builder) diff --git a/streaming_data_types/fbschemas/array_1d_se00/Location.py b/streaming_data_types/fbschemas/array_1d_se00/Location.py new file mode 100644 index 0000000..7af8fcf --- /dev/null +++ b/streaming_data_types/fbschemas/array_1d_se00/Location.py @@ -0,0 +1,10 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + + +class Location(object): + Unknown = 0 + Start = 1 + Middle = 2 + End = 3 diff --git a/streaming_data_types/fbschemas/array_1d_se00/SampleEnvironmentData.py b/streaming_data_types/fbschemas/array_1d_se00/SampleEnvironmentData.py new file mode 100644 index 0000000..1cfb476 --- /dev/null +++ b/streaming_data_types/fbschemas/array_1d_se00/SampleEnvironmentData.py @@ -0,0 +1,228 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy + +np = import_numpy() + + +class SampleEnvironmentData(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = SampleEnvironmentData() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsSampleEnvironmentData(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + + @classmethod + def SampleEnvironmentDataBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x73\x65\x30\x30", size_prefixed=size_prefixed + ) + + # SampleEnvironmentData + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # SampleEnvironmentData + def Name(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # SampleEnvironmentData + def Channel(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # SampleEnvironmentData + def PacketTimestamp(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) + return 0 + + # SampleEnvironmentData + def TimeDelta(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.Get( + flatbuffers.number_types.Float64Flags, o + self._tab.Pos + ) + return 0.0 + + # SampleEnvironmentData + def TimestampLocation(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # SampleEnvironmentData + def ValuesType(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) + return 0 + + # SampleEnvironmentData + def Values(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + if o != 0: + from flatbuffers.table import Table + + obj = Table(bytearray(), 0) + self._tab.Union(obj, o) + return obj + return None + + # SampleEnvironmentData + def Timestamps(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Int64Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8), + ) + return 0 + + # SampleEnvironmentData + def TimestampsAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) + return 0 + + # SampleEnvironmentData + def TimestampsLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # SampleEnvironmentData + def TimestampsIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) + return o == 0 + + # SampleEnvironmentData + def MessageCounter(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(20)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) + return 0 + + +def SampleEnvironmentDataStart(builder): + builder.StartObject(9) + + +def Start(builder): + return SampleEnvironmentDataStart(builder) + + +def SampleEnvironmentDataAddName(builder, name): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(name), 0 + ) + + +def AddName(builder, name): + return SampleEnvironmentDataAddName(builder, name) + + +def SampleEnvironmentDataAddChannel(builder, channel): + builder.PrependInt32Slot(1, channel, 0) + + +def AddChannel(builder, channel): + return SampleEnvironmentDataAddChannel(builder, channel) + + +def SampleEnvironmentDataAddPacketTimestamp(builder, packetTimestamp): + builder.PrependInt64Slot(2, packetTimestamp, 0) + + +def AddPacketTimestamp(builder, packetTimestamp): + return SampleEnvironmentDataAddPacketTimestamp(builder, packetTimestamp) + + +def SampleEnvironmentDataAddTimeDelta(builder, timeDelta): + builder.PrependFloat64Slot(3, timeDelta, 0.0) + + +def AddTimeDelta(builder, timeDelta): + return SampleEnvironmentDataAddTimeDelta(builder, timeDelta) + + +def SampleEnvironmentDataAddTimestampLocation(builder, timestampLocation): + builder.PrependInt8Slot(4, timestampLocation, 0) + + +def AddTimestampLocation(builder, timestampLocation): + return SampleEnvironmentDataAddTimestampLocation(builder, timestampLocation) + + +def SampleEnvironmentDataAddValuesType(builder, valuesType): + builder.PrependUint8Slot(5, valuesType, 0) + + +def AddValuesType(builder, valuesType): + return SampleEnvironmentDataAddValuesType(builder, valuesType) + + +def SampleEnvironmentDataAddValues(builder, values): + builder.PrependUOffsetTRelativeSlot( + 6, flatbuffers.number_types.UOffsetTFlags.py_type(values), 0 + ) + + +def AddValues(builder, values): + return SampleEnvironmentDataAddValues(builder, values) + + +def SampleEnvironmentDataAddTimestamps(builder, timestamps): + builder.PrependUOffsetTRelativeSlot( + 7, flatbuffers.number_types.UOffsetTFlags.py_type(timestamps), 0 + ) + + +def AddTimestamps(builder, timestamps): + return SampleEnvironmentDataAddTimestamps(builder, timestamps) + + +def SampleEnvironmentDataStartTimestampsVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + + +def StartTimestampsVector(builder, numElems): + return SampleEnvironmentDataStartTimestampsVector(builder, numElems) + + +def SampleEnvironmentDataAddMessageCounter(builder, messageCounter): + builder.PrependInt64Slot(8, messageCounter, 0) + + +def AddMessageCounter(builder, messageCounter): + return SampleEnvironmentDataAddMessageCounter(builder, messageCounter) + + +def SampleEnvironmentDataEnd(builder): + return builder.EndObject() + + +def End(builder): + return SampleEnvironmentDataEnd(builder) diff --git a/streaming_data_types/fbschemas/array_1d_se00/UInt16Array.py b/streaming_data_types/fbschemas/array_1d_se00/UInt16Array.py new file mode 100644 index 0000000..0d8a7a3 --- /dev/null +++ b/streaming_data_types/fbschemas/array_1d_se00/UInt16Array.py @@ -0,0 +1,98 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy + +np = import_numpy() + + +class UInt16Array(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = UInt16Array() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsUInt16Array(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + + @classmethod + def UInt16ArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x73\x65\x30\x30", size_prefixed=size_prefixed + ) + + # UInt16Array + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # UInt16Array + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Uint16Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 2), + ) + return 0 + + # UInt16Array + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint16Flags, o) + return 0 + + # UInt16Array + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # UInt16Array + def ValueIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + + +def UInt16ArrayStart(builder): + builder.StartObject(1) + + +def Start(builder): + return UInt16ArrayStart(builder) + + +def UInt16ArrayAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 + ) + + +def AddValue(builder, value): + return UInt16ArrayAddValue(builder, value) + + +def UInt16ArrayStartValueVector(builder, numElems): + return builder.StartVector(2, numElems, 2) + + +def StartValueVector(builder, numElems): + return UInt16ArrayStartValueVector(builder, numElems) + + +def UInt16ArrayEnd(builder): + return builder.EndObject() + + +def End(builder): + return UInt16ArrayEnd(builder) diff --git a/streaming_data_types/fbschemas/array_1d_se00/UInt32Array.py b/streaming_data_types/fbschemas/array_1d_se00/UInt32Array.py new file mode 100644 index 0000000..a69431c --- /dev/null +++ b/streaming_data_types/fbschemas/array_1d_se00/UInt32Array.py @@ -0,0 +1,98 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy + +np = import_numpy() + + +class UInt32Array(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = UInt32Array() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsUInt32Array(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + + @classmethod + def UInt32ArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x73\x65\x30\x30", size_prefixed=size_prefixed + ) + + # UInt32Array + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # UInt32Array + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Uint32Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), + ) + return 0 + + # UInt32Array + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint32Flags, o) + return 0 + + # UInt32Array + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # UInt32Array + def ValueIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + + +def UInt32ArrayStart(builder): + builder.StartObject(1) + + +def Start(builder): + return UInt32ArrayStart(builder) + + +def UInt32ArrayAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 + ) + + +def AddValue(builder, value): + return UInt32ArrayAddValue(builder, value) + + +def UInt32ArrayStartValueVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + +def StartValueVector(builder, numElems): + return UInt32ArrayStartValueVector(builder, numElems) + + +def UInt32ArrayEnd(builder): + return builder.EndObject() + + +def End(builder): + return UInt32ArrayEnd(builder) diff --git a/streaming_data_types/fbschemas/array_1d_se00/UInt64Array.py b/streaming_data_types/fbschemas/array_1d_se00/UInt64Array.py new file mode 100644 index 0000000..c9af60b --- /dev/null +++ b/streaming_data_types/fbschemas/array_1d_se00/UInt64Array.py @@ -0,0 +1,98 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy + +np = import_numpy() + + +class UInt64Array(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = UInt64Array() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsUInt64Array(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + + @classmethod + def UInt64ArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x73\x65\x30\x30", size_prefixed=size_prefixed + ) + + # UInt64Array + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # UInt64Array + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Uint64Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8), + ) + return 0 + + # UInt64Array + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint64Flags, o) + return 0 + + # UInt64Array + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # UInt64Array + def ValueIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + + +def UInt64ArrayStart(builder): + builder.StartObject(1) + + +def Start(builder): + return UInt64ArrayStart(builder) + + +def UInt64ArrayAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 + ) + + +def AddValue(builder, value): + return UInt64ArrayAddValue(builder, value) + + +def UInt64ArrayStartValueVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + + +def StartValueVector(builder, numElems): + return UInt64ArrayStartValueVector(builder, numElems) + + +def UInt64ArrayEnd(builder): + return builder.EndObject() + + +def End(builder): + return UInt64ArrayEnd(builder) diff --git a/streaming_data_types/fbschemas/array_1d_se00/UInt8Array.py b/streaming_data_types/fbschemas/array_1d_se00/UInt8Array.py new file mode 100644 index 0000000..fcd56eb --- /dev/null +++ b/streaming_data_types/fbschemas/array_1d_se00/UInt8Array.py @@ -0,0 +1,98 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy + +np = import_numpy() + + +class UInt8Array(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = UInt8Array() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsUInt8Array(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + + @classmethod + def UInt8ArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x73\x65\x30\x30", size_prefixed=size_prefixed + ) + + # UInt8Array + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # UInt8Array + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Uint8Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1), + ) + return 0 + + # UInt8Array + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) + return 0 + + # UInt8Array + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # UInt8Array + def ValueIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + + +def UInt8ArrayStart(builder): + builder.StartObject(1) + + +def Start(builder): + return UInt8ArrayStart(builder) + + +def UInt8ArrayAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 + ) + + +def AddValue(builder, value): + return UInt8ArrayAddValue(builder, value) + + +def UInt8ArrayStartValueVector(builder, numElems): + return builder.StartVector(1, numElems, 1) + + +def StartValueVector(builder, numElems): + return UInt8ArrayStartValueVector(builder, numElems) + + +def UInt8ArrayEnd(builder): + return builder.EndObject() + + +def End(builder): + return UInt8ArrayEnd(builder) diff --git a/streaming_data_types/fbschemas/array_1d_se00/ValueUnion.py b/streaming_data_types/fbschemas/array_1d_se00/ValueUnion.py new file mode 100644 index 0000000..4dd1bf1 --- /dev/null +++ b/streaming_data_types/fbschemas/array_1d_se00/ValueUnion.py @@ -0,0 +1,17 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + + +class ValueUnion(object): + NONE = 0 + Int8Array = 1 + UInt8Array = 2 + Int16Array = 3 + UInt16Array = 4 + Int32Array = 5 + UInt32Array = 6 + Int64Array = 7 + UInt64Array = 8 + DoubleArray = 9 + FloatArray = 10 diff --git a/streaming_data_types/fbschemas/array_1d_se00/__init__.py b/streaming_data_types/fbschemas/array_1d_se00/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/test_se00.py b/tests/test_se00.py new file mode 100644 index 0000000..49121f3 --- /dev/null +++ b/tests/test_se00.py @@ -0,0 +1,73 @@ +import numpy as np +import pytest + +from streaming_data_types import DESERIALISERS, SERIALISERS +from streaming_data_types.array_1d_se00 import deserialise_se00, serialise_se00 +from streaming_data_types.fbschemas.array_1d_se00.Location import Location + +entry_1 = { + "name": "some_name", + "timestamp_unix_ns": 1668593863397138093, + "channel": 42, + "message_counter": 123456, + "sample_ts_delta": 0.005, + "values": np.arange(100, dtype=np.uint16), + "value_timestamps": np.arange(50) + 1111, + "ts_location": Location.End, +} + +entry_2 = { + "name": "some_name_other_name", + "timestamp_unix_ns": 1668593863397138094, + "channel": 11, + "message_counter": 654321, + "sample_ts_delta": 1.666, + "values": np.arange(1000, dtype=np.int64), + "value_timestamps": None, + "ts_location": Location.Middle, +} + +entry_3 = { + "name": "some_float_name", + "timestamp_unix_ns": 1668593863397138095, + "channel": 11, + "message_counter": 231465, + "sample_ts_delta": 1.666, + "values": np.arange(1000, dtype=np.float32), + "value_timestamps": None, + "ts_location": Location.Middle, +} + +entry_4 = { + "name": "some_double_name", + "timestamp_unix_ns": 1668593863397138096, + "channel": 11, + "message_counter": 324156, + "sample_ts_delta": 1.666, + "values": np.arange(1000, dtype=np.float64), + "value_timestamps": None, + "ts_location": Location.Middle, +} + + +class TestSerialisationSenv: + @pytest.mark.parametrize("input_entry", [entry_1, entry_2, entry_3, entry_4]) + def test_serialises_and_deserialises_se00(self, input_entry): + buf = serialise_se00(**input_entry) + deserialised_tuple = deserialise_se00(buf) + + assert input_entry["name"] == deserialised_tuple.name + assert input_entry["timestamp_unix_ns"] == deserialised_tuple.timestamp_unix_ns + assert input_entry["channel"] == deserialised_tuple.channel + assert input_entry["message_counter"] == deserialised_tuple.message_counter + assert input_entry["sample_ts_delta"] == deserialised_tuple.sample_ts_delta + assert np.array_equal(input_entry["values"], deserialised_tuple.values) + assert np.array_equal( + input_entry["value_timestamps"], deserialised_tuple.value_ts + ) + assert input_entry["values"].dtype == deserialised_tuple.values.dtype + assert input_entry["ts_location"] == deserialised_tuple.ts_location + + def test_schema_type_is_in_global_serialisers_list(self): + assert "se00" in SERIALISERS + assert "se00" in DESERIALISERS From 78a0bd2a00b491548e069988632c2dfec8045a50 Mon Sep 17 00:00:00 2001 From: Jonas Petersson Date: Thu, 17 Nov 2022 07:54:55 +0100 Subject: [PATCH 279/363] updating black version in pre-commit --- .pre-commit-config.yaml | 2 +- requirements-dev.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 8affa1b..ad23bd9 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/psf/black - rev: 21.9b0 + rev: 22.3.0 hooks: - id: black language_version: python3 diff --git a/requirements-dev.txt b/requirements-dev.txt index b05031e..0d111a1 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,5 +1,5 @@ -r requirements.txt -black==21.9b0 # Pinned to match pre-commit config +black==22.3.0 # Pinned to match pre-commit config flake8==4.0.1 # Pinned to match pre-commit config isort==5.10.1 # Pinned to match pre-commit configblack flake8 From 1485ab6545f6818633068cbd02696f6964997277 Mon Sep 17 00:00:00 2001 From: kmurica <76992076+kmurica@users.noreply.github.com> Date: Thu, 17 Nov 2022 09:55:11 +0100 Subject: [PATCH 280/363] adding init file to al00 module for consistency (#64) --- streaming_data_types/fbschemas/alarm_al00/__init__.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 streaming_data_types/fbschemas/alarm_al00/__init__.py diff --git a/streaming_data_types/fbschemas/alarm_al00/__init__.py b/streaming_data_types/fbschemas/alarm_al00/__init__.py new file mode 100644 index 0000000..e69de29 From da5cae4bf8fc52e1dcd9c7d4ed49cff1c3072322 Mon Sep 17 00:00:00 2001 From: Kenan Muric Date: Thu, 17 Nov 2022 10:29:23 +0100 Subject: [PATCH 281/363] adding ep01 serialiser and deserialiser --- streaming_data_types/__init__.py | 4 + streaming_data_types/epics_connection_ep01.py | 62 ++++++++++ .../epics_connection_ep01/ConnectionInfo.py | 13 +++ .../EpicsPVConnectionInfo.py | 106 ++++++++++++++++++ .../epics_connection_ep01/__init__.py | 0 tests/test_ep01.py | 41 +++++++ 6 files changed, 226 insertions(+) create mode 100644 streaming_data_types/epics_connection_ep01.py create mode 100644 streaming_data_types/fbschemas/epics_connection_ep01/ConnectionInfo.py create mode 100644 streaming_data_types/fbschemas/epics_connection_ep01/EpicsPVConnectionInfo.py create mode 100644 streaming_data_types/fbschemas/epics_connection_ep01/__init__.py create mode 100644 tests/test_ep01.py diff --git a/streaming_data_types/__init__.py b/streaming_data_types/__init__.py index 84aa78c..8060d3e 100644 --- a/streaming_data_types/__init__.py +++ b/streaming_data_types/__init__.py @@ -10,6 +10,8 @@ serialise_se00) from streaming_data_types.epics_connection_info_ep00 import (deserialise_ep00, serialise_ep00) +from streaming_data_types.epics_connection_ep01 import (deserialise_ep01, + serialise_ep01) from streaming_data_types.eventdata_ev42 import (deserialise_ev42, serialise_ev42) from streaming_data_types.eventdata_ev43 import (deserialise_ev43, @@ -50,6 +52,7 @@ "6s4t": serialise_6s4t, "x5f2": serialise_x5f2, "ep00": serialise_ep00, + "ep01": serialise_ep01, "tdct": serialise_tdct, "rf5k": serialise_rf5k, "answ": serialise_answ, @@ -74,6 +77,7 @@ "6s4t": deserialise_6s4t, "x5f2": deserialise_x5f2, "ep00": deserialise_ep00, + "ep01": deserialise_ep01, "tdct": deserialise_tdct, "rf5k": deserialise_rf5k, "answ": deserialise_answ, diff --git a/streaming_data_types/epics_connection_ep01.py b/streaming_data_types/epics_connection_ep01.py new file mode 100644 index 0000000..b0717cb --- /dev/null +++ b/streaming_data_types/epics_connection_ep01.py @@ -0,0 +1,62 @@ +from collections import namedtuple +from typing import Optional, Union + +import flatbuffers + +from streaming_data_types.fbschemas.epics_connection_ep01 import ( + EpicsPVConnectionInfo, + ConnectionInfo, +) +from streaming_data_types.utils import check_schema_identifier + +FILE_IDENTIFIER = b"ep01" + + +def serialise_ep01( + timestamp_ns: int, + connection_info: ConnectionInfo, + source_name: str, + service_id: Optional[str] = None, +) -> bytes: + builder = flatbuffers.Builder(136) + builder.ForceDefaults(True) + + if service_id is not None: + service_id_offset = builder.CreateString(service_id) + source_name_offset = builder.CreateString(source_name) + + EpicsPVConnectionInfo.EpicsPVConnectionInfoStart(builder) + if service_id is not None: + EpicsPVConnectionInfo.EpicsPVConnectionInfoAddServiceId(builder, service_id_offset) + EpicsPVConnectionInfo.EpicsPVConnectionInfoAddSourceName(builder, source_name_offset) + EpicsPVConnectionInfo.EpicsPVConnectionInfoAddStatus(builder, connection_info) + EpicsPVConnectionInfo.EpicsPVConnectionInfoAddTimestamp(builder, timestamp_ns) + + end = EpicsPVConnectionInfo.EpicsPVConnectionInfoEnd(builder) + builder.Finish(end, file_identifier=FILE_IDENTIFIER) + return bytes(builder.Output()) + + +EpicsPVConnection = namedtuple( + "EpicsPVConnection", ("timestamp", "connection_info", "source_name", "service_id") +) + + +def deserialise_ep01(buffer: Union[bytearray, bytes]) -> EpicsPVConnection: + check_schema_identifier(buffer, FILE_IDENTIFIER) + + epics_connection = ( + EpicsPVConnectionInfo.EpicsPVConnectionInfo.GetRootAsEpicsPVConnectionInfo(buffer, 0) + ) + + source_name = ( + epics_connection.SourceName() if epics_connection.SourceName() else b"" + ) + service_id = epics_connection.ServiceId() if epics_connection.ServiceId() else b"" + + return EpicsPVConnection( + epics_connection.Timestamp(), + epics_connection.Status(), + source_name.decode(), + service_id.decode(), + ) diff --git a/streaming_data_types/fbschemas/epics_connection_ep01/ConnectionInfo.py b/streaming_data_types/fbschemas/epics_connection_ep01/ConnectionInfo.py new file mode 100644 index 0000000..a5c486c --- /dev/null +++ b/streaming_data_types/fbschemas/epics_connection_ep01/ConnectionInfo.py @@ -0,0 +1,13 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +class ConnectionInfo(object): + UNKNOWN = 0 + NEVER_CONNECTED = 1 + CONNECTED = 2 + DISCONNECTED = 3 + DESTROYED = 4 + CANCELLED = 5 + FINISHED = 6 + REMOTE_ERROR = 7 diff --git a/streaming_data_types/fbschemas/epics_connection_ep01/EpicsPVConnectionInfo.py b/streaming_data_types/fbschemas/epics_connection_ep01/EpicsPVConnectionInfo.py new file mode 100644 index 0000000..06667b1 --- /dev/null +++ b/streaming_data_types/fbschemas/epics_connection_ep01/EpicsPVConnectionInfo.py @@ -0,0 +1,106 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy +np = import_numpy() + + +class EpicsPVConnectionInfo(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = EpicsPVConnectionInfo() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsEpicsPVConnectionInfo(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def EpicsPVConnectionInfoBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x65\x70\x30\x31", size_prefixed=size_prefixed) + + # EpicsPVConnectionInfo + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # EpicsPVConnectionInfo + def Timestamp(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) + return 0 + + # EpicsPVConnectionInfo + def Status(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int16Flags, o + self._tab.Pos) + return 0 + + # EpicsPVConnectionInfo + def SourceName(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # EpicsPVConnectionInfo + def ServiceId(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + +def EpicsPVConnectionInfoStart(builder): + builder.StartObject(4) + + +def Start(builder): + return EpicsPVConnectionInfoStart(builder) + + +def EpicsPVConnectionInfoAddTimestamp(builder, timestamp): + builder.PrependInt64Slot(0, timestamp, 0) + + +def AddTimestamp(builder, timestamp): + return EpicsPVConnectionInfoAddTimestamp(builder, timestamp) + + +def EpicsPVConnectionInfoAddStatus(builder, status): + builder.PrependInt16Slot(1, status, 0) + + +def AddStatus(builder, status): + return EpicsPVConnectionInfoAddStatus(builder, status) + + +def EpicsPVConnectionInfoAddSourceName(builder, sourceName): + builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(sourceName), 0) + + +def AddSourceName(builder, sourceName): + return EpicsPVConnectionInfoAddSourceName(builder, sourceName) + + +def EpicsPVConnectionInfoAddServiceId(builder, serviceId): + builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(serviceId), 0) + + +def AddServiceId(builder, serviceId): + return EpicsPVConnectionInfoAddServiceId(builder, serviceId) + + +def EpicsPVConnectionInfoEnd(builder): + return builder.EndObject() + + +def End(builder): + return EpicsPVConnectionInfoEnd(builder) diff --git a/streaming_data_types/fbschemas/epics_connection_ep01/__init__.py b/streaming_data_types/fbschemas/epics_connection_ep01/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/test_ep01.py b/tests/test_ep01.py new file mode 100644 index 0000000..e319f8a --- /dev/null +++ b/tests/test_ep01.py @@ -0,0 +1,41 @@ +import pytest + +from streaming_data_types import DESERIALISERS, SERIALISERS +from streaming_data_types.epics_connection_ep01 import ( + deserialise_ep01, + serialise_ep01, +) +from streaming_data_types.exceptions import WrongSchemaException +from streaming_data_types.fbschemas.epics_connection_ep01 import ConnectionInfo + + +class TestSerialisationEp01: + original_entry = { + "timestamp_ns": 1593620746000000000, + "connection_info": ConnectionInfo.ConnectionInfo.DISCONNECTED, + "source_name": "test_source", + "service_id": "test_service", + } + + def test_serialises_and_deserialises_ep01_message_correctly(self): + buf = serialise_ep01(**self.original_entry) + deserialised_tuple = deserialise_ep01(buf) + + assert deserialised_tuple.timestamp == self.original_entry["timestamp_ns"] + assert deserialised_tuple.connection_info == self.original_entry["connection_info"] + assert deserialised_tuple.source_name == self.original_entry["source_name"] + assert deserialised_tuple.service_id == self.original_entry["service_id"] + + def test_if_buffer_has_wrong_id_then_throws(self): + buf = serialise_ep01(**self.original_entry) + + # Manually hack the id + buf = bytearray(buf) + buf[4:8] = b"1234" + + with pytest.raises(WrongSchemaException): + deserialise_ep01(buf) + + def test_schema_type_is_in_global_serialisers_list(self): + assert "ep00" in SERIALISERS + assert "ep00" in DESERIALISERS From 564e8fb59bdfdce356aa13bf6ecd18d0c8db86d6 Mon Sep 17 00:00:00 2001 From: Kenan Muric Date: Thu, 17 Nov 2022 13:00:46 +0100 Subject: [PATCH 282/363] additional fixes --- streaming_data_types/epics_connection_ep01.py | 16 ++++++++-------- tests/test_ep01.py | 8 ++++---- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/streaming_data_types/epics_connection_ep01.py b/streaming_data_types/epics_connection_ep01.py index b0717cb..69bf214 100644 --- a/streaming_data_types/epics_connection_ep01.py +++ b/streaming_data_types/epics_connection_ep01.py @@ -14,7 +14,7 @@ def serialise_ep01( timestamp_ns: int, - connection_info: ConnectionInfo, + status: ConnectionInfo, source_name: str, service_id: Optional[str] = None, ) -> bytes: @@ -29,7 +29,7 @@ def serialise_ep01( if service_id is not None: EpicsPVConnectionInfo.EpicsPVConnectionInfoAddServiceId(builder, service_id_offset) EpicsPVConnectionInfo.EpicsPVConnectionInfoAddSourceName(builder, source_name_offset) - EpicsPVConnectionInfo.EpicsPVConnectionInfoAddStatus(builder, connection_info) + EpicsPVConnectionInfo.EpicsPVConnectionInfoAddStatus(builder, status) EpicsPVConnectionInfo.EpicsPVConnectionInfoAddTimestamp(builder, timestamp_ns) end = EpicsPVConnectionInfo.EpicsPVConnectionInfoEnd(builder) @@ -38,7 +38,7 @@ def serialise_ep01( EpicsPVConnection = namedtuple( - "EpicsPVConnection", ("timestamp", "connection_info", "source_name", "service_id") + "EpicsPVConnection", ("timestamp", "status", "source_name", "service_id") ) @@ -52,11 +52,11 @@ def deserialise_ep01(buffer: Union[bytearray, bytes]) -> EpicsPVConnection: source_name = ( epics_connection.SourceName() if epics_connection.SourceName() else b"" ) - service_id = epics_connection.ServiceId() if epics_connection.ServiceId() else b"" + service_id = epics_connection.ServiceId() if epics_connection.ServiceId() else None return EpicsPVConnection( - epics_connection.Timestamp(), - epics_connection.Status(), - source_name.decode(), - service_id.decode(), + timestamp=epics_connection.Timestamp(), + status=epics_connection.Status(), + source_name=source_name.decode(), + service_id=service_id.decode(), ) diff --git a/tests/test_ep01.py b/tests/test_ep01.py index e319f8a..1a55d83 100644 --- a/tests/test_ep01.py +++ b/tests/test_ep01.py @@ -12,7 +12,7 @@ class TestSerialisationEp01: original_entry = { "timestamp_ns": 1593620746000000000, - "connection_info": ConnectionInfo.ConnectionInfo.DISCONNECTED, + "status": ConnectionInfo.ConnectionInfo.DISCONNECTED, "source_name": "test_source", "service_id": "test_service", } @@ -22,7 +22,7 @@ def test_serialises_and_deserialises_ep01_message_correctly(self): deserialised_tuple = deserialise_ep01(buf) assert deserialised_tuple.timestamp == self.original_entry["timestamp_ns"] - assert deserialised_tuple.connection_info == self.original_entry["connection_info"] + assert deserialised_tuple.status == self.original_entry["status"] assert deserialised_tuple.source_name == self.original_entry["source_name"] assert deserialised_tuple.service_id == self.original_entry["service_id"] @@ -37,5 +37,5 @@ def test_if_buffer_has_wrong_id_then_throws(self): deserialise_ep01(buf) def test_schema_type_is_in_global_serialisers_list(self): - assert "ep00" in SERIALISERS - assert "ep00" in DESERIALISERS + assert "ep01" in SERIALISERS + assert "ep01" in DESERIALISERS From cb2ec7c116ede04ba5758720ca2dffb970925ffd Mon Sep 17 00:00:00 2001 From: Kenan Muric Date: Thu, 17 Nov 2022 14:19:10 +0100 Subject: [PATCH 283/363] hiding status from fb schema --- streaming_data_types/epics_connection_ep01.py | 47 ++++++++++++++++--- tests/test_ep01.py | 4 +- 2 files changed, 43 insertions(+), 8 deletions(-) diff --git a/streaming_data_types/epics_connection_ep01.py b/streaming_data_types/epics_connection_ep01.py index 69bf214..d782de4 100644 --- a/streaming_data_types/epics_connection_ep01.py +++ b/streaming_data_types/epics_connection_ep01.py @@ -1,17 +1,52 @@ from collections import namedtuple +from enum import Enum from typing import Optional, Union import flatbuffers -from streaming_data_types.fbschemas.epics_connection_ep01 import ( - EpicsPVConnectionInfo, - ConnectionInfo, -) +from streaming_data_types.fbschemas.epics_connection_ep01.ConnectionInfo import \ + ConnectionInfo as FBConnectionInfo +from streaming_data_types.fbschemas.epics_connection_ep01 import \ + EpicsPVConnectionInfo from streaming_data_types.utils import check_schema_identifier FILE_IDENTIFIER = b"ep01" +class ConnectionInfo(Enum): + UNKNOWN = 0 + NEVER_CONNECTED = 1 + CONNECTED = 2 + DISCONNECTED = 3 + DESTROYED = 4 + CANCELLED = 5 + FINISHED = 6 + REMOTE_ERROR = 7 + + +_enum_to_status = { + ConnectionInfo.UNKNOWN: FBConnectionInfo.UNKNOWN, + ConnectionInfo.NEVER_CONNECTED: FBConnectionInfo.NEVER_CONNECTED, + ConnectionInfo.CONNECTED: FBConnectionInfo.CONNECTED, + ConnectionInfo.DISCONNECTED: FBConnectionInfo.DISCONNECTED, + ConnectionInfo.DESTROYED: FBConnectionInfo.DESTROYED, + ConnectionInfo.CANCELLED: FBConnectionInfo.CANCELLED, + ConnectionInfo.FINISHED: FBConnectionInfo.FINISHED, + ConnectionInfo.REMOTE_ERROR: FBConnectionInfo.REMOTE_ERROR, +} + +_status_to_enum = { + FBConnectionInfo.UNKNOWN: ConnectionInfo.UNKNOWN, + FBConnectionInfo.NEVER_CONNECTED: ConnectionInfo.NEVER_CONNECTED, + FBConnectionInfo.CONNECTED: ConnectionInfo.CONNECTED, + FBConnectionInfo.DISCONNECTED: ConnectionInfo.DISCONNECTED, + FBConnectionInfo.DESTROYED: ConnectionInfo.DESTROYED, + FBConnectionInfo.CANCELLED: ConnectionInfo.CANCELLED, + FBConnectionInfo.FINISHED: ConnectionInfo.FINISHED, + FBConnectionInfo.REMOTE_ERROR: ConnectionInfo.REMOTE_ERROR, +} + + def serialise_ep01( timestamp_ns: int, status: ConnectionInfo, @@ -29,7 +64,7 @@ def serialise_ep01( if service_id is not None: EpicsPVConnectionInfo.EpicsPVConnectionInfoAddServiceId(builder, service_id_offset) EpicsPVConnectionInfo.EpicsPVConnectionInfoAddSourceName(builder, source_name_offset) - EpicsPVConnectionInfo.EpicsPVConnectionInfoAddStatus(builder, status) + EpicsPVConnectionInfo.EpicsPVConnectionInfoAddStatus(builder, _enum_to_status[status]) EpicsPVConnectionInfo.EpicsPVConnectionInfoAddTimestamp(builder, timestamp_ns) end = EpicsPVConnectionInfo.EpicsPVConnectionInfoEnd(builder) @@ -56,7 +91,7 @@ def deserialise_ep01(buffer: Union[bytearray, bytes]) -> EpicsPVConnection: return EpicsPVConnection( timestamp=epics_connection.Timestamp(), - status=epics_connection.Status(), + status=_status_to_enum[epics_connection.Status()], source_name=source_name.decode(), service_id=service_id.decode(), ) diff --git a/tests/test_ep01.py b/tests/test_ep01.py index 1a55d83..a39e515 100644 --- a/tests/test_ep01.py +++ b/tests/test_ep01.py @@ -2,17 +2,17 @@ from streaming_data_types import DESERIALISERS, SERIALISERS from streaming_data_types.epics_connection_ep01 import ( + ConnectionInfo, deserialise_ep01, serialise_ep01, ) from streaming_data_types.exceptions import WrongSchemaException -from streaming_data_types.fbschemas.epics_connection_ep01 import ConnectionInfo class TestSerialisationEp01: original_entry = { "timestamp_ns": 1593620746000000000, - "status": ConnectionInfo.ConnectionInfo.DISCONNECTED, + "status": ConnectionInfo.DISCONNECTED, "source_name": "test_source", "service_id": "test_service", } From 5392db82eca47494bde53464e14aea0de6f9c81e Mon Sep 17 00:00:00 2001 From: Daniel Cacabelos Date: Thu, 24 Nov 2022 11:22:02 +0100 Subject: [PATCH 284/363] Fix syntax error appending item to list --- streaming_data_types/logdata_f142.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/streaming_data_types/logdata_f142.py b/streaming_data_types/logdata_f142.py index 68de89f..b38e7fc 100644 --- a/streaming_data_types/logdata_f142.py +++ b/streaming_data_types/logdata_f142.py @@ -501,7 +501,7 @@ def _serialise_value( # There are a few numpy types we don't try to handle, for example complex numbers raise NotImplementedError( f"Cannot serialise data of type {value.dtype}, must use one of " - f"{list(_map_scalar_type_to_serialiser.keys()).append(np.unicode_)}" + f"{list(_map_scalar_type_to_serialiser.keys()) + [np.unicode_]}" ) From 933f60f43ce7ea41ed6d6f28cf644bc826b4f239 Mon Sep 17 00:00:00 2001 From: kmurica <76992076+kmurica@users.noreply.github.com> Date: Thu, 24 Nov 2022 11:59:28 +0100 Subject: [PATCH 285/363] ECDC-3221: Adding f144 schema (#66) * adding f144 fbs * adding code for serialising and deserialising according to f144 fbs * further updates to dserialisation * fixing errors and implementing unit tests * adding empty line * flake8 fix --- streaming_data_types/__init__.py | 3 + .../fbschemas/logdata_f144/ArrayByte.py | 69 ++++ .../fbschemas/logdata_f144/ArrayDouble.py | 69 ++++ .../fbschemas/logdata_f144/ArrayFloat.py | 69 ++++ .../fbschemas/logdata_f144/ArrayInt.py | 69 ++++ .../fbschemas/logdata_f144/ArrayLong.py | 69 ++++ .../fbschemas/logdata_f144/ArrayShort.py | 69 ++++ .../fbschemas/logdata_f144/ArrayUByte.py | 69 ++++ .../fbschemas/logdata_f144/ArrayUInt.py | 69 ++++ .../fbschemas/logdata_f144/ArrayULong.py | 69 ++++ .../fbschemas/logdata_f144/ArrayUShort.py | 69 ++++ .../fbschemas/logdata_f144/Byte.py | 46 +++ .../fbschemas/logdata_f144/Double.py | 46 +++ .../fbschemas/logdata_f144/Float.py | 46 +++ .../fbschemas/logdata_f144/Int.py | 46 +++ .../fbschemas/logdata_f144/LogData.py | 79 +++++ .../fbschemas/logdata_f144/Long.py | 46 +++ .../fbschemas/logdata_f144/Short.py | 46 +++ .../fbschemas/logdata_f144/UByte.py | 46 +++ .../fbschemas/logdata_f144/UInt.py | 46 +++ .../fbschemas/logdata_f144/ULong.py | 46 +++ .../fbschemas/logdata_f144/UShort.py | 46 +++ .../fbschemas/logdata_f144/Value.py | 26 ++ .../fbschemas/logdata_f144/__init__.py | 0 streaming_data_types/logdata_f144.py | 307 ++++++++++++++++++ tests/test_f144.py | 157 +++++++++ 26 files changed, 1722 insertions(+) create mode 100644 streaming_data_types/fbschemas/logdata_f144/ArrayByte.py create mode 100644 streaming_data_types/fbschemas/logdata_f144/ArrayDouble.py create mode 100644 streaming_data_types/fbschemas/logdata_f144/ArrayFloat.py create mode 100644 streaming_data_types/fbschemas/logdata_f144/ArrayInt.py create mode 100644 streaming_data_types/fbschemas/logdata_f144/ArrayLong.py create mode 100644 streaming_data_types/fbschemas/logdata_f144/ArrayShort.py create mode 100644 streaming_data_types/fbschemas/logdata_f144/ArrayUByte.py create mode 100644 streaming_data_types/fbschemas/logdata_f144/ArrayUInt.py create mode 100644 streaming_data_types/fbschemas/logdata_f144/ArrayULong.py create mode 100644 streaming_data_types/fbschemas/logdata_f144/ArrayUShort.py create mode 100644 streaming_data_types/fbschemas/logdata_f144/Byte.py create mode 100644 streaming_data_types/fbschemas/logdata_f144/Double.py create mode 100644 streaming_data_types/fbschemas/logdata_f144/Float.py create mode 100644 streaming_data_types/fbschemas/logdata_f144/Int.py create mode 100644 streaming_data_types/fbschemas/logdata_f144/LogData.py create mode 100644 streaming_data_types/fbschemas/logdata_f144/Long.py create mode 100644 streaming_data_types/fbschemas/logdata_f144/Short.py create mode 100644 streaming_data_types/fbschemas/logdata_f144/UByte.py create mode 100644 streaming_data_types/fbschemas/logdata_f144/UInt.py create mode 100644 streaming_data_types/fbschemas/logdata_f144/ULong.py create mode 100644 streaming_data_types/fbschemas/logdata_f144/UShort.py create mode 100644 streaming_data_types/fbschemas/logdata_f144/Value.py create mode 100644 streaming_data_types/fbschemas/logdata_f144/__init__.py create mode 100644 streaming_data_types/logdata_f144.py create mode 100644 tests/test_f144.py diff --git a/streaming_data_types/__init__.py b/streaming_data_types/__init__.py index 8060d3e..4d65cb7 100644 --- a/streaming_data_types/__init__.py +++ b/streaming_data_types/__init__.py @@ -27,6 +27,7 @@ from streaming_data_types.histogram_hs01 import (deserialise_hs01, serialise_hs01) from streaming_data_types.logdata_f142 import deserialise_f142, serialise_f142 +from streaming_data_types.logdata_f144 import serialise_f144, deserialise_f144 from streaming_data_types.nicos_cache_ns10 import (deserialise_ns10, serialise_ns10) from streaming_data_types.run_start_pl72 import (deserialise_pl72, @@ -47,6 +48,7 @@ "hs00": serialise_hs00, "hs01": serialise_hs01, "f142": serialise_f142, + "f144": serialise_f144, "ns10": serialise_ns10, "pl72": serialise_pl72, "6s4t": serialise_6s4t, @@ -72,6 +74,7 @@ "hs00": deserialise_hs00, "hs01": deserialise_hs01, "f142": deserialise_f142, + "f144": deserialise_f144, "ns10": deserialise_ns10, "pl72": deserialise_pl72, "6s4t": deserialise_6s4t, diff --git a/streaming_data_types/fbschemas/logdata_f144/ArrayByte.py b/streaming_data_types/fbschemas/logdata_f144/ArrayByte.py new file mode 100644 index 0000000..aa82442 --- /dev/null +++ b/streaming_data_types/fbschemas/logdata_f144/ArrayByte.py @@ -0,0 +1,69 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy +np = import_numpy() + +class ArrayByte(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ArrayByte() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsArrayByte(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def ArrayByteBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed) + + # ArrayByte + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ArrayByte + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) + return 0 + + # ArrayByte + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int8Flags, o) + return 0 + + # ArrayByte + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # ArrayByte + def ValueIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + +def ArrayByteStart(builder): builder.StartObject(1) +def Start(builder): + return ArrayByteStart(builder) +def ArrayByteAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) +def AddValue(builder, value): + return ArrayByteAddValue(builder, value) +def ArrayByteStartValueVector(builder, numElems): return builder.StartVector(1, numElems, 1) +def StartValueVector(builder, numElems): + return ArrayByteStartValueVector(builder, numElems) +def ArrayByteEnd(builder): return builder.EndObject() +def End(builder): + return ArrayByteEnd(builder) \ No newline at end of file diff --git a/streaming_data_types/fbschemas/logdata_f144/ArrayDouble.py b/streaming_data_types/fbschemas/logdata_f144/ArrayDouble.py new file mode 100644 index 0000000..0d465b0 --- /dev/null +++ b/streaming_data_types/fbschemas/logdata_f144/ArrayDouble.py @@ -0,0 +1,69 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy +np = import_numpy() + +class ArrayDouble(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ArrayDouble() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsArrayDouble(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def ArrayDoubleBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed) + + # ArrayDouble + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ArrayDouble + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Float64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return 0 + + # ArrayDouble + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Float64Flags, o) + return 0 + + # ArrayDouble + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # ArrayDouble + def ValueIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + +def ArrayDoubleStart(builder): builder.StartObject(1) +def Start(builder): + return ArrayDoubleStart(builder) +def ArrayDoubleAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) +def AddValue(builder, value): + return ArrayDoubleAddValue(builder, value) +def ArrayDoubleStartValueVector(builder, numElems): return builder.StartVector(8, numElems, 8) +def StartValueVector(builder, numElems): + return ArrayDoubleStartValueVector(builder, numElems) +def ArrayDoubleEnd(builder): return builder.EndObject() +def End(builder): + return ArrayDoubleEnd(builder) \ No newline at end of file diff --git a/streaming_data_types/fbschemas/logdata_f144/ArrayFloat.py b/streaming_data_types/fbschemas/logdata_f144/ArrayFloat.py new file mode 100644 index 0000000..79fc64f --- /dev/null +++ b/streaming_data_types/fbschemas/logdata_f144/ArrayFloat.py @@ -0,0 +1,69 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy +np = import_numpy() + +class ArrayFloat(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ArrayFloat() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsArrayFloat(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def ArrayFloatBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed) + + # ArrayFloat + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ArrayFloat + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Float32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return 0 + + # ArrayFloat + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Float32Flags, o) + return 0 + + # ArrayFloat + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # ArrayFloat + def ValueIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + +def ArrayFloatStart(builder): builder.StartObject(1) +def Start(builder): + return ArrayFloatStart(builder) +def ArrayFloatAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) +def AddValue(builder, value): + return ArrayFloatAddValue(builder, value) +def ArrayFloatStartValueVector(builder, numElems): return builder.StartVector(4, numElems, 4) +def StartValueVector(builder, numElems): + return ArrayFloatStartValueVector(builder, numElems) +def ArrayFloatEnd(builder): return builder.EndObject() +def End(builder): + return ArrayFloatEnd(builder) \ No newline at end of file diff --git a/streaming_data_types/fbschemas/logdata_f144/ArrayInt.py b/streaming_data_types/fbschemas/logdata_f144/ArrayInt.py new file mode 100644 index 0000000..c408e2b --- /dev/null +++ b/streaming_data_types/fbschemas/logdata_f144/ArrayInt.py @@ -0,0 +1,69 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy +np = import_numpy() + +class ArrayInt(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ArrayInt() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsArrayInt(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def ArrayIntBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed) + + # ArrayInt + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ArrayInt + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return 0 + + # ArrayInt + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return 0 + + # ArrayInt + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # ArrayInt + def ValueIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + +def ArrayIntStart(builder): builder.StartObject(1) +def Start(builder): + return ArrayIntStart(builder) +def ArrayIntAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) +def AddValue(builder, value): + return ArrayIntAddValue(builder, value) +def ArrayIntStartValueVector(builder, numElems): return builder.StartVector(4, numElems, 4) +def StartValueVector(builder, numElems): + return ArrayIntStartValueVector(builder, numElems) +def ArrayIntEnd(builder): return builder.EndObject() +def End(builder): + return ArrayIntEnd(builder) \ No newline at end of file diff --git a/streaming_data_types/fbschemas/logdata_f144/ArrayLong.py b/streaming_data_types/fbschemas/logdata_f144/ArrayLong.py new file mode 100644 index 0000000..5090fd5 --- /dev/null +++ b/streaming_data_types/fbschemas/logdata_f144/ArrayLong.py @@ -0,0 +1,69 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy +np = import_numpy() + +class ArrayLong(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ArrayLong() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsArrayLong(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def ArrayLongBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed) + + # ArrayLong + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ArrayLong + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return 0 + + # ArrayLong + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) + return 0 + + # ArrayLong + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # ArrayLong + def ValueIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + +def ArrayLongStart(builder): builder.StartObject(1) +def Start(builder): + return ArrayLongStart(builder) +def ArrayLongAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) +def AddValue(builder, value): + return ArrayLongAddValue(builder, value) +def ArrayLongStartValueVector(builder, numElems): return builder.StartVector(8, numElems, 8) +def StartValueVector(builder, numElems): + return ArrayLongStartValueVector(builder, numElems) +def ArrayLongEnd(builder): return builder.EndObject() +def End(builder): + return ArrayLongEnd(builder) \ No newline at end of file diff --git a/streaming_data_types/fbschemas/logdata_f144/ArrayShort.py b/streaming_data_types/fbschemas/logdata_f144/ArrayShort.py new file mode 100644 index 0000000..d963c00 --- /dev/null +++ b/streaming_data_types/fbschemas/logdata_f144/ArrayShort.py @@ -0,0 +1,69 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy +np = import_numpy() + +class ArrayShort(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ArrayShort() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsArrayShort(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def ArrayShortBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed) + + # ArrayShort + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ArrayShort + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int16Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 2)) + return 0 + + # ArrayShort + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int16Flags, o) + return 0 + + # ArrayShort + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # ArrayShort + def ValueIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + +def ArrayShortStart(builder): builder.StartObject(1) +def Start(builder): + return ArrayShortStart(builder) +def ArrayShortAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) +def AddValue(builder, value): + return ArrayShortAddValue(builder, value) +def ArrayShortStartValueVector(builder, numElems): return builder.StartVector(2, numElems, 2) +def StartValueVector(builder, numElems): + return ArrayShortStartValueVector(builder, numElems) +def ArrayShortEnd(builder): return builder.EndObject() +def End(builder): + return ArrayShortEnd(builder) \ No newline at end of file diff --git a/streaming_data_types/fbschemas/logdata_f144/ArrayUByte.py b/streaming_data_types/fbschemas/logdata_f144/ArrayUByte.py new file mode 100644 index 0000000..be79240 --- /dev/null +++ b/streaming_data_types/fbschemas/logdata_f144/ArrayUByte.py @@ -0,0 +1,69 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy +np = import_numpy() + +class ArrayUByte(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ArrayUByte() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsArrayUByte(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def ArrayUByteBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed) + + # ArrayUByte + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ArrayUByte + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) + return 0 + + # ArrayUByte + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) + return 0 + + # ArrayUByte + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # ArrayUByte + def ValueIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + +def ArrayUByteStart(builder): builder.StartObject(1) +def Start(builder): + return ArrayUByteStart(builder) +def ArrayUByteAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) +def AddValue(builder, value): + return ArrayUByteAddValue(builder, value) +def ArrayUByteStartValueVector(builder, numElems): return builder.StartVector(1, numElems, 1) +def StartValueVector(builder, numElems): + return ArrayUByteStartValueVector(builder, numElems) +def ArrayUByteEnd(builder): return builder.EndObject() +def End(builder): + return ArrayUByteEnd(builder) \ No newline at end of file diff --git a/streaming_data_types/fbschemas/logdata_f144/ArrayUInt.py b/streaming_data_types/fbschemas/logdata_f144/ArrayUInt.py new file mode 100644 index 0000000..7fba9ae --- /dev/null +++ b/streaming_data_types/fbschemas/logdata_f144/ArrayUInt.py @@ -0,0 +1,69 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy +np = import_numpy() + +class ArrayUInt(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ArrayUInt() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsArrayUInt(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def ArrayUIntBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed) + + # ArrayUInt + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ArrayUInt + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Uint32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return 0 + + # ArrayUInt + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint32Flags, o) + return 0 + + # ArrayUInt + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # ArrayUInt + def ValueIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + +def ArrayUIntStart(builder): builder.StartObject(1) +def Start(builder): + return ArrayUIntStart(builder) +def ArrayUIntAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) +def AddValue(builder, value): + return ArrayUIntAddValue(builder, value) +def ArrayUIntStartValueVector(builder, numElems): return builder.StartVector(4, numElems, 4) +def StartValueVector(builder, numElems): + return ArrayUIntStartValueVector(builder, numElems) +def ArrayUIntEnd(builder): return builder.EndObject() +def End(builder): + return ArrayUIntEnd(builder) \ No newline at end of file diff --git a/streaming_data_types/fbschemas/logdata_f144/ArrayULong.py b/streaming_data_types/fbschemas/logdata_f144/ArrayULong.py new file mode 100644 index 0000000..b2a406c --- /dev/null +++ b/streaming_data_types/fbschemas/logdata_f144/ArrayULong.py @@ -0,0 +1,69 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy +np = import_numpy() + +class ArrayULong(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ArrayULong() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsArrayULong(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def ArrayULongBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed) + + # ArrayULong + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ArrayULong + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Uint64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return 0 + + # ArrayULong + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint64Flags, o) + return 0 + + # ArrayULong + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # ArrayULong + def ValueIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + +def ArrayULongStart(builder): builder.StartObject(1) +def Start(builder): + return ArrayULongStart(builder) +def ArrayULongAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) +def AddValue(builder, value): + return ArrayULongAddValue(builder, value) +def ArrayULongStartValueVector(builder, numElems): return builder.StartVector(8, numElems, 8) +def StartValueVector(builder, numElems): + return ArrayULongStartValueVector(builder, numElems) +def ArrayULongEnd(builder): return builder.EndObject() +def End(builder): + return ArrayULongEnd(builder) \ No newline at end of file diff --git a/streaming_data_types/fbschemas/logdata_f144/ArrayUShort.py b/streaming_data_types/fbschemas/logdata_f144/ArrayUShort.py new file mode 100644 index 0000000..6ea886a --- /dev/null +++ b/streaming_data_types/fbschemas/logdata_f144/ArrayUShort.py @@ -0,0 +1,69 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy +np = import_numpy() + +class ArrayUShort(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ArrayUShort() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsArrayUShort(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def ArrayUShortBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed) + + # ArrayUShort + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ArrayUShort + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Uint16Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 2)) + return 0 + + # ArrayUShort + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint16Flags, o) + return 0 + + # ArrayUShort + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # ArrayUShort + def ValueIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + +def ArrayUShortStart(builder): builder.StartObject(1) +def Start(builder): + return ArrayUShortStart(builder) +def ArrayUShortAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) +def AddValue(builder, value): + return ArrayUShortAddValue(builder, value) +def ArrayUShortStartValueVector(builder, numElems): return builder.StartVector(2, numElems, 2) +def StartValueVector(builder, numElems): + return ArrayUShortStartValueVector(builder, numElems) +def ArrayUShortEnd(builder): return builder.EndObject() +def End(builder): + return ArrayUShortEnd(builder) \ No newline at end of file diff --git a/streaming_data_types/fbschemas/logdata_f144/Byte.py b/streaming_data_types/fbschemas/logdata_f144/Byte.py new file mode 100644 index 0000000..96492ba --- /dev/null +++ b/streaming_data_types/fbschemas/logdata_f144/Byte.py @@ -0,0 +1,46 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy +np = import_numpy() + +class Byte(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Byte() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsByte(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def ByteBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed) + + # Byte + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Byte + def Value(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + +def ByteStart(builder): builder.StartObject(1) +def Start(builder): + return ByteStart(builder) +def ByteAddValue(builder, value): builder.PrependInt8Slot(0, value, 0) +def AddValue(builder, value): + return ByteAddValue(builder, value) +def ByteEnd(builder): return builder.EndObject() +def End(builder): + return ByteEnd(builder) \ No newline at end of file diff --git a/streaming_data_types/fbschemas/logdata_f144/Double.py b/streaming_data_types/fbschemas/logdata_f144/Double.py new file mode 100644 index 0000000..8e664b1 --- /dev/null +++ b/streaming_data_types/fbschemas/logdata_f144/Double.py @@ -0,0 +1,46 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy +np = import_numpy() + +class Double(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Double() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsDouble(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def DoubleBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed) + + # Double + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Double + def Value(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Float64Flags, o + self._tab.Pos) + return 0.0 + +def DoubleStart(builder): builder.StartObject(1) +def Start(builder): + return DoubleStart(builder) +def DoubleAddValue(builder, value): builder.PrependFloat64Slot(0, value, 0.0) +def AddValue(builder, value): + return DoubleAddValue(builder, value) +def DoubleEnd(builder): return builder.EndObject() +def End(builder): + return DoubleEnd(builder) \ No newline at end of file diff --git a/streaming_data_types/fbschemas/logdata_f144/Float.py b/streaming_data_types/fbschemas/logdata_f144/Float.py new file mode 100644 index 0000000..0c02389 --- /dev/null +++ b/streaming_data_types/fbschemas/logdata_f144/Float.py @@ -0,0 +1,46 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy +np = import_numpy() + +class Float(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Float() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsFloat(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def FloatBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed) + + # Float + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Float + def Value(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) + return 0.0 + +def FloatStart(builder): builder.StartObject(1) +def Start(builder): + return FloatStart(builder) +def FloatAddValue(builder, value): builder.PrependFloat32Slot(0, value, 0.0) +def AddValue(builder, value): + return FloatAddValue(builder, value) +def FloatEnd(builder): return builder.EndObject() +def End(builder): + return FloatEnd(builder) \ No newline at end of file diff --git a/streaming_data_types/fbschemas/logdata_f144/Int.py b/streaming_data_types/fbschemas/logdata_f144/Int.py new file mode 100644 index 0000000..df398d9 --- /dev/null +++ b/streaming_data_types/fbschemas/logdata_f144/Int.py @@ -0,0 +1,46 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy +np = import_numpy() + +class Int(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Int() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsInt(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def IntBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed) + + # Int + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Int + def Value(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + +def IntStart(builder): builder.StartObject(1) +def Start(builder): + return IntStart(builder) +def IntAddValue(builder, value): builder.PrependInt32Slot(0, value, 0) +def AddValue(builder, value): + return IntAddValue(builder, value) +def IntEnd(builder): return builder.EndObject() +def End(builder): + return IntEnd(builder) \ No newline at end of file diff --git a/streaming_data_types/fbschemas/logdata_f144/LogData.py b/streaming_data_types/fbschemas/logdata_f144/LogData.py new file mode 100644 index 0000000..1e307ae --- /dev/null +++ b/streaming_data_types/fbschemas/logdata_f144/LogData.py @@ -0,0 +1,79 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy +np = import_numpy() + +class LogData(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = LogData() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsLogData(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def LogDataBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed) + + # LogData + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # LogData + def SourceName(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # LogData + def Timestamp(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) + return 0 + + # LogData + def ValueType(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) + return 0 + + # LogData + def Value(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + from flatbuffers.table import Table + obj = Table(bytearray(), 0) + self._tab.Union(obj, o) + return obj + return None + +def LogDataStart(builder): builder.StartObject(4) +def Start(builder): + return LogDataStart(builder) +def LogDataAddSourceName(builder, sourceName): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(sourceName), 0) +def AddSourceName(builder, sourceName): + return LogDataAddSourceName(builder, sourceName) +def LogDataAddTimestamp(builder, timestamp): builder.PrependInt64Slot(1, timestamp, 0) +def AddTimestamp(builder, timestamp): + return LogDataAddTimestamp(builder, timestamp) +def LogDataAddValueType(builder, valueType): builder.PrependUint8Slot(2, valueType, 0) +def AddValueType(builder, valueType): + return LogDataAddValueType(builder, valueType) +def LogDataAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) +def AddValue(builder, value): + return LogDataAddValue(builder, value) +def LogDataEnd(builder): return builder.EndObject() +def End(builder): + return LogDataEnd(builder) \ No newline at end of file diff --git a/streaming_data_types/fbschemas/logdata_f144/Long.py b/streaming_data_types/fbschemas/logdata_f144/Long.py new file mode 100644 index 0000000..e3b3778 --- /dev/null +++ b/streaming_data_types/fbschemas/logdata_f144/Long.py @@ -0,0 +1,46 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy +np = import_numpy() + +class Long(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Long() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsLong(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def LongBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed) + + # Long + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Long + def Value(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) + return 0 + +def LongStart(builder): builder.StartObject(1) +def Start(builder): + return LongStart(builder) +def LongAddValue(builder, value): builder.PrependInt64Slot(0, value, 0) +def AddValue(builder, value): + return LongAddValue(builder, value) +def LongEnd(builder): return builder.EndObject() +def End(builder): + return LongEnd(builder) \ No newline at end of file diff --git a/streaming_data_types/fbschemas/logdata_f144/Short.py b/streaming_data_types/fbschemas/logdata_f144/Short.py new file mode 100644 index 0000000..c7ef950 --- /dev/null +++ b/streaming_data_types/fbschemas/logdata_f144/Short.py @@ -0,0 +1,46 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy +np = import_numpy() + +class Short(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Short() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsShort(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def ShortBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed) + + # Short + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Short + def Value(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int16Flags, o + self._tab.Pos) + return 0 + +def ShortStart(builder): builder.StartObject(1) +def Start(builder): + return ShortStart(builder) +def ShortAddValue(builder, value): builder.PrependInt16Slot(0, value, 0) +def AddValue(builder, value): + return ShortAddValue(builder, value) +def ShortEnd(builder): return builder.EndObject() +def End(builder): + return ShortEnd(builder) \ No newline at end of file diff --git a/streaming_data_types/fbschemas/logdata_f144/UByte.py b/streaming_data_types/fbschemas/logdata_f144/UByte.py new file mode 100644 index 0000000..9f847d9 --- /dev/null +++ b/streaming_data_types/fbschemas/logdata_f144/UByte.py @@ -0,0 +1,46 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy +np = import_numpy() + +class UByte(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = UByte() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsUByte(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def UByteBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed) + + # UByte + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # UByte + def Value(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) + return 0 + +def UByteStart(builder): builder.StartObject(1) +def Start(builder): + return UByteStart(builder) +def UByteAddValue(builder, value): builder.PrependUint8Slot(0, value, 0) +def AddValue(builder, value): + return UByteAddValue(builder, value) +def UByteEnd(builder): return builder.EndObject() +def End(builder): + return UByteEnd(builder) \ No newline at end of file diff --git a/streaming_data_types/fbschemas/logdata_f144/UInt.py b/streaming_data_types/fbschemas/logdata_f144/UInt.py new file mode 100644 index 0000000..4475968 --- /dev/null +++ b/streaming_data_types/fbschemas/logdata_f144/UInt.py @@ -0,0 +1,46 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy +np = import_numpy() + +class UInt(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = UInt() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsUInt(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def UIntBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed) + + # UInt + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # UInt + def Value(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint32Flags, o + self._tab.Pos) + return 0 + +def UIntStart(builder): builder.StartObject(1) +def Start(builder): + return UIntStart(builder) +def UIntAddValue(builder, value): builder.PrependUint32Slot(0, value, 0) +def AddValue(builder, value): + return UIntAddValue(builder, value) +def UIntEnd(builder): return builder.EndObject() +def End(builder): + return UIntEnd(builder) \ No newline at end of file diff --git a/streaming_data_types/fbschemas/logdata_f144/ULong.py b/streaming_data_types/fbschemas/logdata_f144/ULong.py new file mode 100644 index 0000000..a450c89 --- /dev/null +++ b/streaming_data_types/fbschemas/logdata_f144/ULong.py @@ -0,0 +1,46 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy +np = import_numpy() + +class ULong(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ULong() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsULong(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def ULongBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed) + + # ULong + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ULong + def Value(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) + return 0 + +def ULongStart(builder): builder.StartObject(1) +def Start(builder): + return ULongStart(builder) +def ULongAddValue(builder, value): builder.PrependUint64Slot(0, value, 0) +def AddValue(builder, value): + return ULongAddValue(builder, value) +def ULongEnd(builder): return builder.EndObject() +def End(builder): + return ULongEnd(builder) \ No newline at end of file diff --git a/streaming_data_types/fbschemas/logdata_f144/UShort.py b/streaming_data_types/fbschemas/logdata_f144/UShort.py new file mode 100644 index 0000000..590f040 --- /dev/null +++ b/streaming_data_types/fbschemas/logdata_f144/UShort.py @@ -0,0 +1,46 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy +np = import_numpy() + +class UShort(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = UShort() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsUShort(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def UShortBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed) + + # UShort + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # UShort + def Value(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint16Flags, o + self._tab.Pos) + return 0 + +def UShortStart(builder): builder.StartObject(1) +def Start(builder): + return UShortStart(builder) +def UShortAddValue(builder, value): builder.PrependUint16Slot(0, value, 0) +def AddValue(builder, value): + return UShortAddValue(builder, value) +def UShortEnd(builder): return builder.EndObject() +def End(builder): + return UShortEnd(builder) \ No newline at end of file diff --git a/streaming_data_types/fbschemas/logdata_f144/Value.py b/streaming_data_types/fbschemas/logdata_f144/Value.py new file mode 100644 index 0000000..b6f8232 --- /dev/null +++ b/streaming_data_types/fbschemas/logdata_f144/Value.py @@ -0,0 +1,26 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +class Value(object): + NONE = 0 + Byte = 1 + UByte = 2 + Short = 3 + UShort = 4 + Int = 5 + UInt = 6 + Long = 7 + ULong = 8 + Float = 9 + Double = 10 + ArrayByte = 11 + ArrayUByte = 12 + ArrayShort = 13 + ArrayUShort = 14 + ArrayInt = 15 + ArrayUInt = 16 + ArrayLong = 17 + ArrayULong = 18 + ArrayFloat = 19 + ArrayDouble = 20 diff --git a/streaming_data_types/fbschemas/logdata_f144/__init__.py b/streaming_data_types/fbschemas/logdata_f144/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/streaming_data_types/logdata_f144.py b/streaming_data_types/logdata_f144.py new file mode 100644 index 0000000..8d29177 --- /dev/null +++ b/streaming_data_types/logdata_f144.py @@ -0,0 +1,307 @@ +from collections import namedtuple +from datetime import datetime +from typing import Any, Union, NamedTuple + +import flatbuffers +import numpy as np + +from streaming_data_types.fbschemas.logdata_f144 import LogData +from streaming_data_types.fbschemas.logdata_f144.ArrayByte import ( + ArrayByte, + ArrayByteAddValue, + ArrayByteEnd, + ArrayByteStart, +) +from streaming_data_types.fbschemas.logdata_f144.ArrayDouble import ( + ArrayDouble, + ArrayDoubleAddValue, + ArrayDoubleEnd, + ArrayDoubleStart, +) +from streaming_data_types.fbschemas.logdata_f144.ArrayFloat import ( + ArrayFloat, + ArrayFloatAddValue, + ArrayFloatEnd, + ArrayFloatStart, +) +from streaming_data_types.fbschemas.logdata_f144.ArrayInt import ( + ArrayInt, + ArrayIntAddValue, + ArrayIntEnd, + ArrayIntStart, +) +from streaming_data_types.fbschemas.logdata_f144.ArrayLong import ( + ArrayLong, + ArrayLongAddValue, + ArrayLongEnd, + ArrayLongStart, +) +from streaming_data_types.fbschemas.logdata_f144.ArrayShort import ( + ArrayShort, + ArrayShortAddValue, + ArrayShortEnd, + ArrayShortStart, +) +from streaming_data_types.fbschemas.logdata_f144.ArrayUByte import ( + ArrayUByte, + ArrayUByteAddValue, + ArrayUByteEnd, + ArrayUByteStart, +) +from streaming_data_types.fbschemas.logdata_f144.ArrayUInt import ( + ArrayUInt, + ArrayUIntAddValue, + ArrayUIntEnd, + ArrayUIntStart, +) +from streaming_data_types.fbschemas.logdata_f144.ArrayULong import ( + ArrayULong, + ArrayULongAddValue, + ArrayULongEnd, + ArrayULongStart, +) +from streaming_data_types.fbschemas.logdata_f144.ArrayUShort import ( + ArrayUShort, + ArrayUShortAddValue, + ArrayUShortEnd, + ArrayUShortStart, +) +from streaming_data_types.fbschemas.logdata_f144.Byte import ( + Byte, + ByteAddValue, + ByteEnd, + ByteStart, +) +from streaming_data_types.fbschemas.logdata_f144.Double import ( + Double, + DoubleAddValue, + DoubleEnd, + DoubleStart, +) +from streaming_data_types.fbschemas.logdata_f144.Float import ( + Float, + FloatAddValue, + FloatEnd, + FloatStart, +) +from streaming_data_types.fbschemas.logdata_f144.Int import ( + Int, + IntAddValue, + IntEnd, + IntStart, +) +from streaming_data_types.fbschemas.logdata_f144.Long import ( + Long, + LongAddValue, + LongEnd, + LongStart, +) +from streaming_data_types.fbschemas.logdata_f144.Short import ( + Short, + ShortAddValue, + ShortEnd, + ShortStart, +) +from streaming_data_types.fbschemas.logdata_f144.UByte import ( + UByte, + UByteAddValue, + UByteEnd, + UByteStart, +) +from streaming_data_types.fbschemas.logdata_f144.UInt import ( + UInt, + UIntAddValue, + UIntEnd, + UIntStart, +) +from streaming_data_types.fbschemas.logdata_f144.ULong import ( + ULong, + ULongAddValue, + ULongEnd, + ULongStart, +) +from streaming_data_types.fbschemas.logdata_f144.UShort import ( + UShort, + UShortAddValue, + UShortEnd, + UShortStart, +) +from streaming_data_types.fbschemas.logdata_f144.Value import Value +from streaming_data_types.utils import check_schema_identifier + +FILE_IDENTIFIER = b"f144" + +SerialiserFunctions = namedtuple( + "SerialiserFunctionMap", + ("StartFunction", "AddValueFunction", "EndFunction", "value_type_enum"), +) + + +def _serialise_value( + builder: flatbuffers.Builder, value: Any, function_map: SerialiserFunctions +): + function_map.StartFunction(builder) + function_map.AddValueFunction(builder, value) + return function_map.EndFunction(builder) + + +_map_scalar_type_to_serialiser = { + np.dtype("byte"): SerialiserFunctions( + ByteStart, ByteAddValue, ByteEnd, Value.Byte + ), + np.dtype("ubyte"): SerialiserFunctions( + UByteStart, UByteAddValue, UByteEnd, Value.UByte + ), + np.dtype("int16"): SerialiserFunctions( + ShortStart, ShortAddValue, ShortEnd, Value.Short + ), + np.dtype("uint16"): SerialiserFunctions( + UShortStart, UShortAddValue, UShortEnd, Value.UShort + ), + np.dtype("int32"): SerialiserFunctions( + IntStart, IntAddValue, IntEnd, Value.Int + ), + np.dtype("uint32"): SerialiserFunctions( + UIntStart, UIntAddValue, UIntEnd, Value.UInt + ), + np.dtype("int64"): SerialiserFunctions( + LongStart, LongAddValue, LongEnd, Value.Long + ), + np.dtype("uint64"): SerialiserFunctions( + ULongStart, ULongAddValue, ULongEnd, Value.ULong + ), + np.dtype("float32"): SerialiserFunctions( + FloatStart, FloatAddValue, FloatEnd, Value.Float + ), + np.dtype("float64"): SerialiserFunctions( + DoubleStart, DoubleAddValue, DoubleEnd, Value.Double + ), +} + +_map_array_type_to_serialiser = { + np.dtype("byte"): SerialiserFunctions( + ArrayByteStart, ArrayByteAddValue, ArrayByteEnd, Value.ArrayByte + ), + np.dtype("int16"): SerialiserFunctions( + ArrayShortStart, ArrayShortAddValue, ArrayShortEnd, Value.ArrayShort + ), + np.dtype("int32"): SerialiserFunctions( + ArrayIntStart, ArrayIntAddValue, ArrayIntEnd, Value.ArrayInt + ), + np.dtype("int64"): SerialiserFunctions( + ArrayLongStart, ArrayLongAddValue, ArrayLongEnd, Value.ArrayLong + ), + np.dtype("ubyte"): SerialiserFunctions( + ArrayUByteStart, ArrayUByteAddValue, ArrayUByteEnd, Value.ArrayUByte + ), + np.dtype("uint16"): SerialiserFunctions( + ArrayUShortStart, ArrayUShortAddValue, ArrayUShortEnd, Value.ArrayUShort + ), + np.dtype("uint32"): SerialiserFunctions( + ArrayUIntStart, ArrayUIntAddValue, ArrayUIntEnd, Value.ArrayUInt + ), + np.dtype("uint64"): SerialiserFunctions( + ArrayULongStart, ArrayULongAddValue, ArrayULongEnd, Value.ArrayULong + ), + np.dtype("float32"): SerialiserFunctions( + ArrayFloatStart, ArrayFloatAddValue, ArrayFloatEnd, Value.ArrayFloat + ), + np.dtype("float64"): SerialiserFunctions( + ArrayDoubleStart, ArrayDoubleAddValue, ArrayDoubleEnd, Value.ArrayDouble + ), +} + + +def serialise_f144( + source_name: str, + value: Any, + timestamp_unix_ns: int = 0, +) -> bytes: + builder = flatbuffers.Builder(1024) + source_name_offset = builder.CreateString(source_name) + value = np.array(value) + if value.ndim == 1: + try: + c_func_map = _map_array_type_to_serialiser[value.dtype] + value_offset = _serialise_value( + builder, builder.CreateNumpyVector(value), c_func_map + ) + value_type = c_func_map.value_type_enum + except KeyError: + raise NotImplementedError( + f'f144 flatbuffer does not support values of type {value.dtype}.' + ) + elif value.ndim == 0: + try: + c_func_map = _map_scalar_type_to_serialiser[value.dtype] + value_offset = _serialise_value(builder, value, c_func_map) + value_type = c_func_map.value_type_enum + except KeyError: + raise NotImplementedError( + f'f144 flatbuffer does not support values of type {value.dtype}.' + ) + else: + raise NotImplementedError( + "f144 only supports scalars or 1D array values" + ) + LogData.LogDataStart(builder) + LogData.LogDataAddSourceName(builder, source_name_offset) + LogData.LogDataAddValue(builder, value_offset) + LogData.LogDataAddValueType(builder, value_type) + LogData.LogDataAddTimestamp(builder, timestamp_unix_ns) + end = LogData.LogDataEnd(builder) + builder.Finish(end, file_identifier=FILE_IDENTIFIER) + return bytes(builder.Output()) + + +_map_fb_enum_to_type = { + Value.Byte: Byte, + Value.UByte: UByte, + Value.Short: Short, + Value.UShort: UShort, + Value.Int: Int, + Value.UInt: UInt, + Value.Long: Long, + Value.ULong: ULong, + Value.Float: Float, + Value.Double: Double, + Value.ArrayByte: ArrayByte, + Value.ArrayUByte: ArrayUByte, + Value.ArrayShort: ArrayShort, + Value.ArrayUShort: ArrayUShort, + Value.ArrayInt: ArrayInt, + Value.ArrayUInt: ArrayUInt, + Value.ArrayLong: ArrayLong, + Value.ArrayULong: ArrayULong, + Value.ArrayFloat: ArrayFloat, + Value.ArrayDouble: ArrayDouble, +} + + +ExtractedLogData = NamedTuple( + "LogData", + ( + ("source_name", str), + ("value", Any), + ("timestamp_unix_ns", datetime), + ), +) + + +def deserialise_f144(buffer: Union[bytearray, bytes]) -> ExtractedLogData: + check_schema_identifier(buffer, FILE_IDENTIFIER) + log_data = LogData.LogData.GetRootAs(buffer, 0) + source_name = log_data.SourceName() if log_data.SourceName() else b"" + + value_offset = log_data.Value() + value_fb = _map_fb_enum_to_type[log_data.ValueType()]() + value_fb.Init(value_offset.Bytes, value_offset.Pos) + if hasattr(value_fb, "ValueAsNumpy"): + value = value_fb.ValueAsNumpy() + else: + value = value_fb.Value() + return ExtractedLogData( + source_name=source_name.decode(), + value=value, + timestamp_unix_ns=log_data.Timestamp() + ) diff --git a/tests/test_f144.py b/tests/test_f144.py new file mode 100644 index 0000000..3e567fb --- /dev/null +++ b/tests/test_f144.py @@ -0,0 +1,157 @@ +import numpy as np +import pytest + +from streaming_data_types import DESERIALISERS, SERIALISERS +from streaming_data_types.exceptions import WrongSchemaException +from streaming_data_types.logdata_f144 import deserialise_f144, serialise_f144 + + +class TestSerialisationF144: + original_entry = { + "source_name": "some_source", + "value": 578214, + "timestamp_unix_ns": 1585332414000000000, + } + + def test_serialises_and_deserialises_integer_f144_message_correctly(self): + buf = serialise_f144(**self.original_entry) + deserialised_tuple = deserialise_f144(buf) + + assert deserialised_tuple.source_name == self.original_entry["source_name"] + assert deserialised_tuple.value == self.original_entry["value"] + assert ( + deserialised_tuple.timestamp_unix_ns + == self.original_entry["timestamp_unix_ns"] + ) + + def test_serialises_and_deserialises_byte_f144_message_correctly(self): + byte_log = { + "source_name": "some_source", + "value": 0x7F, + "timestamp_unix_ns": 1585332414000000000, + } + buf = serialise_f144(**byte_log) + deserialised_tuple = deserialise_f144(buf) + + assert deserialised_tuple.source_name == byte_log["source_name"] + assert deserialised_tuple.value == byte_log["value"] + assert deserialised_tuple.timestamp_unix_ns == byte_log["timestamp_unix_ns"] + + def test_serialises_and_deserialises_float_f144_message_correctly(self): + float_log = { + "source_name": "some_source", + "value": 1.234, + "timestamp_unix_ns": 1585332414000000000, + } + buf = serialise_f144(**float_log) + deserialised_tuple = deserialise_f144(buf) + + assert deserialised_tuple.source_name == float_log["source_name"] + assert deserialised_tuple.value == float_log["value"] + assert deserialised_tuple.timestamp_unix_ns == float_log["timestamp_unix_ns"] + + def test_serialises_and_deserialises_scalar_ndarray_f144_message_correctly(self): + numpy_log = { + "source_name": "some_source", + "value": np.array(42), + "timestamp_unix_ns": 1585332414000000000, + } + buf = serialise_f144(**numpy_log) + deserialised_tuple = deserialise_f144(buf) + + assert deserialised_tuple.source_name == numpy_log["source_name"] + assert deserialised_tuple.value == np.array(numpy_log["value"]) + assert deserialised_tuple.timestamp_unix_ns == numpy_log["timestamp_unix_ns"] + + def test_serialises_and_deserialises_native_list_correctly(self): + list_log = { + "source_name": "some_source", + "value": [1, 2, 3], + "timestamp_unix_ns": 1585332414000000000, + } + buf = serialise_f144(**list_log) + deserialised_tuple = deserialise_f144(buf) + + assert deserialised_tuple.source_name == list_log["source_name"] + # Array values are output as numpy array + assert np.array_equal(deserialised_tuple.value, np.array(list_log["value"])) + assert deserialised_tuple.timestamp_unix_ns == list_log["timestamp_unix_ns"] + + def test_serialises_and_deserialises_numpy_array_integers_correctly(self): + array_log = { + "source_name": "some_source", + "value": np.array([1, 2, 3]), + "timestamp_unix_ns": 1585332414000000000, + } + buf = serialise_f144(**array_log) + deserialised_tuple = deserialise_f144(buf) + + assert deserialised_tuple.source_name == array_log["source_name"] + assert np.array_equal(deserialised_tuple.value, array_log["value"]) + assert deserialised_tuple.timestamp_unix_ns == array_log["timestamp_unix_ns"] + + def test_serialises_and_deserialises_numpy_array_preserves_byte_type_correctly( + self, + ): + array_log = { + "source_name": "some_source", + "value": np.array([1, 2, 3], dtype=np.uint8), + "timestamp_unix_ns": 1585332414000000000, + } + buf = serialise_f144(**array_log) + deserialised_tuple = deserialise_f144(buf) + + assert np.array_equal(deserialised_tuple.value, array_log["value"]) + assert deserialised_tuple.value.dtype == array_log["value"].dtype + + def test_serialises_and_deserialises_numpy_array_preserves_integer_type_correctly( + self, + ): + array_log = { + "source_name": "some_source", + "value": np.array([1, 2, 3], dtype=np.uint16), + "timestamp_unix_ns": 1585332414000000000, + } + buf = serialise_f144(**array_log) + deserialised_tuple = deserialise_f144(buf) + + assert np.array_equal(deserialised_tuple.value, array_log["value"]) + assert deserialised_tuple.value.dtype == array_log["value"].dtype + + def test_serialises_and_deserialises_numpy_array_floats_correctly(self): + array_log = { + "source_name": "some_source", + "value": np.array([1.1, 2.2, 3.3]), + "timestamp_unix_ns": 1585332414000000000, + } + buf = serialise_f144(**array_log) + deserialised_tuple = deserialise_f144(buf) + + assert deserialised_tuple.source_name == array_log["source_name"] + assert np.allclose(deserialised_tuple.value, array_log["value"]) + assert deserialised_tuple.timestamp_unix_ns == array_log["timestamp_unix_ns"] + + def test_raises_not_implemented_error_when_trying_to_serialise_numpy_complex_number_type( + self, + ): + complex_log = { + "source_name": "some_source", + "value": np.complex(3, 4), + "timestamp_unix_ns": 1585332414000000000, + } + with pytest.raises(NotImplementedError): + serialise_f144(**complex_log) + + def test_if_buffer_has_wrong_id_then_throws(self): + buf = serialise_f144(**self.original_entry) + + # Manually hack the id + buf = bytearray(buf) + buf[4:8] = b"1234" + + with pytest.raises(WrongSchemaException): + deserialise_f144(buf) + + def test_schema_type_is_in_global_serialisers_list(self): + assert "f144" in SERIALISERS + assert "f144" in DESERIALISERS From df4a9e5f31435dfd7544db37faa89319addfc5d2 Mon Sep 17 00:00:00 2001 From: Jonas Petersson Date: Thu, 24 Nov 2022 13:17:30 +0100 Subject: [PATCH 286/363] merging main and reformatting --- streaming_data_types/__init__.py | 60 ++++----- streaming_data_types/alarm_al00.py | 8 +- streaming_data_types/epics_connection_ep01.py | 24 ++-- streaming_data_types/eventdata_ev44.py | 22 +-- .../fbschemas/alarm_al00/Alarm.py | 38 ++++-- .../fbschemas/alarm_al00/Severity.py | 4 +- .../epics_connection_ep01/ConnectionInfo.py | 3 +- .../EpicsPVConnectionInfo.py | 18 ++- .../eventdata_ev44/Event44Message.py | 126 +++++++++++++++--- .../fbschemas/logdata_f144/ArrayByte.py | 47 +++++-- .../fbschemas/logdata_f144/ArrayDouble.py | 47 +++++-- .../fbschemas/logdata_f144/ArrayFloat.py | 47 +++++-- .../fbschemas/logdata_f144/ArrayInt.py | 47 +++++-- .../fbschemas/logdata_f144/ArrayLong.py | 47 +++++-- .../fbschemas/logdata_f144/ArrayShort.py | 47 +++++-- .../fbschemas/logdata_f144/ArrayUByte.py | 47 +++++-- .../fbschemas/logdata_f144/ArrayUInt.py | 47 +++++-- .../fbschemas/logdata_f144/ArrayULong.py | 47 +++++-- .../fbschemas/logdata_f144/ArrayUShort.py | 47 +++++-- .../fbschemas/logdata_f144/Byte.py | 33 ++++- .../fbschemas/logdata_f144/Double.py | 37 +++-- .../fbschemas/logdata_f144/Float.py | 37 +++-- .../fbschemas/logdata_f144/Int.py | 33 ++++- .../fbschemas/logdata_f144/LogData.py | 59 ++++++-- .../fbschemas/logdata_f144/Long.py | 33 ++++- .../fbschemas/logdata_f144/Short.py | 33 ++++- .../fbschemas/logdata_f144/UByte.py | 33 ++++- .../fbschemas/logdata_f144/UInt.py | 37 +++-- .../fbschemas/logdata_f144/ULong.py | 37 +++-- .../fbschemas/logdata_f144/UShort.py | 37 +++-- .../fbschemas/logdata_f144/Value.py | 3 +- streaming_data_types/logdata_f144.py | 20 +-- tests/test_al00.py | 3 +- tests/test_ev44.py | 58 +++++--- 34 files changed, 970 insertions(+), 296 deletions(-) diff --git a/streaming_data_types/__init__.py b/streaming_data_types/__init__.py index 4d65cb7..ec16dfb 100644 --- a/streaming_data_types/__init__.py +++ b/streaming_data_types/__init__.py @@ -1,43 +1,35 @@ from streaming_data_types._version import version -from streaming_data_types.action_response_answ import (deserialise_answ, - serialise_answ) +from streaming_data_types.action_response_answ import deserialise_answ, serialise_answ from streaming_data_types.alarm_al00 import deserialise_al00, serialise_al00 -from streaming_data_types.area_detector_ADAr import (deserialise_ADAr, - serialise_ADAr) -from streaming_data_types.area_detector_NDAr import (deserialise_ndar, - serialise_ndar) -from streaming_data_types.array_1d_se00 import (deserialise_se00, - serialise_se00) -from streaming_data_types.epics_connection_info_ep00 import (deserialise_ep00, - serialise_ep00) -from streaming_data_types.epics_connection_ep01 import (deserialise_ep01, - serialise_ep01) -from streaming_data_types.eventdata_ev42 import (deserialise_ev42, - serialise_ev42) -from streaming_data_types.eventdata_ev43 import (deserialise_ev43, - serialise_ev43) -from streaming_data_types.eventdata_ev44 import (deserialise_ev44, - serialise_ev44) -from streaming_data_types.finished_writing_wrdn import (deserialise_wrdn, - serialise_wrdn) +from streaming_data_types.area_detector_ADAr import deserialise_ADAr, serialise_ADAr +from streaming_data_types.area_detector_NDAr import deserialise_ndar, serialise_ndar +from streaming_data_types.array_1d_se00 import deserialise_se00, serialise_se00 +from streaming_data_types.epics_connection_ep01 import deserialise_ep01, serialise_ep01 +from streaming_data_types.epics_connection_info_ep00 import ( + deserialise_ep00, + serialise_ep00, +) +from streaming_data_types.eventdata_ev42 import deserialise_ev42, serialise_ev42 +from streaming_data_types.eventdata_ev43 import deserialise_ev43, serialise_ev43 +from streaming_data_types.eventdata_ev44 import deserialise_ev44, serialise_ev44 +from streaming_data_types.finished_writing_wrdn import deserialise_wrdn, serialise_wrdn from streaming_data_types.forwarder_config_update_rf5k import ( - deserialise_rf5k, serialise_rf5k) -from streaming_data_types.histogram_hs00 import (deserialise_hs00, - serialise_hs00) -from streaming_data_types.histogram_hs01 import (deserialise_hs01, - serialise_hs01) + deserialise_rf5k, + serialise_rf5k, +) +from streaming_data_types.histogram_hs00 import deserialise_hs00, serialise_hs00 +from streaming_data_types.histogram_hs01 import deserialise_hs01, serialise_hs01 from streaming_data_types.logdata_f142 import deserialise_f142, serialise_f142 -from streaming_data_types.logdata_f144 import serialise_f144, deserialise_f144 -from streaming_data_types.nicos_cache_ns10 import (deserialise_ns10, - serialise_ns10) -from streaming_data_types.run_start_pl72 import (deserialise_pl72, - serialise_pl72) +from streaming_data_types.logdata_f144 import deserialise_f144, serialise_f144 +from streaming_data_types.nicos_cache_ns10 import deserialise_ns10, serialise_ns10 +from streaming_data_types.run_start_pl72 import deserialise_pl72, serialise_pl72 from streaming_data_types.run_stop_6s4t import deserialise_6s4t, serialise_6s4t -from streaming_data_types.sample_environment_senv import (deserialise_senv, - serialise_senv) +from streaming_data_types.sample_environment_senv import ( + deserialise_senv, + serialise_senv, +) from streaming_data_types.status_x5f2 import deserialise_x5f2, serialise_x5f2 -from streaming_data_types.timestamps_tdct import (deserialise_tdct, - serialise_tdct) +from streaming_data_types.timestamps_tdct import deserialise_tdct, serialise_tdct __version__ = version diff --git a/streaming_data_types/alarm_al00.py b/streaming_data_types/alarm_al00.py index ee206c2..53947b9 100644 --- a/streaming_data_types/alarm_al00.py +++ b/streaming_data_types/alarm_al00.py @@ -13,9 +13,9 @@ class Severity(Enum): - OK = 0, - MINOR = 1, - MAJOR = 2, + OK = (0,) + MINOR = (1,) + MAJOR = (2,) INVALID = 3 @@ -42,7 +42,7 @@ def deserialise_al00(buffer): alarm.SourceName().decode("utf-8") if alarm.SourceName() else "", alarm.Timestamp(), _severity_to_enum[alarm.Severity()], - alarm.Message().decode("utf-8") if alarm.Message() else "" + alarm.Message().decode("utf-8") if alarm.Message() else "", ) diff --git a/streaming_data_types/epics_connection_ep01.py b/streaming_data_types/epics_connection_ep01.py index d782de4..05756d5 100644 --- a/streaming_data_types/epics_connection_ep01.py +++ b/streaming_data_types/epics_connection_ep01.py @@ -4,10 +4,10 @@ import flatbuffers -from streaming_data_types.fbschemas.epics_connection_ep01.ConnectionInfo import \ - ConnectionInfo as FBConnectionInfo -from streaming_data_types.fbschemas.epics_connection_ep01 import \ - EpicsPVConnectionInfo +from streaming_data_types.fbschemas.epics_connection_ep01 import EpicsPVConnectionInfo +from streaming_data_types.fbschemas.epics_connection_ep01.ConnectionInfo import ( + ConnectionInfo as FBConnectionInfo, +) from streaming_data_types.utils import check_schema_identifier FILE_IDENTIFIER = b"ep01" @@ -62,9 +62,15 @@ def serialise_ep01( EpicsPVConnectionInfo.EpicsPVConnectionInfoStart(builder) if service_id is not None: - EpicsPVConnectionInfo.EpicsPVConnectionInfoAddServiceId(builder, service_id_offset) - EpicsPVConnectionInfo.EpicsPVConnectionInfoAddSourceName(builder, source_name_offset) - EpicsPVConnectionInfo.EpicsPVConnectionInfoAddStatus(builder, _enum_to_status[status]) + EpicsPVConnectionInfo.EpicsPVConnectionInfoAddServiceId( + builder, service_id_offset + ) + EpicsPVConnectionInfo.EpicsPVConnectionInfoAddSourceName( + builder, source_name_offset + ) + EpicsPVConnectionInfo.EpicsPVConnectionInfoAddStatus( + builder, _enum_to_status[status] + ) EpicsPVConnectionInfo.EpicsPVConnectionInfoAddTimestamp(builder, timestamp_ns) end = EpicsPVConnectionInfo.EpicsPVConnectionInfoEnd(builder) @@ -81,7 +87,9 @@ def deserialise_ep01(buffer: Union[bytearray, bytes]) -> EpicsPVConnection: check_schema_identifier(buffer, FILE_IDENTIFIER) epics_connection = ( - EpicsPVConnectionInfo.EpicsPVConnectionInfo.GetRootAsEpicsPVConnectionInfo(buffer, 0) + EpicsPVConnectionInfo.EpicsPVConnectionInfo.GetRootAsEpicsPVConnectionInfo( + buffer, 0 + ) ) source_name = ( diff --git a/streaming_data_types/eventdata_ev44.py b/streaming_data_types/eventdata_ev44.py index 77d54a1..bb48f2d 100644 --- a/streaming_data_types/eventdata_ev44.py +++ b/streaming_data_types/eventdata_ev44.py @@ -43,12 +43,14 @@ def deserialise_ev44(buffer): ) -def serialise_ev44(source_name, - message_id, - reference_time, - reference_time_index, - time_of_flight, - pixel_id): +def serialise_ev44( + source_name, + message_id, + reference_time, + reference_time_index, + time_of_flight, + pixel_id, +): """ Serialise event data as an ev44 FlatBuffers message. @@ -64,8 +66,12 @@ def serialise_ev44(source_name, builder.ForceDefaults(True) source = builder.CreateString(source_name) - ref_time_data = builder.CreateNumpyVector(np.asarray(reference_time).astype(np.int64)) - ref_time_index_data = builder.CreateNumpyVector(np.asarray(reference_time_index).astype(np.int32)) + ref_time_data = builder.CreateNumpyVector( + np.asarray(reference_time).astype(np.int64) + ) + ref_time_index_data = builder.CreateNumpyVector( + np.asarray(reference_time_index).astype(np.int32) + ) tof_data = builder.CreateNumpyVector(np.asarray(time_of_flight).astype(np.int32)) pixel_id_data = builder.CreateNumpyVector(np.asarray(pixel_id).astype(np.int32)) diff --git a/streaming_data_types/fbschemas/alarm_al00/Alarm.py b/streaming_data_types/fbschemas/alarm_al00/Alarm.py index e81dda1..d292d46 100644 --- a/streaming_data_types/fbschemas/alarm_al00/Alarm.py +++ b/streaming_data_types/fbschemas/alarm_al00/Alarm.py @@ -1,11 +1,12 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers + class Alarm(object): - __slots__ = ['_tab'] + __slots__ = ["_tab"] @classmethod def GetRootAsAlarm(cls, buf, offset): @@ -46,9 +47,30 @@ def Message(self): return self._tab.String(o + self._tab.Pos) return None -def AlarmStart(builder): builder.StartObject(4) -def AlarmAddSourceName(builder, sourceName): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(sourceName), 0) -def AlarmAddTimestamp(builder, timestamp): builder.PrependInt64Slot(1, timestamp, 0) -def AlarmAddSeverity(builder, severity): builder.PrependInt16Slot(2, severity, 0) -def AlarmAddMessage(builder, message): builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(message), 0) -def AlarmEnd(builder): return builder.EndObject() + +def AlarmStart(builder): + builder.StartObject(4) + + +def AlarmAddSourceName(builder, sourceName): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(sourceName), 0 + ) + + +def AlarmAddTimestamp(builder, timestamp): + builder.PrependInt64Slot(1, timestamp, 0) + + +def AlarmAddSeverity(builder, severity): + builder.PrependInt16Slot(2, severity, 0) + + +def AlarmAddMessage(builder, message): + builder.PrependUOffsetTRelativeSlot( + 3, flatbuffers.number_types.UOffsetTFlags.py_type(message), 0 + ) + + +def AlarmEnd(builder): + return builder.EndObject() diff --git a/streaming_data_types/fbschemas/alarm_al00/Severity.py b/streaming_data_types/fbschemas/alarm_al00/Severity.py index b9e39c5..8571963 100644 --- a/streaming_data_types/fbschemas/alarm_al00/Severity.py +++ b/streaming_data_types/fbschemas/alarm_al00/Severity.py @@ -1,10 +1,10 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: + class Severity(object): OK = 0 MINOR = 1 MAJOR = 2 INVALID = 3 - diff --git a/streaming_data_types/fbschemas/epics_connection_ep01/ConnectionInfo.py b/streaming_data_types/fbschemas/epics_connection_ep01/ConnectionInfo.py index a5c486c..b8033c6 100644 --- a/streaming_data_types/fbschemas/epics_connection_ep01/ConnectionInfo.py +++ b/streaming_data_types/fbschemas/epics_connection_ep01/ConnectionInfo.py @@ -1,6 +1,7 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: + class ConnectionInfo(object): UNKNOWN = 0 diff --git a/streaming_data_types/fbschemas/epics_connection_ep01/EpicsPVConnectionInfo.py b/streaming_data_types/fbschemas/epics_connection_ep01/EpicsPVConnectionInfo.py index 06667b1..28084db 100644 --- a/streaming_data_types/fbschemas/epics_connection_ep01/EpicsPVConnectionInfo.py +++ b/streaming_data_types/fbschemas/epics_connection_ep01/EpicsPVConnectionInfo.py @@ -1,14 +1,15 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers from flatbuffers.compat import import_numpy + np = import_numpy() class EpicsPVConnectionInfo(object): - __slots__ = ['_tab'] + __slots__ = ["_tab"] @classmethod def GetRootAs(cls, buf, offset=0): @@ -21,9 +22,12 @@ def GetRootAs(cls, buf, offset=0): def GetRootAsEpicsPVConnectionInfo(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) + @classmethod def EpicsPVConnectionInfoBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x65\x70\x30\x31", size_prefixed=size_prefixed) + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x65\x70\x30\x31", size_prefixed=size_prefixed + ) # EpicsPVConnectionInfo def Init(self, buf, pos): @@ -83,7 +87,9 @@ def AddStatus(builder, status): def EpicsPVConnectionInfoAddSourceName(builder, sourceName): - builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(sourceName), 0) + builder.PrependUOffsetTRelativeSlot( + 2, flatbuffers.number_types.UOffsetTFlags.py_type(sourceName), 0 + ) def AddSourceName(builder, sourceName): @@ -91,7 +97,9 @@ def AddSourceName(builder, sourceName): def EpicsPVConnectionInfoAddServiceId(builder, serviceId): - builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(serviceId), 0) + builder.PrependUOffsetTRelativeSlot( + 3, flatbuffers.number_types.UOffsetTFlags.py_type(serviceId), 0 + ) def AddServiceId(builder, serviceId): diff --git a/streaming_data_types/fbschemas/eventdata_ev44/Event44Message.py b/streaming_data_types/fbschemas/eventdata_ev44/Event44Message.py index 28e463f..a9fc017 100644 --- a/streaming_data_types/fbschemas/eventdata_ev44/Event44Message.py +++ b/streaming_data_types/fbschemas/eventdata_ev44/Event44Message.py @@ -1,13 +1,15 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers from flatbuffers.compat import import_numpy + np = import_numpy() + class Event44Message(object): - __slots__ = ['_tab'] + __slots__ = ["_tab"] @classmethod def GetRootAs(cls, buf, offset=0): @@ -20,9 +22,12 @@ def GetRootAs(cls, buf, offset=0): def GetRootAsEvent44Message(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) + @classmethod def Event44MessageBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x65\x76\x34\x34", size_prefixed=size_prefixed) + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x65\x76\x34\x34", size_prefixed=size_prefixed + ) # Event44Message def Init(self, buf, pos): @@ -47,7 +52,10 @@ def ReferenceTime(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Int64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return self._tab.Get( + flatbuffers.number_types.Int64Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8), + ) return 0 # Event44Message @@ -74,7 +82,10 @@ def ReferenceTimeIndex(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return self._tab.Get( + flatbuffers.number_types.Int32Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), + ) return 0 # Event44Message @@ -101,7 +112,10 @@ def TimeOfFlight(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return self._tab.Get( + flatbuffers.number_types.Int32Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), + ) return 0 # Event44Message @@ -128,7 +142,10 @@ def PixelId(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return self._tab.Get( + flatbuffers.number_types.Int32Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), + ) return 0 # Event44Message @@ -150,39 +167,108 @@ def PixelIdIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) return o == 0 -def Event44MessageStart(builder): builder.StartObject(6) + +def Event44MessageStart(builder): + builder.StartObject(6) + + def Start(builder): return Event44MessageStart(builder) -def Event44MessageAddSourceName(builder, sourceName): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(sourceName), 0) + + +def Event44MessageAddSourceName(builder, sourceName): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(sourceName), 0 + ) + + def AddSourceName(builder, sourceName): return Event44MessageAddSourceName(builder, sourceName) -def Event44MessageAddMessageId(builder, messageId): builder.PrependInt64Slot(1, messageId, 0) + + +def Event44MessageAddMessageId(builder, messageId): + builder.PrependInt64Slot(1, messageId, 0) + + def AddMessageId(builder, messageId): return Event44MessageAddMessageId(builder, messageId) -def Event44MessageAddReferenceTime(builder, referenceTime): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(referenceTime), 0) + + +def Event44MessageAddReferenceTime(builder, referenceTime): + builder.PrependUOffsetTRelativeSlot( + 2, flatbuffers.number_types.UOffsetTFlags.py_type(referenceTime), 0 + ) + + def AddReferenceTime(builder, referenceTime): return Event44MessageAddReferenceTime(builder, referenceTime) -def Event44MessageStartReferenceTimeVector(builder, numElems): return builder.StartVector(8, numElems, 8) + + +def Event44MessageStartReferenceTimeVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + + def StartReferenceTimeVector(builder, numElems): return Event44MessageStartReferenceTimeVector(builder, numElems) -def Event44MessageAddReferenceTimeIndex(builder, referenceTimeIndex): builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(referenceTimeIndex), 0) + + +def Event44MessageAddReferenceTimeIndex(builder, referenceTimeIndex): + builder.PrependUOffsetTRelativeSlot( + 3, flatbuffers.number_types.UOffsetTFlags.py_type(referenceTimeIndex), 0 + ) + + def AddReferenceTimeIndex(builder, referenceTimeIndex): return Event44MessageAddReferenceTimeIndex(builder, referenceTimeIndex) -def Event44MessageStartReferenceTimeIndexVector(builder, numElems): return builder.StartVector(4, numElems, 4) + + +def Event44MessageStartReferenceTimeIndexVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + def StartReferenceTimeIndexVector(builder, numElems): return Event44MessageStartReferenceTimeIndexVector(builder, numElems) -def Event44MessageAddTimeOfFlight(builder, timeOfFlight): builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(timeOfFlight), 0) + + +def Event44MessageAddTimeOfFlight(builder, timeOfFlight): + builder.PrependUOffsetTRelativeSlot( + 4, flatbuffers.number_types.UOffsetTFlags.py_type(timeOfFlight), 0 + ) + + def AddTimeOfFlight(builder, timeOfFlight): return Event44MessageAddTimeOfFlight(builder, timeOfFlight) -def Event44MessageStartTimeOfFlightVector(builder, numElems): return builder.StartVector(4, numElems, 4) + + +def Event44MessageStartTimeOfFlightVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + def StartTimeOfFlightVector(builder, numElems): return Event44MessageStartTimeOfFlightVector(builder, numElems) -def Event44MessageAddPixelId(builder, pixelId): builder.PrependUOffsetTRelativeSlot(5, flatbuffers.number_types.UOffsetTFlags.py_type(pixelId), 0) + + +def Event44MessageAddPixelId(builder, pixelId): + builder.PrependUOffsetTRelativeSlot( + 5, flatbuffers.number_types.UOffsetTFlags.py_type(pixelId), 0 + ) + + def AddPixelId(builder, pixelId): return Event44MessageAddPixelId(builder, pixelId) -def Event44MessageStartPixelIdVector(builder, numElems): return builder.StartVector(4, numElems, 4) + + +def Event44MessageStartPixelIdVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + def StartPixelIdVector(builder, numElems): return Event44MessageStartPixelIdVector(builder, numElems) -def Event44MessageEnd(builder): return builder.EndObject() + + +def Event44MessageEnd(builder): + return builder.EndObject() + + def End(builder): - return Event44MessageEnd(builder) \ No newline at end of file + return Event44MessageEnd(builder) diff --git a/streaming_data_types/fbschemas/logdata_f144/ArrayByte.py b/streaming_data_types/fbschemas/logdata_f144/ArrayByte.py index aa82442..7b26853 100644 --- a/streaming_data_types/fbschemas/logdata_f144/ArrayByte.py +++ b/streaming_data_types/fbschemas/logdata_f144/ArrayByte.py @@ -1,13 +1,15 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers from flatbuffers.compat import import_numpy + np = import_numpy() + class ArrayByte(object): - __slots__ = ['_tab'] + __slots__ = ["_tab"] @classmethod def GetRootAs(cls, buf, offset=0): @@ -20,9 +22,12 @@ def GetRootAs(cls, buf, offset=0): def GetRootAsArrayByte(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) + @classmethod def ArrayByteBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed) + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed + ) # ArrayByte def Init(self, buf, pos): @@ -33,7 +38,10 @@ def Value(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Int8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) + return self._tab.Get( + flatbuffers.number_types.Int8Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1), + ) return 0 # ArrayByte @@ -55,15 +63,36 @@ def ValueIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) return o == 0 -def ArrayByteStart(builder): builder.StartObject(1) + +def ArrayByteStart(builder): + builder.StartObject(1) + + def Start(builder): return ArrayByteStart(builder) -def ArrayByteAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) + + +def ArrayByteAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 + ) + + def AddValue(builder, value): return ArrayByteAddValue(builder, value) -def ArrayByteStartValueVector(builder, numElems): return builder.StartVector(1, numElems, 1) + + +def ArrayByteStartValueVector(builder, numElems): + return builder.StartVector(1, numElems, 1) + + def StartValueVector(builder, numElems): return ArrayByteStartValueVector(builder, numElems) -def ArrayByteEnd(builder): return builder.EndObject() + + +def ArrayByteEnd(builder): + return builder.EndObject() + + def End(builder): - return ArrayByteEnd(builder) \ No newline at end of file + return ArrayByteEnd(builder) diff --git a/streaming_data_types/fbschemas/logdata_f144/ArrayDouble.py b/streaming_data_types/fbschemas/logdata_f144/ArrayDouble.py index 0d465b0..a4b01de 100644 --- a/streaming_data_types/fbschemas/logdata_f144/ArrayDouble.py +++ b/streaming_data_types/fbschemas/logdata_f144/ArrayDouble.py @@ -1,13 +1,15 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers from flatbuffers.compat import import_numpy + np = import_numpy() + class ArrayDouble(object): - __slots__ = ['_tab'] + __slots__ = ["_tab"] @classmethod def GetRootAs(cls, buf, offset=0): @@ -20,9 +22,12 @@ def GetRootAs(cls, buf, offset=0): def GetRootAsArrayDouble(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) + @classmethod def ArrayDoubleBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed) + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed + ) # ArrayDouble def Init(self, buf, pos): @@ -33,7 +38,10 @@ def Value(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Float64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return self._tab.Get( + flatbuffers.number_types.Float64Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8), + ) return 0 # ArrayDouble @@ -55,15 +63,36 @@ def ValueIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) return o == 0 -def ArrayDoubleStart(builder): builder.StartObject(1) + +def ArrayDoubleStart(builder): + builder.StartObject(1) + + def Start(builder): return ArrayDoubleStart(builder) -def ArrayDoubleAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) + + +def ArrayDoubleAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 + ) + + def AddValue(builder, value): return ArrayDoubleAddValue(builder, value) -def ArrayDoubleStartValueVector(builder, numElems): return builder.StartVector(8, numElems, 8) + + +def ArrayDoubleStartValueVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + + def StartValueVector(builder, numElems): return ArrayDoubleStartValueVector(builder, numElems) -def ArrayDoubleEnd(builder): return builder.EndObject() + + +def ArrayDoubleEnd(builder): + return builder.EndObject() + + def End(builder): - return ArrayDoubleEnd(builder) \ No newline at end of file + return ArrayDoubleEnd(builder) diff --git a/streaming_data_types/fbschemas/logdata_f144/ArrayFloat.py b/streaming_data_types/fbschemas/logdata_f144/ArrayFloat.py index 79fc64f..5af7077 100644 --- a/streaming_data_types/fbschemas/logdata_f144/ArrayFloat.py +++ b/streaming_data_types/fbschemas/logdata_f144/ArrayFloat.py @@ -1,13 +1,15 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers from flatbuffers.compat import import_numpy + np = import_numpy() + class ArrayFloat(object): - __slots__ = ['_tab'] + __slots__ = ["_tab"] @classmethod def GetRootAs(cls, buf, offset=0): @@ -20,9 +22,12 @@ def GetRootAs(cls, buf, offset=0): def GetRootAsArrayFloat(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) + @classmethod def ArrayFloatBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed) + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed + ) # ArrayFloat def Init(self, buf, pos): @@ -33,7 +38,10 @@ def Value(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Float32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return self._tab.Get( + flatbuffers.number_types.Float32Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), + ) return 0 # ArrayFloat @@ -55,15 +63,36 @@ def ValueIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) return o == 0 -def ArrayFloatStart(builder): builder.StartObject(1) + +def ArrayFloatStart(builder): + builder.StartObject(1) + + def Start(builder): return ArrayFloatStart(builder) -def ArrayFloatAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) + + +def ArrayFloatAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 + ) + + def AddValue(builder, value): return ArrayFloatAddValue(builder, value) -def ArrayFloatStartValueVector(builder, numElems): return builder.StartVector(4, numElems, 4) + + +def ArrayFloatStartValueVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + def StartValueVector(builder, numElems): return ArrayFloatStartValueVector(builder, numElems) -def ArrayFloatEnd(builder): return builder.EndObject() + + +def ArrayFloatEnd(builder): + return builder.EndObject() + + def End(builder): - return ArrayFloatEnd(builder) \ No newline at end of file + return ArrayFloatEnd(builder) diff --git a/streaming_data_types/fbschemas/logdata_f144/ArrayInt.py b/streaming_data_types/fbschemas/logdata_f144/ArrayInt.py index c408e2b..9a7a825 100644 --- a/streaming_data_types/fbschemas/logdata_f144/ArrayInt.py +++ b/streaming_data_types/fbschemas/logdata_f144/ArrayInt.py @@ -1,13 +1,15 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers from flatbuffers.compat import import_numpy + np = import_numpy() + class ArrayInt(object): - __slots__ = ['_tab'] + __slots__ = ["_tab"] @classmethod def GetRootAs(cls, buf, offset=0): @@ -20,9 +22,12 @@ def GetRootAs(cls, buf, offset=0): def GetRootAsArrayInt(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) + @classmethod def ArrayIntBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed) + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed + ) # ArrayInt def Init(self, buf, pos): @@ -33,7 +38,10 @@ def Value(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return self._tab.Get( + flatbuffers.number_types.Int32Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), + ) return 0 # ArrayInt @@ -55,15 +63,36 @@ def ValueIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) return o == 0 -def ArrayIntStart(builder): builder.StartObject(1) + +def ArrayIntStart(builder): + builder.StartObject(1) + + def Start(builder): return ArrayIntStart(builder) -def ArrayIntAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) + + +def ArrayIntAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 + ) + + def AddValue(builder, value): return ArrayIntAddValue(builder, value) -def ArrayIntStartValueVector(builder, numElems): return builder.StartVector(4, numElems, 4) + + +def ArrayIntStartValueVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + def StartValueVector(builder, numElems): return ArrayIntStartValueVector(builder, numElems) -def ArrayIntEnd(builder): return builder.EndObject() + + +def ArrayIntEnd(builder): + return builder.EndObject() + + def End(builder): - return ArrayIntEnd(builder) \ No newline at end of file + return ArrayIntEnd(builder) diff --git a/streaming_data_types/fbschemas/logdata_f144/ArrayLong.py b/streaming_data_types/fbschemas/logdata_f144/ArrayLong.py index 5090fd5..6a18d77 100644 --- a/streaming_data_types/fbschemas/logdata_f144/ArrayLong.py +++ b/streaming_data_types/fbschemas/logdata_f144/ArrayLong.py @@ -1,13 +1,15 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers from flatbuffers.compat import import_numpy + np = import_numpy() + class ArrayLong(object): - __slots__ = ['_tab'] + __slots__ = ["_tab"] @classmethod def GetRootAs(cls, buf, offset=0): @@ -20,9 +22,12 @@ def GetRootAs(cls, buf, offset=0): def GetRootAsArrayLong(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) + @classmethod def ArrayLongBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed) + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed + ) # ArrayLong def Init(self, buf, pos): @@ -33,7 +38,10 @@ def Value(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Int64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return self._tab.Get( + flatbuffers.number_types.Int64Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8), + ) return 0 # ArrayLong @@ -55,15 +63,36 @@ def ValueIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) return o == 0 -def ArrayLongStart(builder): builder.StartObject(1) + +def ArrayLongStart(builder): + builder.StartObject(1) + + def Start(builder): return ArrayLongStart(builder) -def ArrayLongAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) + + +def ArrayLongAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 + ) + + def AddValue(builder, value): return ArrayLongAddValue(builder, value) -def ArrayLongStartValueVector(builder, numElems): return builder.StartVector(8, numElems, 8) + + +def ArrayLongStartValueVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + + def StartValueVector(builder, numElems): return ArrayLongStartValueVector(builder, numElems) -def ArrayLongEnd(builder): return builder.EndObject() + + +def ArrayLongEnd(builder): + return builder.EndObject() + + def End(builder): - return ArrayLongEnd(builder) \ No newline at end of file + return ArrayLongEnd(builder) diff --git a/streaming_data_types/fbschemas/logdata_f144/ArrayShort.py b/streaming_data_types/fbschemas/logdata_f144/ArrayShort.py index d963c00..819263b 100644 --- a/streaming_data_types/fbschemas/logdata_f144/ArrayShort.py +++ b/streaming_data_types/fbschemas/logdata_f144/ArrayShort.py @@ -1,13 +1,15 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers from flatbuffers.compat import import_numpy + np = import_numpy() + class ArrayShort(object): - __slots__ = ['_tab'] + __slots__ = ["_tab"] @classmethod def GetRootAs(cls, buf, offset=0): @@ -20,9 +22,12 @@ def GetRootAs(cls, buf, offset=0): def GetRootAsArrayShort(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) + @classmethod def ArrayShortBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed) + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed + ) # ArrayShort def Init(self, buf, pos): @@ -33,7 +38,10 @@ def Value(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Int16Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 2)) + return self._tab.Get( + flatbuffers.number_types.Int16Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 2), + ) return 0 # ArrayShort @@ -55,15 +63,36 @@ def ValueIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) return o == 0 -def ArrayShortStart(builder): builder.StartObject(1) + +def ArrayShortStart(builder): + builder.StartObject(1) + + def Start(builder): return ArrayShortStart(builder) -def ArrayShortAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) + + +def ArrayShortAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 + ) + + def AddValue(builder, value): return ArrayShortAddValue(builder, value) -def ArrayShortStartValueVector(builder, numElems): return builder.StartVector(2, numElems, 2) + + +def ArrayShortStartValueVector(builder, numElems): + return builder.StartVector(2, numElems, 2) + + def StartValueVector(builder, numElems): return ArrayShortStartValueVector(builder, numElems) -def ArrayShortEnd(builder): return builder.EndObject() + + +def ArrayShortEnd(builder): + return builder.EndObject() + + def End(builder): - return ArrayShortEnd(builder) \ No newline at end of file + return ArrayShortEnd(builder) diff --git a/streaming_data_types/fbschemas/logdata_f144/ArrayUByte.py b/streaming_data_types/fbschemas/logdata_f144/ArrayUByte.py index be79240..aa03a5c 100644 --- a/streaming_data_types/fbschemas/logdata_f144/ArrayUByte.py +++ b/streaming_data_types/fbschemas/logdata_f144/ArrayUByte.py @@ -1,13 +1,15 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers from flatbuffers.compat import import_numpy + np = import_numpy() + class ArrayUByte(object): - __slots__ = ['_tab'] + __slots__ = ["_tab"] @classmethod def GetRootAs(cls, buf, offset=0): @@ -20,9 +22,12 @@ def GetRootAs(cls, buf, offset=0): def GetRootAsArrayUByte(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) + @classmethod def ArrayUByteBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed) + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed + ) # ArrayUByte def Init(self, buf, pos): @@ -33,7 +38,10 @@ def Value(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) + return self._tab.Get( + flatbuffers.number_types.Uint8Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1), + ) return 0 # ArrayUByte @@ -55,15 +63,36 @@ def ValueIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) return o == 0 -def ArrayUByteStart(builder): builder.StartObject(1) + +def ArrayUByteStart(builder): + builder.StartObject(1) + + def Start(builder): return ArrayUByteStart(builder) -def ArrayUByteAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) + + +def ArrayUByteAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 + ) + + def AddValue(builder, value): return ArrayUByteAddValue(builder, value) -def ArrayUByteStartValueVector(builder, numElems): return builder.StartVector(1, numElems, 1) + + +def ArrayUByteStartValueVector(builder, numElems): + return builder.StartVector(1, numElems, 1) + + def StartValueVector(builder, numElems): return ArrayUByteStartValueVector(builder, numElems) -def ArrayUByteEnd(builder): return builder.EndObject() + + +def ArrayUByteEnd(builder): + return builder.EndObject() + + def End(builder): - return ArrayUByteEnd(builder) \ No newline at end of file + return ArrayUByteEnd(builder) diff --git a/streaming_data_types/fbschemas/logdata_f144/ArrayUInt.py b/streaming_data_types/fbschemas/logdata_f144/ArrayUInt.py index 7fba9ae..c1e2821 100644 --- a/streaming_data_types/fbschemas/logdata_f144/ArrayUInt.py +++ b/streaming_data_types/fbschemas/logdata_f144/ArrayUInt.py @@ -1,13 +1,15 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers from flatbuffers.compat import import_numpy + np = import_numpy() + class ArrayUInt(object): - __slots__ = ['_tab'] + __slots__ = ["_tab"] @classmethod def GetRootAs(cls, buf, offset=0): @@ -20,9 +22,12 @@ def GetRootAs(cls, buf, offset=0): def GetRootAsArrayUInt(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) + @classmethod def ArrayUIntBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed) + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed + ) # ArrayUInt def Init(self, buf, pos): @@ -33,7 +38,10 @@ def Value(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Uint32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return self._tab.Get( + flatbuffers.number_types.Uint32Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), + ) return 0 # ArrayUInt @@ -55,15 +63,36 @@ def ValueIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) return o == 0 -def ArrayUIntStart(builder): builder.StartObject(1) + +def ArrayUIntStart(builder): + builder.StartObject(1) + + def Start(builder): return ArrayUIntStart(builder) -def ArrayUIntAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) + + +def ArrayUIntAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 + ) + + def AddValue(builder, value): return ArrayUIntAddValue(builder, value) -def ArrayUIntStartValueVector(builder, numElems): return builder.StartVector(4, numElems, 4) + + +def ArrayUIntStartValueVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + def StartValueVector(builder, numElems): return ArrayUIntStartValueVector(builder, numElems) -def ArrayUIntEnd(builder): return builder.EndObject() + + +def ArrayUIntEnd(builder): + return builder.EndObject() + + def End(builder): - return ArrayUIntEnd(builder) \ No newline at end of file + return ArrayUIntEnd(builder) diff --git a/streaming_data_types/fbschemas/logdata_f144/ArrayULong.py b/streaming_data_types/fbschemas/logdata_f144/ArrayULong.py index b2a406c..0c6969b 100644 --- a/streaming_data_types/fbschemas/logdata_f144/ArrayULong.py +++ b/streaming_data_types/fbschemas/logdata_f144/ArrayULong.py @@ -1,13 +1,15 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers from flatbuffers.compat import import_numpy + np = import_numpy() + class ArrayULong(object): - __slots__ = ['_tab'] + __slots__ = ["_tab"] @classmethod def GetRootAs(cls, buf, offset=0): @@ -20,9 +22,12 @@ def GetRootAs(cls, buf, offset=0): def GetRootAsArrayULong(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) + @classmethod def ArrayULongBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed) + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed + ) # ArrayULong def Init(self, buf, pos): @@ -33,7 +38,10 @@ def Value(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Uint64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return self._tab.Get( + flatbuffers.number_types.Uint64Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8), + ) return 0 # ArrayULong @@ -55,15 +63,36 @@ def ValueIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) return o == 0 -def ArrayULongStart(builder): builder.StartObject(1) + +def ArrayULongStart(builder): + builder.StartObject(1) + + def Start(builder): return ArrayULongStart(builder) -def ArrayULongAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) + + +def ArrayULongAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 + ) + + def AddValue(builder, value): return ArrayULongAddValue(builder, value) -def ArrayULongStartValueVector(builder, numElems): return builder.StartVector(8, numElems, 8) + + +def ArrayULongStartValueVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + + def StartValueVector(builder, numElems): return ArrayULongStartValueVector(builder, numElems) -def ArrayULongEnd(builder): return builder.EndObject() + + +def ArrayULongEnd(builder): + return builder.EndObject() + + def End(builder): - return ArrayULongEnd(builder) \ No newline at end of file + return ArrayULongEnd(builder) diff --git a/streaming_data_types/fbschemas/logdata_f144/ArrayUShort.py b/streaming_data_types/fbschemas/logdata_f144/ArrayUShort.py index 6ea886a..4b5f49c 100644 --- a/streaming_data_types/fbschemas/logdata_f144/ArrayUShort.py +++ b/streaming_data_types/fbschemas/logdata_f144/ArrayUShort.py @@ -1,13 +1,15 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers from flatbuffers.compat import import_numpy + np = import_numpy() + class ArrayUShort(object): - __slots__ = ['_tab'] + __slots__ = ["_tab"] @classmethod def GetRootAs(cls, buf, offset=0): @@ -20,9 +22,12 @@ def GetRootAs(cls, buf, offset=0): def GetRootAsArrayUShort(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) + @classmethod def ArrayUShortBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed) + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed + ) # ArrayUShort def Init(self, buf, pos): @@ -33,7 +38,10 @@ def Value(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Uint16Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 2)) + return self._tab.Get( + flatbuffers.number_types.Uint16Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 2), + ) return 0 # ArrayUShort @@ -55,15 +63,36 @@ def ValueIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) return o == 0 -def ArrayUShortStart(builder): builder.StartObject(1) + +def ArrayUShortStart(builder): + builder.StartObject(1) + + def Start(builder): return ArrayUShortStart(builder) -def ArrayUShortAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) + + +def ArrayUShortAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 + ) + + def AddValue(builder, value): return ArrayUShortAddValue(builder, value) -def ArrayUShortStartValueVector(builder, numElems): return builder.StartVector(2, numElems, 2) + + +def ArrayUShortStartValueVector(builder, numElems): + return builder.StartVector(2, numElems, 2) + + def StartValueVector(builder, numElems): return ArrayUShortStartValueVector(builder, numElems) -def ArrayUShortEnd(builder): return builder.EndObject() + + +def ArrayUShortEnd(builder): + return builder.EndObject() + + def End(builder): - return ArrayUShortEnd(builder) \ No newline at end of file + return ArrayUShortEnd(builder) diff --git a/streaming_data_types/fbschemas/logdata_f144/Byte.py b/streaming_data_types/fbschemas/logdata_f144/Byte.py index 96492ba..50a49fd 100644 --- a/streaming_data_types/fbschemas/logdata_f144/Byte.py +++ b/streaming_data_types/fbschemas/logdata_f144/Byte.py @@ -1,13 +1,15 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers from flatbuffers.compat import import_numpy + np = import_numpy() + class Byte(object): - __slots__ = ['_tab'] + __slots__ = ["_tab"] @classmethod def GetRootAs(cls, buf, offset=0): @@ -20,9 +22,12 @@ def GetRootAs(cls, buf, offset=0): def GetRootAsByte(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) + @classmethod def ByteBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed) + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed + ) # Byte def Init(self, buf, pos): @@ -35,12 +40,26 @@ def Value(self): return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) return 0 -def ByteStart(builder): builder.StartObject(1) + +def ByteStart(builder): + builder.StartObject(1) + + def Start(builder): return ByteStart(builder) -def ByteAddValue(builder, value): builder.PrependInt8Slot(0, value, 0) + + +def ByteAddValue(builder, value): + builder.PrependInt8Slot(0, value, 0) + + def AddValue(builder, value): return ByteAddValue(builder, value) -def ByteEnd(builder): return builder.EndObject() + + +def ByteEnd(builder): + return builder.EndObject() + + def End(builder): - return ByteEnd(builder) \ No newline at end of file + return ByteEnd(builder) diff --git a/streaming_data_types/fbschemas/logdata_f144/Double.py b/streaming_data_types/fbschemas/logdata_f144/Double.py index 8e664b1..f67a8f7 100644 --- a/streaming_data_types/fbschemas/logdata_f144/Double.py +++ b/streaming_data_types/fbschemas/logdata_f144/Double.py @@ -1,13 +1,15 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers from flatbuffers.compat import import_numpy + np = import_numpy() + class Double(object): - __slots__ = ['_tab'] + __slots__ = ["_tab"] @classmethod def GetRootAs(cls, buf, offset=0): @@ -20,9 +22,12 @@ def GetRootAs(cls, buf, offset=0): def GetRootAsDouble(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) + @classmethod def DoubleBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed) + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed + ) # Double def Init(self, buf, pos): @@ -32,15 +37,31 @@ def Init(self, buf, pos): def Value(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: - return self._tab.Get(flatbuffers.number_types.Float64Flags, o + self._tab.Pos) + return self._tab.Get( + flatbuffers.number_types.Float64Flags, o + self._tab.Pos + ) return 0.0 -def DoubleStart(builder): builder.StartObject(1) + +def DoubleStart(builder): + builder.StartObject(1) + + def Start(builder): return DoubleStart(builder) -def DoubleAddValue(builder, value): builder.PrependFloat64Slot(0, value, 0.0) + + +def DoubleAddValue(builder, value): + builder.PrependFloat64Slot(0, value, 0.0) + + def AddValue(builder, value): return DoubleAddValue(builder, value) -def DoubleEnd(builder): return builder.EndObject() + + +def DoubleEnd(builder): + return builder.EndObject() + + def End(builder): - return DoubleEnd(builder) \ No newline at end of file + return DoubleEnd(builder) diff --git a/streaming_data_types/fbschemas/logdata_f144/Float.py b/streaming_data_types/fbschemas/logdata_f144/Float.py index 0c02389..7f2ef22 100644 --- a/streaming_data_types/fbschemas/logdata_f144/Float.py +++ b/streaming_data_types/fbschemas/logdata_f144/Float.py @@ -1,13 +1,15 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers from flatbuffers.compat import import_numpy + np = import_numpy() + class Float(object): - __slots__ = ['_tab'] + __slots__ = ["_tab"] @classmethod def GetRootAs(cls, buf, offset=0): @@ -20,9 +22,12 @@ def GetRootAs(cls, buf, offset=0): def GetRootAsFloat(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) + @classmethod def FloatBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed) + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed + ) # Float def Init(self, buf, pos): @@ -32,15 +37,31 @@ def Init(self, buf, pos): def Value(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: - return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) + return self._tab.Get( + flatbuffers.number_types.Float32Flags, o + self._tab.Pos + ) return 0.0 -def FloatStart(builder): builder.StartObject(1) + +def FloatStart(builder): + builder.StartObject(1) + + def Start(builder): return FloatStart(builder) -def FloatAddValue(builder, value): builder.PrependFloat32Slot(0, value, 0.0) + + +def FloatAddValue(builder, value): + builder.PrependFloat32Slot(0, value, 0.0) + + def AddValue(builder, value): return FloatAddValue(builder, value) -def FloatEnd(builder): return builder.EndObject() + + +def FloatEnd(builder): + return builder.EndObject() + + def End(builder): - return FloatEnd(builder) \ No newline at end of file + return FloatEnd(builder) diff --git a/streaming_data_types/fbschemas/logdata_f144/Int.py b/streaming_data_types/fbschemas/logdata_f144/Int.py index df398d9..c35a22f 100644 --- a/streaming_data_types/fbschemas/logdata_f144/Int.py +++ b/streaming_data_types/fbschemas/logdata_f144/Int.py @@ -1,13 +1,15 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers from flatbuffers.compat import import_numpy + np = import_numpy() + class Int(object): - __slots__ = ['_tab'] + __slots__ = ["_tab"] @classmethod def GetRootAs(cls, buf, offset=0): @@ -20,9 +22,12 @@ def GetRootAs(cls, buf, offset=0): def GetRootAsInt(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) + @classmethod def IntBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed) + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed + ) # Int def Init(self, buf, pos): @@ -35,12 +40,26 @@ def Value(self): return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) return 0 -def IntStart(builder): builder.StartObject(1) + +def IntStart(builder): + builder.StartObject(1) + + def Start(builder): return IntStart(builder) -def IntAddValue(builder, value): builder.PrependInt32Slot(0, value, 0) + + +def IntAddValue(builder, value): + builder.PrependInt32Slot(0, value, 0) + + def AddValue(builder, value): return IntAddValue(builder, value) -def IntEnd(builder): return builder.EndObject() + + +def IntEnd(builder): + return builder.EndObject() + + def End(builder): - return IntEnd(builder) \ No newline at end of file + return IntEnd(builder) diff --git a/streaming_data_types/fbschemas/logdata_f144/LogData.py b/streaming_data_types/fbschemas/logdata_f144/LogData.py index 1e307ae..39a6019 100644 --- a/streaming_data_types/fbschemas/logdata_f144/LogData.py +++ b/streaming_data_types/fbschemas/logdata_f144/LogData.py @@ -1,13 +1,15 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers from flatbuffers.compat import import_numpy + np = import_numpy() + class LogData(object): - __slots__ = ['_tab'] + __slots__ = ["_tab"] @classmethod def GetRootAs(cls, buf, offset=0): @@ -20,9 +22,12 @@ def GetRootAs(cls, buf, offset=0): def GetRootAsLogData(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) + @classmethod def LogDataBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed) + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed + ) # LogData def Init(self, buf, pos): @@ -54,26 +59,60 @@ def Value(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) if o != 0: from flatbuffers.table import Table + obj = Table(bytearray(), 0) self._tab.Union(obj, o) return obj return None -def LogDataStart(builder): builder.StartObject(4) + +def LogDataStart(builder): + builder.StartObject(4) + + def Start(builder): return LogDataStart(builder) -def LogDataAddSourceName(builder, sourceName): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(sourceName), 0) + + +def LogDataAddSourceName(builder, sourceName): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(sourceName), 0 + ) + + def AddSourceName(builder, sourceName): return LogDataAddSourceName(builder, sourceName) -def LogDataAddTimestamp(builder, timestamp): builder.PrependInt64Slot(1, timestamp, 0) + + +def LogDataAddTimestamp(builder, timestamp): + builder.PrependInt64Slot(1, timestamp, 0) + + def AddTimestamp(builder, timestamp): return LogDataAddTimestamp(builder, timestamp) -def LogDataAddValueType(builder, valueType): builder.PrependUint8Slot(2, valueType, 0) + + +def LogDataAddValueType(builder, valueType): + builder.PrependUint8Slot(2, valueType, 0) + + def AddValueType(builder, valueType): return LogDataAddValueType(builder, valueType) -def LogDataAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) + + +def LogDataAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot( + 3, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 + ) + + def AddValue(builder, value): return LogDataAddValue(builder, value) -def LogDataEnd(builder): return builder.EndObject() + + +def LogDataEnd(builder): + return builder.EndObject() + + def End(builder): - return LogDataEnd(builder) \ No newline at end of file + return LogDataEnd(builder) diff --git a/streaming_data_types/fbschemas/logdata_f144/Long.py b/streaming_data_types/fbschemas/logdata_f144/Long.py index e3b3778..4a6ca03 100644 --- a/streaming_data_types/fbschemas/logdata_f144/Long.py +++ b/streaming_data_types/fbschemas/logdata_f144/Long.py @@ -1,13 +1,15 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers from flatbuffers.compat import import_numpy + np = import_numpy() + class Long(object): - __slots__ = ['_tab'] + __slots__ = ["_tab"] @classmethod def GetRootAs(cls, buf, offset=0): @@ -20,9 +22,12 @@ def GetRootAs(cls, buf, offset=0): def GetRootAsLong(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) + @classmethod def LongBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed) + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed + ) # Long def Init(self, buf, pos): @@ -35,12 +40,26 @@ def Value(self): return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) return 0 -def LongStart(builder): builder.StartObject(1) + +def LongStart(builder): + builder.StartObject(1) + + def Start(builder): return LongStart(builder) -def LongAddValue(builder, value): builder.PrependInt64Slot(0, value, 0) + + +def LongAddValue(builder, value): + builder.PrependInt64Slot(0, value, 0) + + def AddValue(builder, value): return LongAddValue(builder, value) -def LongEnd(builder): return builder.EndObject() + + +def LongEnd(builder): + return builder.EndObject() + + def End(builder): - return LongEnd(builder) \ No newline at end of file + return LongEnd(builder) diff --git a/streaming_data_types/fbschemas/logdata_f144/Short.py b/streaming_data_types/fbschemas/logdata_f144/Short.py index c7ef950..ca6d598 100644 --- a/streaming_data_types/fbschemas/logdata_f144/Short.py +++ b/streaming_data_types/fbschemas/logdata_f144/Short.py @@ -1,13 +1,15 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers from flatbuffers.compat import import_numpy + np = import_numpy() + class Short(object): - __slots__ = ['_tab'] + __slots__ = ["_tab"] @classmethod def GetRootAs(cls, buf, offset=0): @@ -20,9 +22,12 @@ def GetRootAs(cls, buf, offset=0): def GetRootAsShort(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) + @classmethod def ShortBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed) + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed + ) # Short def Init(self, buf, pos): @@ -35,12 +40,26 @@ def Value(self): return self._tab.Get(flatbuffers.number_types.Int16Flags, o + self._tab.Pos) return 0 -def ShortStart(builder): builder.StartObject(1) + +def ShortStart(builder): + builder.StartObject(1) + + def Start(builder): return ShortStart(builder) -def ShortAddValue(builder, value): builder.PrependInt16Slot(0, value, 0) + + +def ShortAddValue(builder, value): + builder.PrependInt16Slot(0, value, 0) + + def AddValue(builder, value): return ShortAddValue(builder, value) -def ShortEnd(builder): return builder.EndObject() + + +def ShortEnd(builder): + return builder.EndObject() + + def End(builder): - return ShortEnd(builder) \ No newline at end of file + return ShortEnd(builder) diff --git a/streaming_data_types/fbschemas/logdata_f144/UByte.py b/streaming_data_types/fbschemas/logdata_f144/UByte.py index 9f847d9..42a1927 100644 --- a/streaming_data_types/fbschemas/logdata_f144/UByte.py +++ b/streaming_data_types/fbschemas/logdata_f144/UByte.py @@ -1,13 +1,15 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers from flatbuffers.compat import import_numpy + np = import_numpy() + class UByte(object): - __slots__ = ['_tab'] + __slots__ = ["_tab"] @classmethod def GetRootAs(cls, buf, offset=0): @@ -20,9 +22,12 @@ def GetRootAs(cls, buf, offset=0): def GetRootAsUByte(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) + @classmethod def UByteBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed) + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed + ) # UByte def Init(self, buf, pos): @@ -35,12 +40,26 @@ def Value(self): return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) return 0 -def UByteStart(builder): builder.StartObject(1) + +def UByteStart(builder): + builder.StartObject(1) + + def Start(builder): return UByteStart(builder) -def UByteAddValue(builder, value): builder.PrependUint8Slot(0, value, 0) + + +def UByteAddValue(builder, value): + builder.PrependUint8Slot(0, value, 0) + + def AddValue(builder, value): return UByteAddValue(builder, value) -def UByteEnd(builder): return builder.EndObject() + + +def UByteEnd(builder): + return builder.EndObject() + + def End(builder): - return UByteEnd(builder) \ No newline at end of file + return UByteEnd(builder) diff --git a/streaming_data_types/fbschemas/logdata_f144/UInt.py b/streaming_data_types/fbschemas/logdata_f144/UInt.py index 4475968..10771f0 100644 --- a/streaming_data_types/fbschemas/logdata_f144/UInt.py +++ b/streaming_data_types/fbschemas/logdata_f144/UInt.py @@ -1,13 +1,15 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers from flatbuffers.compat import import_numpy + np = import_numpy() + class UInt(object): - __slots__ = ['_tab'] + __slots__ = ["_tab"] @classmethod def GetRootAs(cls, buf, offset=0): @@ -20,9 +22,12 @@ def GetRootAs(cls, buf, offset=0): def GetRootAsUInt(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) + @classmethod def UIntBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed) + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed + ) # UInt def Init(self, buf, pos): @@ -32,15 +37,31 @@ def Init(self, buf, pos): def Value(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: - return self._tab.Get(flatbuffers.number_types.Uint32Flags, o + self._tab.Pos) + return self._tab.Get( + flatbuffers.number_types.Uint32Flags, o + self._tab.Pos + ) return 0 -def UIntStart(builder): builder.StartObject(1) + +def UIntStart(builder): + builder.StartObject(1) + + def Start(builder): return UIntStart(builder) -def UIntAddValue(builder, value): builder.PrependUint32Slot(0, value, 0) + + +def UIntAddValue(builder, value): + builder.PrependUint32Slot(0, value, 0) + + def AddValue(builder, value): return UIntAddValue(builder, value) -def UIntEnd(builder): return builder.EndObject() + + +def UIntEnd(builder): + return builder.EndObject() + + def End(builder): - return UIntEnd(builder) \ No newline at end of file + return UIntEnd(builder) diff --git a/streaming_data_types/fbschemas/logdata_f144/ULong.py b/streaming_data_types/fbschemas/logdata_f144/ULong.py index a450c89..bdf75e4 100644 --- a/streaming_data_types/fbschemas/logdata_f144/ULong.py +++ b/streaming_data_types/fbschemas/logdata_f144/ULong.py @@ -1,13 +1,15 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers from flatbuffers.compat import import_numpy + np = import_numpy() + class ULong(object): - __slots__ = ['_tab'] + __slots__ = ["_tab"] @classmethod def GetRootAs(cls, buf, offset=0): @@ -20,9 +22,12 @@ def GetRootAs(cls, buf, offset=0): def GetRootAsULong(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) + @classmethod def ULongBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed) + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed + ) # ULong def Init(self, buf, pos): @@ -32,15 +37,31 @@ def Init(self, buf, pos): def Value(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: - return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) + return self._tab.Get( + flatbuffers.number_types.Uint64Flags, o + self._tab.Pos + ) return 0 -def ULongStart(builder): builder.StartObject(1) + +def ULongStart(builder): + builder.StartObject(1) + + def Start(builder): return ULongStart(builder) -def ULongAddValue(builder, value): builder.PrependUint64Slot(0, value, 0) + + +def ULongAddValue(builder, value): + builder.PrependUint64Slot(0, value, 0) + + def AddValue(builder, value): return ULongAddValue(builder, value) -def ULongEnd(builder): return builder.EndObject() + + +def ULongEnd(builder): + return builder.EndObject() + + def End(builder): - return ULongEnd(builder) \ No newline at end of file + return ULongEnd(builder) diff --git a/streaming_data_types/fbschemas/logdata_f144/UShort.py b/streaming_data_types/fbschemas/logdata_f144/UShort.py index 590f040..1dca26b 100644 --- a/streaming_data_types/fbschemas/logdata_f144/UShort.py +++ b/streaming_data_types/fbschemas/logdata_f144/UShort.py @@ -1,13 +1,15 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers from flatbuffers.compat import import_numpy + np = import_numpy() + class UShort(object): - __slots__ = ['_tab'] + __slots__ = ["_tab"] @classmethod def GetRootAs(cls, buf, offset=0): @@ -20,9 +22,12 @@ def GetRootAs(cls, buf, offset=0): def GetRootAsUShort(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) + @classmethod def UShortBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed) + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed + ) # UShort def Init(self, buf, pos): @@ -32,15 +37,31 @@ def Init(self, buf, pos): def Value(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: - return self._tab.Get(flatbuffers.number_types.Uint16Flags, o + self._tab.Pos) + return self._tab.Get( + flatbuffers.number_types.Uint16Flags, o + self._tab.Pos + ) return 0 -def UShortStart(builder): builder.StartObject(1) + +def UShortStart(builder): + builder.StartObject(1) + + def Start(builder): return UShortStart(builder) -def UShortAddValue(builder, value): builder.PrependUint16Slot(0, value, 0) + + +def UShortAddValue(builder, value): + builder.PrependUint16Slot(0, value, 0) + + def AddValue(builder, value): return UShortAddValue(builder, value) -def UShortEnd(builder): return builder.EndObject() + + +def UShortEnd(builder): + return builder.EndObject() + + def End(builder): - return UShortEnd(builder) \ No newline at end of file + return UShortEnd(builder) diff --git a/streaming_data_types/fbschemas/logdata_f144/Value.py b/streaming_data_types/fbschemas/logdata_f144/Value.py index b6f8232..0f39b02 100644 --- a/streaming_data_types/fbschemas/logdata_f144/Value.py +++ b/streaming_data_types/fbschemas/logdata_f144/Value.py @@ -1,6 +1,7 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: + class Value(object): NONE = 0 diff --git a/streaming_data_types/logdata_f144.py b/streaming_data_types/logdata_f144.py index 8d29177..7123605 100644 --- a/streaming_data_types/logdata_f144.py +++ b/streaming_data_types/logdata_f144.py @@ -1,6 +1,6 @@ from collections import namedtuple from datetime import datetime -from typing import Any, Union, NamedTuple +from typing import Any, NamedTuple, Union import flatbuffers import numpy as np @@ -146,9 +146,7 @@ def _serialise_value( _map_scalar_type_to_serialiser = { - np.dtype("byte"): SerialiserFunctions( - ByteStart, ByteAddValue, ByteEnd, Value.Byte - ), + np.dtype("byte"): SerialiserFunctions(ByteStart, ByteAddValue, ByteEnd, Value.Byte), np.dtype("ubyte"): SerialiserFunctions( UByteStart, UByteAddValue, UByteEnd, Value.UByte ), @@ -158,9 +156,7 @@ def _serialise_value( np.dtype("uint16"): SerialiserFunctions( UShortStart, UShortAddValue, UShortEnd, Value.UShort ), - np.dtype("int32"): SerialiserFunctions( - IntStart, IntAddValue, IntEnd, Value.Int - ), + np.dtype("int32"): SerialiserFunctions(IntStart, IntAddValue, IntEnd, Value.Int), np.dtype("uint32"): SerialiserFunctions( UIntStart, UIntAddValue, UIntEnd, Value.UInt ), @@ -229,7 +225,7 @@ def serialise_f144( value_type = c_func_map.value_type_enum except KeyError: raise NotImplementedError( - f'f144 flatbuffer does not support values of type {value.dtype}.' + f"f144 flatbuffer does not support values of type {value.dtype}." ) elif value.ndim == 0: try: @@ -238,12 +234,10 @@ def serialise_f144( value_type = c_func_map.value_type_enum except KeyError: raise NotImplementedError( - f'f144 flatbuffer does not support values of type {value.dtype}.' + f"f144 flatbuffer does not support values of type {value.dtype}." ) else: - raise NotImplementedError( - "f144 only supports scalars or 1D array values" - ) + raise NotImplementedError("f144 only supports scalars or 1D array values") LogData.LogDataStart(builder) LogData.LogDataAddSourceName(builder, source_name_offset) LogData.LogDataAddValue(builder, value_offset) @@ -303,5 +297,5 @@ def deserialise_f144(buffer: Union[bytearray, bytes]) -> ExtractedLogData: return ExtractedLogData( source_name=source_name.decode(), value=value, - timestamp_unix_ns=log_data.Timestamp() + timestamp_unix_ns=log_data.Timestamp(), ) diff --git a/tests/test_al00.py b/tests/test_al00.py index 84c83b3..6b933d9 100644 --- a/tests/test_al00.py +++ b/tests/test_al00.py @@ -1,8 +1,7 @@ import pytest from streaming_data_types import DESERIALISERS, SERIALISERS -from streaming_data_types.alarm_al00 import (Severity, deserialise_al00, - serialise_al00) +from streaming_data_types.alarm_al00 import Severity, deserialise_al00, serialise_al00 from streaming_data_types.exceptions import WrongSchemaException diff --git a/tests/test_ev44.py b/tests/test_ev44.py index c0b7687..92cd699 100644 --- a/tests/test_ev44.py +++ b/tests/test_ev44.py @@ -14,9 +14,17 @@ def test_serialises_and_deserialises_ev44_message_correctly(self): original_entry = { "source_name": "some_source", "message_id": 123456, - "reference_time": [1618573589123781958, 1618573590133830371, 1618573593677164112, - 1618573594185190549, 1618573596217316066, 1618573596725363109, - 1618573601295720976, 1618573601799761445, 1618573607354064836], + "reference_time": [ + 1618573589123781958, + 1618573590133830371, + 1618573593677164112, + 1618573594185190549, + 1618573596217316066, + 1618573596725363109, + 1618573601295720976, + 1618573601799761445, + 1618573607354064836, + ], "reference_time_index": [2, 4, 5, 7], "time_of_flight": [100, 200, 300, 400, 500, 600, 700, 800, 900], "pixel_id": [10, 20, 30, 40, 50, 60, 70, 80, 90], @@ -28,7 +36,9 @@ def test_serialises_and_deserialises_ev44_message_correctly(self): assert entry.source_name == original_entry["source_name"] assert entry.message_id == original_entry["message_id"] assert np.array_equal(entry.reference_time, original_entry["reference_time"]) - assert np.array_equal(entry.reference_time_index, original_entry["reference_time_index"]) + assert np.array_equal( + entry.reference_time_index, original_entry["reference_time_index"] + ) assert np.array_equal(entry.time_of_flight, original_entry["time_of_flight"]) assert np.array_equal(entry.pixel_id, original_entry["pixel_id"]) @@ -39,11 +49,19 @@ def test_serialises_and_deserialises_ev44_message_correctly_for_numpy_arrays(sel original_entry = { "source_name": "some_source", "message_id": 123456, - "reference_time": np.array([1618573589123781958, 1618573590133830371, - 1618573593677164112, 1618573594185190549, - 1618573596217316066, 1618573596725363109, - 1618573601295720976, 1618573601799761445, - 1618573607354064836]), + "reference_time": np.array( + [ + 1618573589123781958, + 1618573590133830371, + 1618573593677164112, + 1618573594185190549, + 1618573596217316066, + 1618573596725363109, + 1618573601295720976, + 1618573601799761445, + 1618573607354064836, + ] + ), "reference_time_index": np.array([2, 4, 5, 7]), "time_of_flight": np.array([100, 200, 300, 400, 500, 600, 700, 800, 900]), "pixel_id": np.array([10, 20, 30, 40, 50, 60, 70, 80, 90]), @@ -55,7 +73,9 @@ def test_serialises_and_deserialises_ev44_message_correctly_for_numpy_arrays(sel assert entry.source_name == original_entry["source_name"] assert entry.message_id == original_entry["message_id"] assert np.array_equal(entry.reference_time, original_entry["reference_time"]) - assert np.array_equal(entry.reference_time_index, original_entry["reference_time_index"]) + assert np.array_equal( + entry.reference_time_index, original_entry["reference_time_index"] + ) assert np.array_equal(entry.time_of_flight, original_entry["time_of_flight"]) assert np.array_equal(entry.pixel_id, original_entry["pixel_id"]) @@ -63,11 +83,19 @@ def test_if_buffer_has_wrong_id_then_throws(self): original_entry = { "source_name": "some_source", "message_id": 123456, - "reference_time": np.array([1618573589123781958, 1618573590133830371, - 1618573593677164112, 1618573594185190549, - 1618573596217316066, 1618573596725363109, - 1618573601295720976, 1618573601799761445, - 1618573607354064836]), + "reference_time": np.array( + [ + 1618573589123781958, + 1618573590133830371, + 1618573593677164112, + 1618573594185190549, + 1618573596217316066, + 1618573596725363109, + 1618573601295720976, + 1618573601799761445, + 1618573607354064836, + ] + ), "reference_time_index": np.array([2, 4, 5, 7]), "time_of_flight": np.array([100, 200, 300, 400, 500, 600, 700, 800, 900]), "pixel_id": np.array([10, 20, 30, 40, 50, 60, 70, 80, 90]), From 0a205dbb563a8c7d50367b59cbf02ca2b3ffc468 Mon Sep 17 00:00:00 2001 From: Jonas Petersson Date: Thu, 24 Nov 2022 14:31:13 +0100 Subject: [PATCH 287/363] fixing bug --- streaming_data_types/alarm_al00.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/streaming_data_types/alarm_al00.py b/streaming_data_types/alarm_al00.py index 53947b9..a44f394 100644 --- a/streaming_data_types/alarm_al00.py +++ b/streaming_data_types/alarm_al00.py @@ -13,9 +13,9 @@ class Severity(Enum): - OK = (0,) - MINOR = (1,) - MAJOR = (2,) + OK = 0 + MINOR = 1 + MAJOR = 2 INVALID = 3 From 327edeb41950bd242d8c1750e55d35cabd630705 Mon Sep 17 00:00:00 2001 From: kmurica <76992076+kmurica@users.noreply.github.com> Date: Thu, 24 Nov 2022 15:00:25 +0100 Subject: [PATCH 288/363] bumping up version and updating readme (#71) --- README.md | 6 ++++-- streaming_data_types/_version.py | 2 +- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 7df0c01..016aeb6 100644 --- a/README.md +++ b/README.md @@ -13,13 +13,15 @@ https://github.com/ess-dmsc/streaming-data-types |ns10|NICOS cache entry schema| |pl72|Run start| |6s4t|Run stop| -|f142|Log data| +|f142|Log data (deprecated in favour of f144)| +|f144|Log data| |ev42|Event data (deprecated in favour of ev44)| |ev43|Event data from multiple pulses| |ev44|Event data with signed data types| |x5f2|Status messages| |tdct|Timestamps| -|ep00|EPICS connection info| +|ep00|EPICS connection info (deprecated in favour of ep01)| +|ep01|EPICS connection info| |rf5k|Forwarder configuration update| |answ|File-writer command response| |wrdn|File-writer finished writing| diff --git a/streaming_data_types/_version.py b/streaming_data_types/_version.py index 378fd35..c1e614e 100644 --- a/streaming_data_types/_version.py +++ b/streaming_data_types/_version.py @@ -1,4 +1,4 @@ # Version is not directly defined in __init__ because that causes all # run time dependencies to become build-time dependencies when it is # imported in setup.py -version = "0.16.0" +version = "0.18.0" From 3fba70c7a5e8f021bb986a6ec70e5b64ba8387ad Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Thu, 24 Nov 2022 14:01:39 +0000 Subject: [PATCH 289/363] Add buffer tests (#70) * added buffer for ev42 * added buffer for hs01 --- tests/example_buffers/ev42.bin | Bin 0 -> 13112 bytes tests/example_buffers/hs01.bin | Bin 0 -> 104896 bytes tests/test_ev42.py | 18 ++++++++++++++++++ tests/test_hs01.py | 19 +++++++++++++++++++ 4 files changed, 37 insertions(+) create mode 100644 tests/example_buffers/ev42.bin create mode 100644 tests/example_buffers/hs01.bin diff --git a/tests/example_buffers/ev42.bin b/tests/example_buffers/ev42.bin new file mode 100644 index 0000000000000000000000000000000000000000..783f737cb4f5e8249bc99117dfa9ff9bb524a669 GIT binary patch literal 13112 zcmeI2<#$#`*T%1zISC;^aCeHk6%w?#r)Vj~-6eQ&w*bYZc!AbeTx>Bf|OOB0+dMUl!<>yiHbPnH0CJ^iw0F4&$Xp#f_enG`bkiX%1^BpUWuESED#3t**=I z_?d%x(Wje6GunEpaE|rdw|bS`sVl$VywK=+R-U!lYSnqC z3yGYnWiu);$EfT~t1!=MZ+5G}E3D$);&;!j5*=}xe9|b}IivZkt5AJUdny`b7;9B( ztdyx7^=@FbudY$rSx)c2TJ7O^ZL6}B>!5TzSNg`_m=M8mM)yI+TPP?M@FNg9gU9tV>Nz< z(eu(q0)Ap1tmK%9~yKwBFP8)J`|LS{=?{&L|87;&Q|mfLV}Fs#uczkN{|Zh{OK+&hug>WGLaTo! zIGq}36j%_<_p%CKYE?L`(`vX9aoi|!j8)~KQtnu*sH#%FfmVKSFA;dDywS?PjM3;h zR#Puhi@sJ}!B{{zUj;o)``hX4exqisojQWe!mX?Zes+2s2;V*%h4Obr*GS(4I2Au( zRHvboCXZ8@YF1x)N2wu3@1{u4r@%iyt0$wZ(tT(2iqGB7z@DaBE$<8u+QUmYKCeIA zy62Roy{E-5jdFv-YlqO1qE^cStR5r>S0A1B){;iBmcSBDbg6)0Y@MY(ygw#4Tzp{l1zrTBi+ka*e-wN@ZB$~H)jE3727MWj&+0O~Di7YrPXtrw z<0>%T{jAl8x_o_cx_QMYeYn*Ho;x{8S{`B*{e!e(1{$zg`Z!lg|D&{_E$>NTbtl^B z7JqvI-6%+(c3(zEH(4dmMXk#6>`K%EO{>isOTdF}H>HZHc%Jd}dmAH4t8;Ms*(KhG z9!<$&)v=dRl1EmXMP0yF=iyefN?48VW>ssGbh4RM_UV%S9&AN9-O6vYC4r}{^mF)J z>X4I~RY9xK(}2IJ9r%lVDDA&t74pPteh+@O-l!!#j_z%gI-}F?Z_#>c)&N~P1rFoW z{+q#T ze5dZ;TGfDy7dILWD}XkQwz`I=O*|=mJK4xXNAj`ODS_~2w)7R<9QVQ~nmPo(bgDwn zJ5zs49k#>y5cK(IWwaMA-|b}8ZztN81?@;7-HHcRsBa`aT2sKP-XyTF8a){1>1uVS zzbYFg%L`{#8O38ibJoG%p6D9;JVK3+@w`metg7=!3+vms&?pD* z=-AO{8dwMolLny=vlfGkW=8jet!!qaGVC(|-Ffcvh!e_}M7PTMQrQz0at9 z4WmuPt#Usm*HGtzE$~9#XP0`K7;1GJ97XZ0U)or`&Ftyp9%+gLlVECdeks)|u&{(0 zma+Pj2~YXU>ShWs?is}oGdc~H)3TlqaG@>hzW5vO!Pgoj^;C-Qr%I6vnp@=oPY=m5 zUJW$3msMn6{O22d1dSO7hAP#fXXx$HAMrjk?=bI5y^MUr`#nvn91voxSA40c0K4I)vwcg44Os{H$c4e=3q02Y`LFugW3x zWT>YT!@*-hPu?b{!cEbI{#L@f9#g9{^`veUoyOARJJj5=&*T@Jn!m*=<4VRc)>Vu zl+Wr$W%3RDX;;|OBr;JM_7n>DGnd4h)=I~_puts*mY~`N zYRg{MH=lev0q#xo{_Er4V9Bo!d6+zOhI*yI7o*U!58cUI_*wSEPC;ahcWBMgm#p^( zqd`fnE>#A@thEGOx6M2?Cfn`bhOWW8(a)VeM>u6ZWz>+|+XuY2o{vs0ppL0MRRWI} z@{xtWZ$3Yxz0@n-Y^Olhyc?gpyUi&Bp7L8CqaO5U{YZQ$lI+q*dYK6C9dGq+rBhNc zm$o(B{+-XUmJQR5-X=22Sb&)UUe*OGm&oR4J{YxWj_yBD$sSVHCN*}?-=4are3$w#VY2$kMO%GR= zf-mg7cUJTVF7<0-70VpZ1f0%6heIBMlek8QgYd}mR>_V!{nUw^j*e9hbDC8ZZKPIz zf|VY4&ksYaMvx!=389~0KN=3Cr1!2N`d7wi{Tr)4_;*r#ZDIp*HdtM-g&GcVN_9c% zjz(8a?&;hsqpgFa9;_`COwVd49m2~a!B+%2Hb+p>Oix>_E% zLpD!LA8+qR3&82=7jR*QCky6R(8rC#$Sd&b23oZNKdIsGDKA_;)=B!Z6Rs5^50Byb zaO}xX%slkn!p~0hG0zriG)~%z#%?BqL|!L@EqD4D5A30VMITt5CJ%+*KnrSHEh1|_ z%}2ckgFm$4>2GLY1T#`?qmNIlM!|{H;AZGMso`d$n1j;eH1rd#8yM}B3B61~fBO!i zUw4cipMj_S$&2KZj{JNLKGT0E+7GAZk+aT_aZcYMV=+?{ze``y^k>X~v(g)dU6+o% zMGJB;Tl^#qsK;!1+h}JoJT(jXCkZ`#=5(9?9$1~P>*PQ9zkUR>CR`ozfb7{vis{5W zR-G&bcWU+``%>Sy!{IS_pNJOpz*`^g!;isa*H=z2*lz_sHx*0;y){a~ngTm`ioFSL z+8IstJhfx5@8I}`an$30@EWINkH|%-oXWw&F<^biBi3;e?c7i8GFu%1`~S`x9^O(5 zo~DL3S8Brdc2b@VAp~p zq0%Y%SGEP77>Gv?W`DdlD%2=JDW`_)rF<=?2(TU88qaBiURzIBnEgJI34+Kw!}+_c zEj<+<#Q6i5zmo<&pzmc<8zoGLhEm(RyuUUYkf^ay+P@BekTHo$Tlcvqo+*qeBBSe~%tyCws7u z-sSoG_D=C;g3T75lAPphBrco{a+(h(^UfqEqHjCkRDxT0!UVWV{=eWa6+a~X4h|PR zVL#L+jJ-6#cRr4m{*EE@kVRfn$7}fO8gx1-yze;4$dJo6;Wc|7I=$&eHkdADo6Ffw zCwz|Fu@g;;TN6!sPiCCP`AK5VEGls36^@_J_f%j#XGG3u*J8W_T{sbLbdTrF#bd&L z=4^xf*#gaMiAOoKv_4w&Dw(I;d@jQY>Uq$pAJ}Q$1+GSd89cc#HC{TK%!gl8y<>Hs z*=QXadYSbc+7FJ&iPO-S>tLiQ`El_(@;7tS{`k^h_E~lfJZ;C^OYLf)U%f|y`+0gOnVt%8wq-SV*>+E?3g zi!7`O-rbnan$O~0)F6KlzLysqXTZ<*IGs;{=dGlV1({#q{mN-%^at=On6sxm_)rh4 zZ0VUX2Eut}l^Xa*QJxn_jn3Y;YC*j!x8$sZ{f+5H24By)A2pj<4V;oG{?*^ptZQv; zGJQ9^EH4^`X4U$`sADts@QOJrfI8OT+zc(*1K;9-wGifq(eQX~ZZwekCqN@Uud*^z zJUs&!;oz_vIJlaVvz9`5C0c#|1HO;WZe>j|WQoYDPOqtN%xZE=DEWrHeu|WOrQ`ew z9c#i~rHxhv-=&d-_~P3CMZ!7e;B2rhwrza!*Y z{JrdP&KoP?tD(%ltikb_v-o@g&hZA1uv*A0`kuM59@>(=5$?O#H4Za806Tp{03X?aQ z!YStBD&v{6Ir}-hjI6?QI&9%=Hi^?)v}r;dtHqoHB;JMB-k`q0o+9Tl@1w~-!_i@V z;S2m-MTVN6lQRIY@##6|%p0iLG&nj!+OwN;le0#b`1|>BJ-raK{{gGs)WKWInelSw zRC=GcG-pL6;XS&LGn-VGdZ#N)UE**)91VAGgO!B%Km5Cbmc;}+=2G(EdFH)w;1bV^ zE=gGzMpk{|{>vXpwxbud$)7yuP6S`9 zcMf^~5xfW`&s4(eJ|{p+_}vV2Xed6Fp%>>z^rb-=__~@if}gE^%Z9I^Eib@Q#8GBI z^kd;b&W5JY%Om*ZQS_q}IA{IA=;YN{tFtXQlU!+)Bpb5>zFNJ#Q9H6uvs#=hCt;QZ zds)lz?ms#2EWj+2lQY|$U|=nH#JB3xgA~+j^Hi0(ow@*XQ*L&@^F2lUSXV}a!wpOkE}pFYooDGW6=uo<Yo6Mt5*Bhx~SL5Nkb6CMN&MQDS2JLo>;|y~N?}w+UD;gbGV>FUn6*h#r|H%I1Fo&*^TJU+_y^HVO#dq)GyLa*3 zyZG*1eD^NCdl%omi|^jWckkl6ck$i3`0ibN_b$GB7vH^$@7~3C@8Y|6@!h-l?p=KM aF1~vg-@VKK&%KME`2UQ1wrt&&@BahRpuW=p literal 0 HcmV?d00001 diff --git a/tests/example_buffers/hs01.bin b/tests/example_buffers/hs01.bin new file mode 100644 index 0000000000000000000000000000000000000000..4875ec0724987ccebecf063acc311742727e148e GIT binary patch literal 104896 zcmeI5OORySRh~DGz!_l&&Il9X3POVu9SWj?Ktu|VRDeKR7a>wSM1lkqPn|AFC@&8w zjV`!s4Bl>ITz==UJUhc0=CFomSf1suhG$sA9C%Qk7eVetEDTdBCh^g zYyInAkG;>Sw&||?U`lEJ^~r}*`oVOVM(LlVgQuPkJpJzle_rHk^NAN>J#Zd2 z@UVf04Loe%VFM2vc-X+h1|Bx>{@B37pKsqE=k(C!!v-EU@UVf04LrYrho4{1@8fk_ zuY13*Tkmzx`<<@$y7vE~IxoumPT%|cWxo5)>%RE6`<(mw-{=2!pXW{Y_pqloz432c z@4oZ6FaC|yAEFN%crzP#Q}^^u)p;{pd;L}J+fR%~`&B!7T{^M;H%m9xiT0bW`|Vcm zz4r57^F5E(rK8@fqJ4-}C)e*@=$kwFFO+qcq>gx(@&5LFtLOSw*MHH|i+$c_N8QMe z@o#tgMf-nIUYyswh>`bRZS}udI#EB`*4^hfz7OA2-o85f{QLTSUH(`%+SaN5tmnnB z=Ix>17vGb1zdP?J>PJ*SzqY79V#+dJ4b{6ZJM!+cqh934xVF5A#v|Uxd(pVM^0me2 zkM=&^eSYKj;fvObJne|Fo^k!r)-UdhSAUkd!|H$Do$mNUUZvxY{QK;h&o}bL7=Ky& zUY)#Cyq8DeJ3R6USE#%gk9N&?xo@gIY5VNxzwdoCdd2?k)VXJzSLN@^t9{dd<%M~w zpZ>HX+P?mMw*LDt>g?mcC~sf>edinVUbSA`H-dL`a;)@fwz?6cn(^r0XGgwvME9v* zy=dzfV_aL5_oDiD@~Xh|-^h#jJS)}rx;*3BqW+4xez*H3*flTsL)IO$<`H>S+dMJu zSHEc7hV+a3{KiFjcQ6M}v5);y!pjV4>`=Za&wT9~CtX{ts=0pm{5Qd0^Me2Rq`U9) zng>xkqJ4{Z;-OHCYsdT_uwT$UiTmC|>fML2UT8BJ7p*I*e?WiQ^2NxPw-4{ji*arB zw8iQV1G+y^UPSvh-MIQ<^lOXet5^Md_E+O3?&0mL?_8!wo_&Zhu5JFlb2eX;FFHre zw?E@z)yXRC(0Ih!Cw$}TXzxSwjn}H^8P{KVp`JYJIU{t^ zJ`eKD^XGv){i1$(QD1-T=RSYbSJ!&l@}eDi*7Ny@{b;MBy-#1ib+kPP?Wm(a=2=&N zwDs@PyHD4?V!r!uKgMG{{nk;>K8%*{jPZs#v97xEtzYYs7j1d! zX~#bF%M;zJarL$3X&V>yTW_B&U!J<6dD^1;FkhZJ+Vb6_w)1I69rHzX)zNPs`sJxt zJdFnf-I@P8x>zk*3KGS|e~`K5sV}-m(LDLq(>87$v7wH-^0n0wqfYdjr;hz;%a8up#}Cn$e08la>X#=v zpLONyH_y0ezH$51uPsKuI->O=-+5zS>RI1=X1;pzB1Yb$ut)n+U$hVN^=lh9-#%i0 z@{P-nXxw~l>o}Kr+VZrGM^vW?^P6v<^0b}XJkffhJol@PeOO1^xO&>wRabN`+VZTc zUwv)s%Clayt!G?x&!Rl@ z?aO{5&w1oWp0;u8x{s)@-}yxAs;5pw>zL>K<~yI~q@H=|8IS#_BhR>K+`6Njo_$$I zp0@L=XWbtmPrv;=!tWgBYpc7j?uW^@4|$39{`^F;fyu730NNA!I3Ti<+bQNR6o z?~IGqm1iAMp8J*Od71BCMf0`ody><$uMsFu9qZYzd7}H)HqUw8uPE;k_2gT}ITCUC z#_iL(`bGWLv(7$@`r7tye;=ivkAOb`+V2GW$3g9QU(I))_O0#y^{a1OJ$d@&>9-Hh z+qkG-zV}JLy6!<<%r_qA((m5fN90+@y3YM$?9q9xYajAFFKzQ;-B{-l`s&$VMEkP7 zdG5>p)Q_mXd7^dHv2W|yzj~3kub+=GSF9snef##F7`I>hlV^Q--hcNmUzF!PP{%yy zvQKT}=Bej?#F%G2@0WAxw|`Mx=asK*{aD97jmuY8zxvv~kIZ)t^F0Uo=3C!6yyx=N zQP(}ok7%CvB+hNVb={}^*r)aM$2|Ape8%;w<2{dYdDd|b@0)vw{lz@zbx!r{OTPN< zMV|54pZjrsdDamt{jdkZ1j$ByOEg2L4ZB%VV$Q=kWhDcm&#~_9yT= zw{vRSr_ZH%o}Yf}*uSxOGJ1 z-Z$s=e4JDKIPnv zMbB3~`*MHwWgYu*&+0ngBkrHJ`u6QPTi<=gxVqMNPwq#3&n4b7b)82&>uSrhFYD=- zXMdtR_baNeEl@2mEwIY;-)d%wJe-=71Y z+S&Sx_Roj>cn;Y0zS@uHV4W%YK2Po;-Us{TzWH;i_QiX?)(gC(o^yF`ox^_Jk390M zlkCHJ)YW!A{qC2&CH3vsxnn=_t*@;f`HBAP`5L#r_gmX~?Ablu*^m0<7v`2P);#;O z-+gn+3;Rvh^W5Oo^E9qLcI`u(dsX*go^erq`;g~zYyaxOE8bi8>bZMwwLJ&!L+!`s z)OyzEIhR-GR?ogg`|#dqi{^zsl79CrKlW*!=jFWijZW>?du5(=g1`8@8TWZn*ZHDN zpGEY1)N_yOIakz?Z+)Npcpua;-{%0ko{#s@Ij!qH<%hWW#d*wg4$sFt`*m*p>YK;@ zJ(uKrS@axzABg(hhkgB0PTQ~Y3p-4pKL--^(`R<^@{91W(=X%q{SkE1XW_$3%u&?y z{r0Qy{CNS^erCj}U*8Mp7x}+JKc2VuLBH>J{q`Sp+(+GmeLBDMI4}Jb{q`e2==*cV zeb(p3y@dXKKi7S-uj2XHpXXw}{b}2e`w+uk>OS3*dG1TT{YUEwe9V*dJBR0Bzi}_x=J`B0zx`Xs`{*9rqx&PTSI2q0 z_s;2DzCY|wzIDB~*!BHm+&aFWoZI_kJ^kjX>s;=^Ipn!N^1XMSZ++g}pU;o=n5*Vl zM?U>mJ^9vk&)ygJz;j#gllMhk^W7(Nc`p6ux&L7P+|JINXU5d6<{mk>=VM+m-@5kgxw#j0e6Gv`)wNFDujguC)T#H)xH`^F z|DJ2{{b8T>h2Of)UC&iL`gTv=qa5~B`=*~vf9}gUOPI&^y!$Tb_VPgUtm8RmdCpCJ z&&NH~{ym47=X=I{_1Hs_@BXx%*Zb+dh+E(MKKPB-emSQM`sauGbINnE5ATJ#?$7fM z{nY)L&-3d3lkF%&n-D;=uf@OJfi29>92QQQ9qcd>s-Bl z_u~EcoQ#`2Fuw4?BU``0t|oJM;lx{a&7Z!Z+?bCG=fBG7`^Jpjf%JQufZ2QXem3)4j&wi=rTv;1_kFMt+_Qpl` zZ~yj#ojq^-<`?3%j(X;)W8W?J$hpvM@aI3^e%S|o_3jP6`*vQ@b8^r2H(1y6L%;6L zecBi3{mjf&=%X-?@!osn&wJ;!j{3~!JkFu*9_*|5UiKXAAG`RxxgYzpf6;wnC$I1p7$Z!kNwea!S-C7FS|!|+@t;Cw_oo^;ye=jOyPVpdG?c>$NPir z{tNs~4t)Uz+oPrYpa?m@J^y7)bZo_Xw}&WoP9?yc@Ko!_|z@_PK;mv(V&??G0dy$|pk{Ms#b%;&3R!7h(;+J0%v>rV34oqP}V?$h(9kIufmr||Mg-liw_+djs2 z4(#51^3utUH-No->yy_o{{Z&tX`|cVPv!;N{tETft$Og=Fjs?qCf=DJ`b_4D-0zmT zYG3p%-+BYO1NqjmzfM2>sAqm-e%v#8&Z|y>Z-2e{@V%$bpY*$T`1a8}&T0E4zntW) zne1O)M}BjXx8XF9r+)Q^JsD3Yd7D=A%&+q?f8lxRPV$zX`jfncdjHGgot^9lbU(w% zF0QWiJMzTL`S$LIx-I$EY3&PsV;p-RKZ70omU>kWe-rBU_|qTm`Yy!HYvqGI^^Mng zd=J4J?DO$X*M5rUL;oGTEZ=;v<`J*`IH&p4DgK;f9{K89Zy-N^C8zDzU|)ZX{YN{j zar#a0^V`J#WQWashaK^P-OJZcUWdMVNxmqbehT(LJ?k{+wD^1Z)EUrguv_>IJpILf z!0df6|4*3{ojR8~d$Ibi>_-5!elhWYik>XL8&nSEF{u~+lN%z3KQSx-ba&ezkA^&0N2^EAW<_K<>} z^Y{4eH^qBFee>nnXHR@E4=m`m%spV2aDQ9+YdkOXwCzV6oJ+k>-+c_;PxoR!@O$gw z&tOCU9lRF1-gkBA!#M}%M|U7UW}XuFMBNU*=i>ameDpf7 zP`?44b5MtS;(Yo$`W=6r_rZeqBe?t*J6mko_QM;?CWYZ@q*)fwv;R3v-#T zuKWdg>(J-gy6O@y*27N3Gj@aRJ_q;>xb~djxA;5uWxjjcpl3hE$K;QR4`74-4D9iD z^4Lobw(kY&F3{I@uWS5^kZ*sY^*ZN--(cI9buxRbRH$L0>69JmN$0q)NSdb=bY^;5ii6&m)3mh z%)m^46Z^+D&%L$iF5&g+Sr5HU(C_J^#jkDs2Awfz9~pb89(D=-|gHsSfV4Y;7>xG1^mvRlf(z? zj7|@4hTndy+n~RMXWwE$w*w3OjNMw-djM{n6Ws*Q{cY%HgWvgk_X{r25#39RZja8= zKERf|277{S|MsK3Mn^r*E1BoHptn??ycxFV)P#N7zk2cp^j7G%*!FFI>Mz|JeDj>A z_8;QvdM{S=o9MH^-%x*sJ;okmtDo@~`YG6TF7&Ny-Wb36zCTvv%bUPk<6naI(Gp+K z*AndDt*{sH)H6Q7n}O$yx$w*_F?{# z{4tot(BFjo3Go&2G4rg6uZZ{94VcsgTl^WczJ0HZ!!O`~-+5B7*YMKm?meiE?=9_~ z_ltVs8odcvz%ld6Tgt<~1{a{uhwsA`{iJ`j>nr`69Tx1_zuwu+UlPaPz?+14fmh#m z#23_A{MAlp1+N7Mc=oY|KgQotZvlTyA1(1EIK#idme=9mi0IGk7p#4eua13o_%r+t zod(^R`y-yQ8|z>v_1VW5oWS!O?AtmU^6a}{JD2vFekSy1{{#L)ADMjnTR0auCf~aD zIU}!!x5REi^AfhGzUPw2FZ31dRzErm`xmXN4Yu?xe}lh=Z~vLSyWbf+_q2we>2pn7 ze<3~?w;%TFdCFVh*IuGG33c34i+`goxYTz4=(gD2pB4J{-;+0kHzB^l-?O(4f5LCS z<`?qyJJ%R~BEH6M*>lEjh%e=V>WlW-lDDM4G4qbe>xfT4&vWhl1${ne>M^JNFVx3h z?;rj`UILx3rw{vdulB!!?|GQN#6NT2=vb%mT;VxahPUSDTtogw9WayEgW4PGFjo(M z68ZS;-+mW=y?c+h;G*p8HFm*11)G1ni_ibv4v+9}@XMQUPkQ_-cnLfJ&0oWt;J*UR z8)2`(C0OWVM!ie?3vdG3*8%($^_<7N6?5CSeOb@FwCE1#WN?nZ0WYX$|111c&^|M^ zx(Qpqb(Zwkgnha1F>&kJzkGF_*Z78d4LEZyZ2P?qw)_eF1=ztmWBxIA0Xy#pwt1eD zxI}Lh&h3~y_kN%*{1)5(PRZ|xZ>VqHE&M`!68g43^ACwH!Bg_DLmlg@e+};loP+0J zPkry{fZw|E5Ad%eGMDG?-e>SK*u%TQUQzcDyMWG}n9Dv}>|?O@fo?`mRA1dWdG_VJ z$@<*m8+hjT#0T`75cm1r5Wj*y!|u_QHz97HV|k!^*6&^~$lHJ&{3GmRa1JhrH`oKX z1Sg@MI%DGAw>AAwK>N6XXI=Lx-}7tX$@e)p#D50b*WkYBZ$f;7Jv`a{9Ju`VJ3IuJ z;5qR*_5fz^n0Ns<;E4E29(eROIc>jYPx2Ph<&(UH)DgdWlDB4>z-#eegBvh`4QPMX zzreqKlDBf{3_W=h;~G7SaLn(UZJ2(=|N%wV&~mybY%r`kB0&Cwc2m z7v!z+*Z!X5Z8%xKd6KtgT0Y5JNZ061$+K_kuAbyAEp_OR;n`my-?>k*SMa9T&UK1^ z1ZLv1plg4J_|HMlZT2K@+e!!ITerYdFF(m!T3X}xJbUa5(0kx}dVy~DByZDG!k)nI zzzyg;H`wm)0Q>k!-m0V{Z1dMo^46V1<7ZFu)}7ps=Y04iZ`~<9$=m-;&f~ro=sVvP z_7XgLlDApu7TdX2*a!b%7auY2?UU#20p1k<9NrweCB6m^i5Kis(EBsNUU9$dcWgeo zSHu_iufYrAl^=BG@Xq16-v<9N{rOyY-Z$_&cvH~$lDunp2gL0|o_m@RzXTI_MgAe! z)4zS61)k?MI45}<>=AsQhc&kK`=IB2_nhtj5Z*$h&YU_+{GP9S@*WQG?bCf6h5KM# z@45Bd+XZ!|f1A_x>y-O>gU$u^ns^2W@+SXrmuH=Vo!}jlKLux?_qxTtfWOUgBSY2k1<|qV0W0cMX5TzQ*t}e)qn@KLVHJ zZ{VGS33Sd4{4sp{G;c=y7SumJ+nrmF-+AZQr_^yDE%uQ*%;~wd*q%cVzoRer)8Jo& z3*zc7^@H}cMt>gj>E{|eBJLd5*cZg@!~1=Ou6sF$cjkQ9o{ziuATo`>@t z<6nad`b*fxwG;cXo_Z_trsR1JSJ(}xPR6#Lb==buJ^OTDxA^7Hu&!73nk=R_colTdESGbyi-uW{T)%){u+4hZ3uG|Z0ltB7ohta zagI0e-H&yg*ZgC6m*^zyL+laQgPx0huJN1iUK4)fEq$HimuElDVci+~b*>ffwNu_F z8}Q7%pr8KJ?tOCzuHh|%?fYV7ocuYw4fZARIrc5+d+mn!3_K_Q6y6HV#K-@c_aXiZ z{5`lK{{XbUdIP*`5#9vdG4>4oLwM?V4oCkbyqDoQf5Kj%dyTI9RqvQOBje;Xpm_`Z z@Qy_CFFD6^ckNw)G`Iqp$zs_?=-h{Y)9b?bf{|fsCd%#{{doQo? zAA)D-Z_Een>zcUrF3_{zv!JKnx;Nxslh?3c`&g5I1v-DhUh(rJ1)YxgTwT!n?>?r~ zTZ32Vw7eg$@h|Z=_^1DM_x?P#(DRJLdiFh^+ zs|a1t_iwlL3fOOZ9eE6N1^@(zI$!aUBIj7$@z{r5B>J(y|kYd z`Iq1sx&!tcJf{8yc7b;aI`_!AsP7!N=BZ1b`&^TEOngb)eCKu#?spz^d_So-Vjms6 z1UjGR?H(KA&h6fuyG1YV&%Gx4x4v^^{JyW<$B25)H!>e|&J6DczxV0@+n*-|`wT4V zgPzldxcSa+A1m|0W6*tf=&d-nDX2dYcTe6E@3(cA|84i)PoU>@j_rGEf!-K@1~06` zxyU=GpE>mCRw&;Ee@kIODTk0szd$(dG53O@4NeO z4bT2(@a)@rV0=T|^B9qT3%b_^djcMVmtl{_m+0n@w}3yRU(ay^&wjk0?sH-ucm=Mg zbAavnyO#!k56}7L)GNf#i7)VbE^GWZ;J~>!r+d64-jeV69^&uFGk$Ep#GSMH(P@Y~ zkA0jGSI_xo)(5@kjW&97@Yeb1?+V@(_5tTK!asw5fbIPDaY>%%KZkb;9+7`W{VDdE zJo{A7J}2~XXdmPsk#`Nc_shU@{}<$0|BU)`a7^A3w7&s+Odan_kKevJbQaV(1g+l^ zzeQ(FzP5AUk~d{;_iVpM#>qcGcY|NP_eR_2&$&ED&&zwXLD#%B`p!M4FQ1#D=+q&9st)^f zfAS~r$ILqgukas&4f#jJtve@v47T7AX#GSV^|{7gu@~$3^W=hj&)<8f-Z^paW%T>L zyMiayKJX{z@*F*{IeiWAYJc=?f1acJID>x)Z;gG8Z66!_3-DGRxQ4#~&%rM6&+yOa zcZuD@yJSA^pL(%xbu)Pd+@N=fev5yG-}*QB$M`+x9{&XYHMZw)h5rD&0j+ZY-}%k= zed-*}nR!0dAE77Dd$GW;p3i}EUNCQCK7FWD@Oz$@*yrrqdwGTJ9_C>lb$ow};Ab$w zA3*O@Lx0!su00?6xCK2Q@7Wj~=grt>B6&+}>mFmfj||WG53w6~?xV%;-foE>fh+o- zVqbur`v-mgJr{MO-Vu8C=^o@=qVJwN`tv?4u~*!q1Nh$G4RQD4{5`xW@e%eZ{rl&h zYy6&f55I&zN0_JX%s#*i@)qt1yZ7E;`|~Pc$Np~6b1yAC@fN)fbPpqVp7+AJ;H|J{ z@UFoze(P#4Li`Y0-8r`VUSjK?p?ihj=Vya|!=9G*h24Tva8A6=#XJX~eR_Y!_?P4# zV;B1HoEP|wPwfNV3iN)gh)=*V@g+Jw2Lo~U&{!9=j(f15F?s&~lbmw@j`8>8`}{b! zdz{h70G^5Dsgu+}_ZB?BKf^BI4Sx69;$H-G9}D+Rp8HtA-+&k3DR@Jk=VHCY9GBn- zzVlhP!+#0RsK3NM#-4&1{~ENu=VqN7;+LF<`|?~5mb*XSo}uf0FR{ zNZzS>X1Jr-#qtL?;(8eljr60=6(`-bNC57Kz{{ZupggG{h9m% zufyJ;qyHTL0{pJ{Wd?tI|UQCu}|y?xI`y|OZ@7O=0dpIUO1Diee2>%Uz`1b(pFA*OR_na^APx1TzKA7X5ut)nb z-VyhF?XS*hAMpHp6xKEG0A7Yy|L;5Oq2NCR8~A6~YtVCaU$tNQzl1j z*+1BU`mO8V0~r%f?DY(L0O$BO)VJ;hare;Dk8>`ut?xV;oiTCub&FmG*VNVToW`f{ zZqT{LJ|fROu8F(HB|Pgl*a_YlI0D_&+i!z^4*w9m1Y7ryZUGPU!<*q>WBdNlc5h4K_IpO2 ze;?AnKYL}r@ZH}ne$UZ8jqL+$(VfC~&+bvaxG^98HTm{e-%s#vi1*+IyZ{TlHTDu+ z_q@V?3(x0biQjWu6ZhP&vDMdqguex2A6NLD)BCW{Pu)}PfF1p>@Ow_RE`P7qz3Er) zhCKV*pyxTNJHYc^j)LtyI>w*Tx!^vkZyBYtT%mB}S$8wC` zIVev&M}G!hk$(Wc#h!!q?Z11Q;9^-^rqkv-oQLJpmF~_lXgSi0-TV4ihTh( zzq~X2*2~xhG~c;ANAv7&MZFE!gI%cW9K|^3y&E&9eYE7)`wjnwe_wP2{|Me%JPm)0ZCeI3I;2j}h&-XVEC@eTG2+w(lY9+P*3Z67Q9#((Ah@mseL zZ@~fde%-=fgVyo9?aOmW+u>UYHb_sC1^73Va?9zgG_?=R=F{tdia>KtQF>H8?m zIm5mM?bqMW;Li)6$MR(Nf2Ucqck68MufT@-hwzNA@%tV)gMSL|65HP|;J>pU6F($> zifuoqiK&^TKJjxF?Q`2{}I>`pMd%gvAqX#_|C6=j9>c_+q$C=KVlB| z;hf?r@muoFK>gOY?{oO}v4nr_T-f%p!7kte-U6L9w*9#G1Nhg(kFamBry+h3bjH|+ z@SNimyXE}di}N}E1fKWCx()tY@Ra*LrC!#LJtyuQm&BLEtv6sF>jy{lX}oI?k%)}fCLydL!TiDc{}a0cJ!{Rln#IFtvDzzKQh*v?^|`*t54yi4?F-~l@7 z&EVPpjJ;>(jlZAF-{aNr`?3oBgFU!}H%4y`?;P~^ajE0)^YZt0ox-#3CH4qZ&)?&9 zh=1|dIc>lE{bSJNe{~v7K}3bI3f`@a#uD`$_QZM}6n>_p8a9pg(i3_5&JE z*zVK*4(Lby7M{P?%yT`XP6yw-0eb~z(0O|FmZ10J3|>p$)-i8H+&&s?=X4Lp^kqK_ ze(&A}{sdf_4;JzY{55{hr6F#<=Wq+}5VQ~bo`iE)VmrV4zaih}c>(Vj|3F{H{k=qq zc!9qGTYj(Vl=v+04w>f~zkMh7fX<9_YT-HmfZuay;d{R`{A=_wadAc5ISY0JZva=+ z*${V6YvT5QNL>An_$2h-(vN$;B<~8;9pJ%`#Lv;6;`e-( z%wfL!vW|5p@W!BZ?C%`i8Sy3eqrqOVC+E}eeenLdw@kl1y7sk3uLp04dtV!Db=<4{ zjnLiDU*Y%oPOv+Ef3LrSG@*a@Cx1iU5-jisa09NGH#;AG`AM6;t>Zbi z(oB+XLQG)ar-sC zMsEWS)G=>?Js1Z&{5|^ayCXiqpXg@-F7T_{!neO3-6o)U1^)~z>VYkFHrnte@TA#JAKntZ6?zT0QC5Logbd_t>{B^PU|d*`*}wLdiK|1FVP)?^2X$=+tHu( zR`AVtj-L6{Dew~h2HhF{6}kid7VPOy-VC1eZR`i#jd@@p-}>&u{DJyw`1U8yxOFpp z_qU?HwtMj0%parQ(C0GDA+Mlc;4kRE)sNj^yWcT(4*T-Y6-hsJ8~Y(|7V7w%IoA+A zkIecVI^GNKnfV*y-UoS^y0s5@?rGtC_*Y;H7Usy#!(Lb69;oAadye*}PEtSgWk0np zybLDl55YgNKlJ<@OZynJ?_wNm@w-R&v_^j>AA1Ql)HC0H7VN`5{anS&&xI9wo@Wbx zf!>&S4*nLu^?U3R_F(=p)akHi>bNh^{{6hk;cL4;$NBk&em{|4Qv|JB``>bLfVe!Y+AswdCS zA6*b{$xHO(e2sdce!_O{nR)mNc@4Jn7WeQcyZ+U&FZVGEeT|*NeDrg$-Fv34^L6lg z=L84*tvc+r+B|3O!MW^v%-nu{Xd%C#x1bO8*4Rtp=}*~1=wrZM!S91^rhoUggy-BH z`S#Ti-{7~u0bTp(&7&{#lKTc{@SJ0ezPhUrpAh%wi@N%|px=@|fE)4`V8(A=PkwKn z_Yu1xKEdwM>EW&MkHH0KANsA=u(u8!-_sp>_O}jw`+TW0&{u2!@U7dRH=&;u@j)Bx z$=6@^WPNn3m%={eJD2lk^6UK6>#R%M{`438J$qWgcb<)X;a_6UK>N(x!!F#%p1juQ z$9Y0s`&i?zbI_MMeh#{y``*v_Ir2w49R6^JnY`g`o?Fn*VNdY$AHxTmFYoO7Iq>ND zdF|?1x2667Hrmw7)bVrS6MjF}-9Ean?CPr5U^nEO*Et9A6!YZs-tLJP^E@Z)hPel@ z&Mn_M%vI!p*6**xX7cvvtQ3iUF?Ru?EN8bedn@2>n5K2!u-iR`}KV6%Rk=` zZ;7jC-|7s+3%UbXoClr49E10RdX0QAlh@0)Z|p=o*6G}jdf3)$$;-rRUHg+yyocY0 zzJ_qGllANuJ?CiYzs_$x{5`h&E!Z8XesXX0rQiEwerF%@)WaU=cfjAYyXSp40kNCM zya#w6^kDbJo!{rP^?iXqli#D)f>|Bxf_}oE$sdSYKm9>Y+pmUrN8Vr`*89Sa*PZOp zey!{C+OhXKhq~mo=&RG&#@x)x|6&Or0?v;C%AMgAFC_l+#F8y&o z)#m)`-pOxH&QO~_*&kKKopXt-QtPXqZ;5XFkh&S}D{veNdBHr0I{_^`dZNIFe zPF7!354(W!%SqnKqyo?U!kq0%-iA|ilDCjrpC9-Gdhxz=&P6<*cdl~&QG56%h$GEZ(s0>=V71P?%zGwm-|--er8WuzVXnv?R8wF&F}}xo7w@^=lvITh}m7bYINL&4wMhx_o{@w(AeB>%{Zwxo`GW z=-2&vj=l3($8%zCpQk+I{b+rjvn&$t&BIQ=#=SRR8y)9K#53{otGm1;PhILIpC{wL z!d~>FlYZ&{-?w#p^O?JtPd)oepGC(x)hFLQ6!$^A?$7<3FOu&)%l4x_b?YAJuMoGM z`>>w;(oBI59Xnt z@RzVZ`zZDi^3AI}-ap>!;yt4;{m$XOa=btG<6PbcZQfT2-E3dM_Rnv{`l9@>hdQU{ z4&QyNlRRH^diMk0xO#Wa-MY?0zIzaR>&Ul`{eiWPd$SMw3;UHH^_|ms(r0$hai&g> zj{PP3aj)2(OOHSMo|FfcIG6eMEzdpS_x{*#t*4%O=+${bezt$>x>x43zhWM|df!4l z@0)S+de15Jl|IFL^5;RGm-NfnzK=Nv&!bF<|J)Am_p3s`u669ob8zqa%?Ev- z`8@mcvi`iI?_|F3Q!(_H)YosG_rbcJi}y9|-?^+K-#yCLj{C8m`}F6ad7iI(mLKal zk9+W3ozFP>#q)LV#;NNb3wC{uox{H5Yg^ZSMP2jtJGb+wXTJQ*J$B#LbD#0Pn+N(_ zC--Mv(yIc+b_f zp7$%pgZ`cO4t2aIz8~y!?{nrkTG#$!zrG*ie(m4A+6VJ{ZoW5aKh_P;tM8LKpL-7b zEcRzV-dCQd6z*xkF5FM;BwxKF?zj8@1U6XgTm7FQ?!6a(cIUrS4}bb8^3?I?m~rbG z*Zv9eKE9{!TlnqQd#*p~YpW+OBK;Qp^<3@G{@sVqpM8HUr)OXK&GS6a_1x%NJrQ~lxm>Ur9y{YO2|RkXfyJCFBJJ^Adp?$5c+r>=ea=OXu!tf!uP zF<+z)>(u<8>2g`OfJ+)Q4C5_$Ybu&G$U42l`&W)4%u8KI6RVdQa37t?zwxzuNR) zMD*$$^lQBC#W_Ux;60%WCxbZ+cg*YlER-1n+_?#sHKn|y8Wsd>($-}|MW z^BFfUoNJoo^z6%f{v+5w4$2qZi~Ca7_oMcQ$q(FQ2=Un>byAO3@+`8Ti`;7c} ze(I{Hj{O_gwr}oJ@m`z9Io0RHynSe0&qE#Ob|20m%6IST$k#8=Iple6_T}@eKRz%1 ze37TF=zB<8e2e?2o;>yRJGcECw{Pp(uX)Br=T=8_E_tGRcCYfSAI>}3zk8H#|JL#O zu#Wv2cR%V_N50R4{W-UF)YEou(Y~C^I_g#3`rdC{+~_0(Nvl8-VgcKwO{9x zFUqrjc|Slu=0{y^(f-W)0QuHYN89tao__m~ukHQzT&!oF_uakNmuMaLZN7Z#_}-GQ z-#X@d4#u6=_lCOGk*D7|>`z`q_h4P)+V-KXo^?E5^PEe5^xLPXj=G+kb=`}$dG67B zZQQ!%-B(|}y3XTX)Ky1}{i>s1TOD=nOI^|W=E+k}bia}B^J%_u_iNwg%eO!KkZ-={ zVSW3OZ@zx@we3fqaZw$gKkMuF-h0mOF`{|CAIz6$Tw7gj`?QYv>Wy=H_9ZXs?bET2 zdfKA(MC+*^ZSzEV=8NWO$NJi$_d{EK=Zdzv*7v^1H!en9?HIRjb=23EZ$0^*k9N%S z+)_@@zMMz2AM5D1j<)*tVI6I8pI<$7?8~}h%(p-LP)EOY&EI_D*_cTk(LHI~hp4`K z@m}bcZ+)M4^W$9l&9}b!_8oQAiTUb`a(ecqp8cv9^W~}MKI9v>59cy2+PA3RzC``< z)HN>JkM(0-wB?EFSVuH&zV+Rk`-%D5_9agp^}UD2t!G?RC*Diz#B*@(=85XZJm<1M zG3E5^%X!t&FPa~9&C{=Kzxtg^J$dSC%eN18%oihHUGvn9`P%YgJ#|HS>No65{e5Vi zNARQV9HKhf*0HW}(f-ucmakv*{yDE`Uewh#U)%o7cR$`^dFF}sW!yaLi^koXes#3% z!#(MjXMdu)qWy^K$X7?c{Tg>(>+a*lIjp0edFpB#w?E^ee*28JJkdU4-{#wwX#Loa zdZKYrJ^PWTUt82~f9i;lXMg6&yAPd5J&O%BLp0Ah zqHUgj`-y#WVQg&wWN)zWI?S-+A`& z^vhF6f3)3;`o^{0!#=-#%U93())nQe7j1R)nW|;#=N;CI80*loibvWB+k~^28daPjy9k+G6$RohRD+c={{m-FJ8L@yGe^@DOtD zI8Wun^S(x1!cYHYbp3AkjW(#BD7)6xu_rI;M;+7e_|dC=Z0D}JfhS*-r!A_Zy^m)+ z^7Y?`=819MeSURoJj`!AV&of-e*184ZTX`9%A*hC@gC@}d<68@`=~H__9q%v$2#h0 z%Zqu|i9GYw)fVNeC(5sAVfxpUAN92FoTI+HedzgG=Dz&M(~kQ3)zg-zT`|}1o_$k!`bBBc)(<+DSoH$0 z_J^K4QF_#gad{CVZ=bDSR7ZOs#=c@+wD04`I(0t^iP>+SNAvYoJMi_}k9@KE_vFc| z7%?!M6Zzd8woi&p11easpkxRA8()EzMij^(!Kore8yh>^R$>F~FIw+q`TKO$5hHJ(Kl0wIeP5jzkM_&f-Xi{kG6g7^Bb?2ci+huYkbc)^29x~>V8pPO?dcbw}JeP z7ZLXw?QfiYH~L=l?<&3?&wK6bzWn>*uV;#P|D37btJZzhJnOz!jP;}avijfZ{Fm+b z+l~LS`+HG-^zkv^#4;y&c zz{3U}Ht;$(@bGiwb#C)P>rHLo;oiQfO+VCm*ucXE9yai>frkw|Y~W!7yA3@2`Rm~( zJZ#`$0}mT`*ucXE-rNQre(t`xT|ZQM*ucXE{-JN+yPqy#`F$yU?^FES|MPc$w|M&f zm!AIj^y_;@PygrY>3@Im)W41Yuc!Y#|5e}DdFS`vLHc_+{QX`29v6RKR|EeM=< z_ro6I_xB6?`=b5*_7md%zIuQEy`QJ>6U6;I2!C(ApX+i&+|MWQ^HKcVhL02X^LYGR z7eB8i@8b3IkNkWuKey{G;$Z#!Pd`WLQ^Y|(Z_dvn^mBUa=R^AWXnyY4lzh<7!S!=9 z{hZADIca|Wq@RoV%jAQ}&ja=IGe1il^mB5HpC>!p#pCBL*UxqRHR7P3zuaT{InVjm zsRQ~s)9G{MgZ1;5vHjfW{CV<0Kc6}M2KitC{oL#Ho5aBa`nk{Pw}^uU?7?(Q94ufD zrr#zG7O)4??+^zI*n{bJiGu~~!Ss8?!2X0Qbd*nvGbfN4%X zn86k-Ud&KRL%;h!+v}5`{~IGCb%jGoPFO{HGrM)ze!4`00Ot^ygoH P=R+rNfAvFezmxtSA4e4k literal 0 HcmV?d00001 diff --git a/tests/test_ev42.py b/tests/test_ev42.py index 27f30b8..410f373 100644 --- a/tests/test_ev42.py +++ b/tests/test_ev42.py @@ -1,3 +1,4 @@ +import pathlib import numpy as np import pytest @@ -98,3 +99,20 @@ def test_if_buffer_has_wrong_id_then_throws(self): def test_schema_type_is_in_global_serialisers_list(self): assert "ev42" in SERIALISERS assert "ev42" in DESERIALISERS + + def test_converts_real_buffer(self): + file_path = pathlib.Path(__file__).parent / "example_buffers" / "ev42.bin" + with open(file_path, "rb") as file: + buffer = file.read() + + result = deserialise_ev42(buffer) + + assert result.source_name == "grace" + assert result.message_id == 1669290683232688000 + assert result.pulse_time == 1669290683232688000 + assert len(result.time_of_flight) == 1629 + assert result.time_of_flight[0] == 160436 + assert result.time_of_flight[~0] == 147296 + assert len(result.detector_id) == 1629 + assert result.detector_id[0] == 160436 + assert result.detector_id[~0] == 147296 diff --git a/tests/test_hs01.py b/tests/test_hs01.py index f27b83a..348bfb9 100644 --- a/tests/test_hs01.py +++ b/tests/test_hs01.py @@ -1,3 +1,4 @@ +import pathlib import numpy as np import pytest @@ -363,3 +364,21 @@ def test_serialises_and_deserialises_hs01_message_correctly_when_int_input_is_no def test_schema_type_is_in_global_serialisers_list(self): assert "hs01" in SERIALISERS assert "hs01" in DESERIALISERS + + def test_converts_real_buffer(self): + file_path = pathlib.Path(__file__).parent / "example_buffers" / "hs01.bin" + with open(file_path, "rb") as file: + buffer = file.read() + + result = deserialise_hs01(buffer) + + assert result['current_shape'] == [64, 200] + assert result['source'] == 'just-bin-it' + assert result['timestamp'] == 1668605515930621000 + assert len(result['data']) == 64 + assert result['data'][0][0] == 0 + assert result['data'][~0][~0] == 0 + assert len(result['dim_metadata'][0]['bin_boundaries']) == 65 + assert result['dim_metadata'][0]['bin_boundaries'][0] == 0 + assert result['dim_metadata'][0]['bin_boundaries'][64] == 64 + assert result['info'] == '{"id": "nicos-det_image1-1668605510", "start": 1668605510775, "stop": 1668605515775, "state": "FINISHED"}' From 527eaf33816c281bdbb3f1b1c18b47ec358e7ca8 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Fri, 25 Nov 2022 13:09:00 +0100 Subject: [PATCH 290/363] added test for al00 buffer --- tests/example_buffers/al00.bin | Bin 0 -> 72 bytes tests/test_al00.py | 13 +++++++++++++ 2 files changed, 13 insertions(+) create mode 100644 tests/example_buffers/al00.bin diff --git a/tests/example_buffers/al00.bin b/tests/example_buffers/al00.bin new file mode 100644 index 0000000000000000000000000000000000000000..bf25d3d9414d5922d4894a8a866f422152a9ff90 GIT binary patch literal 72 zcmb1OU|>khF)#p93_J`H3?d91Ko$#-&A^}n5 Date: Fri, 25 Nov 2022 13:16:01 +0100 Subject: [PATCH 291/363] added test for f144 buffer --- tests/test_f144.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/tests/test_f144.py b/tests/test_f144.py index 3e567fb..078bb28 100644 --- a/tests/test_f144.py +++ b/tests/test_f144.py @@ -1,4 +1,5 @@ import numpy as np +import pathlib import pytest from streaming_data_types import DESERIALISERS, SERIALISERS @@ -155,3 +156,14 @@ def test_if_buffer_has_wrong_id_then_throws(self): def test_schema_type_is_in_global_serialisers_list(self): assert "f144" in SERIALISERS assert "f144" in DESERIALISERS + + def test_converts_real_buffer(self): + file_path = pathlib.Path(__file__).parent / "example_buffers" / "f144.bin" + with open(file_path, "rb") as file: + buffer = file.read() + + result = deserialise_f144(buffer) + + assert result.source_name == "t_julabo" + assert result.timestamp_unix_ns == 1666004422815024128 + assert result.value == 19 From 4714d5a9505b9f6fe7c848f512d7b3a6719d9c3f Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Fri, 25 Nov 2022 13:35:24 +0100 Subject: [PATCH 292/363] added test for f144 buffer --- tests/example_buffers/f144.bin | Bin 0 -> 80 bytes 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 tests/example_buffers/f144.bin diff --git a/tests/example_buffers/f144.bin b/tests/example_buffers/f144.bin new file mode 100644 index 0000000000000000000000000000000000000000..a91be71aa110cb4bb7f25fc1162a9da7d2dbff64 GIT binary patch literal 80 zcmWe(U|>iyG%?{}kYW&FU}4~A5MbZ|iZQgvNf=#|69=-m1b~DDNQ{9ED8~XMVZhjd R1IR3i&nnGHOv(p|0ssSF2aNy# literal 0 HcmV?d00001 From 294dc747534a96e34275a47a7b32beeac0fe7149 Mon Sep 17 00:00:00 2001 From: danesss <116186241+danesss@users.noreply.github.com> Date: Tue, 29 Nov 2022 09:36:05 +0100 Subject: [PATCH 293/363] Update README.md --- README.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/README.md b/README.md index 016aeb6..6d10b9e 100644 --- a/README.md +++ b/README.md @@ -66,3 +66,8 @@ hist = { ``` The arrays passed in for `data`, `errors` and `bin_boundaries` can be NumPy arrays or regular lists, but on deserialisation they will be NumPy arrays. + + +## Developer documentation + +See [README_DEV.md](README_DEV.md) From d975fd25ef64a3d5060c00c9f646c8efa285ee6f Mon Sep 17 00:00:00 2001 From: Daniel Cacabelos Date: Tue, 29 Nov 2022 16:09:11 +0100 Subject: [PATCH 294/363] Fix: service_id should be empty bytes instead of None --- streaming_data_types/epics_connection_ep01.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/streaming_data_types/epics_connection_ep01.py b/streaming_data_types/epics_connection_ep01.py index 05756d5..62bb1c1 100644 --- a/streaming_data_types/epics_connection_ep01.py +++ b/streaming_data_types/epics_connection_ep01.py @@ -95,7 +95,7 @@ def deserialise_ep01(buffer: Union[bytearray, bytes]) -> EpicsPVConnection: source_name = ( epics_connection.SourceName() if epics_connection.SourceName() else b"" ) - service_id = epics_connection.ServiceId() if epics_connection.ServiceId() else None + service_id = epics_connection.ServiceId() if epics_connection.ServiceId() else b"" return EpicsPVConnection( timestamp=epics_connection.Timestamp(), From 9364e53c9750391793fb3a7a95c81434ce3283b3 Mon Sep 17 00:00:00 2001 From: Daniel Cacabelos Date: Wed, 30 Nov 2022 12:55:36 +0100 Subject: [PATCH 295/363] Bump version to 0.19.0 --- streaming_data_types/_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/streaming_data_types/_version.py b/streaming_data_types/_version.py index c1e614e..6bd697b 100644 --- a/streaming_data_types/_version.py +++ b/streaming_data_types/_version.py @@ -1,4 +1,4 @@ # Version is not directly defined in __init__ because that causes all # run time dependencies to become build-time dependencies when it is # imported in setup.py -version = "0.18.0" +version = "0.19.0" From 52ade5d49489d3645add3e6e6989ff3d10f810da Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Tue, 6 Dec 2022 07:54:52 +0100 Subject: [PATCH 296/363] moved f144 over to new root type --- .../fbschemas/logdata_f144/ArrayByte.py | 68 ++-------- .../fbschemas/logdata_f144/ArrayDouble.py | 68 ++-------- .../fbschemas/logdata_f144/ArrayFloat.py | 68 ++-------- .../fbschemas/logdata_f144/ArrayInt.py | 68 ++-------- .../fbschemas/logdata_f144/ArrayLong.py | 68 ++-------- .../fbschemas/logdata_f144/ArrayShort.py | 68 ++-------- .../fbschemas/logdata_f144/ArrayUByte.py | 68 ++-------- .../fbschemas/logdata_f144/ArrayUInt.py | 68 ++-------- .../fbschemas/logdata_f144/ArrayULong.py | 68 ++-------- .../fbschemas/logdata_f144/ArrayUShort.py | 68 ++-------- .../fbschemas/logdata_f144/Byte.py | 47 +------ .../fbschemas/logdata_f144/Double.py | 51 ++------ .../fbschemas/logdata_f144/Float.py | 51 ++------ .../fbschemas/logdata_f144/Int.py | 47 +------ .../fbschemas/logdata_f144/LogData.py | 118 ------------------ .../fbschemas/logdata_f144/Long.py | 47 +------ .../fbschemas/logdata_f144/Short.py | 47 +------ .../fbschemas/logdata_f144/UByte.py | 47 +------ .../fbschemas/logdata_f144/UInt.py | 51 ++------ .../fbschemas/logdata_f144/ULong.py | 51 ++------ .../fbschemas/logdata_f144/UShort.py | 51 ++------ .../fbschemas/logdata_f144/Value.py | 4 +- .../fbschemas/logdata_f144/f144_LogData.py | 57 +++++++++ streaming_data_types/logdata_f144.py | 16 +-- 24 files changed, 212 insertions(+), 1153 deletions(-) delete mode 100644 streaming_data_types/fbschemas/logdata_f144/LogData.py create mode 100644 streaming_data_types/fbschemas/logdata_f144/f144_LogData.py diff --git a/streaming_data_types/fbschemas/logdata_f144/ArrayByte.py b/streaming_data_types/fbschemas/logdata_f144/ArrayByte.py index 7b26853..3efb547 100644 --- a/streaming_data_types/fbschemas/logdata_f144/ArrayByte.py +++ b/streaming_data_types/fbschemas/logdata_f144/ArrayByte.py @@ -1,34 +1,19 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers -from flatbuffers.compat import import_numpy - -np = import_numpy() - class ArrayByte(object): - __slots__ = ["_tab"] + __slots__ = ['_tab'] @classmethod - def GetRootAs(cls, buf, offset=0): + def GetRootAsArrayByte(cls, buf, offset): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = ArrayByte() x.Init(buf, n + offset) return x - @classmethod - def GetRootAsArrayByte(cls, buf, offset=0): - """This method is deprecated. Please switch to GetRootAs.""" - return cls.GetRootAs(buf, offset) - - @classmethod - def ArrayByteBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier( - buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed - ) - # ArrayByte def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) @@ -38,10 +23,7 @@ def Value(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Int8Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1), - ) + return self._tab.Get(flatbuffers.number_types.Int8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) return 0 # ArrayByte @@ -58,41 +40,7 @@ def ValueLength(self): return self._tab.VectorLen(o) return 0 - # ArrayByte - def ValueIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - return o == 0 - - -def ArrayByteStart(builder): - builder.StartObject(1) - - -def Start(builder): - return ArrayByteStart(builder) - - -def ArrayByteAddValue(builder, value): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 - ) - - -def AddValue(builder, value): - return ArrayByteAddValue(builder, value) - - -def ArrayByteStartValueVector(builder, numElems): - return builder.StartVector(1, numElems, 1) - - -def StartValueVector(builder, numElems): - return ArrayByteStartValueVector(builder, numElems) - - -def ArrayByteEnd(builder): - return builder.EndObject() - - -def End(builder): - return ArrayByteEnd(builder) +def ArrayByteStart(builder): builder.StartObject(1) +def ArrayByteAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) +def ArrayByteStartValueVector(builder, numElems): return builder.StartVector(1, numElems, 1) +def ArrayByteEnd(builder): return builder.EndObject() diff --git a/streaming_data_types/fbschemas/logdata_f144/ArrayDouble.py b/streaming_data_types/fbschemas/logdata_f144/ArrayDouble.py index a4b01de..f92a478 100644 --- a/streaming_data_types/fbschemas/logdata_f144/ArrayDouble.py +++ b/streaming_data_types/fbschemas/logdata_f144/ArrayDouble.py @@ -1,34 +1,19 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers -from flatbuffers.compat import import_numpy - -np = import_numpy() - class ArrayDouble(object): - __slots__ = ["_tab"] + __slots__ = ['_tab'] @classmethod - def GetRootAs(cls, buf, offset=0): + def GetRootAsArrayDouble(cls, buf, offset): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = ArrayDouble() x.Init(buf, n + offset) return x - @classmethod - def GetRootAsArrayDouble(cls, buf, offset=0): - """This method is deprecated. Please switch to GetRootAs.""" - return cls.GetRootAs(buf, offset) - - @classmethod - def ArrayDoubleBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier( - buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed - ) - # ArrayDouble def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) @@ -38,10 +23,7 @@ def Value(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Float64Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8), - ) + return self._tab.Get(flatbuffers.number_types.Float64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) return 0 # ArrayDouble @@ -58,41 +40,7 @@ def ValueLength(self): return self._tab.VectorLen(o) return 0 - # ArrayDouble - def ValueIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - return o == 0 - - -def ArrayDoubleStart(builder): - builder.StartObject(1) - - -def Start(builder): - return ArrayDoubleStart(builder) - - -def ArrayDoubleAddValue(builder, value): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 - ) - - -def AddValue(builder, value): - return ArrayDoubleAddValue(builder, value) - - -def ArrayDoubleStartValueVector(builder, numElems): - return builder.StartVector(8, numElems, 8) - - -def StartValueVector(builder, numElems): - return ArrayDoubleStartValueVector(builder, numElems) - - -def ArrayDoubleEnd(builder): - return builder.EndObject() - - -def End(builder): - return ArrayDoubleEnd(builder) +def ArrayDoubleStart(builder): builder.StartObject(1) +def ArrayDoubleAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) +def ArrayDoubleStartValueVector(builder, numElems): return builder.StartVector(8, numElems, 8) +def ArrayDoubleEnd(builder): return builder.EndObject() diff --git a/streaming_data_types/fbschemas/logdata_f144/ArrayFloat.py b/streaming_data_types/fbschemas/logdata_f144/ArrayFloat.py index 5af7077..584197d 100644 --- a/streaming_data_types/fbschemas/logdata_f144/ArrayFloat.py +++ b/streaming_data_types/fbschemas/logdata_f144/ArrayFloat.py @@ -1,34 +1,19 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers -from flatbuffers.compat import import_numpy - -np = import_numpy() - class ArrayFloat(object): - __slots__ = ["_tab"] + __slots__ = ['_tab'] @classmethod - def GetRootAs(cls, buf, offset=0): + def GetRootAsArrayFloat(cls, buf, offset): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = ArrayFloat() x.Init(buf, n + offset) return x - @classmethod - def GetRootAsArrayFloat(cls, buf, offset=0): - """This method is deprecated. Please switch to GetRootAs.""" - return cls.GetRootAs(buf, offset) - - @classmethod - def ArrayFloatBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier( - buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed - ) - # ArrayFloat def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) @@ -38,10 +23,7 @@ def Value(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Float32Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), - ) + return self._tab.Get(flatbuffers.number_types.Float32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) return 0 # ArrayFloat @@ -58,41 +40,7 @@ def ValueLength(self): return self._tab.VectorLen(o) return 0 - # ArrayFloat - def ValueIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - return o == 0 - - -def ArrayFloatStart(builder): - builder.StartObject(1) - - -def Start(builder): - return ArrayFloatStart(builder) - - -def ArrayFloatAddValue(builder, value): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 - ) - - -def AddValue(builder, value): - return ArrayFloatAddValue(builder, value) - - -def ArrayFloatStartValueVector(builder, numElems): - return builder.StartVector(4, numElems, 4) - - -def StartValueVector(builder, numElems): - return ArrayFloatStartValueVector(builder, numElems) - - -def ArrayFloatEnd(builder): - return builder.EndObject() - - -def End(builder): - return ArrayFloatEnd(builder) +def ArrayFloatStart(builder): builder.StartObject(1) +def ArrayFloatAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) +def ArrayFloatStartValueVector(builder, numElems): return builder.StartVector(4, numElems, 4) +def ArrayFloatEnd(builder): return builder.EndObject() diff --git a/streaming_data_types/fbschemas/logdata_f144/ArrayInt.py b/streaming_data_types/fbschemas/logdata_f144/ArrayInt.py index 9a7a825..58b658a 100644 --- a/streaming_data_types/fbschemas/logdata_f144/ArrayInt.py +++ b/streaming_data_types/fbschemas/logdata_f144/ArrayInt.py @@ -1,34 +1,19 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers -from flatbuffers.compat import import_numpy - -np = import_numpy() - class ArrayInt(object): - __slots__ = ["_tab"] + __slots__ = ['_tab'] @classmethod - def GetRootAs(cls, buf, offset=0): + def GetRootAsArrayInt(cls, buf, offset): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = ArrayInt() x.Init(buf, n + offset) return x - @classmethod - def GetRootAsArrayInt(cls, buf, offset=0): - """This method is deprecated. Please switch to GetRootAs.""" - return cls.GetRootAs(buf, offset) - - @classmethod - def ArrayIntBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier( - buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed - ) - # ArrayInt def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) @@ -38,10 +23,7 @@ def Value(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Int32Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), - ) + return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) return 0 # ArrayInt @@ -58,41 +40,7 @@ def ValueLength(self): return self._tab.VectorLen(o) return 0 - # ArrayInt - def ValueIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - return o == 0 - - -def ArrayIntStart(builder): - builder.StartObject(1) - - -def Start(builder): - return ArrayIntStart(builder) - - -def ArrayIntAddValue(builder, value): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 - ) - - -def AddValue(builder, value): - return ArrayIntAddValue(builder, value) - - -def ArrayIntStartValueVector(builder, numElems): - return builder.StartVector(4, numElems, 4) - - -def StartValueVector(builder, numElems): - return ArrayIntStartValueVector(builder, numElems) - - -def ArrayIntEnd(builder): - return builder.EndObject() - - -def End(builder): - return ArrayIntEnd(builder) +def ArrayIntStart(builder): builder.StartObject(1) +def ArrayIntAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) +def ArrayIntStartValueVector(builder, numElems): return builder.StartVector(4, numElems, 4) +def ArrayIntEnd(builder): return builder.EndObject() diff --git a/streaming_data_types/fbschemas/logdata_f144/ArrayLong.py b/streaming_data_types/fbschemas/logdata_f144/ArrayLong.py index 6a18d77..bad5528 100644 --- a/streaming_data_types/fbschemas/logdata_f144/ArrayLong.py +++ b/streaming_data_types/fbschemas/logdata_f144/ArrayLong.py @@ -1,34 +1,19 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers -from flatbuffers.compat import import_numpy - -np = import_numpy() - class ArrayLong(object): - __slots__ = ["_tab"] + __slots__ = ['_tab'] @classmethod - def GetRootAs(cls, buf, offset=0): + def GetRootAsArrayLong(cls, buf, offset): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = ArrayLong() x.Init(buf, n + offset) return x - @classmethod - def GetRootAsArrayLong(cls, buf, offset=0): - """This method is deprecated. Please switch to GetRootAs.""" - return cls.GetRootAs(buf, offset) - - @classmethod - def ArrayLongBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier( - buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed - ) - # ArrayLong def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) @@ -38,10 +23,7 @@ def Value(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Int64Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8), - ) + return self._tab.Get(flatbuffers.number_types.Int64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) return 0 # ArrayLong @@ -58,41 +40,7 @@ def ValueLength(self): return self._tab.VectorLen(o) return 0 - # ArrayLong - def ValueIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - return o == 0 - - -def ArrayLongStart(builder): - builder.StartObject(1) - - -def Start(builder): - return ArrayLongStart(builder) - - -def ArrayLongAddValue(builder, value): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 - ) - - -def AddValue(builder, value): - return ArrayLongAddValue(builder, value) - - -def ArrayLongStartValueVector(builder, numElems): - return builder.StartVector(8, numElems, 8) - - -def StartValueVector(builder, numElems): - return ArrayLongStartValueVector(builder, numElems) - - -def ArrayLongEnd(builder): - return builder.EndObject() - - -def End(builder): - return ArrayLongEnd(builder) +def ArrayLongStart(builder): builder.StartObject(1) +def ArrayLongAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) +def ArrayLongStartValueVector(builder, numElems): return builder.StartVector(8, numElems, 8) +def ArrayLongEnd(builder): return builder.EndObject() diff --git a/streaming_data_types/fbschemas/logdata_f144/ArrayShort.py b/streaming_data_types/fbschemas/logdata_f144/ArrayShort.py index 819263b..424e613 100644 --- a/streaming_data_types/fbschemas/logdata_f144/ArrayShort.py +++ b/streaming_data_types/fbschemas/logdata_f144/ArrayShort.py @@ -1,34 +1,19 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers -from flatbuffers.compat import import_numpy - -np = import_numpy() - class ArrayShort(object): - __slots__ = ["_tab"] + __slots__ = ['_tab'] @classmethod - def GetRootAs(cls, buf, offset=0): + def GetRootAsArrayShort(cls, buf, offset): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = ArrayShort() x.Init(buf, n + offset) return x - @classmethod - def GetRootAsArrayShort(cls, buf, offset=0): - """This method is deprecated. Please switch to GetRootAs.""" - return cls.GetRootAs(buf, offset) - - @classmethod - def ArrayShortBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier( - buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed - ) - # ArrayShort def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) @@ -38,10 +23,7 @@ def Value(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Int16Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 2), - ) + return self._tab.Get(flatbuffers.number_types.Int16Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 2)) return 0 # ArrayShort @@ -58,41 +40,7 @@ def ValueLength(self): return self._tab.VectorLen(o) return 0 - # ArrayShort - def ValueIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - return o == 0 - - -def ArrayShortStart(builder): - builder.StartObject(1) - - -def Start(builder): - return ArrayShortStart(builder) - - -def ArrayShortAddValue(builder, value): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 - ) - - -def AddValue(builder, value): - return ArrayShortAddValue(builder, value) - - -def ArrayShortStartValueVector(builder, numElems): - return builder.StartVector(2, numElems, 2) - - -def StartValueVector(builder, numElems): - return ArrayShortStartValueVector(builder, numElems) - - -def ArrayShortEnd(builder): - return builder.EndObject() - - -def End(builder): - return ArrayShortEnd(builder) +def ArrayShortStart(builder): builder.StartObject(1) +def ArrayShortAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) +def ArrayShortStartValueVector(builder, numElems): return builder.StartVector(2, numElems, 2) +def ArrayShortEnd(builder): return builder.EndObject() diff --git a/streaming_data_types/fbschemas/logdata_f144/ArrayUByte.py b/streaming_data_types/fbschemas/logdata_f144/ArrayUByte.py index aa03a5c..a5c9ca2 100644 --- a/streaming_data_types/fbschemas/logdata_f144/ArrayUByte.py +++ b/streaming_data_types/fbschemas/logdata_f144/ArrayUByte.py @@ -1,34 +1,19 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers -from flatbuffers.compat import import_numpy - -np = import_numpy() - class ArrayUByte(object): - __slots__ = ["_tab"] + __slots__ = ['_tab'] @classmethod - def GetRootAs(cls, buf, offset=0): + def GetRootAsArrayUByte(cls, buf, offset): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = ArrayUByte() x.Init(buf, n + offset) return x - @classmethod - def GetRootAsArrayUByte(cls, buf, offset=0): - """This method is deprecated. Please switch to GetRootAs.""" - return cls.GetRootAs(buf, offset) - - @classmethod - def ArrayUByteBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier( - buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed - ) - # ArrayUByte def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) @@ -38,10 +23,7 @@ def Value(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Uint8Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1), - ) + return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) return 0 # ArrayUByte @@ -58,41 +40,7 @@ def ValueLength(self): return self._tab.VectorLen(o) return 0 - # ArrayUByte - def ValueIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - return o == 0 - - -def ArrayUByteStart(builder): - builder.StartObject(1) - - -def Start(builder): - return ArrayUByteStart(builder) - - -def ArrayUByteAddValue(builder, value): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 - ) - - -def AddValue(builder, value): - return ArrayUByteAddValue(builder, value) - - -def ArrayUByteStartValueVector(builder, numElems): - return builder.StartVector(1, numElems, 1) - - -def StartValueVector(builder, numElems): - return ArrayUByteStartValueVector(builder, numElems) - - -def ArrayUByteEnd(builder): - return builder.EndObject() - - -def End(builder): - return ArrayUByteEnd(builder) +def ArrayUByteStart(builder): builder.StartObject(1) +def ArrayUByteAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) +def ArrayUByteStartValueVector(builder, numElems): return builder.StartVector(1, numElems, 1) +def ArrayUByteEnd(builder): return builder.EndObject() diff --git a/streaming_data_types/fbschemas/logdata_f144/ArrayUInt.py b/streaming_data_types/fbschemas/logdata_f144/ArrayUInt.py index c1e2821..63fc129 100644 --- a/streaming_data_types/fbschemas/logdata_f144/ArrayUInt.py +++ b/streaming_data_types/fbschemas/logdata_f144/ArrayUInt.py @@ -1,34 +1,19 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers -from flatbuffers.compat import import_numpy - -np = import_numpy() - class ArrayUInt(object): - __slots__ = ["_tab"] + __slots__ = ['_tab'] @classmethod - def GetRootAs(cls, buf, offset=0): + def GetRootAsArrayUInt(cls, buf, offset): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = ArrayUInt() x.Init(buf, n + offset) return x - @classmethod - def GetRootAsArrayUInt(cls, buf, offset=0): - """This method is deprecated. Please switch to GetRootAs.""" - return cls.GetRootAs(buf, offset) - - @classmethod - def ArrayUIntBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier( - buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed - ) - # ArrayUInt def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) @@ -38,10 +23,7 @@ def Value(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Uint32Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), - ) + return self._tab.Get(flatbuffers.number_types.Uint32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) return 0 # ArrayUInt @@ -58,41 +40,7 @@ def ValueLength(self): return self._tab.VectorLen(o) return 0 - # ArrayUInt - def ValueIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - return o == 0 - - -def ArrayUIntStart(builder): - builder.StartObject(1) - - -def Start(builder): - return ArrayUIntStart(builder) - - -def ArrayUIntAddValue(builder, value): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 - ) - - -def AddValue(builder, value): - return ArrayUIntAddValue(builder, value) - - -def ArrayUIntStartValueVector(builder, numElems): - return builder.StartVector(4, numElems, 4) - - -def StartValueVector(builder, numElems): - return ArrayUIntStartValueVector(builder, numElems) - - -def ArrayUIntEnd(builder): - return builder.EndObject() - - -def End(builder): - return ArrayUIntEnd(builder) +def ArrayUIntStart(builder): builder.StartObject(1) +def ArrayUIntAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) +def ArrayUIntStartValueVector(builder, numElems): return builder.StartVector(4, numElems, 4) +def ArrayUIntEnd(builder): return builder.EndObject() diff --git a/streaming_data_types/fbschemas/logdata_f144/ArrayULong.py b/streaming_data_types/fbschemas/logdata_f144/ArrayULong.py index 0c6969b..56f4c46 100644 --- a/streaming_data_types/fbschemas/logdata_f144/ArrayULong.py +++ b/streaming_data_types/fbschemas/logdata_f144/ArrayULong.py @@ -1,34 +1,19 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers -from flatbuffers.compat import import_numpy - -np = import_numpy() - class ArrayULong(object): - __slots__ = ["_tab"] + __slots__ = ['_tab'] @classmethod - def GetRootAs(cls, buf, offset=0): + def GetRootAsArrayULong(cls, buf, offset): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = ArrayULong() x.Init(buf, n + offset) return x - @classmethod - def GetRootAsArrayULong(cls, buf, offset=0): - """This method is deprecated. Please switch to GetRootAs.""" - return cls.GetRootAs(buf, offset) - - @classmethod - def ArrayULongBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier( - buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed - ) - # ArrayULong def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) @@ -38,10 +23,7 @@ def Value(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Uint64Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8), - ) + return self._tab.Get(flatbuffers.number_types.Uint64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) return 0 # ArrayULong @@ -58,41 +40,7 @@ def ValueLength(self): return self._tab.VectorLen(o) return 0 - # ArrayULong - def ValueIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - return o == 0 - - -def ArrayULongStart(builder): - builder.StartObject(1) - - -def Start(builder): - return ArrayULongStart(builder) - - -def ArrayULongAddValue(builder, value): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 - ) - - -def AddValue(builder, value): - return ArrayULongAddValue(builder, value) - - -def ArrayULongStartValueVector(builder, numElems): - return builder.StartVector(8, numElems, 8) - - -def StartValueVector(builder, numElems): - return ArrayULongStartValueVector(builder, numElems) - - -def ArrayULongEnd(builder): - return builder.EndObject() - - -def End(builder): - return ArrayULongEnd(builder) +def ArrayULongStart(builder): builder.StartObject(1) +def ArrayULongAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) +def ArrayULongStartValueVector(builder, numElems): return builder.StartVector(8, numElems, 8) +def ArrayULongEnd(builder): return builder.EndObject() diff --git a/streaming_data_types/fbschemas/logdata_f144/ArrayUShort.py b/streaming_data_types/fbschemas/logdata_f144/ArrayUShort.py index 4b5f49c..07b928e 100644 --- a/streaming_data_types/fbschemas/logdata_f144/ArrayUShort.py +++ b/streaming_data_types/fbschemas/logdata_f144/ArrayUShort.py @@ -1,34 +1,19 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers -from flatbuffers.compat import import_numpy - -np = import_numpy() - class ArrayUShort(object): - __slots__ = ["_tab"] + __slots__ = ['_tab'] @classmethod - def GetRootAs(cls, buf, offset=0): + def GetRootAsArrayUShort(cls, buf, offset): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = ArrayUShort() x.Init(buf, n + offset) return x - @classmethod - def GetRootAsArrayUShort(cls, buf, offset=0): - """This method is deprecated. Please switch to GetRootAs.""" - return cls.GetRootAs(buf, offset) - - @classmethod - def ArrayUShortBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier( - buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed - ) - # ArrayUShort def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) @@ -38,10 +23,7 @@ def Value(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Uint16Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 2), - ) + return self._tab.Get(flatbuffers.number_types.Uint16Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 2)) return 0 # ArrayUShort @@ -58,41 +40,7 @@ def ValueLength(self): return self._tab.VectorLen(o) return 0 - # ArrayUShort - def ValueIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - return o == 0 - - -def ArrayUShortStart(builder): - builder.StartObject(1) - - -def Start(builder): - return ArrayUShortStart(builder) - - -def ArrayUShortAddValue(builder, value): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 - ) - - -def AddValue(builder, value): - return ArrayUShortAddValue(builder, value) - - -def ArrayUShortStartValueVector(builder, numElems): - return builder.StartVector(2, numElems, 2) - - -def StartValueVector(builder, numElems): - return ArrayUShortStartValueVector(builder, numElems) - - -def ArrayUShortEnd(builder): - return builder.EndObject() - - -def End(builder): - return ArrayUShortEnd(builder) +def ArrayUShortStart(builder): builder.StartObject(1) +def ArrayUShortAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) +def ArrayUShortStartValueVector(builder, numElems): return builder.StartVector(2, numElems, 2) +def ArrayUShortEnd(builder): return builder.EndObject() diff --git a/streaming_data_types/fbschemas/logdata_f144/Byte.py b/streaming_data_types/fbschemas/logdata_f144/Byte.py index 50a49fd..03b1f1d 100644 --- a/streaming_data_types/fbschemas/logdata_f144/Byte.py +++ b/streaming_data_types/fbschemas/logdata_f144/Byte.py @@ -1,34 +1,19 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers -from flatbuffers.compat import import_numpy - -np = import_numpy() - class Byte(object): - __slots__ = ["_tab"] + __slots__ = ['_tab'] @classmethod - def GetRootAs(cls, buf, offset=0): + def GetRootAsByte(cls, buf, offset): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = Byte() x.Init(buf, n + offset) return x - @classmethod - def GetRootAsByte(cls, buf, offset=0): - """This method is deprecated. Please switch to GetRootAs.""" - return cls.GetRootAs(buf, offset) - - @classmethod - def ByteBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier( - buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed - ) - # Byte def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) @@ -40,26 +25,6 @@ def Value(self): return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) return 0 - -def ByteStart(builder): - builder.StartObject(1) - - -def Start(builder): - return ByteStart(builder) - - -def ByteAddValue(builder, value): - builder.PrependInt8Slot(0, value, 0) - - -def AddValue(builder, value): - return ByteAddValue(builder, value) - - -def ByteEnd(builder): - return builder.EndObject() - - -def End(builder): - return ByteEnd(builder) +def ByteStart(builder): builder.StartObject(1) +def ByteAddValue(builder, value): builder.PrependInt8Slot(0, value, 0) +def ByteEnd(builder): return builder.EndObject() diff --git a/streaming_data_types/fbschemas/logdata_f144/Double.py b/streaming_data_types/fbschemas/logdata_f144/Double.py index f67a8f7..912557e 100644 --- a/streaming_data_types/fbschemas/logdata_f144/Double.py +++ b/streaming_data_types/fbschemas/logdata_f144/Double.py @@ -1,34 +1,19 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers -from flatbuffers.compat import import_numpy - -np = import_numpy() - class Double(object): - __slots__ = ["_tab"] + __slots__ = ['_tab'] @classmethod - def GetRootAs(cls, buf, offset=0): + def GetRootAsDouble(cls, buf, offset): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = Double() x.Init(buf, n + offset) return x - @classmethod - def GetRootAsDouble(cls, buf, offset=0): - """This method is deprecated. Please switch to GetRootAs.""" - return cls.GetRootAs(buf, offset) - - @classmethod - def DoubleBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier( - buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed - ) - # Double def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) @@ -37,31 +22,9 @@ def Init(self, buf, pos): def Value(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: - return self._tab.Get( - flatbuffers.number_types.Float64Flags, o + self._tab.Pos - ) + return self._tab.Get(flatbuffers.number_types.Float64Flags, o + self._tab.Pos) return 0.0 - -def DoubleStart(builder): - builder.StartObject(1) - - -def Start(builder): - return DoubleStart(builder) - - -def DoubleAddValue(builder, value): - builder.PrependFloat64Slot(0, value, 0.0) - - -def AddValue(builder, value): - return DoubleAddValue(builder, value) - - -def DoubleEnd(builder): - return builder.EndObject() - - -def End(builder): - return DoubleEnd(builder) +def DoubleStart(builder): builder.StartObject(1) +def DoubleAddValue(builder, value): builder.PrependFloat64Slot(0, value, 0.0) +def DoubleEnd(builder): return builder.EndObject() diff --git a/streaming_data_types/fbschemas/logdata_f144/Float.py b/streaming_data_types/fbschemas/logdata_f144/Float.py index 7f2ef22..e99f662 100644 --- a/streaming_data_types/fbschemas/logdata_f144/Float.py +++ b/streaming_data_types/fbschemas/logdata_f144/Float.py @@ -1,34 +1,19 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers -from flatbuffers.compat import import_numpy - -np = import_numpy() - class Float(object): - __slots__ = ["_tab"] + __slots__ = ['_tab'] @classmethod - def GetRootAs(cls, buf, offset=0): + def GetRootAsFloat(cls, buf, offset): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = Float() x.Init(buf, n + offset) return x - @classmethod - def GetRootAsFloat(cls, buf, offset=0): - """This method is deprecated. Please switch to GetRootAs.""" - return cls.GetRootAs(buf, offset) - - @classmethod - def FloatBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier( - buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed - ) - # Float def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) @@ -37,31 +22,9 @@ def Init(self, buf, pos): def Value(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: - return self._tab.Get( - flatbuffers.number_types.Float32Flags, o + self._tab.Pos - ) + return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) return 0.0 - -def FloatStart(builder): - builder.StartObject(1) - - -def Start(builder): - return FloatStart(builder) - - -def FloatAddValue(builder, value): - builder.PrependFloat32Slot(0, value, 0.0) - - -def AddValue(builder, value): - return FloatAddValue(builder, value) - - -def FloatEnd(builder): - return builder.EndObject() - - -def End(builder): - return FloatEnd(builder) +def FloatStart(builder): builder.StartObject(1) +def FloatAddValue(builder, value): builder.PrependFloat32Slot(0, value, 0.0) +def FloatEnd(builder): return builder.EndObject() diff --git a/streaming_data_types/fbschemas/logdata_f144/Int.py b/streaming_data_types/fbschemas/logdata_f144/Int.py index c35a22f..ae7a4f3 100644 --- a/streaming_data_types/fbschemas/logdata_f144/Int.py +++ b/streaming_data_types/fbschemas/logdata_f144/Int.py @@ -1,34 +1,19 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers -from flatbuffers.compat import import_numpy - -np = import_numpy() - class Int(object): - __slots__ = ["_tab"] + __slots__ = ['_tab'] @classmethod - def GetRootAs(cls, buf, offset=0): + def GetRootAsInt(cls, buf, offset): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = Int() x.Init(buf, n + offset) return x - @classmethod - def GetRootAsInt(cls, buf, offset=0): - """This method is deprecated. Please switch to GetRootAs.""" - return cls.GetRootAs(buf, offset) - - @classmethod - def IntBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier( - buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed - ) - # Int def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) @@ -40,26 +25,6 @@ def Value(self): return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) return 0 - -def IntStart(builder): - builder.StartObject(1) - - -def Start(builder): - return IntStart(builder) - - -def IntAddValue(builder, value): - builder.PrependInt32Slot(0, value, 0) - - -def AddValue(builder, value): - return IntAddValue(builder, value) - - -def IntEnd(builder): - return builder.EndObject() - - -def End(builder): - return IntEnd(builder) +def IntStart(builder): builder.StartObject(1) +def IntAddValue(builder, value): builder.PrependInt32Slot(0, value, 0) +def IntEnd(builder): return builder.EndObject() diff --git a/streaming_data_types/fbschemas/logdata_f144/LogData.py b/streaming_data_types/fbschemas/logdata_f144/LogData.py deleted file mode 100644 index 39a6019..0000000 --- a/streaming_data_types/fbschemas/logdata_f144/LogData.py +++ /dev/null @@ -1,118 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers -from flatbuffers.compat import import_numpy - -np = import_numpy() - - -class LogData(object): - __slots__ = ["_tab"] - - @classmethod - def GetRootAs(cls, buf, offset=0): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = LogData() - x.Init(buf, n + offset) - return x - - @classmethod - def GetRootAsLogData(cls, buf, offset=0): - """This method is deprecated. Please switch to GetRootAs.""" - return cls.GetRootAs(buf, offset) - - @classmethod - def LogDataBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier( - buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed - ) - - # LogData - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # LogData - def SourceName(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.String(o + self._tab.Pos) - return None - - # LogData - def Timestamp(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) - return 0 - - # LogData - def ValueType(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) - return 0 - - # LogData - def Value(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) - if o != 0: - from flatbuffers.table import Table - - obj = Table(bytearray(), 0) - self._tab.Union(obj, o) - return obj - return None - - -def LogDataStart(builder): - builder.StartObject(4) - - -def Start(builder): - return LogDataStart(builder) - - -def LogDataAddSourceName(builder, sourceName): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(sourceName), 0 - ) - - -def AddSourceName(builder, sourceName): - return LogDataAddSourceName(builder, sourceName) - - -def LogDataAddTimestamp(builder, timestamp): - builder.PrependInt64Slot(1, timestamp, 0) - - -def AddTimestamp(builder, timestamp): - return LogDataAddTimestamp(builder, timestamp) - - -def LogDataAddValueType(builder, valueType): - builder.PrependUint8Slot(2, valueType, 0) - - -def AddValueType(builder, valueType): - return LogDataAddValueType(builder, valueType) - - -def LogDataAddValue(builder, value): - builder.PrependUOffsetTRelativeSlot( - 3, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 - ) - - -def AddValue(builder, value): - return LogDataAddValue(builder, value) - - -def LogDataEnd(builder): - return builder.EndObject() - - -def End(builder): - return LogDataEnd(builder) diff --git a/streaming_data_types/fbschemas/logdata_f144/Long.py b/streaming_data_types/fbschemas/logdata_f144/Long.py index 4a6ca03..3708376 100644 --- a/streaming_data_types/fbschemas/logdata_f144/Long.py +++ b/streaming_data_types/fbschemas/logdata_f144/Long.py @@ -1,34 +1,19 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers -from flatbuffers.compat import import_numpy - -np = import_numpy() - class Long(object): - __slots__ = ["_tab"] + __slots__ = ['_tab'] @classmethod - def GetRootAs(cls, buf, offset=0): + def GetRootAsLong(cls, buf, offset): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = Long() x.Init(buf, n + offset) return x - @classmethod - def GetRootAsLong(cls, buf, offset=0): - """This method is deprecated. Please switch to GetRootAs.""" - return cls.GetRootAs(buf, offset) - - @classmethod - def LongBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier( - buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed - ) - # Long def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) @@ -40,26 +25,6 @@ def Value(self): return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) return 0 - -def LongStart(builder): - builder.StartObject(1) - - -def Start(builder): - return LongStart(builder) - - -def LongAddValue(builder, value): - builder.PrependInt64Slot(0, value, 0) - - -def AddValue(builder, value): - return LongAddValue(builder, value) - - -def LongEnd(builder): - return builder.EndObject() - - -def End(builder): - return LongEnd(builder) +def LongStart(builder): builder.StartObject(1) +def LongAddValue(builder, value): builder.PrependInt64Slot(0, value, 0) +def LongEnd(builder): return builder.EndObject() diff --git a/streaming_data_types/fbschemas/logdata_f144/Short.py b/streaming_data_types/fbschemas/logdata_f144/Short.py index ca6d598..116cfc2 100644 --- a/streaming_data_types/fbschemas/logdata_f144/Short.py +++ b/streaming_data_types/fbschemas/logdata_f144/Short.py @@ -1,34 +1,19 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers -from flatbuffers.compat import import_numpy - -np = import_numpy() - class Short(object): - __slots__ = ["_tab"] + __slots__ = ['_tab'] @classmethod - def GetRootAs(cls, buf, offset=0): + def GetRootAsShort(cls, buf, offset): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = Short() x.Init(buf, n + offset) return x - @classmethod - def GetRootAsShort(cls, buf, offset=0): - """This method is deprecated. Please switch to GetRootAs.""" - return cls.GetRootAs(buf, offset) - - @classmethod - def ShortBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier( - buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed - ) - # Short def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) @@ -40,26 +25,6 @@ def Value(self): return self._tab.Get(flatbuffers.number_types.Int16Flags, o + self._tab.Pos) return 0 - -def ShortStart(builder): - builder.StartObject(1) - - -def Start(builder): - return ShortStart(builder) - - -def ShortAddValue(builder, value): - builder.PrependInt16Slot(0, value, 0) - - -def AddValue(builder, value): - return ShortAddValue(builder, value) - - -def ShortEnd(builder): - return builder.EndObject() - - -def End(builder): - return ShortEnd(builder) +def ShortStart(builder): builder.StartObject(1) +def ShortAddValue(builder, value): builder.PrependInt16Slot(0, value, 0) +def ShortEnd(builder): return builder.EndObject() diff --git a/streaming_data_types/fbschemas/logdata_f144/UByte.py b/streaming_data_types/fbschemas/logdata_f144/UByte.py index 42a1927..483b5f9 100644 --- a/streaming_data_types/fbschemas/logdata_f144/UByte.py +++ b/streaming_data_types/fbschemas/logdata_f144/UByte.py @@ -1,34 +1,19 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers -from flatbuffers.compat import import_numpy - -np = import_numpy() - class UByte(object): - __slots__ = ["_tab"] + __slots__ = ['_tab'] @classmethod - def GetRootAs(cls, buf, offset=0): + def GetRootAsUByte(cls, buf, offset): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = UByte() x.Init(buf, n + offset) return x - @classmethod - def GetRootAsUByte(cls, buf, offset=0): - """This method is deprecated. Please switch to GetRootAs.""" - return cls.GetRootAs(buf, offset) - - @classmethod - def UByteBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier( - buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed - ) - # UByte def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) @@ -40,26 +25,6 @@ def Value(self): return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) return 0 - -def UByteStart(builder): - builder.StartObject(1) - - -def Start(builder): - return UByteStart(builder) - - -def UByteAddValue(builder, value): - builder.PrependUint8Slot(0, value, 0) - - -def AddValue(builder, value): - return UByteAddValue(builder, value) - - -def UByteEnd(builder): - return builder.EndObject() - - -def End(builder): - return UByteEnd(builder) +def UByteStart(builder): builder.StartObject(1) +def UByteAddValue(builder, value): builder.PrependUint8Slot(0, value, 0) +def UByteEnd(builder): return builder.EndObject() diff --git a/streaming_data_types/fbschemas/logdata_f144/UInt.py b/streaming_data_types/fbschemas/logdata_f144/UInt.py index 10771f0..91be34a 100644 --- a/streaming_data_types/fbschemas/logdata_f144/UInt.py +++ b/streaming_data_types/fbschemas/logdata_f144/UInt.py @@ -1,34 +1,19 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers -from flatbuffers.compat import import_numpy - -np = import_numpy() - class UInt(object): - __slots__ = ["_tab"] + __slots__ = ['_tab'] @classmethod - def GetRootAs(cls, buf, offset=0): + def GetRootAsUInt(cls, buf, offset): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = UInt() x.Init(buf, n + offset) return x - @classmethod - def GetRootAsUInt(cls, buf, offset=0): - """This method is deprecated. Please switch to GetRootAs.""" - return cls.GetRootAs(buf, offset) - - @classmethod - def UIntBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier( - buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed - ) - # UInt def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) @@ -37,31 +22,9 @@ def Init(self, buf, pos): def Value(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: - return self._tab.Get( - flatbuffers.number_types.Uint32Flags, o + self._tab.Pos - ) + return self._tab.Get(flatbuffers.number_types.Uint32Flags, o + self._tab.Pos) return 0 - -def UIntStart(builder): - builder.StartObject(1) - - -def Start(builder): - return UIntStart(builder) - - -def UIntAddValue(builder, value): - builder.PrependUint32Slot(0, value, 0) - - -def AddValue(builder, value): - return UIntAddValue(builder, value) - - -def UIntEnd(builder): - return builder.EndObject() - - -def End(builder): - return UIntEnd(builder) +def UIntStart(builder): builder.StartObject(1) +def UIntAddValue(builder, value): builder.PrependUint32Slot(0, value, 0) +def UIntEnd(builder): return builder.EndObject() diff --git a/streaming_data_types/fbschemas/logdata_f144/ULong.py b/streaming_data_types/fbschemas/logdata_f144/ULong.py index bdf75e4..3db8717 100644 --- a/streaming_data_types/fbschemas/logdata_f144/ULong.py +++ b/streaming_data_types/fbschemas/logdata_f144/ULong.py @@ -1,34 +1,19 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers -from flatbuffers.compat import import_numpy - -np = import_numpy() - class ULong(object): - __slots__ = ["_tab"] + __slots__ = ['_tab'] @classmethod - def GetRootAs(cls, buf, offset=0): + def GetRootAsULong(cls, buf, offset): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = ULong() x.Init(buf, n + offset) return x - @classmethod - def GetRootAsULong(cls, buf, offset=0): - """This method is deprecated. Please switch to GetRootAs.""" - return cls.GetRootAs(buf, offset) - - @classmethod - def ULongBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier( - buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed - ) - # ULong def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) @@ -37,31 +22,9 @@ def Init(self, buf, pos): def Value(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: - return self._tab.Get( - flatbuffers.number_types.Uint64Flags, o + self._tab.Pos - ) + return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) return 0 - -def ULongStart(builder): - builder.StartObject(1) - - -def Start(builder): - return ULongStart(builder) - - -def ULongAddValue(builder, value): - builder.PrependUint64Slot(0, value, 0) - - -def AddValue(builder, value): - return ULongAddValue(builder, value) - - -def ULongEnd(builder): - return builder.EndObject() - - -def End(builder): - return ULongEnd(builder) +def ULongStart(builder): builder.StartObject(1) +def ULongAddValue(builder, value): builder.PrependUint64Slot(0, value, 0) +def ULongEnd(builder): return builder.EndObject() diff --git a/streaming_data_types/fbschemas/logdata_f144/UShort.py b/streaming_data_types/fbschemas/logdata_f144/UShort.py index 1dca26b..92b6ab8 100644 --- a/streaming_data_types/fbschemas/logdata_f144/UShort.py +++ b/streaming_data_types/fbschemas/logdata_f144/UShort.py @@ -1,34 +1,19 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers -from flatbuffers.compat import import_numpy - -np = import_numpy() - class UShort(object): - __slots__ = ["_tab"] + __slots__ = ['_tab'] @classmethod - def GetRootAs(cls, buf, offset=0): + def GetRootAsUShort(cls, buf, offset): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = UShort() x.Init(buf, n + offset) return x - @classmethod - def GetRootAsUShort(cls, buf, offset=0): - """This method is deprecated. Please switch to GetRootAs.""" - return cls.GetRootAs(buf, offset) - - @classmethod - def UShortBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier( - buf, offset, b"\x66\x31\x34\x34", size_prefixed=size_prefixed - ) - # UShort def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) @@ -37,31 +22,9 @@ def Init(self, buf, pos): def Value(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: - return self._tab.Get( - flatbuffers.number_types.Uint16Flags, o + self._tab.Pos - ) + return self._tab.Get(flatbuffers.number_types.Uint16Flags, o + self._tab.Pos) return 0 - -def UShortStart(builder): - builder.StartObject(1) - - -def Start(builder): - return UShortStart(builder) - - -def UShortAddValue(builder, value): - builder.PrependUint16Slot(0, value, 0) - - -def AddValue(builder, value): - return UShortAddValue(builder, value) - - -def UShortEnd(builder): - return builder.EndObject() - - -def End(builder): - return UShortEnd(builder) +def UShortStart(builder): builder.StartObject(1) +def UShortAddValue(builder, value): builder.PrependUint16Slot(0, value, 0) +def UShortEnd(builder): return builder.EndObject() diff --git a/streaming_data_types/fbschemas/logdata_f144/Value.py b/streaming_data_types/fbschemas/logdata_f144/Value.py index 0f39b02..ae593ef 100644 --- a/streaming_data_types/fbschemas/logdata_f144/Value.py +++ b/streaming_data_types/fbschemas/logdata_f144/Value.py @@ -1,7 +1,6 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: - +# namespace: class Value(object): NONE = 0 @@ -25,3 +24,4 @@ class Value(object): ArrayULong = 18 ArrayFloat = 19 ArrayDouble = 20 + diff --git a/streaming_data_types/fbschemas/logdata_f144/f144_LogData.py b/streaming_data_types/fbschemas/logdata_f144/f144_LogData.py new file mode 100644 index 0000000..6af1e50 --- /dev/null +++ b/streaming_data_types/fbschemas/logdata_f144/f144_LogData.py @@ -0,0 +1,57 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + +class f144_LogData(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAsf144_LogData(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = f144_LogData() + x.Init(buf, n + offset) + return x + + # f144_LogData + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # f144_LogData + def SourceName(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # f144_LogData + def Timestamp(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) + return 0 + + # f144_LogData + def ValueType(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) + return 0 + + # f144_LogData + def Value(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + from flatbuffers.table import Table + obj = Table(bytearray(), 0) + self._tab.Union(obj, o) + return obj + return None + +def f144_LogDataStart(builder): builder.StartObject(4) +def f144_LogDataAddSourceName(builder, sourceName): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(sourceName), 0) +def f144_LogDataAddTimestamp(builder, timestamp): builder.PrependInt64Slot(1, timestamp, 0) +def f144_LogDataAddValueType(builder, valueType): builder.PrependUint8Slot(2, valueType, 0) +def f144_LogDataAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) +def f144_LogDataEnd(builder): return builder.EndObject() diff --git a/streaming_data_types/logdata_f144.py b/streaming_data_types/logdata_f144.py index 7123605..10a0b65 100644 --- a/streaming_data_types/logdata_f144.py +++ b/streaming_data_types/logdata_f144.py @@ -5,7 +5,7 @@ import flatbuffers import numpy as np -from streaming_data_types.fbschemas.logdata_f144 import LogData +from streaming_data_types.fbschemas.logdata_f144 import f144_LogData from streaming_data_types.fbschemas.logdata_f144.ArrayByte import ( ArrayByte, ArrayByteAddValue, @@ -238,12 +238,12 @@ def serialise_f144( ) else: raise NotImplementedError("f144 only supports scalars or 1D array values") - LogData.LogDataStart(builder) - LogData.LogDataAddSourceName(builder, source_name_offset) - LogData.LogDataAddValue(builder, value_offset) - LogData.LogDataAddValueType(builder, value_type) - LogData.LogDataAddTimestamp(builder, timestamp_unix_ns) - end = LogData.LogDataEnd(builder) + f144_LogData.f144_LogDataStart(builder) + f144_LogData.f144_LogDataAddSourceName(builder, source_name_offset) + f144_LogData.f144_LogDataAddValue(builder, value_offset) + f144_LogData.f144_LogDataAddValueType(builder, value_type) + f144_LogData.f144_LogDataAddTimestamp(builder, timestamp_unix_ns) + end = f144_LogData.f144_LogDataEnd(builder) builder.Finish(end, file_identifier=FILE_IDENTIFIER) return bytes(builder.Output()) @@ -284,7 +284,7 @@ def serialise_f144( def deserialise_f144(buffer: Union[bytearray, bytes]) -> ExtractedLogData: check_schema_identifier(buffer, FILE_IDENTIFIER) - log_data = LogData.LogData.GetRootAs(buffer, 0) + log_data = f144_LogData.f144_LogData.GetRootAsf144_LogData(buffer, 0) source_name = log_data.SourceName() if log_data.SourceName() else b"" value_offset = log_data.Value() From 15d842dc1f957c6343cb6b0749d433663f982e3a Mon Sep 17 00:00:00 2001 From: Kenan Muric Date: Wed, 7 Dec 2022 13:09:25 +0100 Subject: [PATCH 297/363] fixes due to changes in se00 schema in streaming-data-types --- .gitignore | 4 +- streaming_data_types/array_1d_se00.py | 50 +++--- .../fbschemas/array_1d_se00/DoubleArray.py | 47 +---- .../fbschemas/array_1d_se00/FloatArray.py | 47 +---- .../fbschemas/array_1d_se00/Int16Array.py | 47 +---- .../fbschemas/array_1d_se00/Int32Array.py | 47 +---- .../fbschemas/array_1d_se00/Int64Array.py | 47 +---- .../fbschemas/array_1d_se00/Int8Array.py | 47 +---- .../fbschemas/array_1d_se00/Location.py | 3 +- .../fbschemas/array_1d_se00/UInt16Array.py | 47 +---- .../fbschemas/array_1d_se00/UInt32Array.py | 47 +---- .../fbschemas/array_1d_se00/UInt64Array.py | 47 +---- .../fbschemas/array_1d_se00/UInt8Array.py | 47 +---- .../fbschemas/array_1d_se00/ValueUnion.py | 3 +- ...tData.py => se00_SampleEnvironmentData.py} | 168 +++++------------- 15 files changed, 166 insertions(+), 532 deletions(-) rename streaming_data_types/fbschemas/array_1d_se00/{SampleEnvironmentData.py => se00_SampleEnvironmentData.py} (51%) diff --git a/.gitignore b/.gitignore index 37260d0..7ba83bc 100644 --- a/.gitignore +++ b/.gitignore @@ -50,11 +50,13 @@ coverage.xml *.py,cover .hypothesis/ .pytest_cache/ - # Translations *.mo *.pot +# macOS artifacts +*.DS_Store + # Django stuff: *.log local_settings.py diff --git a/streaming_data_types/array_1d_se00.py b/streaming_data_types/array_1d_se00.py index ffcc160..bec7760 100644 --- a/streaming_data_types/array_1d_se00.py +++ b/streaming_data_types/array_1d_se00.py @@ -16,19 +16,19 @@ ) from streaming_data_types.fbschemas.array_1d_se00.Location import Location -from streaming_data_types.fbschemas.array_1d_se00.SampleEnvironmentData import ( - SampleEnvironmentData, - SampleEnvironmentDataAddChannel, - SampleEnvironmentDataAddMessageCounter, - SampleEnvironmentDataAddName, - SampleEnvironmentDataAddPacketTimestamp, - SampleEnvironmentDataAddTimeDelta, - SampleEnvironmentDataAddTimestampLocation, - SampleEnvironmentDataAddTimestamps, - SampleEnvironmentDataAddValues, - SampleEnvironmentDataAddValuesType, - SampleEnvironmentDataEnd, - SampleEnvironmentDataStart, +from streaming_data_types.fbschemas.array_1d_se00.se00_SampleEnvironmentData import ( + se00_SampleEnvironmentData, + se00_SampleEnvironmentDataAddChannel, + se00_SampleEnvironmentDataAddMessageCounter, + se00_SampleEnvironmentDataAddName, + se00_SampleEnvironmentDataAddPacketTimestamp, + se00_SampleEnvironmentDataAddTimeDelta, + se00_SampleEnvironmentDataAddTimestampLocation, + se00_SampleEnvironmentDataAddTimestamps, + se00_SampleEnvironmentDataAddValues, + se00_SampleEnvironmentDataAddValuesType, + se00_SampleEnvironmentDataEnd, + se00_SampleEnvironmentDataStart, ) from streaming_data_types.fbschemas.array_1d_se00.ValueUnion import ValueUnion from streaming_data_types.utils import check_schema_identifier @@ -105,19 +105,19 @@ def serialise_se00( name_offset = builder.CreateString(name) - SampleEnvironmentDataStart(builder) - SampleEnvironmentDataAddName(builder, name_offset) - SampleEnvironmentDataAddTimeDelta(builder, sample_ts_delta) - SampleEnvironmentDataAddTimestampLocation(builder, ts_location) - SampleEnvironmentDataAddMessageCounter(builder, message_counter) - SampleEnvironmentDataAddChannel(builder, channel) - SampleEnvironmentDataAddPacketTimestamp(builder, timestamp_unix_ns) - SampleEnvironmentDataAddValues(builder, value_offset) - SampleEnvironmentDataAddValuesType(builder, numpy_type_map[temp_values.dtype]) + se00_SampleEnvironmentDataStart(builder) + se00_SampleEnvironmentDataAddName(builder, name_offset) + se00_SampleEnvironmentDataAddTimeDelta(builder, sample_ts_delta) + se00_SampleEnvironmentDataAddTimestampLocation(builder, ts_location) + se00_SampleEnvironmentDataAddMessageCounter(builder, message_counter) + se00_SampleEnvironmentDataAddChannel(builder, channel) + se00_SampleEnvironmentDataAddPacketTimestamp(builder, timestamp_unix_ns) + se00_SampleEnvironmentDataAddValues(builder, value_offset) + se00_SampleEnvironmentDataAddValuesType(builder, numpy_type_map[temp_values.dtype]) if value_timestamps is not None: - SampleEnvironmentDataAddTimestamps(builder, timestamps_offset) + se00_SampleEnvironmentDataAddTimestamps(builder, timestamps_offset) - SE_Message = SampleEnvironmentDataEnd(builder) + SE_Message = se00_SampleEnvironmentDataEnd(builder) builder.Finish(SE_Message, file_identifier=FILE_IDENTIFIER) return bytes(builder.Output()) @@ -126,7 +126,7 @@ def serialise_se00( def deserialise_se00(buffer: Union[bytearray, bytes]) -> Response: check_schema_identifier(buffer, FILE_IDENTIFIER) - SE_data = SampleEnvironmentData.GetRootAsSampleEnvironmentData(buffer, 0) + SE_data = se00_SampleEnvironmentData.GetRootAsse00_SampleEnvironmentData(buffer, 0) value_timestamps = None if not SE_data.TimestampsIsNone(): diff --git a/streaming_data_types/fbschemas/array_1d_se00/DoubleArray.py b/streaming_data_types/fbschemas/array_1d_se00/DoubleArray.py index 36432c7..8ffcb60 100644 --- a/streaming_data_types/fbschemas/array_1d_se00/DoubleArray.py +++ b/streaming_data_types/fbschemas/array_1d_se00/DoubleArray.py @@ -1,15 +1,13 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers from flatbuffers.compat import import_numpy - np = import_numpy() - class DoubleArray(object): - __slots__ = ["_tab"] + __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): @@ -22,12 +20,9 @@ def GetRootAs(cls, buf, offset=0): def GetRootAsDoubleArray(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) - @classmethod def DoubleArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier( - buf, offset, b"\x73\x65\x30\x30", size_prefixed=size_prefixed - ) + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x73\x65\x30\x30", size_prefixed=size_prefixed) # DoubleArray def Init(self, buf, pos): @@ -38,10 +33,7 @@ def Value(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Float64Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8), - ) + return self._tab.Get(flatbuffers.number_types.Float64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) return 0 # DoubleArray @@ -63,36 +55,15 @@ def ValueIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) return o == 0 - -def DoubleArrayStart(builder): - builder.StartObject(1) - - +def DoubleArrayStart(builder): builder.StartObject(1) def Start(builder): return DoubleArrayStart(builder) - - -def DoubleArrayAddValue(builder, value): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 - ) - - +def DoubleArrayAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) def AddValue(builder, value): return DoubleArrayAddValue(builder, value) - - -def DoubleArrayStartValueVector(builder, numElems): - return builder.StartVector(8, numElems, 8) - - +def DoubleArrayStartValueVector(builder, numElems): return builder.StartVector(8, numElems, 8) def StartValueVector(builder, numElems): return DoubleArrayStartValueVector(builder, numElems) - - -def DoubleArrayEnd(builder): - return builder.EndObject() - - +def DoubleArrayEnd(builder): return builder.EndObject() def End(builder): - return DoubleArrayEnd(builder) + return DoubleArrayEnd(builder) \ No newline at end of file diff --git a/streaming_data_types/fbschemas/array_1d_se00/FloatArray.py b/streaming_data_types/fbschemas/array_1d_se00/FloatArray.py index ec2238f..2298eb1 100644 --- a/streaming_data_types/fbschemas/array_1d_se00/FloatArray.py +++ b/streaming_data_types/fbschemas/array_1d_se00/FloatArray.py @@ -1,15 +1,13 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers from flatbuffers.compat import import_numpy - np = import_numpy() - class FloatArray(object): - __slots__ = ["_tab"] + __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): @@ -22,12 +20,9 @@ def GetRootAs(cls, buf, offset=0): def GetRootAsFloatArray(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) - @classmethod def FloatArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier( - buf, offset, b"\x73\x65\x30\x30", size_prefixed=size_prefixed - ) + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x73\x65\x30\x30", size_prefixed=size_prefixed) # FloatArray def Init(self, buf, pos): @@ -38,10 +33,7 @@ def Value(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Float32Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), - ) + return self._tab.Get(flatbuffers.number_types.Float32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) return 0 # FloatArray @@ -63,36 +55,15 @@ def ValueIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) return o == 0 - -def FloatArrayStart(builder): - builder.StartObject(1) - - +def FloatArrayStart(builder): builder.StartObject(1) def Start(builder): return FloatArrayStart(builder) - - -def FloatArrayAddValue(builder, value): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 - ) - - +def FloatArrayAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) def AddValue(builder, value): return FloatArrayAddValue(builder, value) - - -def FloatArrayStartValueVector(builder, numElems): - return builder.StartVector(4, numElems, 4) - - +def FloatArrayStartValueVector(builder, numElems): return builder.StartVector(4, numElems, 4) def StartValueVector(builder, numElems): return FloatArrayStartValueVector(builder, numElems) - - -def FloatArrayEnd(builder): - return builder.EndObject() - - +def FloatArrayEnd(builder): return builder.EndObject() def End(builder): - return FloatArrayEnd(builder) + return FloatArrayEnd(builder) \ No newline at end of file diff --git a/streaming_data_types/fbschemas/array_1d_se00/Int16Array.py b/streaming_data_types/fbschemas/array_1d_se00/Int16Array.py index 801d2ec..03df4ba 100644 --- a/streaming_data_types/fbschemas/array_1d_se00/Int16Array.py +++ b/streaming_data_types/fbschemas/array_1d_se00/Int16Array.py @@ -1,15 +1,13 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers from flatbuffers.compat import import_numpy - np = import_numpy() - class Int16Array(object): - __slots__ = ["_tab"] + __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): @@ -22,12 +20,9 @@ def GetRootAs(cls, buf, offset=0): def GetRootAsInt16Array(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) - @classmethod def Int16ArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier( - buf, offset, b"\x73\x65\x30\x30", size_prefixed=size_prefixed - ) + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x73\x65\x30\x30", size_prefixed=size_prefixed) # Int16Array def Init(self, buf, pos): @@ -38,10 +33,7 @@ def Value(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Int16Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 2), - ) + return self._tab.Get(flatbuffers.number_types.Int16Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 2)) return 0 # Int16Array @@ -63,36 +55,15 @@ def ValueIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) return o == 0 - -def Int16ArrayStart(builder): - builder.StartObject(1) - - +def Int16ArrayStart(builder): builder.StartObject(1) def Start(builder): return Int16ArrayStart(builder) - - -def Int16ArrayAddValue(builder, value): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 - ) - - +def Int16ArrayAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) def AddValue(builder, value): return Int16ArrayAddValue(builder, value) - - -def Int16ArrayStartValueVector(builder, numElems): - return builder.StartVector(2, numElems, 2) - - +def Int16ArrayStartValueVector(builder, numElems): return builder.StartVector(2, numElems, 2) def StartValueVector(builder, numElems): return Int16ArrayStartValueVector(builder, numElems) - - -def Int16ArrayEnd(builder): - return builder.EndObject() - - +def Int16ArrayEnd(builder): return builder.EndObject() def End(builder): - return Int16ArrayEnd(builder) + return Int16ArrayEnd(builder) \ No newline at end of file diff --git a/streaming_data_types/fbschemas/array_1d_se00/Int32Array.py b/streaming_data_types/fbschemas/array_1d_se00/Int32Array.py index d716e81..27a8627 100644 --- a/streaming_data_types/fbschemas/array_1d_se00/Int32Array.py +++ b/streaming_data_types/fbschemas/array_1d_se00/Int32Array.py @@ -1,15 +1,13 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers from flatbuffers.compat import import_numpy - np = import_numpy() - class Int32Array(object): - __slots__ = ["_tab"] + __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): @@ -22,12 +20,9 @@ def GetRootAs(cls, buf, offset=0): def GetRootAsInt32Array(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) - @classmethod def Int32ArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier( - buf, offset, b"\x73\x65\x30\x30", size_prefixed=size_prefixed - ) + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x73\x65\x30\x30", size_prefixed=size_prefixed) # Int32Array def Init(self, buf, pos): @@ -38,10 +33,7 @@ def Value(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Int32Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), - ) + return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) return 0 # Int32Array @@ -63,36 +55,15 @@ def ValueIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) return o == 0 - -def Int32ArrayStart(builder): - builder.StartObject(1) - - +def Int32ArrayStart(builder): builder.StartObject(1) def Start(builder): return Int32ArrayStart(builder) - - -def Int32ArrayAddValue(builder, value): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 - ) - - +def Int32ArrayAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) def AddValue(builder, value): return Int32ArrayAddValue(builder, value) - - -def Int32ArrayStartValueVector(builder, numElems): - return builder.StartVector(4, numElems, 4) - - +def Int32ArrayStartValueVector(builder, numElems): return builder.StartVector(4, numElems, 4) def StartValueVector(builder, numElems): return Int32ArrayStartValueVector(builder, numElems) - - -def Int32ArrayEnd(builder): - return builder.EndObject() - - +def Int32ArrayEnd(builder): return builder.EndObject() def End(builder): - return Int32ArrayEnd(builder) + return Int32ArrayEnd(builder) \ No newline at end of file diff --git a/streaming_data_types/fbschemas/array_1d_se00/Int64Array.py b/streaming_data_types/fbschemas/array_1d_se00/Int64Array.py index f31fc83..ece7e54 100644 --- a/streaming_data_types/fbschemas/array_1d_se00/Int64Array.py +++ b/streaming_data_types/fbschemas/array_1d_se00/Int64Array.py @@ -1,15 +1,13 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers from flatbuffers.compat import import_numpy - np = import_numpy() - class Int64Array(object): - __slots__ = ["_tab"] + __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): @@ -22,12 +20,9 @@ def GetRootAs(cls, buf, offset=0): def GetRootAsInt64Array(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) - @classmethod def Int64ArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier( - buf, offset, b"\x73\x65\x30\x30", size_prefixed=size_prefixed - ) + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x73\x65\x30\x30", size_prefixed=size_prefixed) # Int64Array def Init(self, buf, pos): @@ -38,10 +33,7 @@ def Value(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Int64Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8), - ) + return self._tab.Get(flatbuffers.number_types.Int64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) return 0 # Int64Array @@ -63,36 +55,15 @@ def ValueIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) return o == 0 - -def Int64ArrayStart(builder): - builder.StartObject(1) - - +def Int64ArrayStart(builder): builder.StartObject(1) def Start(builder): return Int64ArrayStart(builder) - - -def Int64ArrayAddValue(builder, value): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 - ) - - +def Int64ArrayAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) def AddValue(builder, value): return Int64ArrayAddValue(builder, value) - - -def Int64ArrayStartValueVector(builder, numElems): - return builder.StartVector(8, numElems, 8) - - +def Int64ArrayStartValueVector(builder, numElems): return builder.StartVector(8, numElems, 8) def StartValueVector(builder, numElems): return Int64ArrayStartValueVector(builder, numElems) - - -def Int64ArrayEnd(builder): - return builder.EndObject() - - +def Int64ArrayEnd(builder): return builder.EndObject() def End(builder): - return Int64ArrayEnd(builder) + return Int64ArrayEnd(builder) \ No newline at end of file diff --git a/streaming_data_types/fbschemas/array_1d_se00/Int8Array.py b/streaming_data_types/fbschemas/array_1d_se00/Int8Array.py index 06f283f..f30a9b1 100644 --- a/streaming_data_types/fbschemas/array_1d_se00/Int8Array.py +++ b/streaming_data_types/fbschemas/array_1d_se00/Int8Array.py @@ -1,15 +1,13 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers from flatbuffers.compat import import_numpy - np = import_numpy() - class Int8Array(object): - __slots__ = ["_tab"] + __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): @@ -22,12 +20,9 @@ def GetRootAs(cls, buf, offset=0): def GetRootAsInt8Array(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) - @classmethod def Int8ArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier( - buf, offset, b"\x73\x65\x30\x30", size_prefixed=size_prefixed - ) + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x73\x65\x30\x30", size_prefixed=size_prefixed) # Int8Array def Init(self, buf, pos): @@ -38,10 +33,7 @@ def Value(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Int8Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1), - ) + return self._tab.Get(flatbuffers.number_types.Int8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) return 0 # Int8Array @@ -63,36 +55,15 @@ def ValueIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) return o == 0 - -def Int8ArrayStart(builder): - builder.StartObject(1) - - +def Int8ArrayStart(builder): builder.StartObject(1) def Start(builder): return Int8ArrayStart(builder) - - -def Int8ArrayAddValue(builder, value): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 - ) - - +def Int8ArrayAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) def AddValue(builder, value): return Int8ArrayAddValue(builder, value) - - -def Int8ArrayStartValueVector(builder, numElems): - return builder.StartVector(1, numElems, 1) - - +def Int8ArrayStartValueVector(builder, numElems): return builder.StartVector(1, numElems, 1) def StartValueVector(builder, numElems): return Int8ArrayStartValueVector(builder, numElems) - - -def Int8ArrayEnd(builder): - return builder.EndObject() - - +def Int8ArrayEnd(builder): return builder.EndObject() def End(builder): - return Int8ArrayEnd(builder) + return Int8ArrayEnd(builder) \ No newline at end of file diff --git a/streaming_data_types/fbschemas/array_1d_se00/Location.py b/streaming_data_types/fbschemas/array_1d_se00/Location.py index 7af8fcf..99cf994 100644 --- a/streaming_data_types/fbschemas/array_1d_se00/Location.py +++ b/streaming_data_types/fbschemas/array_1d_se00/Location.py @@ -1,7 +1,6 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: - +# namespace: class Location(object): Unknown = 0 diff --git a/streaming_data_types/fbschemas/array_1d_se00/UInt16Array.py b/streaming_data_types/fbschemas/array_1d_se00/UInt16Array.py index 0d8a7a3..8fe1f77 100644 --- a/streaming_data_types/fbschemas/array_1d_se00/UInt16Array.py +++ b/streaming_data_types/fbschemas/array_1d_se00/UInt16Array.py @@ -1,15 +1,13 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers from flatbuffers.compat import import_numpy - np = import_numpy() - class UInt16Array(object): - __slots__ = ["_tab"] + __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): @@ -22,12 +20,9 @@ def GetRootAs(cls, buf, offset=0): def GetRootAsUInt16Array(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) - @classmethod def UInt16ArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier( - buf, offset, b"\x73\x65\x30\x30", size_prefixed=size_prefixed - ) + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x73\x65\x30\x30", size_prefixed=size_prefixed) # UInt16Array def Init(self, buf, pos): @@ -38,10 +33,7 @@ def Value(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Uint16Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 2), - ) + return self._tab.Get(flatbuffers.number_types.Uint16Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 2)) return 0 # UInt16Array @@ -63,36 +55,15 @@ def ValueIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) return o == 0 - -def UInt16ArrayStart(builder): - builder.StartObject(1) - - +def UInt16ArrayStart(builder): builder.StartObject(1) def Start(builder): return UInt16ArrayStart(builder) - - -def UInt16ArrayAddValue(builder, value): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 - ) - - +def UInt16ArrayAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) def AddValue(builder, value): return UInt16ArrayAddValue(builder, value) - - -def UInt16ArrayStartValueVector(builder, numElems): - return builder.StartVector(2, numElems, 2) - - +def UInt16ArrayStartValueVector(builder, numElems): return builder.StartVector(2, numElems, 2) def StartValueVector(builder, numElems): return UInt16ArrayStartValueVector(builder, numElems) - - -def UInt16ArrayEnd(builder): - return builder.EndObject() - - +def UInt16ArrayEnd(builder): return builder.EndObject() def End(builder): - return UInt16ArrayEnd(builder) + return UInt16ArrayEnd(builder) \ No newline at end of file diff --git a/streaming_data_types/fbschemas/array_1d_se00/UInt32Array.py b/streaming_data_types/fbschemas/array_1d_se00/UInt32Array.py index a69431c..904ed86 100644 --- a/streaming_data_types/fbschemas/array_1d_se00/UInt32Array.py +++ b/streaming_data_types/fbschemas/array_1d_se00/UInt32Array.py @@ -1,15 +1,13 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers from flatbuffers.compat import import_numpy - np = import_numpy() - class UInt32Array(object): - __slots__ = ["_tab"] + __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): @@ -22,12 +20,9 @@ def GetRootAs(cls, buf, offset=0): def GetRootAsUInt32Array(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) - @classmethod def UInt32ArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier( - buf, offset, b"\x73\x65\x30\x30", size_prefixed=size_prefixed - ) + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x73\x65\x30\x30", size_prefixed=size_prefixed) # UInt32Array def Init(self, buf, pos): @@ -38,10 +33,7 @@ def Value(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Uint32Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), - ) + return self._tab.Get(flatbuffers.number_types.Uint32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) return 0 # UInt32Array @@ -63,36 +55,15 @@ def ValueIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) return o == 0 - -def UInt32ArrayStart(builder): - builder.StartObject(1) - - +def UInt32ArrayStart(builder): builder.StartObject(1) def Start(builder): return UInt32ArrayStart(builder) - - -def UInt32ArrayAddValue(builder, value): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 - ) - - +def UInt32ArrayAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) def AddValue(builder, value): return UInt32ArrayAddValue(builder, value) - - -def UInt32ArrayStartValueVector(builder, numElems): - return builder.StartVector(4, numElems, 4) - - +def UInt32ArrayStartValueVector(builder, numElems): return builder.StartVector(4, numElems, 4) def StartValueVector(builder, numElems): return UInt32ArrayStartValueVector(builder, numElems) - - -def UInt32ArrayEnd(builder): - return builder.EndObject() - - +def UInt32ArrayEnd(builder): return builder.EndObject() def End(builder): - return UInt32ArrayEnd(builder) + return UInt32ArrayEnd(builder) \ No newline at end of file diff --git a/streaming_data_types/fbschemas/array_1d_se00/UInt64Array.py b/streaming_data_types/fbschemas/array_1d_se00/UInt64Array.py index c9af60b..cad65f6 100644 --- a/streaming_data_types/fbschemas/array_1d_se00/UInt64Array.py +++ b/streaming_data_types/fbschemas/array_1d_se00/UInt64Array.py @@ -1,15 +1,13 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers from flatbuffers.compat import import_numpy - np = import_numpy() - class UInt64Array(object): - __slots__ = ["_tab"] + __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): @@ -22,12 +20,9 @@ def GetRootAs(cls, buf, offset=0): def GetRootAsUInt64Array(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) - @classmethod def UInt64ArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier( - buf, offset, b"\x73\x65\x30\x30", size_prefixed=size_prefixed - ) + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x73\x65\x30\x30", size_prefixed=size_prefixed) # UInt64Array def Init(self, buf, pos): @@ -38,10 +33,7 @@ def Value(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Uint64Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8), - ) + return self._tab.Get(flatbuffers.number_types.Uint64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) return 0 # UInt64Array @@ -63,36 +55,15 @@ def ValueIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) return o == 0 - -def UInt64ArrayStart(builder): - builder.StartObject(1) - - +def UInt64ArrayStart(builder): builder.StartObject(1) def Start(builder): return UInt64ArrayStart(builder) - - -def UInt64ArrayAddValue(builder, value): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 - ) - - +def UInt64ArrayAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) def AddValue(builder, value): return UInt64ArrayAddValue(builder, value) - - -def UInt64ArrayStartValueVector(builder, numElems): - return builder.StartVector(8, numElems, 8) - - +def UInt64ArrayStartValueVector(builder, numElems): return builder.StartVector(8, numElems, 8) def StartValueVector(builder, numElems): return UInt64ArrayStartValueVector(builder, numElems) - - -def UInt64ArrayEnd(builder): - return builder.EndObject() - - +def UInt64ArrayEnd(builder): return builder.EndObject() def End(builder): - return UInt64ArrayEnd(builder) + return UInt64ArrayEnd(builder) \ No newline at end of file diff --git a/streaming_data_types/fbschemas/array_1d_se00/UInt8Array.py b/streaming_data_types/fbschemas/array_1d_se00/UInt8Array.py index fcd56eb..ba3d05e 100644 --- a/streaming_data_types/fbschemas/array_1d_se00/UInt8Array.py +++ b/streaming_data_types/fbschemas/array_1d_se00/UInt8Array.py @@ -1,15 +1,13 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers from flatbuffers.compat import import_numpy - np = import_numpy() - class UInt8Array(object): - __slots__ = ["_tab"] + __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): @@ -22,12 +20,9 @@ def GetRootAs(cls, buf, offset=0): def GetRootAsUInt8Array(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) - @classmethod def UInt8ArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier( - buf, offset, b"\x73\x65\x30\x30", size_prefixed=size_prefixed - ) + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x73\x65\x30\x30", size_prefixed=size_prefixed) # UInt8Array def Init(self, buf, pos): @@ -38,10 +33,7 @@ def Value(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Uint8Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1), - ) + return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) return 0 # UInt8Array @@ -63,36 +55,15 @@ def ValueIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) return o == 0 - -def UInt8ArrayStart(builder): - builder.StartObject(1) - - +def UInt8ArrayStart(builder): builder.StartObject(1) def Start(builder): return UInt8ArrayStart(builder) - - -def UInt8ArrayAddValue(builder, value): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 - ) - - +def UInt8ArrayAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) def AddValue(builder, value): return UInt8ArrayAddValue(builder, value) - - -def UInt8ArrayStartValueVector(builder, numElems): - return builder.StartVector(1, numElems, 1) - - +def UInt8ArrayStartValueVector(builder, numElems): return builder.StartVector(1, numElems, 1) def StartValueVector(builder, numElems): return UInt8ArrayStartValueVector(builder, numElems) - - -def UInt8ArrayEnd(builder): - return builder.EndObject() - - +def UInt8ArrayEnd(builder): return builder.EndObject() def End(builder): - return UInt8ArrayEnd(builder) + return UInt8ArrayEnd(builder) \ No newline at end of file diff --git a/streaming_data_types/fbschemas/array_1d_se00/ValueUnion.py b/streaming_data_types/fbschemas/array_1d_se00/ValueUnion.py index 4dd1bf1..514ab29 100644 --- a/streaming_data_types/fbschemas/array_1d_se00/ValueUnion.py +++ b/streaming_data_types/fbschemas/array_1d_se00/ValueUnion.py @@ -1,7 +1,6 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: - +# namespace: class ValueUnion(object): NONE = 0 diff --git a/streaming_data_types/fbschemas/array_1d_se00/SampleEnvironmentData.py b/streaming_data_types/fbschemas/array_1d_se00/se00_SampleEnvironmentData.py similarity index 51% rename from streaming_data_types/fbschemas/array_1d_se00/SampleEnvironmentData.py rename to streaming_data_types/fbschemas/array_1d_se00/se00_SampleEnvironmentData.py index 1cfb476..34bc859 100644 --- a/streaming_data_types/fbschemas/array_1d_se00/SampleEnvironmentData.py +++ b/streaming_data_types/fbschemas/array_1d_se00/se00_SampleEnvironmentData.py @@ -1,228 +1,152 @@ # automatically generated by the FlatBuffers compiler, do not modify -# namespace: +# namespace: import flatbuffers from flatbuffers.compat import import_numpy - np = import_numpy() - -class SampleEnvironmentData(object): - __slots__ = ["_tab"] +class se00_SampleEnvironmentData(object): + __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = SampleEnvironmentData() + x = se00_SampleEnvironmentData() x.Init(buf, n + offset) return x @classmethod - def GetRootAsSampleEnvironmentData(cls, buf, offset=0): + def GetRootAsse00_SampleEnvironmentData(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) - @classmethod - def SampleEnvironmentDataBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier( - buf, offset, b"\x73\x65\x30\x30", size_prefixed=size_prefixed - ) + def se00_SampleEnvironmentDataBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x73\x65\x30\x30", size_prefixed=size_prefixed) - # SampleEnvironmentData + # se00_SampleEnvironmentData def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # SampleEnvironmentData + # se00_SampleEnvironmentData def Name(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: return self._tab.String(o + self._tab.Pos) return None - # SampleEnvironmentData + # se00_SampleEnvironmentData def Channel(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) if o != 0: return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) return 0 - # SampleEnvironmentData + # se00_SampleEnvironmentData def PacketTimestamp(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) if o != 0: return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) return 0 - # SampleEnvironmentData + # se00_SampleEnvironmentData def TimeDelta(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) if o != 0: - return self._tab.Get( - flatbuffers.number_types.Float64Flags, o + self._tab.Pos - ) + return self._tab.Get(flatbuffers.number_types.Float64Flags, o + self._tab.Pos) return 0.0 - # SampleEnvironmentData + # se00_SampleEnvironmentData def TimestampLocation(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) if o != 0: return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) return 0 - # SampleEnvironmentData + # se00_SampleEnvironmentData def ValuesType(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) if o != 0: return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) return 0 - # SampleEnvironmentData + # se00_SampleEnvironmentData def Values(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) if o != 0: from flatbuffers.table import Table - obj = Table(bytearray(), 0) self._tab.Union(obj, o) return obj return None - # SampleEnvironmentData + # se00_SampleEnvironmentData def Timestamps(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) if o != 0: a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Int64Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8), - ) + return self._tab.Get(flatbuffers.number_types.Int64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) return 0 - # SampleEnvironmentData + # se00_SampleEnvironmentData def TimestampsAsNumpy(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) if o != 0: return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) return 0 - # SampleEnvironmentData + # se00_SampleEnvironmentData def TimestampsLength(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) if o != 0: return self._tab.VectorLen(o) return 0 - # SampleEnvironmentData + # se00_SampleEnvironmentData def TimestampsIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) return o == 0 - # SampleEnvironmentData + # se00_SampleEnvironmentData def MessageCounter(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(20)) if o != 0: return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) return 0 - -def SampleEnvironmentDataStart(builder): - builder.StartObject(9) - - +def se00_SampleEnvironmentDataStart(builder): builder.StartObject(9) def Start(builder): - return SampleEnvironmentDataStart(builder) - - -def SampleEnvironmentDataAddName(builder, name): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(name), 0 - ) - - + return se00_SampleEnvironmentDataStart(builder) +def se00_SampleEnvironmentDataAddName(builder, name): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(name), 0) def AddName(builder, name): - return SampleEnvironmentDataAddName(builder, name) - - -def SampleEnvironmentDataAddChannel(builder, channel): - builder.PrependInt32Slot(1, channel, 0) - - + return se00_SampleEnvironmentDataAddName(builder, name) +def se00_SampleEnvironmentDataAddChannel(builder, channel): builder.PrependInt32Slot(1, channel, 0) def AddChannel(builder, channel): - return SampleEnvironmentDataAddChannel(builder, channel) - - -def SampleEnvironmentDataAddPacketTimestamp(builder, packetTimestamp): - builder.PrependInt64Slot(2, packetTimestamp, 0) - - + return se00_SampleEnvironmentDataAddChannel(builder, channel) +def se00_SampleEnvironmentDataAddPacketTimestamp(builder, packetTimestamp): builder.PrependInt64Slot(2, packetTimestamp, 0) def AddPacketTimestamp(builder, packetTimestamp): - return SampleEnvironmentDataAddPacketTimestamp(builder, packetTimestamp) - - -def SampleEnvironmentDataAddTimeDelta(builder, timeDelta): - builder.PrependFloat64Slot(3, timeDelta, 0.0) - - + return se00_SampleEnvironmentDataAddPacketTimestamp(builder, packetTimestamp) +def se00_SampleEnvironmentDataAddTimeDelta(builder, timeDelta): builder.PrependFloat64Slot(3, timeDelta, 0.0) def AddTimeDelta(builder, timeDelta): - return SampleEnvironmentDataAddTimeDelta(builder, timeDelta) - - -def SampleEnvironmentDataAddTimestampLocation(builder, timestampLocation): - builder.PrependInt8Slot(4, timestampLocation, 0) - - + return se00_SampleEnvironmentDataAddTimeDelta(builder, timeDelta) +def se00_SampleEnvironmentDataAddTimestampLocation(builder, timestampLocation): builder.PrependInt8Slot(4, timestampLocation, 0) def AddTimestampLocation(builder, timestampLocation): - return SampleEnvironmentDataAddTimestampLocation(builder, timestampLocation) - - -def SampleEnvironmentDataAddValuesType(builder, valuesType): - builder.PrependUint8Slot(5, valuesType, 0) - - + return se00_SampleEnvironmentDataAddTimestampLocation(builder, timestampLocation) +def se00_SampleEnvironmentDataAddValuesType(builder, valuesType): builder.PrependUint8Slot(5, valuesType, 0) def AddValuesType(builder, valuesType): - return SampleEnvironmentDataAddValuesType(builder, valuesType) - - -def SampleEnvironmentDataAddValues(builder, values): - builder.PrependUOffsetTRelativeSlot( - 6, flatbuffers.number_types.UOffsetTFlags.py_type(values), 0 - ) - - + return se00_SampleEnvironmentDataAddValuesType(builder, valuesType) +def se00_SampleEnvironmentDataAddValues(builder, values): builder.PrependUOffsetTRelativeSlot(6, flatbuffers.number_types.UOffsetTFlags.py_type(values), 0) def AddValues(builder, values): - return SampleEnvironmentDataAddValues(builder, values) - - -def SampleEnvironmentDataAddTimestamps(builder, timestamps): - builder.PrependUOffsetTRelativeSlot( - 7, flatbuffers.number_types.UOffsetTFlags.py_type(timestamps), 0 - ) - - + return se00_SampleEnvironmentDataAddValues(builder, values) +def se00_SampleEnvironmentDataAddTimestamps(builder, timestamps): builder.PrependUOffsetTRelativeSlot(7, flatbuffers.number_types.UOffsetTFlags.py_type(timestamps), 0) def AddTimestamps(builder, timestamps): - return SampleEnvironmentDataAddTimestamps(builder, timestamps) - - -def SampleEnvironmentDataStartTimestampsVector(builder, numElems): - return builder.StartVector(8, numElems, 8) - - + return se00_SampleEnvironmentDataAddTimestamps(builder, timestamps) +def se00_SampleEnvironmentDataStartTimestampsVector(builder, numElems): return builder.StartVector(8, numElems, 8) def StartTimestampsVector(builder, numElems): - return SampleEnvironmentDataStartTimestampsVector(builder, numElems) - - -def SampleEnvironmentDataAddMessageCounter(builder, messageCounter): - builder.PrependInt64Slot(8, messageCounter, 0) - - + return se00_SampleEnvironmentDataStartTimestampsVector(builder, numElems) +def se00_SampleEnvironmentDataAddMessageCounter(builder, messageCounter): builder.PrependInt64Slot(8, messageCounter, 0) def AddMessageCounter(builder, messageCounter): - return SampleEnvironmentDataAddMessageCounter(builder, messageCounter) - - -def SampleEnvironmentDataEnd(builder): - return builder.EndObject() - - + return se00_SampleEnvironmentDataAddMessageCounter(builder, messageCounter) +def se00_SampleEnvironmentDataEnd(builder): return builder.EndObject() def End(builder): - return SampleEnvironmentDataEnd(builder) + return se00_SampleEnvironmentDataEnd(builder) \ No newline at end of file From 798f77049c454c395c057d80d84f407e91e8b06a Mon Sep 17 00:00:00 2001 From: Kenan Muric Date: Wed, 7 Dec 2022 13:57:54 +0100 Subject: [PATCH 298/363] bumping version --- streaming_data_types/_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/streaming_data_types/_version.py b/streaming_data_types/_version.py index 6bd697b..b008080 100644 --- a/streaming_data_types/_version.py +++ b/streaming_data_types/_version.py @@ -1,4 +1,4 @@ # Version is not directly defined in __init__ because that causes all # run time dependencies to become build-time dependencies when it is # imported in setup.py -version = "0.19.0" +version = "0.20.0" From 8b0d868ae3ceec5d7a734f7c965414cc15cb3ddb Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Mon, 12 Dec 2022 15:22:52 +0100 Subject: [PATCH 299/363] use pyenv instead of conda --- Jenkinsfile | 19 +++++++------------ requirements-dev.txt | 3 ++- tox.ini | 3 +-- 3 files changed, 10 insertions(+), 15 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index e83e56f..314cd38 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -2,10 +2,8 @@ import ecdcpipeline.ContainerBuildNode import ecdcpipeline.PipelineBuilder -project = "python-streaming-data-types" - container_build_nodes = [ - 'centos7-release': ContainerBuildNode.getDefaultContainerBuildNode('centos7-gcc8'), + 'centos7': new ContainerBuildNode('dockerregistry.esss.dk/ecdc_group/build-node-images/centos7-build-node:10.0.2-dev', '/usr/bin/scl enable devtoolset-11 rh-python38 -- /bin/bash -e -x') ] // Define number of old builds to keep. @@ -36,29 +34,26 @@ builders = pipeline_builder.createBuilders { container -> pipeline_builder.stage("${container.key}: Dependencies") { container.sh """ - /opt/miniconda/bin/conda init bash - export PATH=/opt/miniconda/bin:$PATH - python --version which python - python -m pip install --user -r ${project}/requirements.txt - python -m pip install --user -r ${project}/requirements-dev.txt + python -m pip install --user -r ${pipeline_builder.project}/requirements-dev.txt """ } // stage pipeline_builder.stage("${container.key}: Test") { def test_output = "TestResults.xml" container.sh """ - export PATH=/opt/miniconda/bin:$PATH - cd ${project} + cd ${pipeline_builder.project} + pyenv local 3.7 3.8 3.9 + pyenv versions python -m tox -- --junitxml=${test_output} """ - container.copyFrom("${project}/${test_output}", ".") + container.copyFrom("${pipeline_builder.project}/${test_output}", ".") xunit thresholds: [failed(unstableThreshold: '0')], tools: [JUnit(deleteOutputFiles: true, pattern: '*.xml', skipNoTestFiles: false, stopProcessingIfError: true)] } // stage } // createBuilders node { - dir("${project}") { + dir("${pipeline_builder.project}") { scm_vars = checkout scm } diff --git a/requirements-dev.txt b/requirements-dev.txt index 0d111a1..563c3f2 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -5,6 +5,7 @@ isort==5.10.1 # Pinned to match pre-commit configblack flake8 pre-commit pytest -tox +tox==3.27.1 # tox 4 seems to be broken at the moment +tox-pyenv twine wheel diff --git a/tox.ini b/tox.ini index 04457fc..f55583f 100644 --- a/tox.ini +++ b/tox.ini @@ -1,6 +1,5 @@ [tox] -envlist = py36, py37, py38, flake8 -requires = tox-conda +envlist = py37, py38, py39, flake8 isolated_build = true skipsdist=true From 9f6f6739811863599b25dfa7da20363f9996ec50 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Mon, 12 Dec 2022 15:26:11 +0100 Subject: [PATCH 300/363] flake8 was present twice --- requirements-dev.txt | 1 - 1 file changed, 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 563c3f2..aeb0477 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -2,7 +2,6 @@ black==22.3.0 # Pinned to match pre-commit config flake8==4.0.1 # Pinned to match pre-commit config isort==5.10.1 # Pinned to match pre-commit configblack -flake8 pre-commit pytest tox==3.27.1 # tox 4 seems to be broken at the moment From 70600a071df1721fd87b8ba8e76297cddd794a8b Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Thu, 22 Dec 2022 14:38:16 +0100 Subject: [PATCH 301/363] use built in complex numbers --- tests/test_f142.py | 2 +- tests/test_f144.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_f142.py b/tests/test_f142.py index e216c3e..983279c 100644 --- a/tests/test_f142.py +++ b/tests/test_f142.py @@ -190,7 +190,7 @@ def test_raises_not_implemented_error_when_trying_to_serialise_numpy_complex_num ): complex_log = { "source_name": "some_source", - "value": np.complex(3, 4), + "value": complex(3, 4), "timestamp_unix_ns": 1585332414000000000, } with pytest.raises(NotImplementedError): diff --git a/tests/test_f144.py b/tests/test_f144.py index 078bb28..38983df 100644 --- a/tests/test_f144.py +++ b/tests/test_f144.py @@ -137,7 +137,7 @@ def test_raises_not_implemented_error_when_trying_to_serialise_numpy_complex_num ): complex_log = { "source_name": "some_source", - "value": np.complex(3, 4), + "value": complex(3, 4), "timestamp_unix_ns": 1585332414000000000, } with pytest.raises(NotImplementedError): From ec4a5c0637a97714a7471f143b3f082d4dcc64d5 Mon Sep 17 00:00:00 2001 From: Afonso Mukai <28659574+amues@users.noreply.github.com> Date: Thu, 22 Dec 2022 14:45:08 +0100 Subject: [PATCH 302/363] Update build node version (#79) --- Jenkinsfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Jenkinsfile b/Jenkinsfile index 314cd38..363011a 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -3,7 +3,7 @@ import ecdcpipeline.ContainerBuildNode import ecdcpipeline.PipelineBuilder container_build_nodes = [ - 'centos7': new ContainerBuildNode('dockerregistry.esss.dk/ecdc_group/build-node-images/centos7-build-node:10.0.2-dev', '/usr/bin/scl enable devtoolset-11 rh-python38 -- /bin/bash -e -x') + 'centos7': ContainerBuildNode.getDefaultContainerBuildNode('centos7-gcc11') ] // Define number of old builds to keep. From 1acf41966ad8b339b2b594e3887ba9c0d77f860d Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Mon, 2 Jan 2023 08:00:35 +0000 Subject: [PATCH 303/363] bump to latest flatbuffers --- requirements.txt | 2 +- streaming_data_types/area_detector_ADAr.py | 2 +- streaming_data_types/area_detector_NDAr.py | 4 ++-- streaming_data_types/forwarder_config_update_rf5k.py | 2 +- streaming_data_types/histogram_hs00.py | 2 +- streaming_data_types/histogram_hs01.py | 2 +- streaming_data_types/logdata_f142.py | 2 +- 7 files changed, 8 insertions(+), 8 deletions(-) diff --git a/requirements.txt b/requirements.txt index 2c75ce0..07b351b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ -flatbuffers==1.12 +flatbuffers==22.11.23 numpy diff --git a/streaming_data_types/area_detector_ADAr.py b/streaming_data_types/area_detector_ADAr.py index 9ad1aad..b8374d9 100644 --- a/streaming_data_types/area_detector_ADAr.py +++ b/streaming_data_types/area_detector_ADAr.py @@ -107,7 +107,7 @@ def serialise_ADAr( ADArray.ADArrayStartAttributesVector(builder, len(attributes)) for item in reversed(temp_attributes): builder.PrependUOffsetTRelative(item) - attributes_offset = builder.EndVector(len(attributes)) + attributes_offset = builder.EndVector() # Build the actual buffer ADArray.ADArrayStart(builder) diff --git a/streaming_data_types/area_detector_NDAr.py b/streaming_data_types/area_detector_NDAr.py index 8e3a048..12ac809 100644 --- a/streaming_data_types/area_detector_NDAr.py +++ b/streaming_data_types/area_detector_NDAr.py @@ -25,14 +25,14 @@ def serialise_ndar( # FlatBuffers builds arrays backwards for s in reversed(dims): builder.PrependUint64(s) - dims_offset = builder.EndVector(len(dims)) + dims_offset = builder.EndVector() # Build data NDArray.NDArrayStartPDataVector(builder, len(data)) # FlatBuffers builds arrays backwards for s in reversed(data): builder.PrependUint8(s) - data_offset = builder.EndVector(len(data)) + data_offset = builder.EndVector() # Build the actual buffer NDArray.NDArrayStart(builder) diff --git a/streaming_data_types/forwarder_config_update_rf5k.py b/streaming_data_types/forwarder_config_update_rf5k.py index 5cc3b87..9638102 100644 --- a/streaming_data_types/forwarder_config_update_rf5k.py +++ b/streaming_data_types/forwarder_config_update_rf5k.py @@ -99,7 +99,7 @@ def serialise_rf5k(config_change: UpdateType, streams: List[StreamInfo]) -> byte ConfigUpdate.ConfigUpdateStartStreamsVector(builder, len(streams)) for stream_offset in stream_offsets: builder.PrependUOffsetTRelative(stream_offset) - streams_offset = builder.EndVector(len(streams)) + streams_offset = builder.EndVector() # Build the actual buffer ConfigUpdate.ConfigUpdateStart(builder) diff --git a/streaming_data_types/histogram_hs00.py b/streaming_data_types/histogram_hs00.py index 4c8700a..0121bbd 100644 --- a/streaming_data_types/histogram_hs00.py +++ b/streaming_data_types/histogram_hs00.py @@ -142,7 +142,7 @@ def serialise_hs00(histogram): # FlatBuffers builds arrays backwards for m in reversed(metadata): builder.PrependUOffsetTRelative(m) - metadata_vector = builder.EndVector(rank) + metadata_vector = builder.EndVector() # Build the data data_offset, data_type = _serialise_array(builder, histogram["data"]) diff --git a/streaming_data_types/histogram_hs01.py b/streaming_data_types/histogram_hs01.py index 09d7faf..160585f 100644 --- a/streaming_data_types/histogram_hs01.py +++ b/streaming_data_types/histogram_hs01.py @@ -142,7 +142,7 @@ def serialise_hs01(histogram): # FlatBuffers builds arrays backwards for m in reversed(metadata): builder.PrependUOffsetTRelative(m) - metadata_vector = builder.EndVector(rank) + metadata_vector = builder.EndVector() # Build the data data_offset, data_type = _serialise_array(builder, histogram["data"]) diff --git a/streaming_data_types/logdata_f142.py b/streaming_data_types/logdata_f142.py index b38e7fc..8186c63 100644 --- a/streaming_data_types/logdata_f142.py +++ b/streaming_data_types/logdata_f142.py @@ -399,7 +399,7 @@ def _serialise_stringarray(builder: flatbuffers.Builder, data: np.ndarray, sourc ArrayStringStartValueVector(builder, len(data)) for string_offset in string_offsets: builder.PrependSOffsetTRelative(string_offset) - string_array_offset = builder.EndVector(len(data)) + string_array_offset = builder.EndVector() ArrayStringStart(builder) ArrayStringAddValue(builder, string_array_offset) value_position = ArrayStringEnd(builder) From a41728658cbb74948c3e4bcb5d8cce924a914d0f Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Mon, 2 Jan 2023 09:51:09 +0100 Subject: [PATCH 304/363] bump version number --- setup.py | 2 +- streaming_data_types/_version.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index 15c3afb..4209205 100644 --- a/setup.py +++ b/setup.py @@ -27,6 +27,6 @@ license="BSD 2-Clause License", packages=find_packages(exclude=["tests", "tests.*"]), python_requires=">=3.6.0", - install_requires=["flatbuffers==1.12", "numpy"], + install_requires=["flatbuffers>=22.11.23", "numpy"], extras_require={"dev": ["flake8", "pre-commit", "pytest", "tox"]}, ) diff --git a/streaming_data_types/_version.py b/streaming_data_types/_version.py index b008080..29368b1 100644 --- a/streaming_data_types/_version.py +++ b/streaming_data_types/_version.py @@ -1,4 +1,4 @@ # Version is not directly defined in __init__ because that causes all # run time dependencies to become build-time dependencies when it is # imported in setup.py -version = "0.20.0" +version = "0.21.0" From 6c0c157b0dbcc51fd3d260f269b9c34c1f4397fe Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Mon, 9 Jan 2023 08:51:36 +0100 Subject: [PATCH 305/363] clear pythonpath when using tox --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index f55583f..ae641da 100644 --- a/tox.ini +++ b/tox.ini @@ -8,7 +8,7 @@ deps = -r{toxinidir}/requirements.txt -r{toxinidir}/requirements-dev.txt commands = - python -m pytest {posargs} + python -m pytest {posargs} -o pythonpath= [testenv:flake8] commands = From 96ef1fb53874e985a8eae151c53ecd5687f9dffd Mon Sep 17 00:00:00 2001 From: Subindev D <64056430+subindev-d@users.noreply.github.com> Date: Thu, 20 Apr 2023 14:28:42 +0100 Subject: [PATCH 306/363] Updated the maxtime for running in windows In windows , if the year parameter of the datetime.datetime class is greater than 3001 , it returns OSError: [Errno 22] Invalid argument for Windows 10 operating system. It works fine in Linux. It could be a bug in the python. Setting the year value for max_time to 3001 will fix this error for windows. --- streaming_data_types/area_detector_ADAr.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/streaming_data_types/area_detector_ADAr.py b/streaming_data_types/area_detector_ADAr.py index b8374d9..2ca7381 100644 --- a/streaming_data_types/area_detector_ADAr.py +++ b/streaming_data_types/area_detector_ADAr.py @@ -166,7 +166,7 @@ def deserialise_ADAr(buffer: Union[bytearray, bytes]) -> ADArray: ad_array = ADArray.ADArray.GetRootAsADArray(buffer, 0) unique_id = ad_array.Id() max_time = datetime( - year=9000, month=1, day=1, hour=0, minute=0, second=0 + year=3001, month=1, day=1, hour=0, minute=0, second=0 ).timestamp() used_timestamp = ad_array.Timestamp() / 1e9 if used_timestamp > max_time: From b3dd5a9686c708cab0563c035b352ce3c6ac8a65 Mon Sep 17 00:00:00 2001 From: Subindev D <64056430+subindev-d@users.noreply.github.com> Date: Fri, 21 Apr 2023 16:37:12 +0100 Subject: [PATCH 307/363] Replaced all the year 9000 with year 3001 for max_time. datetime class return os Error for timestamp() method in Window OS. --- streaming_data_types/action_response_answ.py | 2 +- streaming_data_types/sample_environment_senv.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/streaming_data_types/action_response_answ.py b/streaming_data_types/action_response_answ.py index c5e6546..5fc4706 100644 --- a/streaming_data_types/action_response_answ.py +++ b/streaming_data_types/action_response_answ.py @@ -64,7 +64,7 @@ def deserialise_answ(buffer: Union[bytearray, bytes]): check_schema_identifier(buffer, FILE_IDENTIFIER) answ_message = ActionResponse.ActionResponse.GetRootAsActionResponse(buffer, 0) max_time = datetime( - year=9000, month=1, day=1, hour=0, minute=0, second=0 + year=3001, month=1, day=1, hour=0, minute=0, second=0 ).timestamp() used_timestamp = answ_message.StopTime() / 1000 if used_timestamp > max_time: diff --git a/streaming_data_types/sample_environment_senv.py b/streaming_data_types/sample_environment_senv.py index 5635cc2..d39da76 100644 --- a/streaming_data_types/sample_environment_senv.py +++ b/streaming_data_types/sample_environment_senv.py @@ -104,7 +104,7 @@ def deserialise_senv(buffer: Union[bytearray, bytes]) -> Response: SE_data = SampleEnvironmentData.GetRootAsSampleEnvironmentData(buffer, 0) max_time = datetime( - year=9000, month=1, day=1, hour=0, minute=0, second=0 + year=3001, month=1, day=1, hour=0, minute=0, second=0 ).timestamp() used_timestamp = SE_data.PacketTimestamp() / 1e9 if used_timestamp > max_time: From 34b1e013d9c6309826e26f70a25c7037156c3ca0 Mon Sep 17 00:00:00 2001 From: Daniel Cacabelos Date: Mon, 24 Apr 2023 09:48:03 +0200 Subject: [PATCH 308/363] Chore: Bump isort --- .pre-commit-config.yaml | 2 +- requirements-dev.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ad23bd9..c6ea035 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -9,7 +9,7 @@ repos: hooks: - id: flake8 - repo: https://github.com/pycqa/isort - rev: 5.10.1 + rev: 5.12.0 hooks: - id: isort args: ["--profile", "black"] diff --git a/requirements-dev.txt b/requirements-dev.txt index aeb0477..ab10fd1 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,7 +1,7 @@ -r requirements.txt black==22.3.0 # Pinned to match pre-commit config flake8==4.0.1 # Pinned to match pre-commit config -isort==5.10.1 # Pinned to match pre-commit configblack +isort==5.12.0 # Pinned to match pre-commit configblack pre-commit pytest tox==3.27.1 # tox 4 seems to be broken at the moment From 715054428a9d759f004e9a838c0bd8bb3ad480fd Mon Sep 17 00:00:00 2001 From: Daniel Cacabelos Date: Mon, 24 Apr 2023 09:55:40 +0200 Subject: [PATCH 309/363] Chore: Downgrade isort to maintain python 3.7 compatibility --- .pre-commit-config.yaml | 2 +- requirements-dev.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index c6ea035..c39bbc8 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -9,7 +9,7 @@ repos: hooks: - id: flake8 - repo: https://github.com/pycqa/isort - rev: 5.12.0 + rev: 5.11.5 hooks: - id: isort args: ["--profile", "black"] diff --git a/requirements-dev.txt b/requirements-dev.txt index ab10fd1..495564b 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,7 +1,7 @@ -r requirements.txt black==22.3.0 # Pinned to match pre-commit config flake8==4.0.1 # Pinned to match pre-commit config -isort==5.12.0 # Pinned to match pre-commit configblack +isort==5.11.5 # Pinned to match pre-commit configblack pre-commit pytest tox==3.27.1 # tox 4 seems to be broken at the moment From 1a7ac4e185e7dc03ed582e929682c6a1028f7aa3 Mon Sep 17 00:00:00 2001 From: Daniel Cacabelos Date: Fri, 21 Apr 2023 10:22:03 +0200 Subject: [PATCH 310/363] Minor: Add missing typing --- streaming_data_types/alarm_al00.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/streaming_data_types/alarm_al00.py b/streaming_data_types/alarm_al00.py index a44f394..f779052 100644 --- a/streaming_data_types/alarm_al00.py +++ b/streaming_data_types/alarm_al00.py @@ -34,7 +34,7 @@ class Severity(Enum): } -def deserialise_al00(buffer): +def deserialise_al00(buffer) -> AlarmInfo: check_schema_identifier(buffer, FILE_IDENTIFIER) alarm = Alarm.Alarm.GetRootAsAlarm(buffer, 0) @@ -46,7 +46,9 @@ def deserialise_al00(buffer): ) -def serialise_al00(source: str, timestamp_ns: int, severity: Severity, message: str): +def serialise_al00( + source: str, timestamp_ns: int, severity: Severity, message: str +) -> bytes: builder = flatbuffers.Builder(128) message_offset = builder.CreateString(message) From c5aca0ab9d738c5f57249692e34dc41904470762 Mon Sep 17 00:00:00 2001 From: Daniel Cacabelos Date: Mon, 24 Apr 2023 10:41:16 +0200 Subject: [PATCH 311/363] Bump version --- streaming_data_types/_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/streaming_data_types/_version.py b/streaming_data_types/_version.py index 29368b1..fc505d7 100644 --- a/streaming_data_types/_version.py +++ b/streaming_data_types/_version.py @@ -1,4 +1,4 @@ # Version is not directly defined in __init__ because that causes all # run time dependencies to become build-time dependencies when it is # imported in setup.py -version = "0.21.0" +version = "0.21.1" From 337b3bffd0f1e1a7eacf4185314b5ab5891a9889 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Tue, 9 May 2023 12:25:10 +0200 Subject: [PATCH 312/363] added json schema (#88) * added json schema * update readme * updated version number --- README.md | 49 +++++++------- README_DEV.md | 1 - streaming_data_types/__init__.py | 3 + streaming_data_types/_version.py | 2 +- .../fbschemas/json_json/JsonData.py | 67 +++++++++++++++++++ streaming_data_types/json_json.py | 24 +++++++ tests/test_json.py | 34 ++++++++++ 7 files changed, 154 insertions(+), 26 deletions(-) create mode 100644 streaming_data_types/fbschemas/json_json/JsonData.py create mode 100644 streaming_data_types/json_json.py create mode 100644 tests/test_json.py diff --git a/README.md b/README.md index 6d10b9e..2f1cdc4 100644 --- a/README.md +++ b/README.md @@ -6,30 +6,31 @@ https://github.com/ess-dmsc/streaming-data-types ## FlatBuffer Schemas -|name|description| -|----|-----------| -|hs00|Histogram schema (deprecated in favour of hs01)| -|hs01|Histogram schema| -|ns10|NICOS cache entry schema| -|pl72|Run start| -|6s4t|Run stop| -|f142|Log data (deprecated in favour of f144)| -|f144|Log data| -|ev42|Event data (deprecated in favour of ev44)| -|ev43|Event data from multiple pulses| -|ev44|Event data with signed data types| -|x5f2|Status messages| -|tdct|Timestamps| -|ep00|EPICS connection info (deprecated in favour of ep01)| -|ep01|EPICS connection info| -|rf5k|Forwarder configuration update| -|answ|File-writer command response| -|wrdn|File-writer finished writing| -|NDAr|**Deprecated**| -|ADAr|EPICS areaDetector data| -|al00|Alarm/status messages used by the Forwarder and NICOS| -|senv|**Deprecated**| -|se00|Arrays with optional timestamps, for example waveform data. Replaces _senv_. | +| name | description | +|------|------------------------------------------------------------------------------| +| hs00 | Histogram schema (deprecated in favour of hs01) | +| hs01 | Histogram schema | +| ns10 | NICOS cache entry schema | +| pl72 | Run start | +| 6s4t | Run stop | +| f142 | Log data (deprecated in favour of f144) | +| f144 | Log data | +| ev42 | Event data (deprecated in favour of ev44) | +| ev43 | Event data from multiple pulses | +| ev44 | Event data with signed data types | +| x5f2 | Status messages | +| tdct | Timestamps | +| ep00 | EPICS connection info (deprecated in favour of ep01) | +| ep01 | EPICS connection info | +| rf5k | Forwarder configuration update | +| answ | File-writer command response | +| wrdn | File-writer finished writing | +| NDAr | **Deprecated** | +| ADAr | EPICS areaDetector data | +| al00 | Alarm/status messages used by the Forwarder and NICOS | +| senv | **Deprecated** | +| json | Generic JSON data | +| se00 | Arrays with optional timestamps, for example waveform data. Replaces _senv_. | ### hs00 and hs01 Schema for histogram data. It is one of the more complicated to use schemas. diff --git a/README_DEV.md b/README_DEV.md index 2b99dca..2897ee5 100644 --- a/README_DEV.md +++ b/README_DEV.md @@ -25,7 +25,6 @@ This command can also be used to run the hooks manually. * Check whether the serialised data produced by the new code can be verified in C++ * There is a helper program in the [FlatBufferVerification](https://github.com/ess-dmsc/FlatBufferVerification) repository * Don't worry if it fails verification - it seems to be an inherent FlatBuffers issue -* Add the schema and verifiability result to the table of schemas in `README.md` ### Tox Tox allows the unit tests to be run against multiple versions of Python. diff --git a/streaming_data_types/__init__.py b/streaming_data_types/__init__.py index ec16dfb..ed11317 100644 --- a/streaming_data_types/__init__.py +++ b/streaming_data_types/__init__.py @@ -19,6 +19,7 @@ ) from streaming_data_types.histogram_hs00 import deserialise_hs00, serialise_hs00 from streaming_data_types.histogram_hs01 import deserialise_hs01, serialise_hs01 +from streaming_data_types.json_json import deserialise_json, serialise_json from streaming_data_types.logdata_f142 import deserialise_f142, serialise_f142 from streaming_data_types.logdata_f144 import deserialise_f144, serialise_f144 from streaming_data_types.nicos_cache_ns10 import deserialise_ns10, serialise_ns10 @@ -56,6 +57,7 @@ "se00": serialise_se00, "ADAr": serialise_ADAr, "al00": serialise_al00, + "json": serialise_json, } @@ -82,4 +84,5 @@ "se00": deserialise_se00, "ADAr": deserialise_ADAr, "al00": deserialise_al00, + "json": deserialise_json, } diff --git a/streaming_data_types/_version.py b/streaming_data_types/_version.py index fc505d7..1c586eb 100644 --- a/streaming_data_types/_version.py +++ b/streaming_data_types/_version.py @@ -1,4 +1,4 @@ # Version is not directly defined in __init__ because that causes all # run time dependencies to become build-time dependencies when it is # imported in setup.py -version = "0.21.1" +version = "0.22.0" diff --git a/streaming_data_types/fbschemas/json_json/JsonData.py b/streaming_data_types/fbschemas/json_json/JsonData.py new file mode 100644 index 0000000..cb4d4c6 --- /dev/null +++ b/streaming_data_types/fbschemas/json_json/JsonData.py @@ -0,0 +1,67 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy + +np = import_numpy() + + +class JsonData(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = JsonData() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsJsonData(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + + @classmethod + def JsonDataBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x6A\x73\x6F\x6E", size_prefixed=size_prefixed + ) + + # JsonData + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # JsonData + def Json(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + +def JsonDataStart(builder): + builder.StartObject(1) + + +def Start(builder): + JsonDataStart(builder) + + +def JsonDataAddJson(builder, json): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(json), 0 + ) + + +def AddJson(builder: flatbuffers.Builder, json: int): + JsonDataAddJson(builder, json) + + +def JsonDataEnd(builder): + return builder.EndObject() + + +def End(builder): + return JsonDataEnd(builder) diff --git a/streaming_data_types/json_json.py b/streaming_data_types/json_json.py new file mode 100644 index 0000000..826b95c --- /dev/null +++ b/streaming_data_types/json_json.py @@ -0,0 +1,24 @@ +import flatbuffers + +import streaming_data_types.fbschemas.json_json.JsonData as JsonData +from streaming_data_types.utils import check_schema_identifier + +FILE_IDENTIFIER = b"json" + + +def deserialise_json(buffer) -> str: + check_schema_identifier(buffer, FILE_IDENTIFIER) + return JsonData.JsonData.GetRootAsJsonData(buffer, 0).Json().decode("utf-8") + + +def serialise_json(json_str) -> bytes: + builder = flatbuffers.Builder(128) + + offset = builder.CreateString(json_str) + + JsonData.JsonDataStart(builder) + JsonData.AddJson(builder, offset) + result = JsonData.JsonDataEnd(builder) + + builder.Finish(result, file_identifier=FILE_IDENTIFIER) + return bytes(builder.Output()) diff --git a/tests/test_json.py b/tests/test_json.py new file mode 100644 index 0000000..9b3e39e --- /dev/null +++ b/tests/test_json.py @@ -0,0 +1,34 @@ +import json + +import pytest + +from streaming_data_types import DESERIALISERS, SERIALISERS +from streaming_data_types.exceptions import WrongSchemaException +from streaming_data_types.json_json import deserialise_json, serialise_json + + +class TestSerialisationJson: + def test_serialises_and_deserialises_json_message_correctly(self): + """ + Round-trip to check what we serialise is what we get back. + """ + json_str = json.dumps(["foo", "bar"]) + buf = serialise_json(json_str) + entry = deserialise_json(buf) + + assert entry == json_str + + def test_if_buffer_has_wrong_id_then_throws(self): + json_str = json.dumps(["foo", "bar"]) + buf = serialise_json(json_str) + + # Manually hack the id + buf = bytearray(buf) + buf[4:8] = b"1234" + + with pytest.raises(WrongSchemaException): + deserialise_json(buf) + + def test_schema_type_is_in_global_serialisers_list(self): + assert "json" in SERIALISERS + assert "json" in DESERIALISERS From cb1c270dcb1db58b5c4bddf9ec6a91060fcbe66e Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Tue, 9 May 2023 12:36:58 +0200 Subject: [PATCH 313/363] added missing __init__ file --- streaming_data_types/fbschemas/json_json/__init__.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 streaming_data_types/fbschemas/json_json/__init__.py diff --git a/streaming_data_types/fbschemas/json_json/__init__.py b/streaming_data_types/fbschemas/json_json/__init__.py new file mode 100644 index 0000000..e69de29 From 3c2564cd6d6365a9f466222d5569c7d06d0ba9cb Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Tue, 9 May 2023 12:38:20 +0200 Subject: [PATCH 314/363] bump version --- streaming_data_types/_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/streaming_data_types/_version.py b/streaming_data_types/_version.py index 1c586eb..b1225ca 100644 --- a/streaming_data_types/_version.py +++ b/streaming_data_types/_version.py @@ -1,4 +1,4 @@ # Version is not directly defined in __init__ because that causes all # run time dependencies to become build-time dependencies when it is # imported in setup.py -version = "0.22.0" +version = "0.22.1" From 64c8c25dec9b9511e39fea9b924da75ed4879ec9 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Fri, 8 Sep 2023 14:40:35 +0200 Subject: [PATCH 315/363] Deserialising ADAr populates the dimensions field (#90) --- streaming_data_types/_version.py | 2 +- streaming_data_types/area_detector_ADAr.py | 2 ++ tests/test_ADAr.py | 2 ++ 3 files changed, 5 insertions(+), 1 deletion(-) diff --git a/streaming_data_types/_version.py b/streaming_data_types/_version.py index b1225ca..2a84c92 100644 --- a/streaming_data_types/_version.py +++ b/streaming_data_types/_version.py @@ -1,4 +1,4 @@ # Version is not directly defined in __init__ because that causes all # run time dependencies to become build-time dependencies when it is # imported in setup.py -version = "0.22.1" +version = "0.23.0" diff --git a/streaming_data_types/area_detector_ADAr.py b/streaming_data_types/area_detector_ADAr.py index 2ca7381..3eba0e1 100644 --- a/streaming_data_types/area_detector_ADAr.py +++ b/streaming_data_types/area_detector_ADAr.py @@ -130,6 +130,7 @@ def serialise_ADAr( ("source_name", str), ("unique_id", int), ("timestamp", datetime), + ("dimensions", np.ndarray), ("data", np.ndarray), ("attributes", List[Attribute]), ), @@ -200,6 +201,7 @@ def deserialise_ADAr(buffer: Union[bytearray, bytes]) -> ADArray: source_name=ad_array.SourceName().decode(), unique_id=unique_id, timestamp=datetime.fromtimestamp(used_timestamp, tz=timezone.utc), + dimensions=ad_array.DimensionsAsNumpy(), data=data, attributes=attributes_list, ) diff --git a/tests/test_ADAr.py b/tests/test_ADAr.py index 1840dc0..7ffc7ca 100644 --- a/tests/test_ADAr.py +++ b/tests/test_ADAr.py @@ -36,6 +36,8 @@ def test_serialises_and_deserialises_ADAr_int_array(self): assert entry.unique_id == original_entry["unique_id"] assert entry.source_name == original_entry["source_name"] assert entry.timestamp == original_entry["timestamp"] + assert np.array_equal(entry.dimensions, original_entry["data"].shape) + assert np.array_equal(entry.data.shape, entry.dimensions) # Sanity check assert np.array_equal(entry.data, original_entry["data"]) assert entry.data.dtype == original_entry["data"].dtype assert len(entry.attributes) == len(original_entry["attributes"]) From 07c6fcb6436a6737c6146b567bd79aa0ed40d852 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Fri, 8 Sep 2023 14:57:22 +0200 Subject: [PATCH 316/363] ADAr dimensions returned as a tuple --- streaming_data_types/_version.py | 2 +- streaming_data_types/area_detector_ADAr.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/streaming_data_types/_version.py b/streaming_data_types/_version.py index 2a84c92..c3b0831 100644 --- a/streaming_data_types/_version.py +++ b/streaming_data_types/_version.py @@ -1,4 +1,4 @@ # Version is not directly defined in __init__ because that causes all # run time dependencies to become build-time dependencies when it is # imported in setup.py -version = "0.23.0" +version = "0.23.1" diff --git a/streaming_data_types/area_detector_ADAr.py b/streaming_data_types/area_detector_ADAr.py index 3eba0e1..0819e98 100644 --- a/streaming_data_types/area_detector_ADAr.py +++ b/streaming_data_types/area_detector_ADAr.py @@ -201,7 +201,7 @@ def deserialise_ADAr(buffer: Union[bytearray, bytes]) -> ADArray: source_name=ad_array.SourceName().decode(), unique_id=unique_id, timestamp=datetime.fromtimestamp(used_timestamp, tz=timezone.utc), - dimensions=ad_array.DimensionsAsNumpy(), + dimensions=tuple(ad_array.DimensionsAsNumpy()), data=data, attributes=attributes_list, ) From 255546d75b7e6128ce487cab544093a996f31022 Mon Sep 17 00:00:00 2001 From: Daniel Cacabelos Date: Thu, 18 Jan 2024 08:45:01 +0100 Subject: [PATCH 317/363] Tests: Update python versions --- Jenkinsfile | 2 +- tox.ini | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index 363011a..d88033a 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -43,7 +43,7 @@ builders = pipeline_builder.createBuilders { container -> def test_output = "TestResults.xml" container.sh """ cd ${pipeline_builder.project} - pyenv local 3.7 3.8 3.9 + pyenv local 3.8 3.9 3.10 3.11 3.12 pyenv versions python -m tox -- --junitxml=${test_output} """ diff --git a/tox.ini b/tox.ini index ae641da..773bb75 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py37, py38, py39, flake8 +envlist = py38, py39, py310, py311, py312, flake8 isolated_build = true skipsdist=true From ed4178da43352c368cf3f8c484c2c88817790ffa Mon Sep 17 00:00:00 2001 From: Daniel Cacabelos Date: Thu, 18 Jan 2024 08:45:31 +0100 Subject: [PATCH 318/363] CI: Use local environment in pre-commit --- .pre-commit-config.yaml | 33 ++++++++++++++++++++------------- requirements-dev.txt | 6 +++--- 2 files changed, 23 insertions(+), 16 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index c39bbc8..d9ad7bc 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,16 +1,23 @@ repos: -- repo: https://github.com/psf/black - rev: 22.3.0 +- repo: local hooks: - - id: black - language_version: python3 -- repo: https://github.com/pycqa/flake8 - rev: 4.0.1 - hooks: - - id: flake8 -- repo: https://github.com/pycqa/isort - rev: 5.11.5 - hooks: - - id: isort - args: ["--profile", "black"] + - id: black + name: black + entry: black + language: system + types_or: [python, pyi] + require_serial: true + - id: flake8 + name: flake8 + entry: flake8 + language: system + types: [python] + require_serial: true + - id: isort + name: isort + entry: isort + args: ["--profile", "black"] + language: system + types_or: [cython, pyi, python] + require_serial: true diff --git a/requirements-dev.txt b/requirements-dev.txt index 495564b..05210d8 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,7 +1,7 @@ -r requirements.txt -black==22.3.0 # Pinned to match pre-commit config -flake8==4.0.1 # Pinned to match pre-commit config -isort==5.11.5 # Pinned to match pre-commit configblack +black +flake8 +isort pre-commit pytest tox==3.27.1 # tox 4 seems to be broken at the moment From 43c94157b6d9852ba9bc1704f62bbafd8bd01b12 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Fri, 12 Apr 2024 11:23:52 +0200 Subject: [PATCH 319/363] No longer need to push to anaconda --- README_DEV.md | 33 --------------------------------- 1 file changed, 33 deletions(-) diff --git a/README_DEV.md b/README_DEV.md index 2897ee5..7f826d0 100644 --- a/README_DEV.md +++ b/README_DEV.md @@ -106,36 +106,3 @@ After testing installing from test.pypi.org works, push to PyPI: twine upload dist/* ``` Finally, create a tag on the GitHub repository with the appropriate name, e.g. `v0.7.0`. - -### Build and upload conda package - -The conda package is used by ESS DMSC DRAM group for the Scipp library. -Please create the release version tag on github before creating the conda package as it gets the version number from the tag. - -Note: anecdotal evidence suggests that this works better on Linux than on MacOS. - -#### Steps - -You must first have a conda installation, for example `conda` via pip, or [miniconda](https://docs.conda.io/en/latest/miniconda.html). - -From the directory of the ess-streaming-data-types repository, build the package with -```sh -conda install -c conda-forge conda-build anaconda-client -conda build -c conda-forge ./conda -``` - -To upload the package, first login -```sh -anaconda login -``` -use the ESS-DMSC-ECDC account or personal account linked to ESS-DMSC organisation. - -Find the path for the built package using -```sh -conda build ./conda --output -``` - -Then upload -```sh -anaconda upload --user ESS-DMSC /path/to/package -``` From 21236544bfa0b29afed51ea9f0646ffe1e29b633 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Thu, 18 Apr 2024 10:01:15 +0200 Subject: [PATCH 320/363] Added ad00 as a replacement for ADAR (#94) * first attempt for ad00 * bumped version number --- streaming_data_types/__init__.py | 3 + streaming_data_types/_version.py | 2 +- streaming_data_types/area_detector_ad00.py | 200 ++++++++++++++ .../fbschemas/area_detector_ad00/Attribute.py | 164 ++++++++++++ .../fbschemas/area_detector_ad00/DType.py | 17 ++ .../fbschemas/area_detector_ad00/__init__.py | 0 .../area_detector_ad00/ad00_ADArray.py | 252 ++++++++++++++++++ tests/test_ad00.py | 105 ++++++++ 8 files changed, 742 insertions(+), 1 deletion(-) create mode 100644 streaming_data_types/area_detector_ad00.py create mode 100644 streaming_data_types/fbschemas/area_detector_ad00/Attribute.py create mode 100644 streaming_data_types/fbschemas/area_detector_ad00/DType.py create mode 100644 streaming_data_types/fbschemas/area_detector_ad00/__init__.py create mode 100644 streaming_data_types/fbschemas/area_detector_ad00/ad00_ADArray.py create mode 100644 tests/test_ad00.py diff --git a/streaming_data_types/__init__.py b/streaming_data_types/__init__.py index ed11317..8e25c0b 100644 --- a/streaming_data_types/__init__.py +++ b/streaming_data_types/__init__.py @@ -1,6 +1,7 @@ from streaming_data_types._version import version from streaming_data_types.action_response_answ import deserialise_answ, serialise_answ from streaming_data_types.alarm_al00 import deserialise_al00, serialise_al00 +from streaming_data_types.area_detector_ad00 import deserialise_ad00, serialise_ad00 from streaming_data_types.area_detector_ADAr import deserialise_ADAr, serialise_ADAr from streaming_data_types.area_detector_NDAr import deserialise_ndar, serialise_ndar from streaming_data_types.array_1d_se00 import deserialise_se00, serialise_se00 @@ -58,6 +59,7 @@ "ADAr": serialise_ADAr, "al00": serialise_al00, "json": serialise_json, + "ad00": serialise_ad00, } @@ -85,4 +87,5 @@ "ADAr": deserialise_ADAr, "al00": deserialise_al00, "json": deserialise_json, + "ad00": deserialise_ad00, } diff --git a/streaming_data_types/_version.py b/streaming_data_types/_version.py index c3b0831..3ab01a1 100644 --- a/streaming_data_types/_version.py +++ b/streaming_data_types/_version.py @@ -1,4 +1,4 @@ # Version is not directly defined in __init__ because that causes all # run time dependencies to become build-time dependencies when it is # imported in setup.py -version = "0.23.1" +version = "0.24.0" diff --git a/streaming_data_types/area_detector_ad00.py b/streaming_data_types/area_detector_ad00.py new file mode 100644 index 0000000..cf1a734 --- /dev/null +++ b/streaming_data_types/area_detector_ad00.py @@ -0,0 +1,200 @@ +from struct import pack +from typing import List, NamedTuple, Union + +import flatbuffers +import numpy as np + +import streaming_data_types.fbschemas.area_detector_ad00.Attribute as ADArAttribute +from streaming_data_types.fbschemas.area_detector_ad00 import ad00_ADArray +from streaming_data_types.fbschemas.area_detector_ad00.DType import DType +from streaming_data_types.utils import check_schema_identifier + +FILE_IDENTIFIER = b"ad00" + + +class Attribute: + def __init__( + self, + name: str, + description: str, + source: str, + data: Union[np.ndarray, str, int, float], + ): + self.name = name + self.description = description + self.source = source + self.data = data + + def __eq__(self, other): + data_is_equal = type(self.data) == type(other.data) # noqa: E721 + if type(self.data) is np.ndarray: + data_is_equal = data_is_equal and np.array_equal(self.data, other.data) + else: + data_is_equal = data_is_equal and self.data == other.data + return ( + self.name == other.name + and self.description == other.description + and self.source == other.source + and data_is_equal + ) + + +def serialise_ad00( + source_name: str, + unique_id: int, + timestamp_ns: int, + data: Union[np.ndarray, str], + attributes: List[Attribute] = [], +) -> bytes: + builder = flatbuffers.Builder(1024) + builder.ForceDefaults(True) + + type_map = { + np.dtype("uint8"): DType.uint8, + np.dtype("int8"): DType.int8, + np.dtype("uint16"): DType.uint16, + np.dtype("int16"): DType.int16, + np.dtype("uint32"): DType.uint32, + np.dtype("int32"): DType.int32, + np.dtype("uint64"): DType.uint64, + np.dtype("int64"): DType.int64, + np.dtype("float32"): DType.float32, + np.dtype("float64"): DType.float64, + } + + if type(data) is str: + data = np.frombuffer(data.encode(), np.uint8) + data_type = DType.c_string + else: + data_type = type_map[data.dtype] + + # Build dims + dims_offset = builder.CreateNumpyVector(np.asarray(data.shape)) + + # Build data + data_offset = builder.CreateNumpyVector(data.flatten().view(np.uint8)) + + source_name_offset = builder.CreateString(source_name) + + temp_attributes = [] + for item in attributes: + if type(item.data) is np.ndarray: + attr_data_type = type_map[item.data.dtype] + attr_data = item.data + elif type(item.data) is str: + attr_data_type = DType.c_string + attr_data = np.frombuffer(item.data.encode(), np.uint8) + elif type(item.data) is int: + attr_data_type = DType.int64 + attr_data = np.frombuffer(pack("q", item.data), np.uint8) + elif type(item.data) is float: + attr_data_type = DType.float64 + attr_data = np.frombuffer(pack("d", item.data), np.uint8) + attr_name_offset = builder.CreateString(item.name) + attr_desc_offset = builder.CreateString(item.description) + attr_src_offset = builder.CreateString(item.source) + attr_data_offset = builder.CreateNumpyVector(attr_data.flatten().view(np.uint8)) + ADArAttribute.AttributeStart(builder) + ADArAttribute.AttributeAddName(builder, attr_name_offset) + ADArAttribute.AttributeAddDescription(builder, attr_desc_offset) + ADArAttribute.AttributeAddSource(builder, attr_src_offset) + ADArAttribute.AttributeAddDataType(builder, attr_data_type) + ADArAttribute.AttributeAddData(builder, attr_data_offset) + attr_offset = ADArAttribute.AttributeEnd(builder) + temp_attributes.append(attr_offset) + + ad00_ADArray.ad00_ADArrayStartAttributesVector(builder, len(attributes)) + for item in reversed(temp_attributes): + builder.PrependUOffsetTRelative(item) + attributes_offset = builder.EndVector() + + # Build the actual buffer + ad00_ADArray.ad00_ADArrayStart(builder) + ad00_ADArray.ad00_ADArrayAddSourceName(builder, source_name_offset) + ad00_ADArray.ad00_ADArrayAddDataType(builder, data_type) + ad00_ADArray.ad00_ADArrayAddDimensions(builder, dims_offset) + ad00_ADArray.ad00_ADArrayAddId(builder, unique_id) + ad00_ADArray.ad00_ADArrayAddData(builder, data_offset) + ad00_ADArray.ad00_ADArrayAddTimestamp(builder, timestamp_ns) + ad00_ADArray.ad00_ADArrayAddAttributes(builder, attributes_offset) + array_message = ad00_ADArray.ad00_ADArrayEnd(builder) + + builder.Finish(array_message, file_identifier=FILE_IDENTIFIER) + return bytes(builder.Output()) + + +ADArray = NamedTuple( + "ADArray", + ( + ("source_name", str), + ("unique_id", int), + ("timestamp_ns", int), + ("dimensions", np.ndarray), + ("data", np.ndarray), + ("attributes", List[Attribute]), + ), +) + + +def get_payload_data(fb_arr) -> np.ndarray: + return get_data(fb_arr).reshape(fb_arr.DimensionsAsNumpy()) + + +def get_data(fb_arr) -> np.ndarray: + """ + Converts the data array into the correct type. + """ + raw_data = fb_arr.DataAsNumpy() + type_map = { + DType.uint8: np.uint8, + DType.int8: np.int8, + DType.uint16: np.uint16, + DType.int16: np.int16, + DType.uint32: np.uint32, + DType.int32: np.int32, + DType.uint64: np.uint64, + DType.int64: np.int64, + DType.float32: np.float32, + DType.float64: np.float64, + } + return raw_data.view(type_map[fb_arr.DataType()]) + + +def deserialise_ad00(buffer: Union[bytearray, bytes]) -> ADArray: + check_schema_identifier(buffer, FILE_IDENTIFIER) + + ad_array = ad00_ADArray.ad00_ADArray.GetRootAsad00_ADArray(buffer, 0) + unique_id = ad_array.Id() + if ad_array.DataType() == DType.c_string: + data = ad_array.DataAsNumpy().tobytes().decode() + else: + data = get_payload_data(ad_array) + + attributes_list = [] + for i in range(ad_array.AttributesLength()): + attribute_ptr = ad_array.Attributes(i) + if attribute_ptr.DataType() == DType.c_string: + attr_data = attribute_ptr.DataAsNumpy().tobytes().decode() + else: + attr_data = get_data(attribute_ptr) + temp_attribute = Attribute( + name=attribute_ptr.Name().decode(), + description=attribute_ptr.Description().decode(), + source=attribute_ptr.Source().decode(), + data=attr_data, + ) + if type(temp_attribute.data) is np.ndarray and len(temp_attribute.data) == 1: + if np.issubdtype(temp_attribute.data.dtype, np.floating): + temp_attribute.data = float(temp_attribute.data[0]) + elif np.issubdtype(temp_attribute.data.dtype, np.integer): + temp_attribute.data = int(temp_attribute.data[0]) + attributes_list.append(temp_attribute) + + return ADArray( + source_name=ad_array.SourceName().decode(), + unique_id=unique_id, + timestamp_ns=ad_array.Timestamp(), + dimensions=tuple(ad_array.DimensionsAsNumpy()), + data=data, + attributes=attributes_list, + ) diff --git a/streaming_data_types/fbschemas/area_detector_ad00/Attribute.py b/streaming_data_types/fbschemas/area_detector_ad00/Attribute.py new file mode 100644 index 0000000..11300bb --- /dev/null +++ b/streaming_data_types/fbschemas/area_detector_ad00/Attribute.py @@ -0,0 +1,164 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy + +np = import_numpy() + + +class Attribute(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Attribute() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsAttribute(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + + @classmethod + def AttributeBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x61\x64\x30\x30", size_prefixed=size_prefixed + ) + + # Attribute + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Attribute + def Name(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # Attribute + def Description(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # Attribute + def Source(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # Attribute + def DataType(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # Attribute + def Data(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Uint8Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1), + ) + return 0 + + # Attribute + def DataAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) + return 0 + + # Attribute + def DataLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # Attribute + def DataIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + return o == 0 + + +def AttributeStart(builder): + builder.StartObject(5) + + +def Start(builder): + AttributeStart(builder) + + +def AttributeAddName(builder, name): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(name), 0 + ) + + +def AddName(builder: flatbuffers.Builder, name: int): + AttributeAddName(builder, name) + + +def AttributeAddDescription(builder, description): + builder.PrependUOffsetTRelativeSlot( + 1, flatbuffers.number_types.UOffsetTFlags.py_type(description), 0 + ) + + +def AddDescription(builder: flatbuffers.Builder, description: int): + AttributeAddDescription(builder, description) + + +def AttributeAddSource(builder, source): + builder.PrependUOffsetTRelativeSlot( + 2, flatbuffers.number_types.UOffsetTFlags.py_type(source), 0 + ) + + +def AddSource(builder: flatbuffers.Builder, source: int): + AttributeAddSource(builder, source) + + +def AttributeAddDataType(builder, dataType): + builder.PrependInt8Slot(3, dataType, 0) + + +def AddDataType(builder: flatbuffers.Builder, dataType: int): + AttributeAddDataType(builder, dataType) + + +def AttributeAddData(builder, data): + builder.PrependUOffsetTRelativeSlot( + 4, flatbuffers.number_types.UOffsetTFlags.py_type(data), 0 + ) + + +def AddData(builder: flatbuffers.Builder, data: int): + AttributeAddData(builder, data) + + +def AttributeStartDataVector(builder, numElems): + return builder.StartVector(1, numElems, 1) + + +def StartDataVector(builder, numElems: int) -> int: + return AttributeStartDataVector(builder, numElems) + + +def AttributeEnd(builder): + return builder.EndObject() + + +def End(builder): + return AttributeEnd(builder) diff --git a/streaming_data_types/fbschemas/area_detector_ad00/DType.py b/streaming_data_types/fbschemas/area_detector_ad00/DType.py new file mode 100644 index 0000000..22098af --- /dev/null +++ b/streaming_data_types/fbschemas/area_detector_ad00/DType.py @@ -0,0 +1,17 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + + +class DType(object): + int8 = 0 + uint8 = 1 + int16 = 2 + uint16 = 3 + int32 = 4 + uint32 = 5 + int64 = 6 + uint64 = 7 + float32 = 8 + float64 = 9 + c_string = 10 diff --git a/streaming_data_types/fbschemas/area_detector_ad00/__init__.py b/streaming_data_types/fbschemas/area_detector_ad00/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/streaming_data_types/fbschemas/area_detector_ad00/ad00_ADArray.py b/streaming_data_types/fbschemas/area_detector_ad00/ad00_ADArray.py new file mode 100644 index 0000000..0d32e2b --- /dev/null +++ b/streaming_data_types/fbschemas/area_detector_ad00/ad00_ADArray.py @@ -0,0 +1,252 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy + +np = import_numpy() + + +class ad00_ADArray(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ad00_ADArray() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsad00_ADArray(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + + @classmethod + def ad00_ADArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x61\x64\x30\x30", size_prefixed=size_prefixed + ) + + # ad00_ADArray + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ad00_ADArray + def SourceName(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # ad00_ADArray + def Id(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # ad00_ADArray + def Timestamp(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) + return 0 + + # ad00_ADArray + def Dimensions(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Int64Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8), + ) + return 0 + + # ad00_ADArray + def DimensionsAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) + return 0 + + # ad00_ADArray + def DimensionsLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # ad00_ADArray + def DimensionsIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + return o == 0 + + # ad00_ADArray + def DataType(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # ad00_ADArray + def Data(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Uint8Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1), + ) + return 0 + + # ad00_ADArray + def DataAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) + return 0 + + # ad00_ADArray + def DataLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # ad00_ADArray + def DataIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + return o == 0 + + # ad00_ADArray + def Attributes(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + if o != 0: + x = self._tab.Vector(o) + x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 + x = self._tab.Indirect(x) + from .Attribute import Attribute + + obj = Attribute() + obj.Init(self._tab.Bytes, x) + return obj + return None + + # ad00_ADArray + def AttributesLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # ad00_ADArray + def AttributesIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + return o == 0 + + +def ad00_ADArrayStart(builder): + builder.StartObject(7) + + +def Start(builder): + ad00_ADArrayStart(builder) + + +def ad00_ADArrayAddSourceName(builder, sourceName): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(sourceName), 0 + ) + + +def AddSourceName(builder: flatbuffers.Builder, sourceName: int): + ad00_ADArrayAddSourceName(builder, sourceName) + + +def ad00_ADArrayAddId(builder, id): + builder.PrependInt32Slot(1, id, 0) + + +def AddId(builder: flatbuffers.Builder, id: int): + ad00_ADArrayAddId(builder, id) + + +def ad00_ADArrayAddTimestamp(builder, timestamp): + builder.PrependInt64Slot(2, timestamp, 0) + + +def AddTimestamp(builder: flatbuffers.Builder, timestamp: int): + ad00_ADArrayAddTimestamp(builder, timestamp) + + +def ad00_ADArrayAddDimensions(builder, dimensions): + builder.PrependUOffsetTRelativeSlot( + 3, flatbuffers.number_types.UOffsetTFlags.py_type(dimensions), 0 + ) + + +def AddDimensions(builder: flatbuffers.Builder, dimensions: int): + ad00_ADArrayAddDimensions(builder, dimensions) + + +def ad00_ADArrayStartDimensionsVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + + +def StartDimensionsVector(builder, numElems: int) -> int: + return ad00_ADArrayStartDimensionsVector(builder, numElems) + + +def ad00_ADArrayAddDataType(builder, dataType): + builder.PrependInt8Slot(4, dataType, 0) + + +def AddDataType(builder: flatbuffers.Builder, dataType: int): + ad00_ADArrayAddDataType(builder, dataType) + + +def ad00_ADArrayAddData(builder, data): + builder.PrependUOffsetTRelativeSlot( + 5, flatbuffers.number_types.UOffsetTFlags.py_type(data), 0 + ) + + +def AddData(builder: flatbuffers.Builder, data: int): + ad00_ADArrayAddData(builder, data) + + +def ad00_ADArrayStartDataVector(builder, numElems): + return builder.StartVector(1, numElems, 1) + + +def StartDataVector(builder, numElems: int) -> int: + return ad00_ADArrayStartDataVector(builder, numElems) + + +def ad00_ADArrayAddAttributes(builder, attributes): + builder.PrependUOffsetTRelativeSlot( + 6, flatbuffers.number_types.UOffsetTFlags.py_type(attributes), 0 + ) + + +def AddAttributes(builder: flatbuffers.Builder, attributes: int): + ad00_ADArrayAddAttributes(builder, attributes) + + +def ad00_ADArrayStartAttributesVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + +def StartAttributesVector(builder, numElems: int) -> int: + return ad00_ADArrayStartAttributesVector(builder, numElems) + + +def ad00_ADArrayEnd(builder): + return builder.EndObject() + + +def End(builder): + return ad00_ADArrayEnd(builder) diff --git a/tests/test_ad00.py b/tests/test_ad00.py new file mode 100644 index 0000000..cde2658 --- /dev/null +++ b/tests/test_ad00.py @@ -0,0 +1,105 @@ +import time + +import numpy as np +import pytest + +from streaming_data_types import DESERIALISERS, SERIALISERS +from streaming_data_types.area_detector_ad00 import ( + Attribute, + deserialise_ad00, + serialise_ad00, +) +from streaming_data_types.exceptions import WrongSchemaException + + +class TestSerialisationAD00: + def test_serialises_and_deserialises_ad00_int_array(self): + """ + Round-trip to check what we serialise is what we get back. + """ + original_entry = { + "source_name": "some source name", + "unique_id": 754, + "data": np.array([[1, 2, 3], [3, 4, 5]], dtype=np.uint64), + "timestamp_ns": time.time_ns(), + "attributes": [ + Attribute("name1", "desc1", "src1", "value"), + Attribute("name2", "desc2", "src2", 11), + Attribute("name3", "desc3", "src3", 3.14), + Attribute("name4", "desc4", "src4", np.linspace(0, 10)), + ], + } + + buf = serialise_ad00(**original_entry) + entry = deserialise_ad00(buf) + + assert entry.unique_id == original_entry["unique_id"] + assert entry.source_name == original_entry["source_name"] + assert entry.timestamp_ns == original_entry["timestamp_ns"] + assert np.array_equal(entry.dimensions, original_entry["data"].shape) + assert np.array_equal(entry.data.shape, entry.dimensions) # Sanity check + assert np.array_equal(entry.data, original_entry["data"]) + assert entry.data.dtype == original_entry["data"].dtype + assert len(entry.attributes) == len(original_entry["attributes"]) + for i in range(len(entry.attributes)): + assert entry.attributes[i] == original_entry["attributes"][i] + + def test_serialises_and_deserialises_ad00_float_array(self): + """ + Round-trip to check what we serialise is what we get back. + """ + original_entry = { + "source_name": "some other source name", + "unique_id": 789679, + "data": np.array([[1.1, 2.2, 3.3], [4.4, 5.5, 6.6]], dtype=np.float32), + "timestamp_ns": time.time_ns(), + } + + buf = serialise_ad00(**original_entry) + entry = deserialise_ad00(buf) + + assert entry.unique_id == original_entry["unique_id"] + assert entry.source_name == original_entry["source_name"] + assert entry.timestamp_ns == original_entry["timestamp_ns"] + assert np.array_equal(entry.data, original_entry["data"]) + assert entry.data.dtype == original_entry["data"].dtype + + def test_serialises_and_deserialises_ad00_string(self): + """ + Round-trip to check what we serialise is what we get back. + """ + original_entry = { + "source_name": "some source name", + "unique_id": 754, + "data": "hi, this is a string", + "timestamp_ns": time.time_ns(), + } + + buf = serialise_ad00(**original_entry) + entry = deserialise_ad00(buf) + + assert entry.unique_id == original_entry["unique_id"] + assert entry.source_name == original_entry["source_name"] + assert entry.timestamp_ns == original_entry["timestamp_ns"] + assert entry.data == original_entry["data"] + + def test_if_buffer_has_wrong_id_then_throws(self): + original_entry = { + "source_name": "some source name", + "unique_id": 754, + "data": np.array([[1, 2, 3], [3, 4, 5]], dtype=np.uint64), + "timestamp_ns": time.time_ns(), + } + + buf = serialise_ad00(**original_entry) + + # Manually hack the id + buf = bytearray(buf) + buf[4:8] = b"1234" + + with pytest.raises(WrongSchemaException): + deserialise_ad00(buf) + + def test_schema_type_is_in_global_serialisers_list(self): + assert "ad00" in SERIALISERS + assert "ad00" in DESERIALISERS From bff89dc5d762ae69341ba6d2ec750bc0c5f3ac43 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Fri, 19 Apr 2024 10:30:11 +0200 Subject: [PATCH 321/363] remove unneeded import (#96) --- streaming_data_types/logdata_f144.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/streaming_data_types/logdata_f144.py b/streaming_data_types/logdata_f144.py index 10a0b65..4df2b3c 100644 --- a/streaming_data_types/logdata_f144.py +++ b/streaming_data_types/logdata_f144.py @@ -1,5 +1,4 @@ from collections import namedtuple -from datetime import datetime from typing import Any, NamedTuple, Union import flatbuffers @@ -277,7 +276,7 @@ def serialise_f144( ( ("source_name", str), ("value", Any), - ("timestamp_unix_ns", datetime), + ("timestamp_unix_ns", int), ), ) From 3e3cb292804fad13dfbc50982d6ed7ac9d02b96c Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Mon, 22 Apr 2024 08:39:11 +0200 Subject: [PATCH 322/363] Add da00 dataarray (#97) * Fixed import order? * Accept formatting changes * [Refactor] rename hm01 to hm00, since that draft has been dropped * [Add] draft schema: da00 and hm00 - hm00 was designed to combine features from hs00 and ADAr, including labeled histogram axes and histogram signal errors. Ultimately its implementation is likely overly complex and too specialized. - da00 is a simplified form of hm00: all arrays are handled the same way, so that, e.g., the 'signal' histogram, its 'errors' and all of its axes vectors are stored in one list of Variable objects. In addition to making the serialise/deserialise methods simpler, this adds flexibility to what can be stored in the buffer. For example, the axes do not _need_ to be bin-edges _nor_ vectors. The hope is that these objects can be passed through HDF5/NeXus structures more-easily into scipp data structures. * [Ref.] minor changes, following recent ADAr changes * [Refactor] Use new da00 schema * Remove (writer) config-related structure from da00 * Update generated da00 flatbuffer files The da00 no longer requires `axes` names on Variables, and uses a signed integer for timestamps. These changes have no immediate impact on the python serializer and deserializer methods. * [Remove] dropped draft hm00 schema * [Remove] straggler hm00 test file * Update streaming_data_types/dataarray_da00.py * Update streaming_data_types/dataarray_da00.py * use unix timestamps * use np * added test for no data * remove typing that isn't supported in 3.8 * missed some typing * fixed typing for reals * Update README.md --------- Co-authored-by: Gregory Tucker --- README.md | 1 + streaming_data_types/__init__.py | 3 + streaming_data_types/dataarray_da00.py | 229 +++++++++++++++ .../fbschemas/dataarray_da00/__init__.py | 0 .../dataarray_da00/da00_DataArray.py | 128 +++++++++ .../fbschemas/dataarray_da00/da00_Variable.py | 269 ++++++++++++++++++ .../fbschemas/dataarray_da00/da00_dtype.py | 18 ++ tests/test_da00.py | 182 ++++++++++++ 8 files changed, 830 insertions(+) create mode 100644 streaming_data_types/dataarray_da00.py create mode 100644 streaming_data_types/fbschemas/dataarray_da00/__init__.py create mode 100644 streaming_data_types/fbschemas/dataarray_da00/da00_DataArray.py create mode 100644 streaming_data_types/fbschemas/dataarray_da00/da00_Variable.py create mode 100644 streaming_data_types/fbschemas/dataarray_da00/da00_dtype.py create mode 100644 tests/test_da00.py diff --git a/README.md b/README.md index 2f1cdc4..7a9f9fb 100644 --- a/README.md +++ b/README.md @@ -31,6 +31,7 @@ https://github.com/ess-dmsc/streaming-data-types | senv | **Deprecated** | | json | Generic JSON data | | se00 | Arrays with optional timestamps, for example waveform data. Replaces _senv_. | +| da00 | Scipp-like data arrays, for histograms, etc. | ### hs00 and hs01 Schema for histogram data. It is one of the more complicated to use schemas. diff --git a/streaming_data_types/__init__.py b/streaming_data_types/__init__.py index 8e25c0b..d85bdeb 100644 --- a/streaming_data_types/__init__.py +++ b/streaming_data_types/__init__.py @@ -5,6 +5,7 @@ from streaming_data_types.area_detector_ADAr import deserialise_ADAr, serialise_ADAr from streaming_data_types.area_detector_NDAr import deserialise_ndar, serialise_ndar from streaming_data_types.array_1d_se00 import deserialise_se00, serialise_se00 +from streaming_data_types.dataarray_da00 import deserialise_da00, serialise_da00 from streaming_data_types.epics_connection_ep01 import deserialise_ep01, serialise_ep01 from streaming_data_types.epics_connection_info_ep00 import ( deserialise_ep00, @@ -60,6 +61,7 @@ "al00": serialise_al00, "json": serialise_json, "ad00": serialise_ad00, + "da00": serialise_da00, } @@ -88,4 +90,5 @@ "al00": deserialise_al00, "json": deserialise_json, "ad00": deserialise_ad00, + "da00": deserialise_da00, } diff --git a/streaming_data_types/dataarray_da00.py b/streaming_data_types/dataarray_da00.py new file mode 100644 index 0000000..b97ebda --- /dev/null +++ b/streaming_data_types/dataarray_da00.py @@ -0,0 +1,229 @@ +from dataclasses import dataclass +from struct import pack +from typing import List, NamedTuple, Tuple, Union + +import flatbuffers +import numpy as np + +import streaming_data_types.fbschemas.dataarray_da00.da00_Variable as VariableBuffer +from streaming_data_types.fbschemas.dataarray_da00 import da00_DataArray +from streaming_data_types.fbschemas.dataarray_da00.da00_dtype import da00_dtype +from streaming_data_types.utils import check_schema_identifier + +FILE_IDENTIFIER = b"da00" + + +def get_dtype(data: Union[np.ndarray, str, float, int]): + if isinstance(data, np.ndarray): + type_map = { + np.dtype(x): d + for x, d in ( + ("int8", da00_dtype.int8), + ("int16", da00_dtype.int16), + ("int32", da00_dtype.int32), + ("int64", da00_dtype.int64), + ("uint8", da00_dtype.uint8), + ("uint16", da00_dtype.uint16), + ("uint32", da00_dtype.uint32), + ("uint64", da00_dtype.uint64), + ("float32", da00_dtype.float32), + ("float64", da00_dtype.float64), + ) + } + return type_map[data.dtype] + if isinstance(data, str): + return da00_dtype.c_string + if isinstance(data, float): + return da00_dtype.float64 + if isinstance(data, int): + return da00_dtype.int64 + raise RuntimeError(f"Unsupported data type {type(data)} in get_dtype") + + +def to_buffer(data: Union[np.ndarray, str, float, int]): + if isinstance(data, np.ndarray): + return data + if isinstance(data, str): + return np.frombuffer(data.encode(), np.uint8) + if isinstance(data, int): + return np.frombuffer(pack("q", data), np.uint8) + if isinstance(data, float): + return np.frombuffer(pack("d", data), np.uint8) + raise RuntimeError(f"Unsupported data type {type(data)} in to_buffer") + + +def from_buffer(fb_array) -> np.ndarray: + """Convert a flatbuffer array into the correct type""" + raw_data = fb_array.DataAsNumpy() + type_map = { + d: np.dtype(x) + for x, d in ( + ("int8", da00_dtype.int8), + ("int16", da00_dtype.int16), + ("int32", da00_dtype.int32), + ("int64", da00_dtype.int64), + ("uint8", da00_dtype.uint8), + ("uint16", da00_dtype.uint16), + ("uint32", da00_dtype.uint32), + ("uint64", da00_dtype.uint64), + ("float32", da00_dtype.float32), + ("float64", da00_dtype.float64), + ) + } + dtype = fb_array.DataType() + if da00_dtype.c_string == dtype: + return raw_data.tobytes().decode() + return raw_data.view(type_map[fb_array.DataType()]) + + +def create_optional_string(builder, string: Union[str, None]): + return None if string is None else builder.CreateString(string) + + +@dataclass +class Variable: + name: str + data: Union[np.ndarray, str] + axes: Union[List[str], None] = None + shape: Union[Tuple[int, ...], None] = None + unit: Union[str, None] = None + label: Union[str, None] = None + source: Union[str, None] = None + + def __post_init__(self): + # Calculate the shape when used, e.g., interactively + # -- but allow to read it back from the buffered object too + if self.axes is None: + self.axes = [] + if self.shape is None: + self.shape = to_buffer(self.data).shape + + def __eq__(self, other): + if not isinstance(other, Variable): + return False + same_data = type(self.data) == type(other.data) # noqa: E721 + if isinstance(self.data, np.ndarray): + same_data &= np.array_equal(self.data, other.data) + else: + same_data &= self.data == other.data + same_axes = len(self.axes) == len(other.axes) and all( + a == b for a, b in zip(self.axes, other.axes) + ) + return ( + same_data + and same_axes + and self.name == other.name + and self.unit == other.unit + and self.label == other.label + and self.source == other.source + and self.shape == other.shape + ) + + def pack(self, builder): + source_offset = create_optional_string(builder, self.source) + label_offset = create_optional_string(builder, self.label) + unit_offset = create_optional_string(builder, self.unit) + name_offset = builder.CreateString(self.name) + buf = to_buffer(self.data) + shape_offset = builder.CreateNumpyVector(np.asarray(buf.shape)) + data_offset = builder.CreateNumpyVector(buf.flatten().view(np.uint8)) + + temp_axes = [builder.CreateString(x) for x in self.axes] + VariableBuffer.StartAxesVector(builder, len(temp_axes)) + for dim in reversed(temp_axes): + builder.PrependUOffsetTRelative(dim) + axes_offset = builder.EndVector() + + VariableBuffer.Start(builder) + VariableBuffer.AddName(builder, name_offset) + if unit_offset is not None: + VariableBuffer.AddUnit(builder, unit_offset) + if label_offset is not None: + VariableBuffer.AddLabel(builder, label_offset) + if source_offset is not None: + VariableBuffer.AddSource(builder, source_offset) + VariableBuffer.AddDataType(builder, get_dtype(self.data)) + VariableBuffer.AddAxes(builder, axes_offset) + VariableBuffer.AddShape(builder, shape_offset) + VariableBuffer.AddData(builder, data_offset) + return VariableBuffer.End(builder) + + @classmethod + def unpack(cls, b: VariableBuffer): + data = from_buffer(b) + axes = [b.Axes(i).decode() for i in range(b.AxesLength())] + if len(axes): + data = data.reshape(b.ShapeAsNumpy()) + elif b.DataType() != da00_dtype.c_string and np.prod(data.shape) == 1: + data = data.item() + + unit = None if b.Unit() is None else b.Unit().decode() + label = None if b.Label() is None else b.Label().decode() + source = None if b.Source() is None else b.Source().decode() + name = b.Name().decode() + # the buffered shape is NOT the shape of the numpy array in all cases + buffered_shape = tuple(b.ShapeAsNumpy()) + return cls( + name=name, + unit=unit, + label=label, + source=source, + axes=axes, + data=data, + shape=buffered_shape, + ) + + +def insert_variable_list(starter, builder, objects: List[Variable]): + temp = [obj.pack(builder) for obj in objects] + starter(builder, len(temp)) + for obj in reversed(temp): + builder.PrependUOffsetTRelative(obj) + return builder.EndVector() + + +def serialise_da00( + source_name: str, + timestamp_ns: int, + data: List[Variable], +) -> bytes: + if not data: + raise RuntimeError("data must contain at least one Variable") + builder = flatbuffers.Builder(1024) + builder.ForceDefaults(True) + + data_offset = insert_variable_list(da00_DataArray.StartDataVector, builder, data) + source_name_offset = builder.CreateString(source_name) + + # Build the actual buffer + da00_DataArray.Start(builder) + da00_DataArray.AddSourceName(builder, source_name_offset) + da00_DataArray.AddTimestamp(builder, timestamp_ns) + da00_DataArray.AddData(builder, data_offset) + array_message = da00_DataArray.End(builder) + + builder.Finish(array_message, file_identifier=FILE_IDENTIFIER) + return bytes(builder.Output()) + + +da00_DataArray_t = NamedTuple( + "da00_DataArray", + ( + ("source_name", str), + ("timestamp_ns", int), + ("data", List[Variable]), + ), +) + + +def deserialise_da00(buffer: Union[bytearray, bytes]) -> da00_DataArray: + check_schema_identifier(buffer, FILE_IDENTIFIER) + + da00 = da00_DataArray.da00_DataArray.GetRootAs(buffer, offset=0) + data = [Variable.unpack(da00.Data(j)) for j in range(da00.DataLength())] + + return da00_DataArray_t( + source_name=da00.SourceName().decode(), + timestamp_ns=da00.Timestamp(), + data=data, + ) diff --git a/streaming_data_types/fbschemas/dataarray_da00/__init__.py b/streaming_data_types/fbschemas/dataarray_da00/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/streaming_data_types/fbschemas/dataarray_da00/da00_DataArray.py b/streaming_data_types/fbschemas/dataarray_da00/da00_DataArray.py new file mode 100644 index 0000000..2cad228 --- /dev/null +++ b/streaming_data_types/fbschemas/dataarray_da00/da00_DataArray.py @@ -0,0 +1,128 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy + +np = import_numpy() + + +class da00_DataArray(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = da00_DataArray() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsda00_DataArray(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + + @classmethod + def da00_DataArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x64\x61\x30\x30", size_prefixed=size_prefixed + ) + + # da00_DataArray + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # da00_DataArray + def SourceName(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # da00_DataArray + def Timestamp(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) + return 0 + + # da00_DataArray + def Data(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + x = self._tab.Vector(o) + x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 + x = self._tab.Indirect(x) + from streaming_data_types.fbschemas.dataarray_da00.da00_Variable import ( + da00_Variable, + ) + + obj = da00_Variable() + obj.Init(self._tab.Bytes, x) + return obj + return None + + # da00_DataArray + def DataLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # da00_DataArray + def DataIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + return o == 0 + + +def da00_DataArrayStart(builder): + builder.StartObject(3) + + +def Start(builder): + return da00_DataArrayStart(builder) + + +def da00_DataArrayAddSourceName(builder, sourceName): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(sourceName), 0 + ) + + +def AddSourceName(builder, sourceName): + return da00_DataArrayAddSourceName(builder, sourceName) + + +def da00_DataArrayAddTimestamp(builder, timestamp): + builder.PrependInt64Slot(1, timestamp, 0) + + +def AddTimestamp(builder, timestamp): + return da00_DataArrayAddTimestamp(builder, timestamp) + + +def da00_DataArrayAddData(builder, data): + builder.PrependUOffsetTRelativeSlot( + 2, flatbuffers.number_types.UOffsetTFlags.py_type(data), 0 + ) + + +def AddData(builder, data): + return da00_DataArrayAddData(builder, data) + + +def da00_DataArrayStartDataVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + +def StartDataVector(builder, numElems): + return da00_DataArrayStartDataVector(builder, numElems) + + +def da00_DataArrayEnd(builder): + return builder.EndObject() + + +def End(builder): + return da00_DataArrayEnd(builder) diff --git a/streaming_data_types/fbschemas/dataarray_da00/da00_Variable.py b/streaming_data_types/fbschemas/dataarray_da00/da00_Variable.py new file mode 100644 index 0000000..1d0ad66 --- /dev/null +++ b/streaming_data_types/fbschemas/dataarray_da00/da00_Variable.py @@ -0,0 +1,269 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy + +np = import_numpy() + + +class da00_Variable(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = da00_Variable() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsda00_Variable(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + + @classmethod + def da00_VariableBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x64\x61\x30\x30", size_prefixed=size_prefixed + ) + + # da00_Variable + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # da00_Variable + def Name(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # da00_Variable + def Unit(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # da00_Variable + def Label(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # da00_Variable + def Source(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # da00_Variable + def DataType(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # da00_Variable + def Axes(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.String( + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4) + ) + return "" + + # da00_Variable + def AxesLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # da00_Variable + def AxesIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + return o == 0 + + # da00_Variable + def Shape(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Int64Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8), + ) + return 0 + + # da00_Variable + def ShapeAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) + return 0 + + # da00_Variable + def ShapeLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # da00_Variable + def ShapeIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + return o == 0 + + # da00_Variable + def Data(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Uint8Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1), + ) + return 0 + + # da00_Variable + def DataAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) + return 0 + + # da00_Variable + def DataLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # da00_Variable + def DataIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) + return o == 0 + + +def da00_VariableStart(builder): + builder.StartObject(8) + + +def Start(builder): + return da00_VariableStart(builder) + + +def da00_VariableAddName(builder, name): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(name), 0 + ) + + +def AddName(builder, name): + return da00_VariableAddName(builder, name) + + +def da00_VariableAddUnit(builder, unit): + builder.PrependUOffsetTRelativeSlot( + 1, flatbuffers.number_types.UOffsetTFlags.py_type(unit), 0 + ) + + +def AddUnit(builder, unit): + return da00_VariableAddUnit(builder, unit) + + +def da00_VariableAddLabel(builder, label): + builder.PrependUOffsetTRelativeSlot( + 2, flatbuffers.number_types.UOffsetTFlags.py_type(label), 0 + ) + + +def AddLabel(builder, label): + return da00_VariableAddLabel(builder, label) + + +def da00_VariableAddSource(builder, source): + builder.PrependUOffsetTRelativeSlot( + 3, flatbuffers.number_types.UOffsetTFlags.py_type(source), 0 + ) + + +def AddSource(builder, source): + return da00_VariableAddSource(builder, source) + + +def da00_VariableAddDataType(builder, dataType): + builder.PrependInt8Slot(4, dataType, 0) + + +def AddDataType(builder, dataType): + return da00_VariableAddDataType(builder, dataType) + + +def da00_VariableAddAxes(builder, axes): + builder.PrependUOffsetTRelativeSlot( + 5, flatbuffers.number_types.UOffsetTFlags.py_type(axes), 0 + ) + + +def AddAxes(builder, axes): + return da00_VariableAddAxes(builder, axes) + + +def da00_VariableStartAxesVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + +def StartAxesVector(builder, numElems): + return da00_VariableStartAxesVector(builder, numElems) + + +def da00_VariableAddShape(builder, shape): + builder.PrependUOffsetTRelativeSlot( + 6, flatbuffers.number_types.UOffsetTFlags.py_type(shape), 0 + ) + + +def AddShape(builder, shape): + return da00_VariableAddShape(builder, shape) + + +def da00_VariableStartShapeVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + + +def StartShapeVector(builder, numElems): + return da00_VariableStartShapeVector(builder, numElems) + + +def da00_VariableAddData(builder, data): + builder.PrependUOffsetTRelativeSlot( + 7, flatbuffers.number_types.UOffsetTFlags.py_type(data), 0 + ) + + +def AddData(builder, data): + return da00_VariableAddData(builder, data) + + +def da00_VariableStartDataVector(builder, numElems): + return builder.StartVector(1, numElems, 1) + + +def StartDataVector(builder, numElems): + return da00_VariableStartDataVector(builder, numElems) + + +def da00_VariableEnd(builder): + return builder.EndObject() + + +def End(builder): + return da00_VariableEnd(builder) diff --git a/streaming_data_types/fbschemas/dataarray_da00/da00_dtype.py b/streaming_data_types/fbschemas/dataarray_da00/da00_dtype.py new file mode 100644 index 0000000..1caf3fe --- /dev/null +++ b/streaming_data_types/fbschemas/dataarray_da00/da00_dtype.py @@ -0,0 +1,18 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + + +class da00_dtype(object): + none = 0 + int8 = 1 + uint8 = 2 + int16 = 3 + uint16 = 4 + int32 = 5 + uint32 = 6 + int64 = 7 + uint64 = 8 + float32 = 9 + float64 = 10 + c_string = 11 diff --git a/tests/test_da00.py b/tests/test_da00.py new file mode 100644 index 0000000..64ca8c4 --- /dev/null +++ b/tests/test_da00.py @@ -0,0 +1,182 @@ +import time + +import numpy as np +import pytest + +from streaming_data_types import DESERIALISERS, SERIALISERS +from streaming_data_types.dataarray_da00 import ( + Variable, + deserialise_da00, + serialise_da00, +) +from streaming_data_types.exceptions import WrongSchemaException + + +def test_serialises_and_deserialises_da00_int_array(): + """ + Round-trip to check what we serialise is what we get back. + """ + original_entry = { + "source_name": "some source name", + "timestamp_ns": time.time_ns(), + "data": [ + Variable( + name="data", + unit="counts", + axes=["time", "x", "y"], + data=np.array([[[1, 2, 3], [3, 4, 5]]], dtype=np.uint64), + ), + Variable( + name="time", + unit="hours", + label="elapsed clock time", + axes=["time"], + data=np.array([13, 21], dtype=np.float32), + ), + Variable( + name="x", + unit="m", + label="Position", + axes=["x"], + data=np.array([-1, 0, 1], dtype=np.float32), + ), + Variable( + name="y", + unit="m", + label="Position", + axes=["y"], + data=np.array([0, 2, 4, 6], dtype=np.float32), + ), + Variable(name="name1", data="value", label="desc1", source="src1"), + Variable(name="name2", data=11, label="desc2", source="src2"), + Variable(name="name3", data=3.14, label="desc3", source="src3"), + Variable( + name="name4", data=np.linspace(0, 10), label="desc4", source="src4" + ), + Variable( + name="name5", + data=np.array([[1, 2], [3, 4]]), + axes=["a", "b"], + label="desc5", + source="src5", + ), + ], + } + + buf = serialise_da00(**original_entry) + entry = deserialise_da00(buf) + + assert entry.source_name == original_entry["source_name"] + assert entry.timestamp_ns == original_entry["timestamp_ns"] + assert len(entry.data) == len(original_entry["data"]) + for a, b in zip(entry.data, original_entry["data"]): + assert a == b + + +def test_serialises_and_deserialises_da00_float_array(): + """ + Round-trip to check what we serialise is what we get back. + """ + original_entry = { + "source_name": "some other source name", + "data": [ + Variable( + name="data", + axes=["x", "time", "y"], + data=np.array([[[1.1, 2.2, 3.3]], [[4.4, 5.5, 6.6]]], dtype=np.float32), + ), + Variable( + name="errors", axes=["y"], data=np.array([1, 2, 3], dtype=np.int8) + ), + Variable( + name="y", + unit="m", + label="Position", + axes=["y"], + data=np.array([0, 2, 4, 6], dtype=np.float64), + ), + Variable( + name="time", + unit="hours", + label="elapsed clock time", + axes=["time"], + data=np.array([13, 21], dtype=np.uint32), + ), + Variable( + name="x", + unit="m", + label="Position", + axes=["x"], + data=np.array([-1, 0, 1], dtype=np.int8), + ), + ], + "timestamp_ns": time.time_ns(), + } + + buf = serialise_da00(**original_entry) + entry = deserialise_da00(buf) + + assert entry.source_name == original_entry["source_name"] + assert entry.timestamp_ns == original_entry["timestamp_ns"] + assert len(entry.data) == len(original_entry["data"]) + for a, b in zip(entry.data, original_entry["data"]): + assert a == b + + +def test_serialises_and_deserialises_da00_string(): + """ + Round-trip to check what we serialise is what we get back. + """ + original_entry = { + "source_name": "some source name", + "data": [Variable(data="hi, this is a string", axes=[], name="the_string")], + "timestamp_ns": time.time_ns(), + } + + buf = serialise_da00(**original_entry) + entry = deserialise_da00(buf) + + assert entry.source_name == original_entry["source_name"] + assert entry.timestamp_ns == original_entry["timestamp_ns"] + assert len(entry.data) == len(original_entry["data"]) + for a, b in zip(entry.data, original_entry["data"]): + assert a == b + + +def test_no_variables_throws(): + original_entry = { + "source_name": "some source name", + "data": [], + "timestamp_ns": time.time_ns(), + } + + with pytest.raises(RuntimeError): + serialise_da00(**original_entry) + + +def test_if_buffer_has_wrong_id_then_throws(): + original_entry = { + "source_name": "some source name", + "data": [ + Variable( + name="data", + axes=["x", "y"], + data=np.array([[1, 2, 3], [3, 4, 5]], dtype=np.uint64), + ) + ], + "timestamp_ns": time.time_ns(), + } + + buf = serialise_da00(**original_entry) + + # Manually hack the id + buf = bytearray(buf) + buf[4:8] = b"1234" + + with pytest.raises(WrongSchemaException): + deserialise_da00(buf) + + +def test_da00_schema_type_is_in_global_serialisers_list(): + assert "da00" in SERIALISERS + assert "da00" in DESERIALISERS From 5a48070e163eb8a64d15c12bd53e70a0a5da2a96 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Mon, 22 Apr 2024 08:41:03 +0200 Subject: [PATCH 323/363] bump version --- streaming_data_types/_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/streaming_data_types/_version.py b/streaming_data_types/_version.py index 3ab01a1..9f1f4cd 100644 --- a/streaming_data_types/_version.py +++ b/streaming_data_types/_version.py @@ -1,4 +1,4 @@ # Version is not directly defined in __init__ because that causes all # run time dependencies to become build-time dependencies when it is # imported in setup.py -version = "0.24.0" +version = "0.25.0" From 6aba8c185a506ccdb28b02b10251e4d1e5cc69eb Mon Sep 17 00:00:00 2001 From: Jonas <43351280+hurvan@users.noreply.github.com> Date: Mon, 27 May 2024 11:24:32 +0200 Subject: [PATCH 324/363] new forwarder config schema (#98) * add fc00 schema for forwarder configuration * format --- README.md | 5 +- streaming_data_types/__init__.py | 6 + .../forwarder_config_update_fc00/Protocol.py | 8 ++ .../forwarder_config_update_fc00/Stream.py | 86 +++++++++++++ .../UpdateType.py | 9 ++ .../forwarder_config_update_fc00/__init__.py | 0 .../fc00_ConfigUpdate.py | 77 ++++++++++++ .../forwarder_config_update_fc00.py | 119 ++++++++++++++++++ tests/test_fc00.py | 61 +++++++++ 9 files changed, 369 insertions(+), 2 deletions(-) create mode 100644 streaming_data_types/fbschemas/forwarder_config_update_fc00/Protocol.py create mode 100644 streaming_data_types/fbschemas/forwarder_config_update_fc00/Stream.py create mode 100644 streaming_data_types/fbschemas/forwarder_config_update_fc00/UpdateType.py create mode 100644 streaming_data_types/fbschemas/forwarder_config_update_fc00/__init__.py create mode 100644 streaming_data_types/fbschemas/forwarder_config_update_fc00/fc00_ConfigUpdate.py create mode 100644 streaming_data_types/forwarder_config_update_fc00.py create mode 100644 tests/test_fc00.py diff --git a/README.md b/README.md index 7a9f9fb..3332453 100644 --- a/README.md +++ b/README.md @@ -22,7 +22,8 @@ https://github.com/ess-dmsc/streaming-data-types | tdct | Timestamps | | ep00 | EPICS connection info (deprecated in favour of ep01) | | ep01 | EPICS connection info | -| rf5k | Forwarder configuration update | +| rf5k | Forwarder configuration update (deprecated in favour of fc00) | +| fc00 | Forwarder configuration update | | answ | File-writer command response | | wrdn | File-writer finished writing | | NDAr | **Deprecated** | @@ -31,7 +32,7 @@ https://github.com/ess-dmsc/streaming-data-types | senv | **Deprecated** | | json | Generic JSON data | | se00 | Arrays with optional timestamps, for example waveform data. Replaces _senv_. | -| da00 | Scipp-like data arrays, for histograms, etc. | +| da00 | Scipp-like data arrays, for histograms, etc. | ### hs00 and hs01 Schema for histogram data. It is one of the more complicated to use schemas. diff --git a/streaming_data_types/__init__.py b/streaming_data_types/__init__.py index d85bdeb..9f33376 100644 --- a/streaming_data_types/__init__.py +++ b/streaming_data_types/__init__.py @@ -19,6 +19,10 @@ deserialise_rf5k, serialise_rf5k, ) +from streaming_data_types.forwarder_config_update_fc00 import ( + deserialise_fc00, + serialise_fc00, +) from streaming_data_types.histogram_hs00 import deserialise_hs00, serialise_hs00 from streaming_data_types.histogram_hs01 import deserialise_hs01, serialise_hs01 from streaming_data_types.json_json import deserialise_json, serialise_json @@ -52,6 +56,7 @@ "ep01": serialise_ep01, "tdct": serialise_tdct, "rf5k": serialise_rf5k, + "fc00": serialise_fc00, "answ": serialise_answ, "wrdn": serialise_wrdn, "NDAr": serialise_ndar, @@ -81,6 +86,7 @@ "ep01": deserialise_ep01, "tdct": deserialise_tdct, "rf5k": deserialise_rf5k, + "fc00": deserialise_fc00, "answ": deserialise_answ, "wrdn": deserialise_wrdn, "NDAr": deserialise_ndar, diff --git a/streaming_data_types/fbschemas/forwarder_config_update_fc00/Protocol.py b/streaming_data_types/fbschemas/forwarder_config_update_fc00/Protocol.py new file mode 100644 index 0000000..d3260a6 --- /dev/null +++ b/streaming_data_types/fbschemas/forwarder_config_update_fc00/Protocol.py @@ -0,0 +1,8 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +class Protocol(object): + PVA = 0 + CA = 1 + FAKE = 2 diff --git a/streaming_data_types/fbschemas/forwarder_config_update_fc00/Stream.py b/streaming_data_types/fbschemas/forwarder_config_update_fc00/Stream.py new file mode 100644 index 0000000..642420c --- /dev/null +++ b/streaming_data_types/fbschemas/forwarder_config_update_fc00/Stream.py @@ -0,0 +1,86 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy +np = import_numpy() + +class Stream(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Stream() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsStream(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def StreamBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x66\x63\x30\x30", size_prefixed=size_prefixed) + + # Stream + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Stream + def Channel(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # Stream + def Schema(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # Stream + def Topic(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # Stream + def Protocol(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint16Flags, o + self._tab.Pos) + return 0 + + # Stream + def Periodic(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + +def StreamStart(builder): builder.StartObject(5) +def Start(builder): + return StreamStart(builder) +def StreamAddChannel(builder, channel): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(channel), 0) +def AddChannel(builder, channel): + return StreamAddChannel(builder, channel) +def StreamAddSchema(builder, schema): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(schema), 0) +def AddSchema(builder, schema): + return StreamAddSchema(builder, schema) +def StreamAddTopic(builder, topic): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(topic), 0) +def AddTopic(builder, topic): + return StreamAddTopic(builder, topic) +def StreamAddProtocol(builder, protocol): builder.PrependUint16Slot(3, protocol, 0) +def AddProtocol(builder, protocol): + return StreamAddProtocol(builder, protocol) +def StreamAddPeriodic(builder, periodic): builder.PrependInt32Slot(4, periodic, 0) +def AddPeriodic(builder, periodic): + return StreamAddPeriodic(builder, periodic) +def StreamEnd(builder): return builder.EndObject() +def End(builder): + return StreamEnd(builder) \ No newline at end of file diff --git a/streaming_data_types/fbschemas/forwarder_config_update_fc00/UpdateType.py b/streaming_data_types/fbschemas/forwarder_config_update_fc00/UpdateType.py new file mode 100644 index 0000000..b349dbb --- /dev/null +++ b/streaming_data_types/fbschemas/forwarder_config_update_fc00/UpdateType.py @@ -0,0 +1,9 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +class UpdateType(object): + ADD = 0 + REMOVE = 1 + REMOVEALL = 2 + REPLACE = 3 diff --git a/streaming_data_types/fbschemas/forwarder_config_update_fc00/__init__.py b/streaming_data_types/fbschemas/forwarder_config_update_fc00/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/streaming_data_types/fbschemas/forwarder_config_update_fc00/fc00_ConfigUpdate.py b/streaming_data_types/fbschemas/forwarder_config_update_fc00/fc00_ConfigUpdate.py new file mode 100644 index 0000000..dbc6f6c --- /dev/null +++ b/streaming_data_types/fbschemas/forwarder_config_update_fc00/fc00_ConfigUpdate.py @@ -0,0 +1,77 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy +np = import_numpy() + +class fc00_ConfigUpdate(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = fc00_ConfigUpdate() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsfc00_ConfigUpdate(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def fc00_ConfigUpdateBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x66\x63\x30\x30", size_prefixed=size_prefixed) + + # fc00_ConfigUpdate + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # fc00_ConfigUpdate + def ConfigChange(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint16Flags, o + self._tab.Pos) + return 0 + + # fc00_ConfigUpdate + def Streams(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + x = self._tab.Vector(o) + x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 + x = self._tab.Indirect(x) + from .Stream import Stream + obj = Stream() + obj.Init(self._tab.Bytes, x) + return obj + return None + + # fc00_ConfigUpdate + def StreamsLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # fc00_ConfigUpdate + def StreamsIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + return o == 0 + +def fc00_ConfigUpdateStart(builder): builder.StartObject(2) +def Start(builder): + return fc00_ConfigUpdateStart(builder) +def fc00_ConfigUpdateAddConfigChange(builder, configChange): builder.PrependUint16Slot(0, configChange, 0) +def AddConfigChange(builder, configChange): + return fc00_ConfigUpdateAddConfigChange(builder, configChange) +def fc00_ConfigUpdateAddStreams(builder, streams): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(streams), 0) +def AddStreams(builder, streams): + return fc00_ConfigUpdateAddStreams(builder, streams) +def fc00_ConfigUpdateStartStreamsVector(builder, numElems): return builder.StartVector(4, numElems, 4) +def StartStreamsVector(builder, numElems): + return fc00_ConfigUpdateStartStreamsVector(builder, numElems) +def fc00_ConfigUpdateEnd(builder): return builder.EndObject() +def End(builder): + return fc00_ConfigUpdateEnd(builder) \ No newline at end of file diff --git a/streaming_data_types/forwarder_config_update_fc00.py b/streaming_data_types/forwarder_config_update_fc00.py new file mode 100644 index 0000000..8d1ff84 --- /dev/null +++ b/streaming_data_types/forwarder_config_update_fc00.py @@ -0,0 +1,119 @@ +from collections import namedtuple +from typing import List, Union + +import flatbuffers +from flatbuffers.packer import struct as flatbuffer_struct + +from streaming_data_types.fbschemas.forwarder_config_update_fc00 import ( + Protocol, + Stream, + UpdateType, + fc00_ConfigUpdate, +) +from streaming_data_types.utils import check_schema_identifier + +FILE_IDENTIFIER = b"fc00" + +ConfigurationUpdate = namedtuple("ConfigurationUpdate", ("config_change", "streams")) + +StreamInfo = namedtuple( + "StreamInfo", ("channel", "schema", "topic", "protocol", "periodic") +) + + +def deserialise_fc00(buffer: Union[bytearray, bytes]) -> ConfigurationUpdate: + """ + Deserialise FlatBuffer fc00. + + :param buffer: The FlatBuffers buffer. + :return: The deserialised data. + """ + check_schema_identifier(buffer, FILE_IDENTIFIER) + + config_message = fc00_ConfigUpdate.fc00_ConfigUpdate.GetRootAsfc00_ConfigUpdate( + buffer, 0 + ) + + streams = [] + try: + for i in range(config_message.StreamsLength()): + stream_message = config_message.Streams(i) + streams.append( + StreamInfo( + stream_message.Channel().decode("utf-8") + if stream_message.Channel() + else "", + stream_message.Schema().decode("utf-8") + if stream_message.Schema() + else "", + stream_message.Topic().decode("utf-8") + if stream_message.Topic() + else "", + stream_message.Protocol(), + int(stream_message.Periodic().decode("utf-8")) + if stream_message.Periodic() + else 0, + ) + ) + except flatbuffer_struct.error: + pass # No streams in buffer + + return ConfigurationUpdate(config_message.ConfigChange(), streams) + + +def serialise_stream( + builder: flatbuffers.Builder, + protocol: Protocol, + channel_offset: int, + schema_offset: int, + topic_offset: int, +) -> int: + Stream.StreamStart(builder) + Stream.StreamAddProtocol(builder, protocol) + Stream.StreamAddTopic(builder, topic_offset) + Stream.StreamAddSchema(builder, schema_offset) + Stream.StreamAddChannel(builder, channel_offset) + return Stream.StreamEnd(builder) + + +def serialise_fc00(config_change: UpdateType, streams: List[StreamInfo]) -> bytes: + """ + Serialise config update message as an fc00 FlatBuffers message. + + :param config_change: + :param streams: channel, schema and output topic configurations + :return: + """ + builder = flatbuffers.Builder(1024) + builder.ForceDefaults(True) + + if streams: + # We have to use multiple loops/list comprehensions here because we cannot create strings after we have + # called StreamStart and cannot create streams after we have called StartVector + stream_field_offsets = [ + ( + builder.CreateString(stream.channel), + builder.CreateString(stream.schema), + builder.CreateString(stream.topic), + ) + for stream in streams + ] + stream_offsets = [ + serialise_stream(builder, stream.protocol, *stream_fields) + for stream, stream_fields in zip(streams, stream_field_offsets) + ] + + fc00_ConfigUpdate.fc00_ConfigUpdateStartStreamsVector(builder, len(streams)) + for stream_offset in stream_offsets: + builder.PrependUOffsetTRelative(stream_offset) + streams_offset = builder.EndVector() + + # Build the actual buffer + fc00_ConfigUpdate.fc00_ConfigUpdateStart(builder) + if streams: + fc00_ConfigUpdate.fc00_ConfigUpdateAddStreams(builder, streams_offset) + fc00_ConfigUpdate.fc00_ConfigUpdateAddConfigChange(builder, config_change) + data = fc00_ConfigUpdate.fc00_ConfigUpdateEnd(builder) + + builder.Finish(data, file_identifier=FILE_IDENTIFIER) + return bytes(builder.Output()) diff --git a/tests/test_fc00.py b/tests/test_fc00.py new file mode 100644 index 0000000..4ce5abc --- /dev/null +++ b/tests/test_fc00.py @@ -0,0 +1,61 @@ +import pytest + +from streaming_data_types import DESERIALISERS, SERIALISERS +from streaming_data_types.exceptions import WrongSchemaException +from streaming_data_types.fbschemas.forwarder_config_update_fc00.UpdateType import ( + UpdateType, +) +from streaming_data_types.forwarder_config_update_fc00 import ( + Protocol, + StreamInfo, + deserialise_fc00, + serialise_fc00, +) + + +class TestSerialisationRf5k: + def test_serialises_and_deserialises_fc00_message_with_streams_correctly(self): + """ + Round-trip to check what we serialise is what we get back. + """ + stream_1 = StreamInfo("channel1", "f144", "topic1", Protocol.Protocol.PVA, 0) + stream_2 = StreamInfo("channel2", "TdcTime", "topic2", Protocol.Protocol.CA, 0) + stream_3 = StreamInfo("channel3", "f144", "topic3", Protocol.Protocol.PVA, 1) + original_entry = { + "config_change": UpdateType.ADD, + "streams": [stream_1, stream_2, stream_3], + } + + buf = serialise_fc00(**original_entry) + entry = deserialise_fc00(buf) + + assert entry.config_change == original_entry["config_change"] + assert stream_1 in entry.streams + assert stream_2 in entry.streams + + def test_serialises_and_deserialises_fc00_message_without_streams_correctly(self): + """ + Round-trip to check what we serialise is what we get back. + """ + original_entry = {"config_change": UpdateType.REMOVEALL, "streams": []} + + buf = serialise_fc00(**original_entry) + entry = deserialise_fc00(buf) + + assert entry.config_change == original_entry["config_change"] + + def test_if_buffer_has_wrong_id_then_throws(self): + original_entry = {"config_change": UpdateType.REMOVEALL, "streams": []} + + buf = serialise_fc00(**original_entry) + + # Manually hack the id + buf = bytearray(buf) + buf[4:8] = b"1234" + + with pytest.raises(WrongSchemaException): + deserialise_fc00(buf) + + def test_schema_type_is_in_global_serialisers_list(self): + assert "fc00" in SERIALISERS + assert "fc00" in DESERIALISERS From 8002f7c50f3725cf4b4d9f44f4528d0b05c6e27d Mon Sep 17 00:00:00 2001 From: Jonas <43351280+hurvan@users.noreply.github.com> Date: Mon, 27 May 2024 13:40:40 +0200 Subject: [PATCH 325/363] pump the version (#99) --- streaming_data_types/_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/streaming_data_types/_version.py b/streaming_data_types/_version.py index 9f1f4cd..0416758 100644 --- a/streaming_data_types/_version.py +++ b/streaming_data_types/_version.py @@ -1,4 +1,4 @@ # Version is not directly defined in __init__ because that causes all # run time dependencies to become build-time dependencies when it is # imported in setup.py -version = "0.25.0" +version = "0.26.0" From 4a6a8a14a711a6ea2eb5e0187cfd507de484f73c Mon Sep 17 00:00:00 2001 From: Jonas <43351280+hurvan@users.noreply.github.com> Date: Tue, 28 May 2024 12:37:14 +0200 Subject: [PATCH 326/363] Periodic value was always default (#100) * The periodic was not actually sent as it should, only the default value was serialized. fixing test to cover this * bump version for bug fix --- streaming_data_types/_version.py | 2 +- .../forwarder_config_update_fc00.py | 32 +++++++++++-------- tests/test_fc00.py | 1 + 3 files changed, 21 insertions(+), 14 deletions(-) diff --git a/streaming_data_types/_version.py b/streaming_data_types/_version.py index 0416758..c012f41 100644 --- a/streaming_data_types/_version.py +++ b/streaming_data_types/_version.py @@ -1,4 +1,4 @@ # Version is not directly defined in __init__ because that causes all # run time dependencies to become build-time dependencies when it is # imported in setup.py -version = "0.26.0" +version = "0.26.1" diff --git a/streaming_data_types/forwarder_config_update_fc00.py b/streaming_data_types/forwarder_config_update_fc00.py index 8d1ff84..2c2d355 100644 --- a/streaming_data_types/forwarder_config_update_fc00.py +++ b/streaming_data_types/forwarder_config_update_fc00.py @@ -40,19 +40,23 @@ def deserialise_fc00(buffer: Union[bytearray, bytes]) -> ConfigurationUpdate: stream_message = config_message.Streams(i) streams.append( StreamInfo( - stream_message.Channel().decode("utf-8") - if stream_message.Channel() - else "", - stream_message.Schema().decode("utf-8") - if stream_message.Schema() - else "", - stream_message.Topic().decode("utf-8") - if stream_message.Topic() - else "", + ( + stream_message.Channel().decode("utf-8") + if stream_message.Channel() + else "" + ), + ( + stream_message.Schema().decode("utf-8") + if stream_message.Schema() + else "" + ), + ( + stream_message.Topic().decode("utf-8") + if stream_message.Topic() + else "" + ), stream_message.Protocol(), - int(stream_message.Periodic().decode("utf-8")) - if stream_message.Periodic() - else 0, + stream_message.Periodic() if stream_message.Periodic() else 0, ) ) except flatbuffer_struct.error: @@ -67,12 +71,14 @@ def serialise_stream( channel_offset: int, schema_offset: int, topic_offset: int, + periodic_offset: int, ) -> int: Stream.StreamStart(builder) Stream.StreamAddProtocol(builder, protocol) Stream.StreamAddTopic(builder, topic_offset) Stream.StreamAddSchema(builder, schema_offset) Stream.StreamAddChannel(builder, channel_offset) + Stream.StreamAddPeriodic(builder, periodic_offset) return Stream.StreamEnd(builder) @@ -99,7 +105,7 @@ def serialise_fc00(config_change: UpdateType, streams: List[StreamInfo]) -> byte for stream in streams ] stream_offsets = [ - serialise_stream(builder, stream.protocol, *stream_fields) + serialise_stream(builder, stream.protocol, *stream_fields, stream.periodic) for stream, stream_fields in zip(streams, stream_field_offsets) ] diff --git a/tests/test_fc00.py b/tests/test_fc00.py index 4ce5abc..49127ce 100644 --- a/tests/test_fc00.py +++ b/tests/test_fc00.py @@ -32,6 +32,7 @@ def test_serialises_and_deserialises_fc00_message_with_streams_correctly(self): assert entry.config_change == original_entry["config_change"] assert stream_1 in entry.streams assert stream_2 in entry.streams + assert stream_3 in entry.streams def test_serialises_and_deserialises_fc00_message_without_streams_correctly(self): """ From 6f460234d3be20373e3c351de0a64e57e33a0d4d Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Thu, 18 Jul 2024 10:00:43 +0200 Subject: [PATCH 327/363] pl72 defaults broker to empty string (#101) * pl72 defaults broker to empty string * pin numpy version to below 2.0 --- requirements.txt | 2 +- streaming_data_types/run_start_pl72.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index 07b351b..943ea9f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ flatbuffers==22.11.23 -numpy +numpy<2.0.0 diff --git a/streaming_data_types/run_start_pl72.py b/streaming_data_types/run_start_pl72.py index cbb2d8a..d3f20ee 100644 --- a/streaming_data_types/run_start_pl72.py +++ b/streaming_data_types/run_start_pl72.py @@ -34,7 +34,7 @@ def serialise_pl72( nexus_structure: str = "{}", service_id: str = "", instrument_name: str = "TEST", - broker: str = "localhost:9092", + broker: str = "", metadata: str = "{}", detector_spectrum_map: Optional[DetectorSpectrumMap] = None, control_topic: str = "", From c7c879ca0f5ecda2a85d62de1ab5244066b0ff63 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Thu, 3 Oct 2024 13:56:04 +0200 Subject: [PATCH 328/363] Remove confusing default value --- streaming_data_types/run_start_pl72.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/streaming_data_types/run_start_pl72.py b/streaming_data_types/run_start_pl72.py index d3f20ee..bcc50ab 100644 --- a/streaming_data_types/run_start_pl72.py +++ b/streaming_data_types/run_start_pl72.py @@ -33,7 +33,7 @@ def serialise_pl72( run_name: str = "test_run", nexus_structure: str = "{}", service_id: str = "", - instrument_name: str = "TEST", + instrument_name: str = "", broker: str = "", metadata: str = "{}", detector_spectrum_map: Optional[DetectorSpectrumMap] = None, From 8e5fa4768b0ca232987e6244c55b812086d6d4b3 Mon Sep 17 00:00:00 2001 From: Gregory Tucker Date: Tue, 26 Nov 2024 17:30:53 +0100 Subject: [PATCH 329/363] [Add] AR51 readout data (de)serialiser --- streaming_data_types/__init__.py | 11 +- .../readout_data_ar51/RawReadoutMessage.py | 130 ++++++++++++++++++ .../fbschemas/readout_data_ar51/__init__.py | 0 streaming_data_types/readout_data_ar51.py | 66 +++++++++ tests/test_ar51.py | 80 +++++++++++ 5 files changed, 283 insertions(+), 4 deletions(-) create mode 100644 streaming_data_types/fbschemas/readout_data_ar51/RawReadoutMessage.py create mode 100644 streaming_data_types/fbschemas/readout_data_ar51/__init__.py create mode 100644 streaming_data_types/readout_data_ar51.py create mode 100644 tests/test_ar51.py diff --git a/streaming_data_types/__init__.py b/streaming_data_types/__init__.py index 9f33376..cd7ab8b 100644 --- a/streaming_data_types/__init__.py +++ b/streaming_data_types/__init__.py @@ -15,20 +15,21 @@ from streaming_data_types.eventdata_ev43 import deserialise_ev43, serialise_ev43 from streaming_data_types.eventdata_ev44 import deserialise_ev44, serialise_ev44 from streaming_data_types.finished_writing_wrdn import deserialise_wrdn, serialise_wrdn -from streaming_data_types.forwarder_config_update_rf5k import ( - deserialise_rf5k, - serialise_rf5k, -) from streaming_data_types.forwarder_config_update_fc00 import ( deserialise_fc00, serialise_fc00, ) +from streaming_data_types.forwarder_config_update_rf5k import ( + deserialise_rf5k, + serialise_rf5k, +) from streaming_data_types.histogram_hs00 import deserialise_hs00, serialise_hs00 from streaming_data_types.histogram_hs01 import deserialise_hs01, serialise_hs01 from streaming_data_types.json_json import deserialise_json, serialise_json from streaming_data_types.logdata_f142 import deserialise_f142, serialise_f142 from streaming_data_types.logdata_f144 import deserialise_f144, serialise_f144 from streaming_data_types.nicos_cache_ns10 import deserialise_ns10, serialise_ns10 +from streaming_data_types.readout_data_ar51 import deserialise_ar51, serialise_ar51 from streaming_data_types.run_start_pl72 import deserialise_pl72, serialise_pl72 from streaming_data_types.run_stop_6s4t import deserialise_6s4t, serialise_6s4t from streaming_data_types.sample_environment_senv import ( @@ -67,6 +68,7 @@ "json": serialise_json, "ad00": serialise_ad00, "da00": serialise_da00, + "ar51": serialise_ar51, } @@ -97,4 +99,5 @@ "json": deserialise_json, "ad00": deserialise_ad00, "da00": deserialise_da00, + "ar51": deserialise_ar51, } diff --git a/streaming_data_types/fbschemas/readout_data_ar51/RawReadoutMessage.py b/streaming_data_types/fbschemas/readout_data_ar51/RawReadoutMessage.py new file mode 100644 index 0000000..c91261c --- /dev/null +++ b/streaming_data_types/fbschemas/readout_data_ar51/RawReadoutMessage.py @@ -0,0 +1,130 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy + +np = import_numpy() + + +class RawReadoutMessage(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = RawReadoutMessage() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsRawReadoutMessage(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + + @classmethod + def RawReadoutMessageBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x61\x72\x35\x31", size_prefixed=size_prefixed + ) + + # RawReadoutMessage + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # RawReadoutMessage + def SourceName(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # RawReadoutMessage + def MessageId(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) + return 0 + + # RawReadoutMessage + def RawData(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Uint8Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1), + ) + return 0 + + # RawReadoutMessage + def RawDataAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) + return 0 + + # RawReadoutMessage + def RawDataLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # RawReadoutMessage + def RawDataIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + return o == 0 + + +def RawReadoutMessageStart(builder): + builder.StartObject(3) + + +def Start(builder): + RawReadoutMessageStart(builder) + + +def RawReadoutMessageAddSourceName(builder, sourceName): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(sourceName), 0 + ) + + +def AddSourceName(builder, sourceName): + RawReadoutMessageAddSourceName(builder, sourceName) + + +def RawReadoutMessageAddMessageId(builder, messageId): + builder.PrependInt64Slot(1, messageId, 0) + + +def AddMessageId(builder, messageId): + RawReadoutMessageAddMessageId(builder, messageId) + + +def RawReadoutMessageAddRawData(builder, rawData): + builder.PrependUOffsetTRelativeSlot( + 2, flatbuffers.number_types.UOffsetTFlags.py_type(rawData), 0 + ) + + +def AddRawData(builder, rawData): + RawReadoutMessageAddRawData(builder, rawData) + + +def RawReadoutMessageStartRawDataVector(builder, numElems): + return builder.StartVector(1, numElems, 1) + + +def StartRawDataVector(builder, numElems: int) -> int: + return RawReadoutMessageStartRawDataVector(builder, numElems) + + +def RawReadoutMessageEnd(builder): + return builder.EndObject() + + +def End(builder): + return RawReadoutMessageEnd(builder) diff --git a/streaming_data_types/fbschemas/readout_data_ar51/__init__.py b/streaming_data_types/fbschemas/readout_data_ar51/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/streaming_data_types/readout_data_ar51.py b/streaming_data_types/readout_data_ar51.py new file mode 100644 index 0000000..bdb09f0 --- /dev/null +++ b/streaming_data_types/readout_data_ar51.py @@ -0,0 +1,66 @@ +from collections import namedtuple + +import flatbuffers +import numpy as np + +import streaming_data_types.fbschemas.readout_data_ar51.RawReadoutMessage as RawReadoutMessage +from streaming_data_types.utils import check_schema_identifier + +FILE_IDENTIFIER = b"ar51" + + +RawReadoutData = namedtuple( + "RawReadoutData", + ( + "source_name", + "message_id", + "raw_data", + ), +) + + +def deserialise_ar51(buffer): + """ + Deserialize FlatBuffer ar51. + + :param buffer: The FlatBuffers buffer. + :return: The deserialized data. + """ + check_schema_identifier(buffer, FILE_IDENTIFIER) + + event = RawReadoutMessage.RawReadoutMessage.GetRootAs(buffer, 0) + + return RawReadoutData( + event.SourceName().decode("utf-8"), + event.MessageId(), + event.RawDataAsNumpy(), + ) + + +def serialise_ar51( + source_name, + message_id, + raw_data, +): + """ + Serialize data as an ar51 FlatBuffers message. + + :param source_name: + :param message_id: + :param raw_data: + :return: + """ + builder = flatbuffers.Builder(1024) + builder.ForceDefaults(True) + + source = builder.CreateString(source_name) + raw_data_data = builder.CreateNumpyVector(np.asarray(raw_data).astype(np.ubyte)) + RawReadoutMessage.RawReadoutMessageStart(builder) + RawReadoutMessage.RawReadoutMessageAddRawData(builder, raw_data_data) + RawReadoutMessage.RawReadoutMessageAddMessageId(builder, message_id) + RawReadoutMessage.RawReadoutMessageAddSourceName(builder, source) + + data = RawReadoutMessage.RawReadoutMessageEnd(builder) + builder.Finish(data, file_identifier=FILE_IDENTIFIER) + + return bytes(builder.Output()) diff --git a/tests/test_ar51.py b/tests/test_ar51.py new file mode 100644 index 0000000..532fbcc --- /dev/null +++ b/tests/test_ar51.py @@ -0,0 +1,80 @@ +import numpy as np +import pytest + +from streaming_data_types import DESERIALISERS, SERIALISERS +from streaming_data_types.exceptions import WrongSchemaException +from streaming_data_types.readout_data_ar51 import deserialise_ar51, serialise_ar51 + + +class TestSerialisationAR51: + def test_serialises_and_deserialises_ar51_message_correctly(self): + """ + Round-trip to check what we serialise is what we get back. + """ + original_entry = { + "source_name": "some_source", + "message_id": 123456, + "raw_data": bytearray( + [ + 0, + 1, + 2, + 3, + 4, + 5, + 6, + 7, + 8, + ] + ), + } + + buf = serialise_ar51(**original_entry) + entry = deserialise_ar51(buf) + + assert entry.source_name == original_entry["source_name"] + assert entry.message_id == original_entry["message_id"] + assert np.array_equal(entry.raw_data, original_entry["raw_data"]) + + def test_serialises_and_deserialises_ar51_message_correctly_for_numpy_arrays(self): + """ + Round-trip to check what we serialise is what we get back. + """ + original_entry = { + "source_name": "some_source", + "message_id": 123456, + "raw_data": np.array([100, 200, 30, 40, 50, 60, 70, 80, 90]), + } + + buf = serialise_ar51(**original_entry) + entry = deserialise_ar51(buf) + + assert entry.source_name == original_entry["source_name"] + assert entry.message_id == original_entry["message_id"] + assert np.array_equal(entry.raw_data, original_entry["raw_data"]) + + def test_if_buffer_has_wrong_id_then_throws(self): + original_entry = { + "source_name": "some_source", + "message_id": 123456, + "raw_data": np.array([100, 200, 300, 400, 500, 600, 700, 800, 900]), + } + + buf = serialise_ar51(**original_entry) + + # Manually introduce error in id. + buf = bytearray(buf) + buf[4:8] = b"1234" + + with pytest.raises(WrongSchemaException): + deserialise_ar51(buf) + + def test_schema_type_is_in_global_serialisers_list(self): + assert "ar51" in SERIALISERS + assert "ar51" in DESERIALISERS + + +if __name__ == "__main__": + import unittest + + unittest.main() From 598f7d88a7a067374ca9701cbc057efafd614f40 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Fri, 29 Nov 2024 08:16:46 +0100 Subject: [PATCH 330/363] conda isn't needed anymore (#104) --- conda/meta.yaml | 26 -------------------------- 1 file changed, 26 deletions(-) delete mode 100644 conda/meta.yaml diff --git a/conda/meta.yaml b/conda/meta.yaml deleted file mode 100644 index e320107..0000000 --- a/conda/meta.yaml +++ /dev/null @@ -1,26 +0,0 @@ -package: - name: ess-streaming-data-types - version: {{ GIT_DESCRIBE_TAG }} - -source: - path: .. - -build: - number: {{ environ.get('GIT_DESCRIBE_NUMBER', 0) }} - noarch: python - script: python -m pip install --target={{ environ.get('PREFIX') }}/lib/python/site-packages --ignore-installed --no-deps -vv . - -requirements: - run: - - python-flatbuffers ==1.12 - - numpy - - python >=3.6 - -test: - imports: - - streaming_data_types - -about: - home: https://github.com/ess-dmsc/python-streaming-data-types - summary: Python utilities for handling ESS streamed data - license: BSD-2-Clause From bc83325d63b3a7812dd9e9f63c8c7aec31134fe6 Mon Sep 17 00:00:00 2001 From: Matt Clarke Date: Tue, 7 Jan 2025 18:34:42 +0100 Subject: [PATCH 331/363] Add ansto ev44 alternative (#105) * added an44 * updated readme --- README.md | 1 + streaming_data_types/__init__.py | 11 +- streaming_data_types/_version.py | 2 +- streaming_data_types/eventdata_an44.py | 95 ++++++ .../eventdata_an44/AN44EventMessage.py | 322 ++++++++++++++++++ .../fbschemas/eventdata_an44/__init__.py | 0 tests/test_an44.py | 118 +++++++ 7 files changed, 544 insertions(+), 5 deletions(-) create mode 100644 streaming_data_types/eventdata_an44.py create mode 100644 streaming_data_types/fbschemas/eventdata_an44/AN44EventMessage.py create mode 100644 streaming_data_types/fbschemas/eventdata_an44/__init__.py create mode 100644 tests/test_an44.py diff --git a/README.md b/README.md index 3332453..00437df 100644 --- a/README.md +++ b/README.md @@ -18,6 +18,7 @@ https://github.com/ess-dmsc/streaming-data-types | ev42 | Event data (deprecated in favour of ev44) | | ev43 | Event data from multiple pulses | | ev44 | Event data with signed data types | +| an44 | ev44 with ANSTO specific changes | | x5f2 | Status messages | | tdct | Timestamps | | ep00 | EPICS connection info (deprecated in favour of ep01) | diff --git a/streaming_data_types/__init__.py b/streaming_data_types/__init__.py index 9f33376..b2624ae 100644 --- a/streaming_data_types/__init__.py +++ b/streaming_data_types/__init__.py @@ -11,18 +11,19 @@ deserialise_ep00, serialise_ep00, ) +from streaming_data_types.eventdata_an44 import deserialise_an44, serialise_an44 from streaming_data_types.eventdata_ev42 import deserialise_ev42, serialise_ev42 from streaming_data_types.eventdata_ev43 import deserialise_ev43, serialise_ev43 from streaming_data_types.eventdata_ev44 import deserialise_ev44, serialise_ev44 from streaming_data_types.finished_writing_wrdn import deserialise_wrdn, serialise_wrdn -from streaming_data_types.forwarder_config_update_rf5k import ( - deserialise_rf5k, - serialise_rf5k, -) from streaming_data_types.forwarder_config_update_fc00 import ( deserialise_fc00, serialise_fc00, ) +from streaming_data_types.forwarder_config_update_rf5k import ( + deserialise_rf5k, + serialise_rf5k, +) from streaming_data_types.histogram_hs00 import deserialise_hs00, serialise_hs00 from streaming_data_types.histogram_hs01 import deserialise_hs01, serialise_hs01 from streaming_data_types.json_json import deserialise_json, serialise_json @@ -41,6 +42,7 @@ __version__ = version SERIALISERS = { + "an44": serialise_an44, "ev42": serialise_ev42, "ev43": serialise_ev43, "ev44": serialise_ev44, @@ -71,6 +73,7 @@ DESERIALISERS = { + "an44": deserialise_an44, "ev42": deserialise_ev42, "ev43": deserialise_ev43, "ev44": deserialise_ev44, diff --git a/streaming_data_types/_version.py b/streaming_data_types/_version.py index c012f41..83ee1a0 100644 --- a/streaming_data_types/_version.py +++ b/streaming_data_types/_version.py @@ -1,4 +1,4 @@ # Version is not directly defined in __init__ because that causes all # run time dependencies to become build-time dependencies when it is # imported in setup.py -version = "0.26.1" +version = "0.27.0" diff --git a/streaming_data_types/eventdata_an44.py b/streaming_data_types/eventdata_an44.py new file mode 100644 index 0000000..0e41e8d --- /dev/null +++ b/streaming_data_types/eventdata_an44.py @@ -0,0 +1,95 @@ +from collections import namedtuple + +import flatbuffers +import numpy as np + +import streaming_data_types.fbschemas.eventdata_an44.AN44EventMessage as AN44EventMessage +from streaming_data_types.utils import check_schema_identifier + +FILE_IDENTIFIER = b"an44" + + +EventData = namedtuple( + "EventData", + ( + "source_name", + "message_id", + "reference_time", + "reference_time_index", + "time_of_flight", + "pixel_id", + "weight", + ), +) + + +def deserialise_an44(buffer): + """ + Deserialise FlatBuffer an44. + + :param buffer: The FlatBuffers buffer. + :return: The deserialised data. + """ + check_schema_identifier(buffer, FILE_IDENTIFIER) + + event = AN44EventMessage.AN44EventMessage.GetRootAs(buffer, 0) + + return EventData( + event.SourceName().decode("utf-8"), + event.MessageId(), + event.ReferenceTimeAsNumpy(), + event.ReferenceTimeIndexAsNumpy(), + event.TimeOfFlightAsNumpy(), + event.PixelIdAsNumpy(), + event.WeightAsNumpy(), + ) + + +def serialise_an44( + source_name, + message_id, + reference_time, + reference_time_index, + time_of_flight, + pixel_id, + weight, +): + """ + Serialise event data as an an44 FlatBuffers message. + + :param source_name: + :param message_id: + :param reference_time: + :param reference_time_index: + :param time_of_flight: + :param pixel_id: + :param weight: + :return: + """ + builder = flatbuffers.Builder(1024) + builder.ForceDefaults(True) + + source = builder.CreateString(source_name) + ref_time_data = builder.CreateNumpyVector( + np.asarray(reference_time).astype(np.int64) + ) + ref_time_index_data = builder.CreateNumpyVector( + np.asarray(reference_time_index).astype(np.int32) + ) + tof_data = builder.CreateNumpyVector(np.asarray(time_of_flight).astype(np.int32)) + pixel_id_data = builder.CreateNumpyVector(np.asarray(pixel_id).astype(np.int32)) + weight_data = builder.CreateNumpyVector(np.asarray(weight).astype(np.int16)) + + AN44EventMessage.AN44EventMessageStart(builder) + AN44EventMessage.AN44EventMessageAddReferenceTime(builder, ref_time_data) + AN44EventMessage.AN44EventMessageAddReferenceTimeIndex(builder, ref_time_index_data) + AN44EventMessage.AN44EventMessageAddTimeOfFlight(builder, tof_data) + AN44EventMessage.AN44EventMessageAddPixelId(builder, pixel_id_data) + AN44EventMessage.AN44EventMessageAddWeight(builder, weight_data) + AN44EventMessage.AN44EventMessageAddMessageId(builder, message_id) + AN44EventMessage.AN44EventMessageAddSourceName(builder, source) + + data = AN44EventMessage.AN44EventMessageEnd(builder) + builder.Finish(data, file_identifier=FILE_IDENTIFIER) + + return bytes(builder.Output()) diff --git a/streaming_data_types/fbschemas/eventdata_an44/AN44EventMessage.py b/streaming_data_types/fbschemas/eventdata_an44/AN44EventMessage.py new file mode 100644 index 0000000..a107bca --- /dev/null +++ b/streaming_data_types/fbschemas/eventdata_an44/AN44EventMessage.py @@ -0,0 +1,322 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy + +np = import_numpy() + + +class AN44EventMessage(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = AN44EventMessage() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsAN44EventMessage(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + + @classmethod + def AN44EventMessageBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x61\x6E\x34\x34", size_prefixed=size_prefixed + ) + + # AN44EventMessage + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # AN44EventMessage + def SourceName(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # AN44EventMessage + def MessageId(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) + return 0 + + # AN44EventMessage + def ReferenceTime(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Int64Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8), + ) + return 0 + + # AN44EventMessage + def ReferenceTimeAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) + return 0 + + # AN44EventMessage + def ReferenceTimeLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # AN44EventMessage + def ReferenceTimeIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + return o == 0 + + # AN44EventMessage + def ReferenceTimeIndex(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Int32Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), + ) + return 0 + + # AN44EventMessage + def ReferenceTimeIndexAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return 0 + + # AN44EventMessage + def ReferenceTimeIndexLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # AN44EventMessage + def ReferenceTimeIndexIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + return o == 0 + + # AN44EventMessage + def TimeOfFlight(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Int32Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), + ) + return 0 + + # AN44EventMessage + def TimeOfFlightAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return 0 + + # AN44EventMessage + def TimeOfFlightLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # AN44EventMessage + def TimeOfFlightIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + return o == 0 + + # AN44EventMessage + def PixelId(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Int32Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), + ) + return 0 + + # AN44EventMessage + def PixelIdAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return 0 + + # AN44EventMessage + def PixelIdLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # AN44EventMessage + def PixelIdIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + return o == 0 + + # AN44EventMessage + def Weight(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Int16Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 2), + ) + return 0 + + # AN44EventMessage + def WeightAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int16Flags, o) + return 0 + + # AN44EventMessage + def WeightLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # AN44EventMessage + def WeightIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + return o == 0 + + +def AN44EventMessageStart(builder): + builder.StartObject(7) + + +def Start(builder): + AN44EventMessageStart(builder) + + +def AN44EventMessageAddSourceName(builder, sourceName): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(sourceName), 0 + ) + + +def AddSourceName(builder, sourceName): + AN44EventMessageAddSourceName(builder, sourceName) + + +def AN44EventMessageAddMessageId(builder, messageId): + builder.PrependInt64Slot(1, messageId, 0) + + +def AddMessageId(builder, messageId): + AN44EventMessageAddMessageId(builder, messageId) + + +def AN44EventMessageAddReferenceTime(builder, referenceTime): + builder.PrependUOffsetTRelativeSlot( + 2, flatbuffers.number_types.UOffsetTFlags.py_type(referenceTime), 0 + ) + + +def AddReferenceTime(builder, referenceTime): + AN44EventMessageAddReferenceTime(builder, referenceTime) + + +def AN44EventMessageStartReferenceTimeVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + + +def StartReferenceTimeVector(builder, numElems): + return AN44EventMessageStartReferenceTimeVector(builder, numElems) + + +def AN44EventMessageAddReferenceTimeIndex(builder, referenceTimeIndex): + builder.PrependUOffsetTRelativeSlot( + 3, flatbuffers.number_types.UOffsetTFlags.py_type(referenceTimeIndex), 0 + ) + + +def AddReferenceTimeIndex(builder, referenceTimeIndex): + AN44EventMessageAddReferenceTimeIndex(builder, referenceTimeIndex) + + +def AN44EventMessageStartReferenceTimeIndexVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + +def StartReferenceTimeIndexVector(builder, numElems): + return AN44EventMessageStartReferenceTimeIndexVector(builder, numElems) + + +def AN44EventMessageAddTimeOfFlight(builder, timeOfFlight): + builder.PrependUOffsetTRelativeSlot( + 4, flatbuffers.number_types.UOffsetTFlags.py_type(timeOfFlight), 0 + ) + + +def AddTimeOfFlight(builder, timeOfFlight): + AN44EventMessageAddTimeOfFlight(builder, timeOfFlight) + + +def AN44EventMessageStartTimeOfFlightVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + +def StartTimeOfFlightVector(builder, numElems): + return AN44EventMessageStartTimeOfFlightVector(builder, numElems) + + +def AN44EventMessageAddPixelId(builder, pixelId): + builder.PrependUOffsetTRelativeSlot( + 5, flatbuffers.number_types.UOffsetTFlags.py_type(pixelId), 0 + ) + + +def AddPixelId(builder, pixelId): + AN44EventMessageAddPixelId(builder, pixelId) + + +def AN44EventMessageStartPixelIdVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + +def StartPixelIdVector(builder, numElems): + return AN44EventMessageStartPixelIdVector(builder, numElems) + + +def AN44EventMessageAddWeight(builder, weight): + builder.PrependUOffsetTRelativeSlot( + 6, flatbuffers.number_types.UOffsetTFlags.py_type(weight), 0 + ) + + +def AddWeight(builder, weight): + AN44EventMessageAddWeight(builder, weight) + + +def AN44EventMessageStartWeightVector(builder, numElems): + return builder.StartVector(2, numElems, 2) + + +def StartWeightVector(builder, numElems): + return AN44EventMessageStartWeightVector(builder, numElems) + + +def AN44EventMessageEnd(builder): + return builder.EndObject() + + +def End(builder): + return AN44EventMessageEnd(builder) diff --git a/streaming_data_types/fbschemas/eventdata_an44/__init__.py b/streaming_data_types/fbschemas/eventdata_an44/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/test_an44.py b/tests/test_an44.py new file mode 100644 index 0000000..cdd8cf8 --- /dev/null +++ b/tests/test_an44.py @@ -0,0 +1,118 @@ +import numpy as np +import pytest + +from streaming_data_types import DESERIALISERS, SERIALISERS +from streaming_data_types.eventdata_an44 import deserialise_an44, serialise_an44 +from streaming_data_types.exceptions import WrongSchemaException + + +class TestSerialisationEv44: + def test_serialises_and_deserialises_an44_message_correctly(self): + """ + Round-trip to check what we serialise is what we get back. + """ + original_entry = { + "source_name": "some_source", + "message_id": 123456, + "reference_time": [ + 1618573589123781958, + 1618573590133830371, + 1618573593677164112, + 1618573594185190549, + 1618573596217316066, + 1618573596725363109, + 1618573601295720976, + 1618573601799761445, + 1618573607354064836, + ], + "reference_time_index": [2, 4, 5, 7], + "time_of_flight": [100, 200, 300, 400, 500, 600, 700, 800, 900], + "pixel_id": [10, 20, 30, 40, 50, 60, 70, 80, 90], + "weight": [51, 52, 53, 54, 55, 56, 57, 58, 59], + } + + buf = serialise_an44(**original_entry) + entry = deserialise_an44(buf) + + assert entry.source_name == original_entry["source_name"] + assert entry.message_id == original_entry["message_id"] + assert np.array_equal(entry.reference_time, original_entry["reference_time"]) + assert np.array_equal( + entry.reference_time_index, original_entry["reference_time_index"] + ) + assert np.array_equal(entry.time_of_flight, original_entry["time_of_flight"]) + assert np.array_equal(entry.pixel_id, original_entry["pixel_id"]) + assert np.array_equal(entry.weight, original_entry["weight"]) + + def test_serialises_and_deserialises_an44_message_correctly_for_numpy_arrays(self): + """ + Round-trip to check what we serialise is what we get back. + """ + original_entry = { + "source_name": "some_source", + "message_id": 123456, + "reference_time": np.array( + [ + 1618573589123781958, + 1618573590133830371, + 1618573593677164112, + 1618573594185190549, + 1618573596217316066, + 1618573596725363109, + 1618573601295720976, + 1618573601799761445, + 1618573607354064836, + ] + ), + "reference_time_index": np.array([2, 4, 5, 7]), + "time_of_flight": np.array([100, 200, 300, 400, 500, 600, 700, 800, 900]), + "pixel_id": np.array([10, 20, 30, 40, 50, 60, 70, 80, 90]), + "weight": np.array([51, 52, 53, 54, 55, 56, 57, 58, 59]), + } + + buf = serialise_an44(**original_entry) + entry = deserialise_an44(buf) + + assert entry.source_name == original_entry["source_name"] + assert entry.message_id == original_entry["message_id"] + assert np.array_equal(entry.reference_time, original_entry["reference_time"]) + assert np.array_equal( + entry.reference_time_index, original_entry["reference_time_index"] + ) + assert np.array_equal(entry.time_of_flight, original_entry["time_of_flight"]) + assert np.array_equal(entry.pixel_id, original_entry["pixel_id"]) + + def test_if_buffer_has_wrong_id_then_throws(self): + original_entry = { + "source_name": "some_source", + "message_id": 123456, + "reference_time": np.array( + [ + 1618573589123781958, + 1618573590133830371, + 1618573593677164112, + 1618573594185190549, + 1618573596217316066, + 1618573596725363109, + 1618573601295720976, + 1618573601799761445, + 1618573607354064836, + ] + ), + "reference_time_index": np.array([2, 4, 5, 7]), + "time_of_flight": np.array([100, 200, 300, 400, 500, 600, 700, 800, 900]), + "pixel_id": np.array([10, 20, 30, 40, 50, 60, 70, 80, 90]), + "weight": np.array([51, 52, 53, 54, 55, 56, 57, 58, 59]), + } + buf = serialise_an44(**original_entry) + + # Manually introduce error in id. + buf = bytearray(buf) + buf[4:8] = b"1234" + + with pytest.raises(WrongSchemaException): + deserialise_an44(buf) + + def test_schema_type_is_in_global_serialisers_list(self): + assert "an44" in SERIALISERS + assert "an44" in DESERIALISERS From 76206f5fe1137b2b30a7137b93ca3aecb39462e6 Mon Sep 17 00:00:00 2001 From: George O'Neill <133203284+ggoneiESS@users.noreply.github.com> Date: Mon, 24 Mar 2025 18:09:41 +0100 Subject: [PATCH 332/363] Update requirements.txt updated to include flatbuffers for imports --- requirements.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 943ea9f..74d3917 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,3 @@ -flatbuffers==22.11.23 +flatbuffers +python-flatbuffers numpy<2.0.0 From 60492567475992a4821d78dc073fc66a31ab9ae5 Mon Sep 17 00:00:00 2001 From: George O'Neill Date: Mon, 24 Mar 2025 18:23:53 +0100 Subject: [PATCH 333/363] Revert "Update requirements.txt" This reverts commit 76206f5fe1137b2b30a7137b93ca3aecb39462e6. --- requirements.txt | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index 74d3917..943ea9f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,2 @@ -flatbuffers -python-flatbuffers +flatbuffers==22.11.23 numpy<2.0.0 From 893d1fac1165cb575bf27574443c31cf7a29d0fe Mon Sep 17 00:00:00 2001 From: George O'Neill Date: Mon, 24 Mar 2025 18:27:29 +0100 Subject: [PATCH 334/363] updated setup with requirements --- requirements.txt | 2 +- setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index 943ea9f..38f5b4f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ -flatbuffers==22.11.23 +flatbuffers numpy<2.0.0 diff --git a/setup.py b/setup.py index 4209205..96bcc1f 100644 --- a/setup.py +++ b/setup.py @@ -27,6 +27,6 @@ license="BSD 2-Clause License", packages=find_packages(exclude=["tests", "tests.*"]), python_requires=">=3.6.0", - install_requires=["flatbuffers>=22.11.23", "numpy"], + install_requires=["flatbuffers", "numpy"], extras_require={"dev": ["flake8", "pre-commit", "pytest", "tox"]}, ) From d17e73e3b2b32d170363a1b0e5a03ca59c99d77f Mon Sep 17 00:00:00 2001 From: Milosz Nowak Date: Fri, 25 Apr 2025 10:51:18 +0200 Subject: [PATCH 335/363] remove Jenkinsfile --- Jenkinsfile | 68 ----------------------------------------------------- 1 file changed, 68 deletions(-) delete mode 100644 Jenkinsfile diff --git a/Jenkinsfile b/Jenkinsfile deleted file mode 100644 index d88033a..0000000 --- a/Jenkinsfile +++ /dev/null @@ -1,68 +0,0 @@ -@Library('ecdc-pipeline') -import ecdcpipeline.ContainerBuildNode -import ecdcpipeline.PipelineBuilder - -container_build_nodes = [ - 'centos7': ContainerBuildNode.getDefaultContainerBuildNode('centos7-gcc11') -] - -// Define number of old builds to keep. -num_artifacts_to_keep = '1' - -// Set number of old builds to keep. -properties([[ - $class: 'BuildDiscarderProperty', - strategy: [ - $class: 'LogRotator', - artifactDaysToKeepStr: '', - artifactNumToKeepStr: num_artifacts_to_keep, - daysToKeepStr: '', - numToKeepStr: num_artifacts_to_keep - ] -]]); - -pipeline_builder = new PipelineBuilder(this, container_build_nodes) -pipeline_builder.activateEmailFailureNotifications() - -builders = pipeline_builder.createBuilders { container -> - pipeline_builder.stage("${container.key}: Checkout") { - dir(pipeline_builder.project) { - scm_vars = checkout scm - } - container.copyTo(pipeline_builder.project, pipeline_builder.project) - } // stage - - pipeline_builder.stage("${container.key}: Dependencies") { - container.sh """ - which python - python -m pip install --user -r ${pipeline_builder.project}/requirements-dev.txt - """ - } // stage - - pipeline_builder.stage("${container.key}: Test") { - def test_output = "TestResults.xml" - container.sh """ - cd ${pipeline_builder.project} - pyenv local 3.8 3.9 3.10 3.11 3.12 - pyenv versions - python -m tox -- --junitxml=${test_output} - """ - container.copyFrom("${pipeline_builder.project}/${test_output}", ".") - xunit thresholds: [failed(unstableThreshold: '0')], tools: [JUnit(deleteOutputFiles: true, pattern: '*.xml', skipNoTestFiles: false, stopProcessingIfError: true)] - } // stage -} // createBuilders - -node { - dir("${pipeline_builder.project}") { - scm_vars = checkout scm - } - - try { - parallel builders - } catch (e) { - throw e - } - - // Delete workspace when build is done - cleanWs() -} From bbd08e7ad8e91a4b079decb388562e2d9170dee5 Mon Sep 17 00:00:00 2001 From: Jack Harper Date: Fri, 20 Jun 2025 15:31:03 +0100 Subject: [PATCH 336/363] add un00 - engineering units --- streaming_data_types/__init__.py | 4 + .../fbschemas/units_un00/Units.py | 80 +++++++++++++++++++ .../fbschemas/units_un00/__init__.py | 0 streaming_data_types/units_un00.py | 42 ++++++++++ tests/test_un00.py | 43 ++++++++++ 5 files changed, 169 insertions(+) create mode 100644 streaming_data_types/fbschemas/units_un00/Units.py create mode 100644 streaming_data_types/fbschemas/units_un00/__init__.py create mode 100644 streaming_data_types/units_un00.py create mode 100644 tests/test_un00.py diff --git a/streaming_data_types/__init__.py b/streaming_data_types/__init__.py index 52e29d9..2757dfe 100644 --- a/streaming_data_types/__init__.py +++ b/streaming_data_types/__init__.py @@ -42,6 +42,8 @@ __version__ = version +from streaming_data_types.units_un00 import serialise_un00, deserialise_un00 + SERIALISERS = { "an44": serialise_an44, "ev42": serialise_ev42, @@ -71,6 +73,7 @@ "ad00": serialise_ad00, "da00": serialise_da00, "ar51": serialise_ar51, + "un00": serialise_un00, } @@ -103,4 +106,5 @@ "ad00": deserialise_ad00, "da00": deserialise_da00, "ar51": deserialise_ar51, + "un00": deserialise_un00, } diff --git a/streaming_data_types/fbschemas/units_un00/Units.py b/streaming_data_types/fbschemas/units_un00/Units.py new file mode 100644 index 0000000..c2316d0 --- /dev/null +++ b/streaming_data_types/fbschemas/units_un00/Units.py @@ -0,0 +1,80 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy +np = import_numpy() + +class Units(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Units() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsUnits(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def UnitsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x75\x6E\x30\x30", size_prefixed=size_prefixed) + + # Units + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Units + def SourceName(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # Units + def Timestamp(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) + return 0 + + # Units + def Units(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + +def UnitsStart(builder): + builder.StartObject(3) + +def Start(builder): + UnitsStart(builder) + +def UnitsAddSourceName(builder, sourceName): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(sourceName), 0) + +def AddSourceName(builder, sourceName): + UnitsAddSourceName(builder, sourceName) + +def UnitsAddTimestamp(builder, timestamp): + builder.PrependInt64Slot(1, timestamp, 0) + +def AddTimestamp(builder, timestamp): + UnitsAddTimestamp(builder, timestamp) + +def UnitsAddUnits(builder, units): + builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(units), 0) + +def AddUnits(builder, units): + UnitsAddUnits(builder, units) + +def UnitsEnd(builder): + return builder.EndObject() + +def End(builder): + return UnitsEnd(builder) diff --git a/streaming_data_types/fbschemas/units_un00/__init__.py b/streaming_data_types/fbschemas/units_un00/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/streaming_data_types/units_un00.py b/streaming_data_types/units_un00.py new file mode 100644 index 0000000..de91bf3 --- /dev/null +++ b/streaming_data_types/units_un00.py @@ -0,0 +1,42 @@ +from collections import namedtuple +from enum import Enum +from typing import Optional + +import flatbuffers + +from streaming_data_types.fbschemas.units_un00 import Units +from streaming_data_types.utils import check_schema_identifier + +FILE_IDENTIFIER = b"un00" + +UnitInfo = namedtuple("UnitInfo", ("source", "timestamp_ns", "units")) + + +def deserialise_un00(buffer) -> UnitInfo: + check_schema_identifier(buffer, FILE_IDENTIFIER) + units = Units.Units.GetRootAsUnits(buffer, 0) + + return UnitInfo( + units.SourceName().decode("utf-8") if units.SourceName() else "", + units.Timestamp(), + units.Units().decode("utf-8") if units.Units() is not None else None, + ) + + +def serialise_un00( + source: str, timestamp_ns: int, units: Optional[str] +) -> bytes: + builder = flatbuffers.Builder(128) + if units is not None: + units_offset = builder.CreateString(units) + source_offset = builder.CreateString(source) + + Units.UnitsStart(builder) + Units.UnitsAddSourceName(builder, source_offset) + Units.UnitsAddTimestamp(builder, timestamp_ns) + if units is not None: + Units.UnitsAddUnits(builder, units_offset) + _units = Units.UnitsEnd(builder) + + builder.Finish(_units, file_identifier=FILE_IDENTIFIER) + return bytes(builder.Output()) diff --git a/tests/test_un00.py b/tests/test_un00.py new file mode 100644 index 0000000..c59f688 --- /dev/null +++ b/tests/test_un00.py @@ -0,0 +1,43 @@ +import pytest + +from streaming_data_types import DESERIALISERS, SERIALISERS +from streaming_data_types.exceptions import WrongSchemaException +from streaming_data_types.units_un00 import deserialise_un00, serialise_un00 + + +class TestSerialisationUn00: + def test_serialises_and_deserialises_un00_message_correctly(self): + """ + Round-trip to check what we serialise is what we get back. + """ + buf = serialise_un00("some_source", 1234567890, "Some unit") + entry = deserialise_un00(buf) + + assert entry.source == "some_source" + assert entry.timestamp_ns == 1234567890 + assert entry.units == "Some unit" + + def test_serialises_and_deserialises_un00_message_correctly_with_none_as_unit(self): + """ + Round-trip to check what we serialise is what we get back with None specified as a unit. + """ + buf = serialise_un00("some_source", 1234567890, None) + entry = deserialise_un00(buf) + + assert entry.source == "some_source" + assert entry.timestamp_ns == 1234567890 + assert entry.units is None + + def test_if_buffer_has_wrong_id_then_throws(self): + buf = serialise_un00("some_source", 1234567890, "Some unit") + + # Manually hack the id + buf = bytearray(buf) + buf[4:8] = b"1234" + + with pytest.raises(WrongSchemaException): + deserialise_un00(buf) + + def test_schema_type_is_in_global_serialisers_list(self): + assert "un00" in SERIALISERS + assert "un00" in DESERIALISERS From d804b4a84527eeb1749c6240371ee922261c151c Mon Sep 17 00:00:00 2001 From: Jack Harper Date: Tue, 24 Jun 2025 14:23:23 +0100 Subject: [PATCH 337/363] remove unused enum import, add pyproject.toml for build requires --- streaming_data_types/units_un00.py | 1 - 1 file changed, 1 deletion(-) diff --git a/streaming_data_types/units_un00.py b/streaming_data_types/units_un00.py index de91bf3..746f1d8 100644 --- a/streaming_data_types/units_un00.py +++ b/streaming_data_types/units_un00.py @@ -1,5 +1,4 @@ from collections import namedtuple -from enum import Enum from typing import Optional import flatbuffers From c9936c593aacf88d28317625a22b458f351cb57d Mon Sep 17 00:00:00 2001 From: Freddie Akeroyd Date: Mon, 6 Oct 2025 01:57:19 +0100 Subject: [PATCH 338/363] Add stub --- schemas/pu00_pulse_metadata.fbs | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) create mode 100644 schemas/pu00_pulse_metadata.fbs diff --git a/schemas/pu00_pulse_metadata.fbs b/schemas/pu00_pulse_metadata.fbs new file mode 100644 index 0000000..fb90a88 --- /dev/null +++ b/schemas/pu00_pulse_metadata.fbs @@ -0,0 +1,27 @@ +// Schema for neutron pulse information + +file_identifier "pu00"; + +table Pu00Message { + message_id : long; // Consecutive numbers, to detect missing or unordered messages. + reference_time : [long] (required); // Nanoseconds since Unix epoch (1 Jan 1970) + // If pulse times are available in the aquisition system, this field holds + // those timestamps. Holds wall time otherwise. + +// old is84 bits + period_number : uint; + proton_charge : float; + + +// however more general alternatie woudl be + name : [string] + value : [some union] + +// or + + name : [ soem agrreed enum list] + value : [ some union ] + +} + +root_type Pu00Message; From df728968e9110a334a0686299c5f299af095410f Mon Sep 17 00:00:00 2001 From: Jack Harper Date: Tue, 20 Jan 2026 14:29:17 +0000 Subject: [PATCH 339/363] create isis-streaming-data-types --- .flake8 | 9 -- .github/workflows/Lint-and-test.yml | 48 ++++++++ .github/workflows/release.yml | 92 ++++++++++++++++ .gitignore | 2 + .pre-commit-config.yaml | 23 ---- MANIFEST.in | 4 - Makefile | 5 - pyproject.toml | 103 ++++++++++++++++++ requirements-dev.txt | 10 -- requirements.txt | 2 - setup.py | 32 ------ .../streaming_data_types}/__init__.py | 2 +- .../action_response_answ.py | 0 .../streaming_data_types}/alarm_al00.py | 0 .../area_detector_ADAr.py | 0 .../area_detector_NDAr.py | 0 .../area_detector_ad00.py | 0 .../streaming_data_types}/array_1d_se00.py | 0 .../streaming_data_types}/dataarray_da00.py | 0 .../epics_connection_ep01.py | 0 .../epics_connection_info_ep00.py | 0 .../streaming_data_types}/eventdata_an44.py | 0 .../streaming_data_types}/eventdata_ev42.py | 0 .../streaming_data_types}/eventdata_ev43.py | 0 .../streaming_data_types}/eventdata_ev44.py | 0 .../streaming_data_types}/exceptions.py | 0 .../fbschemas/ADAr_ADArray_schema/ADArray.py | 0 .../ADAr_ADArray_schema/Attribute.py | 0 .../fbschemas/ADAr_ADArray_schema/DType.py | 0 .../fbschemas/ADAr_ADArray_schema/__init__.py | 0 .../fbschemas/NDAr_NDArray_schema/DType.py | 0 .../fbschemas/NDAr_NDArray_schema/NDArray.py | 0 .../NDAr_NDArray_schema/NDAttribute.py | 0 .../fbschemas/NDAr_NDArray_schema/__init__.py | 0 .../NDAr_NDArray_schema/epicsTimeStamp.py | 0 .../fbschemas/__init__.py | 0 .../action_response_answ/ActionOutcome.py | 0 .../action_response_answ/ActionResponse.py | 0 .../action_response_answ/ActionType.py | 0 .../action_response_answ/__init__.py | 0 .../fbschemas/alarm_al00/Alarm.py | 0 .../fbschemas/alarm_al00/Severity.py | 0 .../fbschemas/alarm_al00/__init__.py | 0 .../fbschemas/area_detector_ad00/Attribute.py | 0 .../fbschemas/area_detector_ad00/DType.py | 0 .../fbschemas/area_detector_ad00/__init__.py | 0 .../area_detector_ad00/ad00_ADArray.py | 0 .../fbschemas/array_1d_se00/DoubleArray.py | 0 .../fbschemas/array_1d_se00/FloatArray.py | 0 .../fbschemas/array_1d_se00/Int16Array.py | 0 .../fbschemas/array_1d_se00/Int32Array.py | 0 .../fbschemas/array_1d_se00/Int64Array.py | 0 .../fbschemas/array_1d_se00/Int8Array.py | 0 .../fbschemas/array_1d_se00/Location.py | 0 .../fbschemas/array_1d_se00/UInt16Array.py | 0 .../fbschemas/array_1d_se00/UInt32Array.py | 0 .../fbschemas/array_1d_se00/UInt64Array.py | 0 .../fbschemas/array_1d_se00/UInt8Array.py | 0 .../fbschemas/array_1d_se00/ValueUnion.py | 0 .../fbschemas/array_1d_se00/__init__.py | 0 .../se00_SampleEnvironmentData.py | 0 .../fbschemas/dataarray_da00/__init__.py | 0 .../dataarray_da00/da00_DataArray.py | 0 .../fbschemas/dataarray_da00/da00_Variable.py | 0 .../fbschemas/dataarray_da00/da00_dtype.py | 0 .../epics_connection_ep01/ConnectionInfo.py | 0 .../EpicsPVConnectionInfo.py | 0 .../epics_connection_ep01/__init__.py | 0 .../EpicsConnectionInfo.py | 0 .../epics_connection_info_ep00/EventType.py | 0 .../epics_connection_info_ep00/__init__.py | 0 .../eventdata_an44/AN44EventMessage.py | 0 .../fbschemas/eventdata_an44/__init__.py | 0 .../fbschemas/eventdata_ev42/EventMessage.py | 0 .../fbschemas/eventdata_ev42/FacilityData.py | 0 .../fbschemas/eventdata_ev42/__init__.py | 0 .../eventdata_ev43/Event43Message.py | 0 .../fbschemas/eventdata_ev43/__init__.py | 0 .../eventdata_ev44/Event44Message.py | 0 .../fbschemas/eventdata_ev44/__init__.py | 0 .../finished_writing_wrdn/FinishedWriting.py | 0 .../finished_writing_wrdn/__init__.py | 0 .../forwarder_config_update_fc00/Protocol.py | 0 .../forwarder_config_update_fc00/Stream.py | 0 .../UpdateType.py | 0 .../forwarder_config_update_fc00/__init__.py | 0 .../fc00_ConfigUpdate.py | 0 .../ConfigUpdate.py | 0 .../forwarder_config_update_rf5k/Protocol.py | 0 .../forwarder_config_update_rf5k/Stream.py | 0 .../UpdateType.py | 0 .../forwarder_config_update_rf5k/__init__.py | 0 .../fbschemas/histogram_hs00/Array.py | 0 .../fbschemas/histogram_hs00/ArrayDouble.py | 0 .../fbschemas/histogram_hs00/ArrayFloat.py | 0 .../fbschemas/histogram_hs00/ArrayUInt.py | 0 .../fbschemas/histogram_hs00/ArrayULong.py | 0 .../histogram_hs00/DimensionMetaData.py | 0 .../histogram_hs00/EventHistogram.py | 0 .../fbschemas/histogram_hs00/__init__.py | 0 .../fbschemas/histogram_hs01/Array.py | 0 .../fbschemas/histogram_hs01/ArrayDouble.py | 0 .../fbschemas/histogram_hs01/ArrayFloat.py | 0 .../fbschemas/histogram_hs01/ArrayInt.py | 0 .../fbschemas/histogram_hs01/ArrayLong.py | 0 .../histogram_hs01/DimensionMetaData.py | 0 .../histogram_hs01/EventHistogram.py | 0 .../fbschemas/histogram_hs01/__init__.py | 0 .../isis_event_info_is84/ISISData.py | 0 .../isis_event_info_is84/RunState.py | 0 .../isis_event_info_is84/__init__.py | 0 .../fbschemas/json_json/JsonData.py | 0 .../fbschemas/json_json/__init__.py | 0 .../fbschemas/logdata_f142/AlarmSeverity.py | 0 .../fbschemas/logdata_f142/AlarmStatus.py | 0 .../fbschemas/logdata_f142/ArrayByte.py | 0 .../fbschemas/logdata_f142/ArrayDouble.py | 0 .../fbschemas/logdata_f142/ArrayFloat.py | 0 .../fbschemas/logdata_f142/ArrayInt.py | 0 .../fbschemas/logdata_f142/ArrayLong.py | 0 .../fbschemas/logdata_f142/ArrayShort.py | 0 .../fbschemas/logdata_f142/ArrayString.py | 0 .../fbschemas/logdata_f142/ArrayUByte.py | 0 .../fbschemas/logdata_f142/ArrayUInt.py | 0 .../fbschemas/logdata_f142/ArrayULong.py | 0 .../fbschemas/logdata_f142/ArrayUShort.py | 0 .../fbschemas/logdata_f142/Byte.py | 0 .../fbschemas/logdata_f142/Double.py | 0 .../fbschemas/logdata_f142/Float.py | 0 .../fbschemas/logdata_f142/Int.py | 0 .../fbschemas/logdata_f142/LogData.py | 0 .../fbschemas/logdata_f142/Long.py | 0 .../fbschemas/logdata_f142/Short.py | 0 .../fbschemas/logdata_f142/String.py | 0 .../fbschemas/logdata_f142/UByte.py | 0 .../fbschemas/logdata_f142/UInt.py | 0 .../fbschemas/logdata_f142/ULong.py | 0 .../fbschemas/logdata_f142/UShort.py | 0 .../fbschemas/logdata_f142/Value.py | 0 .../fbschemas/logdata_f142/__init__.py | 0 .../fbschemas/logdata_f144/ArrayByte.py | 0 .../fbschemas/logdata_f144/ArrayDouble.py | 0 .../fbschemas/logdata_f144/ArrayFloat.py | 0 .../fbschemas/logdata_f144/ArrayInt.py | 0 .../fbschemas/logdata_f144/ArrayLong.py | 0 .../fbschemas/logdata_f144/ArrayShort.py | 0 .../fbschemas/logdata_f144/ArrayUByte.py | 0 .../fbschemas/logdata_f144/ArrayUInt.py | 0 .../fbschemas/logdata_f144/ArrayULong.py | 0 .../fbschemas/logdata_f144/ArrayUShort.py | 0 .../fbschemas/logdata_f144/Byte.py | 0 .../fbschemas/logdata_f144/Double.py | 0 .../fbschemas/logdata_f144/Float.py | 0 .../fbschemas/logdata_f144/Int.py | 0 .../fbschemas/logdata_f144/Long.py | 0 .../fbschemas/logdata_f144/Short.py | 0 .../fbschemas/logdata_f144/UByte.py | 0 .../fbschemas/logdata_f144/UInt.py | 0 .../fbschemas/logdata_f144/ULong.py | 0 .../fbschemas/logdata_f144/UShort.py | 0 .../fbschemas/logdata_f144/Value.py | 0 .../fbschemas/logdata_f144/__init__.py | 0 .../fbschemas/logdata_f144/f144_LogData.py | 0 .../fbschemas/nicos_cache_ns10/CacheEntry.py | 0 .../fbschemas/nicos_cache_ns10/__init__.py | 0 .../fbschemas/nmx_mo01/__init__.py | 0 .../readout_data_ar51/RawReadoutMessage.py | 0 .../fbschemas/readout_data_ar51/__init__.py | 0 .../fbschemas/run_start_pl72/RunStart.py | 0 .../run_start_pl72/SpectraDetectorMapping.py | 0 .../fbschemas/run_start_pl72/__init__.py | 0 .../fbschemas/run_stop_6s4t/RunStop.py | 0 .../fbschemas/run_stop_6s4t/__init__.py | 0 .../sample_environment_senv/Int16Array.py | 0 .../sample_environment_senv/Int32Array.py | 0 .../sample_environment_senv/Int64Array.py | 0 .../sample_environment_senv/Int8Array.py | 0 .../sample_environment_senv/Location.py | 0 .../SampleEnvironmentData.py | 0 .../sample_environment_senv/UInt16Array.py | 0 .../sample_environment_senv/UInt32Array.py | 0 .../sample_environment_senv/UInt64Array.py | 0 .../sample_environment_senv/UInt8Array.py | 0 .../sample_environment_senv/ValueUnion.py | 0 .../sample_environment_senv/__init__.py | 0 .../fbschemas/status_x5f2/Status.py | 0 .../fbschemas/status_x5f2/__init__.py | 0 .../fbschemas/timestamps_tdct/__init__.py | 0 .../fbschemas/timestamps_tdct/timestamp.py | 0 .../fbschemas/units_un00/Units.py | 0 .../fbschemas/units_un00/__init__.py | 0 .../finished_writing_wrdn.py | 0 .../forwarder_config_update_fc00.py | 0 .../forwarder_config_update_rf5k.py | 0 .../streaming_data_types}/histogram_hs00.py | 0 .../streaming_data_types}/histogram_hs01.py | 0 .../streaming_data_types}/json_json.py | 0 .../streaming_data_types}/logdata_f142.py | 0 .../streaming_data_types}/logdata_f144.py | 0 .../streaming_data_types}/nicos_cache_ns10.py | 0 .../readout_data_ar51.py | 0 .../streaming_data_types}/run_start_pl72.py | 0 .../streaming_data_types}/run_stop_6s4t.py | 0 .../sample_environment_senv.py | 0 .../streaming_data_types}/status_x5f2.py | 0 .../streaming_data_types}/timestamps_tdct.py | 0 .../streaming_data_types}/units_un00.py | 0 .../streaming_data_types}/utils.py | 0 streaming_data_types/_version.py | 4 - tox.ini | 15 --- 210 files changed, 246 insertions(+), 105 deletions(-) delete mode 100644 .flake8 create mode 100644 .github/workflows/Lint-and-test.yml create mode 100644 .github/workflows/release.yml delete mode 100644 .pre-commit-config.yaml delete mode 100644 MANIFEST.in delete mode 100644 Makefile create mode 100644 pyproject.toml delete mode 100644 requirements-dev.txt delete mode 100644 requirements.txt delete mode 100644 setup.py rename {streaming_data_types => src/streaming_data_types}/__init__.py (100%) rename {streaming_data_types => src/streaming_data_types}/action_response_answ.py (100%) rename {streaming_data_types => src/streaming_data_types}/alarm_al00.py (100%) rename {streaming_data_types => src/streaming_data_types}/area_detector_ADAr.py (100%) rename {streaming_data_types => src/streaming_data_types}/area_detector_NDAr.py (100%) rename {streaming_data_types => src/streaming_data_types}/area_detector_ad00.py (100%) rename {streaming_data_types => src/streaming_data_types}/array_1d_se00.py (100%) rename {streaming_data_types => src/streaming_data_types}/dataarray_da00.py (100%) rename {streaming_data_types => src/streaming_data_types}/epics_connection_ep01.py (100%) rename {streaming_data_types => src/streaming_data_types}/epics_connection_info_ep00.py (100%) rename {streaming_data_types => src/streaming_data_types}/eventdata_an44.py (100%) rename {streaming_data_types => src/streaming_data_types}/eventdata_ev42.py (100%) rename {streaming_data_types => src/streaming_data_types}/eventdata_ev43.py (100%) rename {streaming_data_types => src/streaming_data_types}/eventdata_ev44.py (100%) rename {streaming_data_types => src/streaming_data_types}/exceptions.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/ADAr_ADArray_schema/ADArray.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/ADAr_ADArray_schema/Attribute.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/ADAr_ADArray_schema/DType.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/ADAr_ADArray_schema/__init__.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/NDAr_NDArray_schema/DType.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/NDAr_NDArray_schema/NDArray.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/NDAr_NDArray_schema/NDAttribute.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/NDAr_NDArray_schema/__init__.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/NDAr_NDArray_schema/epicsTimeStamp.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/__init__.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/action_response_answ/ActionOutcome.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/action_response_answ/ActionResponse.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/action_response_answ/ActionType.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/action_response_answ/__init__.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/alarm_al00/Alarm.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/alarm_al00/Severity.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/alarm_al00/__init__.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/area_detector_ad00/Attribute.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/area_detector_ad00/DType.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/area_detector_ad00/__init__.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/area_detector_ad00/ad00_ADArray.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/array_1d_se00/DoubleArray.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/array_1d_se00/FloatArray.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/array_1d_se00/Int16Array.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/array_1d_se00/Int32Array.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/array_1d_se00/Int64Array.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/array_1d_se00/Int8Array.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/array_1d_se00/Location.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/array_1d_se00/UInt16Array.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/array_1d_se00/UInt32Array.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/array_1d_se00/UInt64Array.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/array_1d_se00/UInt8Array.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/array_1d_se00/ValueUnion.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/array_1d_se00/__init__.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/array_1d_se00/se00_SampleEnvironmentData.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/dataarray_da00/__init__.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/dataarray_da00/da00_DataArray.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/dataarray_da00/da00_Variable.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/dataarray_da00/da00_dtype.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/epics_connection_ep01/ConnectionInfo.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/epics_connection_ep01/EpicsPVConnectionInfo.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/epics_connection_ep01/__init__.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/epics_connection_info_ep00/EpicsConnectionInfo.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/epics_connection_info_ep00/EventType.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/epics_connection_info_ep00/__init__.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/eventdata_an44/AN44EventMessage.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/eventdata_an44/__init__.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/eventdata_ev42/EventMessage.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/eventdata_ev42/FacilityData.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/eventdata_ev42/__init__.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/eventdata_ev43/Event43Message.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/eventdata_ev43/__init__.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/eventdata_ev44/Event44Message.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/eventdata_ev44/__init__.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/finished_writing_wrdn/FinishedWriting.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/finished_writing_wrdn/__init__.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/forwarder_config_update_fc00/Protocol.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/forwarder_config_update_fc00/Stream.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/forwarder_config_update_fc00/UpdateType.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/forwarder_config_update_fc00/__init__.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/forwarder_config_update_fc00/fc00_ConfigUpdate.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/forwarder_config_update_rf5k/ConfigUpdate.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/forwarder_config_update_rf5k/Protocol.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/forwarder_config_update_rf5k/Stream.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/forwarder_config_update_rf5k/UpdateType.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/forwarder_config_update_rf5k/__init__.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/histogram_hs00/Array.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/histogram_hs00/ArrayDouble.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/histogram_hs00/ArrayFloat.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/histogram_hs00/ArrayUInt.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/histogram_hs00/ArrayULong.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/histogram_hs00/DimensionMetaData.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/histogram_hs00/EventHistogram.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/histogram_hs00/__init__.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/histogram_hs01/Array.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/histogram_hs01/ArrayDouble.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/histogram_hs01/ArrayFloat.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/histogram_hs01/ArrayInt.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/histogram_hs01/ArrayLong.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/histogram_hs01/DimensionMetaData.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/histogram_hs01/EventHistogram.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/histogram_hs01/__init__.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/isis_event_info_is84/ISISData.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/isis_event_info_is84/RunState.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/isis_event_info_is84/__init__.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/json_json/JsonData.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/json_json/__init__.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/logdata_f142/AlarmSeverity.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/logdata_f142/AlarmStatus.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/logdata_f142/ArrayByte.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/logdata_f142/ArrayDouble.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/logdata_f142/ArrayFloat.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/logdata_f142/ArrayInt.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/logdata_f142/ArrayLong.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/logdata_f142/ArrayShort.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/logdata_f142/ArrayString.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/logdata_f142/ArrayUByte.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/logdata_f142/ArrayUInt.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/logdata_f142/ArrayULong.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/logdata_f142/ArrayUShort.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/logdata_f142/Byte.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/logdata_f142/Double.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/logdata_f142/Float.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/logdata_f142/Int.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/logdata_f142/LogData.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/logdata_f142/Long.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/logdata_f142/Short.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/logdata_f142/String.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/logdata_f142/UByte.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/logdata_f142/UInt.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/logdata_f142/ULong.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/logdata_f142/UShort.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/logdata_f142/Value.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/logdata_f142/__init__.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/logdata_f144/ArrayByte.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/logdata_f144/ArrayDouble.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/logdata_f144/ArrayFloat.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/logdata_f144/ArrayInt.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/logdata_f144/ArrayLong.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/logdata_f144/ArrayShort.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/logdata_f144/ArrayUByte.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/logdata_f144/ArrayUInt.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/logdata_f144/ArrayULong.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/logdata_f144/ArrayUShort.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/logdata_f144/Byte.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/logdata_f144/Double.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/logdata_f144/Float.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/logdata_f144/Int.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/logdata_f144/Long.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/logdata_f144/Short.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/logdata_f144/UByte.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/logdata_f144/UInt.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/logdata_f144/ULong.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/logdata_f144/UShort.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/logdata_f144/Value.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/logdata_f144/__init__.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/logdata_f144/f144_LogData.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/nicos_cache_ns10/CacheEntry.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/nicos_cache_ns10/__init__.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/nmx_mo01/__init__.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/readout_data_ar51/RawReadoutMessage.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/readout_data_ar51/__init__.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/run_start_pl72/RunStart.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/run_start_pl72/SpectraDetectorMapping.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/run_start_pl72/__init__.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/run_stop_6s4t/RunStop.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/run_stop_6s4t/__init__.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/sample_environment_senv/Int16Array.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/sample_environment_senv/Int32Array.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/sample_environment_senv/Int64Array.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/sample_environment_senv/Int8Array.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/sample_environment_senv/Location.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/sample_environment_senv/SampleEnvironmentData.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/sample_environment_senv/UInt16Array.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/sample_environment_senv/UInt32Array.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/sample_environment_senv/UInt64Array.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/sample_environment_senv/UInt8Array.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/sample_environment_senv/ValueUnion.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/sample_environment_senv/__init__.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/status_x5f2/Status.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/status_x5f2/__init__.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/timestamps_tdct/__init__.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/timestamps_tdct/timestamp.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/units_un00/Units.py (100%) rename {streaming_data_types => src/streaming_data_types}/fbschemas/units_un00/__init__.py (100%) rename {streaming_data_types => src/streaming_data_types}/finished_writing_wrdn.py (100%) rename {streaming_data_types => src/streaming_data_types}/forwarder_config_update_fc00.py (100%) rename {streaming_data_types => src/streaming_data_types}/forwarder_config_update_rf5k.py (100%) rename {streaming_data_types => src/streaming_data_types}/histogram_hs00.py (100%) rename {streaming_data_types => src/streaming_data_types}/histogram_hs01.py (100%) rename {streaming_data_types => src/streaming_data_types}/json_json.py (100%) rename {streaming_data_types => src/streaming_data_types}/logdata_f142.py (100%) rename {streaming_data_types => src/streaming_data_types}/logdata_f144.py (100%) rename {streaming_data_types => src/streaming_data_types}/nicos_cache_ns10.py (100%) rename {streaming_data_types => src/streaming_data_types}/readout_data_ar51.py (100%) rename {streaming_data_types => src/streaming_data_types}/run_start_pl72.py (100%) rename {streaming_data_types => src/streaming_data_types}/run_stop_6s4t.py (100%) rename {streaming_data_types => src/streaming_data_types}/sample_environment_senv.py (100%) rename {streaming_data_types => src/streaming_data_types}/status_x5f2.py (100%) rename {streaming_data_types => src/streaming_data_types}/timestamps_tdct.py (100%) rename {streaming_data_types => src/streaming_data_types}/units_un00.py (100%) rename {streaming_data_types => src/streaming_data_types}/utils.py (100%) delete mode 100644 streaming_data_types/_version.py delete mode 100644 tox.ini diff --git a/.flake8 b/.flake8 deleted file mode 100644 index 00da88c..0000000 --- a/.flake8 +++ /dev/null @@ -1,9 +0,0 @@ -[flake8] -ignore = E501, E203, E402, W503, Q000 -# E501 & E203: Formatting handled by Black -# E402 complains about imports not being at the top -# W503 complains about splitting if across lines which conflicts with Black -# Q000 complains about using "" and not '' which conflicts with Black -exclude = - fbschemas - README.md diff --git a/.github/workflows/Lint-and-test.yml b/.github/workflows/Lint-and-test.yml new file mode 100644 index 0000000..02697cc --- /dev/null +++ b/.github/workflows/Lint-and-test.yml @@ -0,0 +1,48 @@ +# This workflow will install Python dependencies, run tests and lint with a single version of Python +# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python + +name: Python application + +on: + workflow_call: + push: + branches: [ "main" ] + pull_request: + branches: [ "main" ] + +jobs: + call-workflow: + uses: ISISComputingGroup/reusable-workflows/.github/workflows/linters.yml@main + with: + compare-branch: origin/main + python-ver: '3.13' + runs-on: 'ubuntu-latest' + tests: + strategy: + matrix: + version: ['3.12', '3.13', '3.14'] + os: ["ubuntu-latest", "windows-latest"] + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@v6 + - name: Install uv and set the python version + uses: astral-sh/setup-uv@v7 + with: + python-version: ${{ matrix.runs-on }} + - name: Install dependencies + run: uv sync --all-extras --dev + - name: Test with pytest + run: uv run pytest tests + results: + if: ${{ always() }} + runs-on: ubuntu-latest + name: Final Results + needs: [tests, call-workflow] + steps: + - run: exit 1 + # see https://stackoverflow.com/a/67532120/4907315 + if: >- + ${{ + contains(needs.*.result, 'failure') + || contains(needs.*.result, 'cancelled') + }} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..2d8134e --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,92 @@ +name: Publish Python distribution to PyPI +on: push +jobs: + lint-and-test: + if: github.ref_type == 'tag' + name: Run linter and tests + uses: ./.github/workflows/Lint-and-test.yml + build: + needs: lint-and-test + if: github.ref_type == 'tag' + name: build distribution + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v6 + - name: Set up Python + uses: actions/setup-python@v6 + with: + python-version: "3.13" + - name: Install pypa/build + run: >- + python3 -m + pip install + build + --user + - name: Build a binary wheel and a source tarball + run: python3 -m build + - name: Store the distribution packages + uses: actions/upload-artifact@v6 + with: + name: python-package-distributions + path: dist/ + publish-to-pypi: + name: >- + Publish Python distribution to PyPI + if: github.ref_type == 'tag' + needs: [lint-and-test, build] + runs-on: ubuntu-latest + environment: + name: release + url: https://pypi.org/p/isis-streaming-data-types + permissions: + id-token: write # IMPORTANT: mandatory for trusted publishing + steps: + - name: Download all the dists + uses: actions/download-artifact@v7 + with: + name: python-package-distributions + path: dist/ + - name: Publish distribution to PyPI + uses: pypa/gh-action-pypi-publish@release/v1 + github-release: + name: >- + Sign the Python distribution with Sigstore + and upload them to GitHub Release + needs: [lint-and-test, build, publish-to-pypi] + runs-on: ubuntu-latest + + permissions: + contents: write # IMPORTANT: mandatory for making GitHub Releases + id-token: write # IMPORTANT: mandatory for sigstore + + steps: + - name: Download all the dists + uses: actions/download-artifact@v7 + with: + name: python-package-distributions + path: dist/ + - name: Sign the dists with Sigstore + uses: sigstore/gh-action-sigstore-python@v3.2.0 + with: + inputs: >- + ./dist/*.tar.gz + ./dist/*.whl + - name: Create GitHub Release + env: + GITHUB_TOKEN: ${{ github.token }} + run: >- + gh release create + '${{ github.ref_name }}' + --repo '${{ github.repository }}' + --notes "" + - name: Upload artifact signatures to GitHub Release + env: + GITHUB_TOKEN: ${{ github.token }} + # Upload to GitHub Release using the `gh` CLI. + # `dist/` contains the built packages, and the + # sigstore-produced signatures and certificates. + run: >- + gh release upload + '${{ github.ref_name }}' dist/** + --repo '${{ github.repository }}' diff --git a/.gitignore b/.gitignore index 7ba83bc..a8b9804 100644 --- a/.gitignore +++ b/.gitignore @@ -136,3 +136,5 @@ dmypy.json # VSCode .vscode + +src/streaming_data_types/_version.py diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml deleted file mode 100644 index d9ad7bc..0000000 --- a/.pre-commit-config.yaml +++ /dev/null @@ -1,23 +0,0 @@ -repos: -- repo: local - hooks: - - id: black - name: black - entry: black - language: system - types_or: [python, pyi] - require_serial: true - - id: flake8 - name: flake8 - entry: flake8 - language: system - types: [python] - require_serial: true - - id: isort - name: isort - entry: isort - args: ["--profile", "black"] - language: system - types_or: [cython, pyi, python] - require_serial: true - diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index 5dd9038..0000000 --- a/MANIFEST.in +++ /dev/null @@ -1,4 +0,0 @@ -include LICENSE -include requirements*.* -include Makefile -include README.md diff --git a/Makefile b/Makefile deleted file mode 100644 index ab4c425..0000000 --- a/Makefile +++ /dev/null @@ -1,5 +0,0 @@ -init: - pip install -r requirements.txt - -test: - pytest tests diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..e147999 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,103 @@ +[build-system] +requires = ["setuptools", "setuptools_scm>=8"] +build-backend = "setuptools.build_meta" + + +[project] +name = "isis_streaming_data_types" +dynamic = ["version"] +description = "Python utilities for handling ISIS streamed data" +readme = "README.md" +requires-python = ">=3.12" +license-files = ["LICENSE"] + +authors = [ + {name = "ISIS Experiment Controls", email = "ISISExperimentControls@stfc.ac.uk" } +] +maintainers = [ + {name = "ISIS Experiment Controls", email = "ISISExperimentControls@stfc.ac.uk" } +] + +# Classifiers help users find your project by categorizing it. +# +# For a list of valid classifiers, see https://pypi.org/classifiers/ +classifiers = [ + # How mature is this project? Common values are + # 3 - Alpha + # 4 - Beta + # 5 - Production/Stable + "Development Status :: 3 - Alpha", + "Intended Audience :: Developers", + + # Specify the Python versions you support here. In particular, ensure + # that you indicate you support Python 3. These classifiers are *not* + # checked by "pip install". See instead "requires-python" key in this file. + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", + "Programming Language :: Python :: 3 :: Only", +] + +dependencies = [ + "flatbuffers", + "numpy<2", # Pinned to <2 due to f142 np.unicode +] + +[project.optional-dependencies] +dev = [ + "ruff>=0.8", + "pyright", + "pytest", + "pytest-cov", +] + +[project.urls] +"Homepage" = "https://github.com/isiscomputinggroup/isis_streaming_data_types" +"Bug Reports" = "https://github.com/isiscomputinggroup/isis_streaming_data_types/issues" +"Source" = "https://github.com/isiscomputinggroup/isis_streaming_data_types" + +[tool.pytest.ini_options] +testpaths = "tests" +asyncio_mode = "auto" +addopts = "--cov --cov-report=html -vv" +filterwarnings = [ + 'ignore:FigureCanvasAgg is non-interactive, and thus cannot be shown:UserWarning', + 'error:Using UFloat objects with std_dev==0 may give unexpected results.:UserWarning', +] + +[tool.coverage.run] +branch = true +source = ["src"] + +[tool.coverage.report] +fail_under = 100 +exclude_lines = [ + "pragma: no cover", + "if TYPE_CHECKING:", + "if typing.TYPE_CHECKING:", + "@abstractmethod", +] + +[tool.coverage.html] +directory = "coverage_html_report" + +[tool.pyright] +include = ["src", "tests"] +reportConstantRedefinition = true +reportDeprecated = true +reportInconsistentConstructor = true +reportMissingParameterType = true +reportMissingTypeArgument = true +reportUnnecessaryCast = true +reportUnnecessaryComparison = true +reportUnnecessaryContains = true +reportUnnecessaryIsInstance = true +reportUntypedBaseClass = true +reportUntypedClassDecorator = true +reportUntypedFunctionDecorator = true + +[tool.setuptools_scm] +version_file = "src/streaming_data_types/_version.py" + + diff --git a/requirements-dev.txt b/requirements-dev.txt deleted file mode 100644 index 05210d8..0000000 --- a/requirements-dev.txt +++ /dev/null @@ -1,10 +0,0 @@ --r requirements.txt -black -flake8 -isort -pre-commit -pytest -tox==3.27.1 # tox 4 seems to be broken at the moment -tox-pyenv -twine -wheel diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 38f5b4f..0000000 --- a/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -flatbuffers -numpy<2.0.0 diff --git a/setup.py b/setup.py deleted file mode 100644 index 96bcc1f..0000000 --- a/setup.py +++ /dev/null @@ -1,32 +0,0 @@ -import os - -from setuptools import find_packages, setup - -from streaming_data_types._version import version - -DESCRIPTION = "Python utilities for handling ESS streamed data" - -here = os.path.abspath(os.path.dirname(__file__)) - -# Import the README and use it as the long-description. -try: - with open(os.path.join(here, "README.md"), encoding="utf-8") as f: - LONG_DESCRIPTION = "\n" + f.read() -except Exception as error: - print("COULD NOT GET LONG DESC: {}".format(error)) - LONG_DESCRIPTION = DESCRIPTION - -setup( - name="ess_streaming_data_types", - version=version, - description=DESCRIPTION, - long_description=LONG_DESCRIPTION, - long_description_content_type="text/markdown", - author="ScreamingUdder", - url="https://github.com/ess-dmsc/python-streaming-data-types", - license="BSD 2-Clause License", - packages=find_packages(exclude=["tests", "tests.*"]), - python_requires=">=3.6.0", - install_requires=["flatbuffers", "numpy"], - extras_require={"dev": ["flake8", "pre-commit", "pytest", "tox"]}, -) diff --git a/streaming_data_types/__init__.py b/src/streaming_data_types/__init__.py similarity index 100% rename from streaming_data_types/__init__.py rename to src/streaming_data_types/__init__.py index 2757dfe..b163503 100644 --- a/streaming_data_types/__init__.py +++ b/src/streaming_data_types/__init__.py @@ -39,10 +39,10 @@ ) from streaming_data_types.status_x5f2 import deserialise_x5f2, serialise_x5f2 from streaming_data_types.timestamps_tdct import deserialise_tdct, serialise_tdct +from streaming_data_types.units_un00 import serialise_un00, deserialise_un00 __version__ = version -from streaming_data_types.units_un00 import serialise_un00, deserialise_un00 SERIALISERS = { "an44": serialise_an44, diff --git a/streaming_data_types/action_response_answ.py b/src/streaming_data_types/action_response_answ.py similarity index 100% rename from streaming_data_types/action_response_answ.py rename to src/streaming_data_types/action_response_answ.py diff --git a/streaming_data_types/alarm_al00.py b/src/streaming_data_types/alarm_al00.py similarity index 100% rename from streaming_data_types/alarm_al00.py rename to src/streaming_data_types/alarm_al00.py diff --git a/streaming_data_types/area_detector_ADAr.py b/src/streaming_data_types/area_detector_ADAr.py similarity index 100% rename from streaming_data_types/area_detector_ADAr.py rename to src/streaming_data_types/area_detector_ADAr.py diff --git a/streaming_data_types/area_detector_NDAr.py b/src/streaming_data_types/area_detector_NDAr.py similarity index 100% rename from streaming_data_types/area_detector_NDAr.py rename to src/streaming_data_types/area_detector_NDAr.py diff --git a/streaming_data_types/area_detector_ad00.py b/src/streaming_data_types/area_detector_ad00.py similarity index 100% rename from streaming_data_types/area_detector_ad00.py rename to src/streaming_data_types/area_detector_ad00.py diff --git a/streaming_data_types/array_1d_se00.py b/src/streaming_data_types/array_1d_se00.py similarity index 100% rename from streaming_data_types/array_1d_se00.py rename to src/streaming_data_types/array_1d_se00.py diff --git a/streaming_data_types/dataarray_da00.py b/src/streaming_data_types/dataarray_da00.py similarity index 100% rename from streaming_data_types/dataarray_da00.py rename to src/streaming_data_types/dataarray_da00.py diff --git a/streaming_data_types/epics_connection_ep01.py b/src/streaming_data_types/epics_connection_ep01.py similarity index 100% rename from streaming_data_types/epics_connection_ep01.py rename to src/streaming_data_types/epics_connection_ep01.py diff --git a/streaming_data_types/epics_connection_info_ep00.py b/src/streaming_data_types/epics_connection_info_ep00.py similarity index 100% rename from streaming_data_types/epics_connection_info_ep00.py rename to src/streaming_data_types/epics_connection_info_ep00.py diff --git a/streaming_data_types/eventdata_an44.py b/src/streaming_data_types/eventdata_an44.py similarity index 100% rename from streaming_data_types/eventdata_an44.py rename to src/streaming_data_types/eventdata_an44.py diff --git a/streaming_data_types/eventdata_ev42.py b/src/streaming_data_types/eventdata_ev42.py similarity index 100% rename from streaming_data_types/eventdata_ev42.py rename to src/streaming_data_types/eventdata_ev42.py diff --git a/streaming_data_types/eventdata_ev43.py b/src/streaming_data_types/eventdata_ev43.py similarity index 100% rename from streaming_data_types/eventdata_ev43.py rename to src/streaming_data_types/eventdata_ev43.py diff --git a/streaming_data_types/eventdata_ev44.py b/src/streaming_data_types/eventdata_ev44.py similarity index 100% rename from streaming_data_types/eventdata_ev44.py rename to src/streaming_data_types/eventdata_ev44.py diff --git a/streaming_data_types/exceptions.py b/src/streaming_data_types/exceptions.py similarity index 100% rename from streaming_data_types/exceptions.py rename to src/streaming_data_types/exceptions.py diff --git a/streaming_data_types/fbschemas/ADAr_ADArray_schema/ADArray.py b/src/streaming_data_types/fbschemas/ADAr_ADArray_schema/ADArray.py similarity index 100% rename from streaming_data_types/fbschemas/ADAr_ADArray_schema/ADArray.py rename to src/streaming_data_types/fbschemas/ADAr_ADArray_schema/ADArray.py diff --git a/streaming_data_types/fbschemas/ADAr_ADArray_schema/Attribute.py b/src/streaming_data_types/fbschemas/ADAr_ADArray_schema/Attribute.py similarity index 100% rename from streaming_data_types/fbschemas/ADAr_ADArray_schema/Attribute.py rename to src/streaming_data_types/fbschemas/ADAr_ADArray_schema/Attribute.py diff --git a/streaming_data_types/fbschemas/ADAr_ADArray_schema/DType.py b/src/streaming_data_types/fbschemas/ADAr_ADArray_schema/DType.py similarity index 100% rename from streaming_data_types/fbschemas/ADAr_ADArray_schema/DType.py rename to src/streaming_data_types/fbschemas/ADAr_ADArray_schema/DType.py diff --git a/streaming_data_types/fbschemas/ADAr_ADArray_schema/__init__.py b/src/streaming_data_types/fbschemas/ADAr_ADArray_schema/__init__.py similarity index 100% rename from streaming_data_types/fbschemas/ADAr_ADArray_schema/__init__.py rename to src/streaming_data_types/fbschemas/ADAr_ADArray_schema/__init__.py diff --git a/streaming_data_types/fbschemas/NDAr_NDArray_schema/DType.py b/src/streaming_data_types/fbschemas/NDAr_NDArray_schema/DType.py similarity index 100% rename from streaming_data_types/fbschemas/NDAr_NDArray_schema/DType.py rename to src/streaming_data_types/fbschemas/NDAr_NDArray_schema/DType.py diff --git a/streaming_data_types/fbschemas/NDAr_NDArray_schema/NDArray.py b/src/streaming_data_types/fbschemas/NDAr_NDArray_schema/NDArray.py similarity index 100% rename from streaming_data_types/fbschemas/NDAr_NDArray_schema/NDArray.py rename to src/streaming_data_types/fbschemas/NDAr_NDArray_schema/NDArray.py diff --git a/streaming_data_types/fbschemas/NDAr_NDArray_schema/NDAttribute.py b/src/streaming_data_types/fbschemas/NDAr_NDArray_schema/NDAttribute.py similarity index 100% rename from streaming_data_types/fbschemas/NDAr_NDArray_schema/NDAttribute.py rename to src/streaming_data_types/fbschemas/NDAr_NDArray_schema/NDAttribute.py diff --git a/streaming_data_types/fbschemas/NDAr_NDArray_schema/__init__.py b/src/streaming_data_types/fbschemas/NDAr_NDArray_schema/__init__.py similarity index 100% rename from streaming_data_types/fbschemas/NDAr_NDArray_schema/__init__.py rename to src/streaming_data_types/fbschemas/NDAr_NDArray_schema/__init__.py diff --git a/streaming_data_types/fbschemas/NDAr_NDArray_schema/epicsTimeStamp.py b/src/streaming_data_types/fbschemas/NDAr_NDArray_schema/epicsTimeStamp.py similarity index 100% rename from streaming_data_types/fbschemas/NDAr_NDArray_schema/epicsTimeStamp.py rename to src/streaming_data_types/fbschemas/NDAr_NDArray_schema/epicsTimeStamp.py diff --git a/streaming_data_types/fbschemas/__init__.py b/src/streaming_data_types/fbschemas/__init__.py similarity index 100% rename from streaming_data_types/fbschemas/__init__.py rename to src/streaming_data_types/fbschemas/__init__.py diff --git a/streaming_data_types/fbschemas/action_response_answ/ActionOutcome.py b/src/streaming_data_types/fbschemas/action_response_answ/ActionOutcome.py similarity index 100% rename from streaming_data_types/fbschemas/action_response_answ/ActionOutcome.py rename to src/streaming_data_types/fbschemas/action_response_answ/ActionOutcome.py diff --git a/streaming_data_types/fbschemas/action_response_answ/ActionResponse.py b/src/streaming_data_types/fbschemas/action_response_answ/ActionResponse.py similarity index 100% rename from streaming_data_types/fbschemas/action_response_answ/ActionResponse.py rename to src/streaming_data_types/fbschemas/action_response_answ/ActionResponse.py diff --git a/streaming_data_types/fbschemas/action_response_answ/ActionType.py b/src/streaming_data_types/fbschemas/action_response_answ/ActionType.py similarity index 100% rename from streaming_data_types/fbschemas/action_response_answ/ActionType.py rename to src/streaming_data_types/fbschemas/action_response_answ/ActionType.py diff --git a/streaming_data_types/fbschemas/action_response_answ/__init__.py b/src/streaming_data_types/fbschemas/action_response_answ/__init__.py similarity index 100% rename from streaming_data_types/fbschemas/action_response_answ/__init__.py rename to src/streaming_data_types/fbschemas/action_response_answ/__init__.py diff --git a/streaming_data_types/fbschemas/alarm_al00/Alarm.py b/src/streaming_data_types/fbschemas/alarm_al00/Alarm.py similarity index 100% rename from streaming_data_types/fbschemas/alarm_al00/Alarm.py rename to src/streaming_data_types/fbschemas/alarm_al00/Alarm.py diff --git a/streaming_data_types/fbschemas/alarm_al00/Severity.py b/src/streaming_data_types/fbschemas/alarm_al00/Severity.py similarity index 100% rename from streaming_data_types/fbschemas/alarm_al00/Severity.py rename to src/streaming_data_types/fbschemas/alarm_al00/Severity.py diff --git a/streaming_data_types/fbschemas/alarm_al00/__init__.py b/src/streaming_data_types/fbschemas/alarm_al00/__init__.py similarity index 100% rename from streaming_data_types/fbschemas/alarm_al00/__init__.py rename to src/streaming_data_types/fbschemas/alarm_al00/__init__.py diff --git a/streaming_data_types/fbschemas/area_detector_ad00/Attribute.py b/src/streaming_data_types/fbschemas/area_detector_ad00/Attribute.py similarity index 100% rename from streaming_data_types/fbschemas/area_detector_ad00/Attribute.py rename to src/streaming_data_types/fbschemas/area_detector_ad00/Attribute.py diff --git a/streaming_data_types/fbschemas/area_detector_ad00/DType.py b/src/streaming_data_types/fbschemas/area_detector_ad00/DType.py similarity index 100% rename from streaming_data_types/fbschemas/area_detector_ad00/DType.py rename to src/streaming_data_types/fbschemas/area_detector_ad00/DType.py diff --git a/streaming_data_types/fbschemas/area_detector_ad00/__init__.py b/src/streaming_data_types/fbschemas/area_detector_ad00/__init__.py similarity index 100% rename from streaming_data_types/fbschemas/area_detector_ad00/__init__.py rename to src/streaming_data_types/fbschemas/area_detector_ad00/__init__.py diff --git a/streaming_data_types/fbschemas/area_detector_ad00/ad00_ADArray.py b/src/streaming_data_types/fbschemas/area_detector_ad00/ad00_ADArray.py similarity index 100% rename from streaming_data_types/fbschemas/area_detector_ad00/ad00_ADArray.py rename to src/streaming_data_types/fbschemas/area_detector_ad00/ad00_ADArray.py diff --git a/streaming_data_types/fbschemas/array_1d_se00/DoubleArray.py b/src/streaming_data_types/fbschemas/array_1d_se00/DoubleArray.py similarity index 100% rename from streaming_data_types/fbschemas/array_1d_se00/DoubleArray.py rename to src/streaming_data_types/fbschemas/array_1d_se00/DoubleArray.py diff --git a/streaming_data_types/fbschemas/array_1d_se00/FloatArray.py b/src/streaming_data_types/fbschemas/array_1d_se00/FloatArray.py similarity index 100% rename from streaming_data_types/fbschemas/array_1d_se00/FloatArray.py rename to src/streaming_data_types/fbschemas/array_1d_se00/FloatArray.py diff --git a/streaming_data_types/fbschemas/array_1d_se00/Int16Array.py b/src/streaming_data_types/fbschemas/array_1d_se00/Int16Array.py similarity index 100% rename from streaming_data_types/fbschemas/array_1d_se00/Int16Array.py rename to src/streaming_data_types/fbschemas/array_1d_se00/Int16Array.py diff --git a/streaming_data_types/fbschemas/array_1d_se00/Int32Array.py b/src/streaming_data_types/fbschemas/array_1d_se00/Int32Array.py similarity index 100% rename from streaming_data_types/fbschemas/array_1d_se00/Int32Array.py rename to src/streaming_data_types/fbschemas/array_1d_se00/Int32Array.py diff --git a/streaming_data_types/fbschemas/array_1d_se00/Int64Array.py b/src/streaming_data_types/fbschemas/array_1d_se00/Int64Array.py similarity index 100% rename from streaming_data_types/fbschemas/array_1d_se00/Int64Array.py rename to src/streaming_data_types/fbschemas/array_1d_se00/Int64Array.py diff --git a/streaming_data_types/fbschemas/array_1d_se00/Int8Array.py b/src/streaming_data_types/fbschemas/array_1d_se00/Int8Array.py similarity index 100% rename from streaming_data_types/fbschemas/array_1d_se00/Int8Array.py rename to src/streaming_data_types/fbschemas/array_1d_se00/Int8Array.py diff --git a/streaming_data_types/fbschemas/array_1d_se00/Location.py b/src/streaming_data_types/fbschemas/array_1d_se00/Location.py similarity index 100% rename from streaming_data_types/fbschemas/array_1d_se00/Location.py rename to src/streaming_data_types/fbschemas/array_1d_se00/Location.py diff --git a/streaming_data_types/fbschemas/array_1d_se00/UInt16Array.py b/src/streaming_data_types/fbschemas/array_1d_se00/UInt16Array.py similarity index 100% rename from streaming_data_types/fbschemas/array_1d_se00/UInt16Array.py rename to src/streaming_data_types/fbschemas/array_1d_se00/UInt16Array.py diff --git a/streaming_data_types/fbschemas/array_1d_se00/UInt32Array.py b/src/streaming_data_types/fbschemas/array_1d_se00/UInt32Array.py similarity index 100% rename from streaming_data_types/fbschemas/array_1d_se00/UInt32Array.py rename to src/streaming_data_types/fbschemas/array_1d_se00/UInt32Array.py diff --git a/streaming_data_types/fbschemas/array_1d_se00/UInt64Array.py b/src/streaming_data_types/fbschemas/array_1d_se00/UInt64Array.py similarity index 100% rename from streaming_data_types/fbschemas/array_1d_se00/UInt64Array.py rename to src/streaming_data_types/fbschemas/array_1d_se00/UInt64Array.py diff --git a/streaming_data_types/fbschemas/array_1d_se00/UInt8Array.py b/src/streaming_data_types/fbschemas/array_1d_se00/UInt8Array.py similarity index 100% rename from streaming_data_types/fbschemas/array_1d_se00/UInt8Array.py rename to src/streaming_data_types/fbschemas/array_1d_se00/UInt8Array.py diff --git a/streaming_data_types/fbschemas/array_1d_se00/ValueUnion.py b/src/streaming_data_types/fbschemas/array_1d_se00/ValueUnion.py similarity index 100% rename from streaming_data_types/fbschemas/array_1d_se00/ValueUnion.py rename to src/streaming_data_types/fbschemas/array_1d_se00/ValueUnion.py diff --git a/streaming_data_types/fbschemas/array_1d_se00/__init__.py b/src/streaming_data_types/fbschemas/array_1d_se00/__init__.py similarity index 100% rename from streaming_data_types/fbschemas/array_1d_se00/__init__.py rename to src/streaming_data_types/fbschemas/array_1d_se00/__init__.py diff --git a/streaming_data_types/fbschemas/array_1d_se00/se00_SampleEnvironmentData.py b/src/streaming_data_types/fbschemas/array_1d_se00/se00_SampleEnvironmentData.py similarity index 100% rename from streaming_data_types/fbschemas/array_1d_se00/se00_SampleEnvironmentData.py rename to src/streaming_data_types/fbschemas/array_1d_se00/se00_SampleEnvironmentData.py diff --git a/streaming_data_types/fbschemas/dataarray_da00/__init__.py b/src/streaming_data_types/fbschemas/dataarray_da00/__init__.py similarity index 100% rename from streaming_data_types/fbschemas/dataarray_da00/__init__.py rename to src/streaming_data_types/fbschemas/dataarray_da00/__init__.py diff --git a/streaming_data_types/fbschemas/dataarray_da00/da00_DataArray.py b/src/streaming_data_types/fbschemas/dataarray_da00/da00_DataArray.py similarity index 100% rename from streaming_data_types/fbschemas/dataarray_da00/da00_DataArray.py rename to src/streaming_data_types/fbschemas/dataarray_da00/da00_DataArray.py diff --git a/streaming_data_types/fbschemas/dataarray_da00/da00_Variable.py b/src/streaming_data_types/fbschemas/dataarray_da00/da00_Variable.py similarity index 100% rename from streaming_data_types/fbschemas/dataarray_da00/da00_Variable.py rename to src/streaming_data_types/fbschemas/dataarray_da00/da00_Variable.py diff --git a/streaming_data_types/fbschemas/dataarray_da00/da00_dtype.py b/src/streaming_data_types/fbschemas/dataarray_da00/da00_dtype.py similarity index 100% rename from streaming_data_types/fbschemas/dataarray_da00/da00_dtype.py rename to src/streaming_data_types/fbschemas/dataarray_da00/da00_dtype.py diff --git a/streaming_data_types/fbschemas/epics_connection_ep01/ConnectionInfo.py b/src/streaming_data_types/fbschemas/epics_connection_ep01/ConnectionInfo.py similarity index 100% rename from streaming_data_types/fbschemas/epics_connection_ep01/ConnectionInfo.py rename to src/streaming_data_types/fbschemas/epics_connection_ep01/ConnectionInfo.py diff --git a/streaming_data_types/fbschemas/epics_connection_ep01/EpicsPVConnectionInfo.py b/src/streaming_data_types/fbschemas/epics_connection_ep01/EpicsPVConnectionInfo.py similarity index 100% rename from streaming_data_types/fbschemas/epics_connection_ep01/EpicsPVConnectionInfo.py rename to src/streaming_data_types/fbschemas/epics_connection_ep01/EpicsPVConnectionInfo.py diff --git a/streaming_data_types/fbschemas/epics_connection_ep01/__init__.py b/src/streaming_data_types/fbschemas/epics_connection_ep01/__init__.py similarity index 100% rename from streaming_data_types/fbschemas/epics_connection_ep01/__init__.py rename to src/streaming_data_types/fbschemas/epics_connection_ep01/__init__.py diff --git a/streaming_data_types/fbschemas/epics_connection_info_ep00/EpicsConnectionInfo.py b/src/streaming_data_types/fbschemas/epics_connection_info_ep00/EpicsConnectionInfo.py similarity index 100% rename from streaming_data_types/fbschemas/epics_connection_info_ep00/EpicsConnectionInfo.py rename to src/streaming_data_types/fbschemas/epics_connection_info_ep00/EpicsConnectionInfo.py diff --git a/streaming_data_types/fbschemas/epics_connection_info_ep00/EventType.py b/src/streaming_data_types/fbschemas/epics_connection_info_ep00/EventType.py similarity index 100% rename from streaming_data_types/fbschemas/epics_connection_info_ep00/EventType.py rename to src/streaming_data_types/fbschemas/epics_connection_info_ep00/EventType.py diff --git a/streaming_data_types/fbschemas/epics_connection_info_ep00/__init__.py b/src/streaming_data_types/fbschemas/epics_connection_info_ep00/__init__.py similarity index 100% rename from streaming_data_types/fbschemas/epics_connection_info_ep00/__init__.py rename to src/streaming_data_types/fbschemas/epics_connection_info_ep00/__init__.py diff --git a/streaming_data_types/fbschemas/eventdata_an44/AN44EventMessage.py b/src/streaming_data_types/fbschemas/eventdata_an44/AN44EventMessage.py similarity index 100% rename from streaming_data_types/fbschemas/eventdata_an44/AN44EventMessage.py rename to src/streaming_data_types/fbschemas/eventdata_an44/AN44EventMessage.py diff --git a/streaming_data_types/fbschemas/eventdata_an44/__init__.py b/src/streaming_data_types/fbschemas/eventdata_an44/__init__.py similarity index 100% rename from streaming_data_types/fbschemas/eventdata_an44/__init__.py rename to src/streaming_data_types/fbschemas/eventdata_an44/__init__.py diff --git a/streaming_data_types/fbschemas/eventdata_ev42/EventMessage.py b/src/streaming_data_types/fbschemas/eventdata_ev42/EventMessage.py similarity index 100% rename from streaming_data_types/fbschemas/eventdata_ev42/EventMessage.py rename to src/streaming_data_types/fbschemas/eventdata_ev42/EventMessage.py diff --git a/streaming_data_types/fbschemas/eventdata_ev42/FacilityData.py b/src/streaming_data_types/fbschemas/eventdata_ev42/FacilityData.py similarity index 100% rename from streaming_data_types/fbschemas/eventdata_ev42/FacilityData.py rename to src/streaming_data_types/fbschemas/eventdata_ev42/FacilityData.py diff --git a/streaming_data_types/fbschemas/eventdata_ev42/__init__.py b/src/streaming_data_types/fbschemas/eventdata_ev42/__init__.py similarity index 100% rename from streaming_data_types/fbschemas/eventdata_ev42/__init__.py rename to src/streaming_data_types/fbschemas/eventdata_ev42/__init__.py diff --git a/streaming_data_types/fbschemas/eventdata_ev43/Event43Message.py b/src/streaming_data_types/fbschemas/eventdata_ev43/Event43Message.py similarity index 100% rename from streaming_data_types/fbschemas/eventdata_ev43/Event43Message.py rename to src/streaming_data_types/fbschemas/eventdata_ev43/Event43Message.py diff --git a/streaming_data_types/fbschemas/eventdata_ev43/__init__.py b/src/streaming_data_types/fbschemas/eventdata_ev43/__init__.py similarity index 100% rename from streaming_data_types/fbschemas/eventdata_ev43/__init__.py rename to src/streaming_data_types/fbschemas/eventdata_ev43/__init__.py diff --git a/streaming_data_types/fbschemas/eventdata_ev44/Event44Message.py b/src/streaming_data_types/fbschemas/eventdata_ev44/Event44Message.py similarity index 100% rename from streaming_data_types/fbschemas/eventdata_ev44/Event44Message.py rename to src/streaming_data_types/fbschemas/eventdata_ev44/Event44Message.py diff --git a/streaming_data_types/fbschemas/eventdata_ev44/__init__.py b/src/streaming_data_types/fbschemas/eventdata_ev44/__init__.py similarity index 100% rename from streaming_data_types/fbschemas/eventdata_ev44/__init__.py rename to src/streaming_data_types/fbschemas/eventdata_ev44/__init__.py diff --git a/streaming_data_types/fbschemas/finished_writing_wrdn/FinishedWriting.py b/src/streaming_data_types/fbschemas/finished_writing_wrdn/FinishedWriting.py similarity index 100% rename from streaming_data_types/fbschemas/finished_writing_wrdn/FinishedWriting.py rename to src/streaming_data_types/fbschemas/finished_writing_wrdn/FinishedWriting.py diff --git a/streaming_data_types/fbschemas/finished_writing_wrdn/__init__.py b/src/streaming_data_types/fbschemas/finished_writing_wrdn/__init__.py similarity index 100% rename from streaming_data_types/fbschemas/finished_writing_wrdn/__init__.py rename to src/streaming_data_types/fbschemas/finished_writing_wrdn/__init__.py diff --git a/streaming_data_types/fbschemas/forwarder_config_update_fc00/Protocol.py b/src/streaming_data_types/fbschemas/forwarder_config_update_fc00/Protocol.py similarity index 100% rename from streaming_data_types/fbschemas/forwarder_config_update_fc00/Protocol.py rename to src/streaming_data_types/fbschemas/forwarder_config_update_fc00/Protocol.py diff --git a/streaming_data_types/fbschemas/forwarder_config_update_fc00/Stream.py b/src/streaming_data_types/fbschemas/forwarder_config_update_fc00/Stream.py similarity index 100% rename from streaming_data_types/fbschemas/forwarder_config_update_fc00/Stream.py rename to src/streaming_data_types/fbschemas/forwarder_config_update_fc00/Stream.py diff --git a/streaming_data_types/fbschemas/forwarder_config_update_fc00/UpdateType.py b/src/streaming_data_types/fbschemas/forwarder_config_update_fc00/UpdateType.py similarity index 100% rename from streaming_data_types/fbschemas/forwarder_config_update_fc00/UpdateType.py rename to src/streaming_data_types/fbschemas/forwarder_config_update_fc00/UpdateType.py diff --git a/streaming_data_types/fbschemas/forwarder_config_update_fc00/__init__.py b/src/streaming_data_types/fbschemas/forwarder_config_update_fc00/__init__.py similarity index 100% rename from streaming_data_types/fbschemas/forwarder_config_update_fc00/__init__.py rename to src/streaming_data_types/fbschemas/forwarder_config_update_fc00/__init__.py diff --git a/streaming_data_types/fbschemas/forwarder_config_update_fc00/fc00_ConfigUpdate.py b/src/streaming_data_types/fbschemas/forwarder_config_update_fc00/fc00_ConfigUpdate.py similarity index 100% rename from streaming_data_types/fbschemas/forwarder_config_update_fc00/fc00_ConfigUpdate.py rename to src/streaming_data_types/fbschemas/forwarder_config_update_fc00/fc00_ConfigUpdate.py diff --git a/streaming_data_types/fbschemas/forwarder_config_update_rf5k/ConfigUpdate.py b/src/streaming_data_types/fbschemas/forwarder_config_update_rf5k/ConfigUpdate.py similarity index 100% rename from streaming_data_types/fbschemas/forwarder_config_update_rf5k/ConfigUpdate.py rename to src/streaming_data_types/fbschemas/forwarder_config_update_rf5k/ConfigUpdate.py diff --git a/streaming_data_types/fbschemas/forwarder_config_update_rf5k/Protocol.py b/src/streaming_data_types/fbschemas/forwarder_config_update_rf5k/Protocol.py similarity index 100% rename from streaming_data_types/fbschemas/forwarder_config_update_rf5k/Protocol.py rename to src/streaming_data_types/fbschemas/forwarder_config_update_rf5k/Protocol.py diff --git a/streaming_data_types/fbschemas/forwarder_config_update_rf5k/Stream.py b/src/streaming_data_types/fbschemas/forwarder_config_update_rf5k/Stream.py similarity index 100% rename from streaming_data_types/fbschemas/forwarder_config_update_rf5k/Stream.py rename to src/streaming_data_types/fbschemas/forwarder_config_update_rf5k/Stream.py diff --git a/streaming_data_types/fbschemas/forwarder_config_update_rf5k/UpdateType.py b/src/streaming_data_types/fbschemas/forwarder_config_update_rf5k/UpdateType.py similarity index 100% rename from streaming_data_types/fbschemas/forwarder_config_update_rf5k/UpdateType.py rename to src/streaming_data_types/fbschemas/forwarder_config_update_rf5k/UpdateType.py diff --git a/streaming_data_types/fbschemas/forwarder_config_update_rf5k/__init__.py b/src/streaming_data_types/fbschemas/forwarder_config_update_rf5k/__init__.py similarity index 100% rename from streaming_data_types/fbschemas/forwarder_config_update_rf5k/__init__.py rename to src/streaming_data_types/fbschemas/forwarder_config_update_rf5k/__init__.py diff --git a/streaming_data_types/fbschemas/histogram_hs00/Array.py b/src/streaming_data_types/fbschemas/histogram_hs00/Array.py similarity index 100% rename from streaming_data_types/fbschemas/histogram_hs00/Array.py rename to src/streaming_data_types/fbschemas/histogram_hs00/Array.py diff --git a/streaming_data_types/fbschemas/histogram_hs00/ArrayDouble.py b/src/streaming_data_types/fbschemas/histogram_hs00/ArrayDouble.py similarity index 100% rename from streaming_data_types/fbschemas/histogram_hs00/ArrayDouble.py rename to src/streaming_data_types/fbschemas/histogram_hs00/ArrayDouble.py diff --git a/streaming_data_types/fbschemas/histogram_hs00/ArrayFloat.py b/src/streaming_data_types/fbschemas/histogram_hs00/ArrayFloat.py similarity index 100% rename from streaming_data_types/fbschemas/histogram_hs00/ArrayFloat.py rename to src/streaming_data_types/fbschemas/histogram_hs00/ArrayFloat.py diff --git a/streaming_data_types/fbschemas/histogram_hs00/ArrayUInt.py b/src/streaming_data_types/fbschemas/histogram_hs00/ArrayUInt.py similarity index 100% rename from streaming_data_types/fbschemas/histogram_hs00/ArrayUInt.py rename to src/streaming_data_types/fbschemas/histogram_hs00/ArrayUInt.py diff --git a/streaming_data_types/fbschemas/histogram_hs00/ArrayULong.py b/src/streaming_data_types/fbschemas/histogram_hs00/ArrayULong.py similarity index 100% rename from streaming_data_types/fbschemas/histogram_hs00/ArrayULong.py rename to src/streaming_data_types/fbschemas/histogram_hs00/ArrayULong.py diff --git a/streaming_data_types/fbschemas/histogram_hs00/DimensionMetaData.py b/src/streaming_data_types/fbschemas/histogram_hs00/DimensionMetaData.py similarity index 100% rename from streaming_data_types/fbschemas/histogram_hs00/DimensionMetaData.py rename to src/streaming_data_types/fbschemas/histogram_hs00/DimensionMetaData.py diff --git a/streaming_data_types/fbschemas/histogram_hs00/EventHistogram.py b/src/streaming_data_types/fbschemas/histogram_hs00/EventHistogram.py similarity index 100% rename from streaming_data_types/fbschemas/histogram_hs00/EventHistogram.py rename to src/streaming_data_types/fbschemas/histogram_hs00/EventHistogram.py diff --git a/streaming_data_types/fbschemas/histogram_hs00/__init__.py b/src/streaming_data_types/fbschemas/histogram_hs00/__init__.py similarity index 100% rename from streaming_data_types/fbschemas/histogram_hs00/__init__.py rename to src/streaming_data_types/fbschemas/histogram_hs00/__init__.py diff --git a/streaming_data_types/fbschemas/histogram_hs01/Array.py b/src/streaming_data_types/fbschemas/histogram_hs01/Array.py similarity index 100% rename from streaming_data_types/fbschemas/histogram_hs01/Array.py rename to src/streaming_data_types/fbschemas/histogram_hs01/Array.py diff --git a/streaming_data_types/fbschemas/histogram_hs01/ArrayDouble.py b/src/streaming_data_types/fbschemas/histogram_hs01/ArrayDouble.py similarity index 100% rename from streaming_data_types/fbschemas/histogram_hs01/ArrayDouble.py rename to src/streaming_data_types/fbschemas/histogram_hs01/ArrayDouble.py diff --git a/streaming_data_types/fbschemas/histogram_hs01/ArrayFloat.py b/src/streaming_data_types/fbschemas/histogram_hs01/ArrayFloat.py similarity index 100% rename from streaming_data_types/fbschemas/histogram_hs01/ArrayFloat.py rename to src/streaming_data_types/fbschemas/histogram_hs01/ArrayFloat.py diff --git a/streaming_data_types/fbschemas/histogram_hs01/ArrayInt.py b/src/streaming_data_types/fbschemas/histogram_hs01/ArrayInt.py similarity index 100% rename from streaming_data_types/fbschemas/histogram_hs01/ArrayInt.py rename to src/streaming_data_types/fbschemas/histogram_hs01/ArrayInt.py diff --git a/streaming_data_types/fbschemas/histogram_hs01/ArrayLong.py b/src/streaming_data_types/fbschemas/histogram_hs01/ArrayLong.py similarity index 100% rename from streaming_data_types/fbschemas/histogram_hs01/ArrayLong.py rename to src/streaming_data_types/fbschemas/histogram_hs01/ArrayLong.py diff --git a/streaming_data_types/fbschemas/histogram_hs01/DimensionMetaData.py b/src/streaming_data_types/fbschemas/histogram_hs01/DimensionMetaData.py similarity index 100% rename from streaming_data_types/fbschemas/histogram_hs01/DimensionMetaData.py rename to src/streaming_data_types/fbschemas/histogram_hs01/DimensionMetaData.py diff --git a/streaming_data_types/fbschemas/histogram_hs01/EventHistogram.py b/src/streaming_data_types/fbschemas/histogram_hs01/EventHistogram.py similarity index 100% rename from streaming_data_types/fbschemas/histogram_hs01/EventHistogram.py rename to src/streaming_data_types/fbschemas/histogram_hs01/EventHistogram.py diff --git a/streaming_data_types/fbschemas/histogram_hs01/__init__.py b/src/streaming_data_types/fbschemas/histogram_hs01/__init__.py similarity index 100% rename from streaming_data_types/fbschemas/histogram_hs01/__init__.py rename to src/streaming_data_types/fbschemas/histogram_hs01/__init__.py diff --git a/streaming_data_types/fbschemas/isis_event_info_is84/ISISData.py b/src/streaming_data_types/fbschemas/isis_event_info_is84/ISISData.py similarity index 100% rename from streaming_data_types/fbschemas/isis_event_info_is84/ISISData.py rename to src/streaming_data_types/fbschemas/isis_event_info_is84/ISISData.py diff --git a/streaming_data_types/fbschemas/isis_event_info_is84/RunState.py b/src/streaming_data_types/fbschemas/isis_event_info_is84/RunState.py similarity index 100% rename from streaming_data_types/fbschemas/isis_event_info_is84/RunState.py rename to src/streaming_data_types/fbschemas/isis_event_info_is84/RunState.py diff --git a/streaming_data_types/fbschemas/isis_event_info_is84/__init__.py b/src/streaming_data_types/fbschemas/isis_event_info_is84/__init__.py similarity index 100% rename from streaming_data_types/fbschemas/isis_event_info_is84/__init__.py rename to src/streaming_data_types/fbschemas/isis_event_info_is84/__init__.py diff --git a/streaming_data_types/fbschemas/json_json/JsonData.py b/src/streaming_data_types/fbschemas/json_json/JsonData.py similarity index 100% rename from streaming_data_types/fbschemas/json_json/JsonData.py rename to src/streaming_data_types/fbschemas/json_json/JsonData.py diff --git a/streaming_data_types/fbschemas/json_json/__init__.py b/src/streaming_data_types/fbschemas/json_json/__init__.py similarity index 100% rename from streaming_data_types/fbschemas/json_json/__init__.py rename to src/streaming_data_types/fbschemas/json_json/__init__.py diff --git a/streaming_data_types/fbschemas/logdata_f142/AlarmSeverity.py b/src/streaming_data_types/fbschemas/logdata_f142/AlarmSeverity.py similarity index 100% rename from streaming_data_types/fbschemas/logdata_f142/AlarmSeverity.py rename to src/streaming_data_types/fbschemas/logdata_f142/AlarmSeverity.py diff --git a/streaming_data_types/fbschemas/logdata_f142/AlarmStatus.py b/src/streaming_data_types/fbschemas/logdata_f142/AlarmStatus.py similarity index 100% rename from streaming_data_types/fbschemas/logdata_f142/AlarmStatus.py rename to src/streaming_data_types/fbschemas/logdata_f142/AlarmStatus.py diff --git a/streaming_data_types/fbschemas/logdata_f142/ArrayByte.py b/src/streaming_data_types/fbschemas/logdata_f142/ArrayByte.py similarity index 100% rename from streaming_data_types/fbschemas/logdata_f142/ArrayByte.py rename to src/streaming_data_types/fbschemas/logdata_f142/ArrayByte.py diff --git a/streaming_data_types/fbschemas/logdata_f142/ArrayDouble.py b/src/streaming_data_types/fbschemas/logdata_f142/ArrayDouble.py similarity index 100% rename from streaming_data_types/fbschemas/logdata_f142/ArrayDouble.py rename to src/streaming_data_types/fbschemas/logdata_f142/ArrayDouble.py diff --git a/streaming_data_types/fbschemas/logdata_f142/ArrayFloat.py b/src/streaming_data_types/fbschemas/logdata_f142/ArrayFloat.py similarity index 100% rename from streaming_data_types/fbschemas/logdata_f142/ArrayFloat.py rename to src/streaming_data_types/fbschemas/logdata_f142/ArrayFloat.py diff --git a/streaming_data_types/fbschemas/logdata_f142/ArrayInt.py b/src/streaming_data_types/fbschemas/logdata_f142/ArrayInt.py similarity index 100% rename from streaming_data_types/fbschemas/logdata_f142/ArrayInt.py rename to src/streaming_data_types/fbschemas/logdata_f142/ArrayInt.py diff --git a/streaming_data_types/fbschemas/logdata_f142/ArrayLong.py b/src/streaming_data_types/fbschemas/logdata_f142/ArrayLong.py similarity index 100% rename from streaming_data_types/fbschemas/logdata_f142/ArrayLong.py rename to src/streaming_data_types/fbschemas/logdata_f142/ArrayLong.py diff --git a/streaming_data_types/fbschemas/logdata_f142/ArrayShort.py b/src/streaming_data_types/fbschemas/logdata_f142/ArrayShort.py similarity index 100% rename from streaming_data_types/fbschemas/logdata_f142/ArrayShort.py rename to src/streaming_data_types/fbschemas/logdata_f142/ArrayShort.py diff --git a/streaming_data_types/fbschemas/logdata_f142/ArrayString.py b/src/streaming_data_types/fbschemas/logdata_f142/ArrayString.py similarity index 100% rename from streaming_data_types/fbschemas/logdata_f142/ArrayString.py rename to src/streaming_data_types/fbschemas/logdata_f142/ArrayString.py diff --git a/streaming_data_types/fbschemas/logdata_f142/ArrayUByte.py b/src/streaming_data_types/fbschemas/logdata_f142/ArrayUByte.py similarity index 100% rename from streaming_data_types/fbschemas/logdata_f142/ArrayUByte.py rename to src/streaming_data_types/fbschemas/logdata_f142/ArrayUByte.py diff --git a/streaming_data_types/fbschemas/logdata_f142/ArrayUInt.py b/src/streaming_data_types/fbschemas/logdata_f142/ArrayUInt.py similarity index 100% rename from streaming_data_types/fbschemas/logdata_f142/ArrayUInt.py rename to src/streaming_data_types/fbschemas/logdata_f142/ArrayUInt.py diff --git a/streaming_data_types/fbschemas/logdata_f142/ArrayULong.py b/src/streaming_data_types/fbschemas/logdata_f142/ArrayULong.py similarity index 100% rename from streaming_data_types/fbschemas/logdata_f142/ArrayULong.py rename to src/streaming_data_types/fbschemas/logdata_f142/ArrayULong.py diff --git a/streaming_data_types/fbschemas/logdata_f142/ArrayUShort.py b/src/streaming_data_types/fbschemas/logdata_f142/ArrayUShort.py similarity index 100% rename from streaming_data_types/fbschemas/logdata_f142/ArrayUShort.py rename to src/streaming_data_types/fbschemas/logdata_f142/ArrayUShort.py diff --git a/streaming_data_types/fbschemas/logdata_f142/Byte.py b/src/streaming_data_types/fbschemas/logdata_f142/Byte.py similarity index 100% rename from streaming_data_types/fbschemas/logdata_f142/Byte.py rename to src/streaming_data_types/fbschemas/logdata_f142/Byte.py diff --git a/streaming_data_types/fbschemas/logdata_f142/Double.py b/src/streaming_data_types/fbschemas/logdata_f142/Double.py similarity index 100% rename from streaming_data_types/fbschemas/logdata_f142/Double.py rename to src/streaming_data_types/fbschemas/logdata_f142/Double.py diff --git a/streaming_data_types/fbschemas/logdata_f142/Float.py b/src/streaming_data_types/fbschemas/logdata_f142/Float.py similarity index 100% rename from streaming_data_types/fbschemas/logdata_f142/Float.py rename to src/streaming_data_types/fbschemas/logdata_f142/Float.py diff --git a/streaming_data_types/fbschemas/logdata_f142/Int.py b/src/streaming_data_types/fbschemas/logdata_f142/Int.py similarity index 100% rename from streaming_data_types/fbschemas/logdata_f142/Int.py rename to src/streaming_data_types/fbschemas/logdata_f142/Int.py diff --git a/streaming_data_types/fbschemas/logdata_f142/LogData.py b/src/streaming_data_types/fbschemas/logdata_f142/LogData.py similarity index 100% rename from streaming_data_types/fbschemas/logdata_f142/LogData.py rename to src/streaming_data_types/fbschemas/logdata_f142/LogData.py diff --git a/streaming_data_types/fbschemas/logdata_f142/Long.py b/src/streaming_data_types/fbschemas/logdata_f142/Long.py similarity index 100% rename from streaming_data_types/fbschemas/logdata_f142/Long.py rename to src/streaming_data_types/fbschemas/logdata_f142/Long.py diff --git a/streaming_data_types/fbschemas/logdata_f142/Short.py b/src/streaming_data_types/fbschemas/logdata_f142/Short.py similarity index 100% rename from streaming_data_types/fbschemas/logdata_f142/Short.py rename to src/streaming_data_types/fbschemas/logdata_f142/Short.py diff --git a/streaming_data_types/fbschemas/logdata_f142/String.py b/src/streaming_data_types/fbschemas/logdata_f142/String.py similarity index 100% rename from streaming_data_types/fbschemas/logdata_f142/String.py rename to src/streaming_data_types/fbschemas/logdata_f142/String.py diff --git a/streaming_data_types/fbschemas/logdata_f142/UByte.py b/src/streaming_data_types/fbschemas/logdata_f142/UByte.py similarity index 100% rename from streaming_data_types/fbschemas/logdata_f142/UByte.py rename to src/streaming_data_types/fbschemas/logdata_f142/UByte.py diff --git a/streaming_data_types/fbschemas/logdata_f142/UInt.py b/src/streaming_data_types/fbschemas/logdata_f142/UInt.py similarity index 100% rename from streaming_data_types/fbschemas/logdata_f142/UInt.py rename to src/streaming_data_types/fbschemas/logdata_f142/UInt.py diff --git a/streaming_data_types/fbschemas/logdata_f142/ULong.py b/src/streaming_data_types/fbschemas/logdata_f142/ULong.py similarity index 100% rename from streaming_data_types/fbschemas/logdata_f142/ULong.py rename to src/streaming_data_types/fbschemas/logdata_f142/ULong.py diff --git a/streaming_data_types/fbschemas/logdata_f142/UShort.py b/src/streaming_data_types/fbschemas/logdata_f142/UShort.py similarity index 100% rename from streaming_data_types/fbschemas/logdata_f142/UShort.py rename to src/streaming_data_types/fbschemas/logdata_f142/UShort.py diff --git a/streaming_data_types/fbschemas/logdata_f142/Value.py b/src/streaming_data_types/fbschemas/logdata_f142/Value.py similarity index 100% rename from streaming_data_types/fbschemas/logdata_f142/Value.py rename to src/streaming_data_types/fbschemas/logdata_f142/Value.py diff --git a/streaming_data_types/fbschemas/logdata_f142/__init__.py b/src/streaming_data_types/fbschemas/logdata_f142/__init__.py similarity index 100% rename from streaming_data_types/fbschemas/logdata_f142/__init__.py rename to src/streaming_data_types/fbschemas/logdata_f142/__init__.py diff --git a/streaming_data_types/fbschemas/logdata_f144/ArrayByte.py b/src/streaming_data_types/fbschemas/logdata_f144/ArrayByte.py similarity index 100% rename from streaming_data_types/fbschemas/logdata_f144/ArrayByte.py rename to src/streaming_data_types/fbschemas/logdata_f144/ArrayByte.py diff --git a/streaming_data_types/fbschemas/logdata_f144/ArrayDouble.py b/src/streaming_data_types/fbschemas/logdata_f144/ArrayDouble.py similarity index 100% rename from streaming_data_types/fbschemas/logdata_f144/ArrayDouble.py rename to src/streaming_data_types/fbschemas/logdata_f144/ArrayDouble.py diff --git a/streaming_data_types/fbschemas/logdata_f144/ArrayFloat.py b/src/streaming_data_types/fbschemas/logdata_f144/ArrayFloat.py similarity index 100% rename from streaming_data_types/fbschemas/logdata_f144/ArrayFloat.py rename to src/streaming_data_types/fbschemas/logdata_f144/ArrayFloat.py diff --git a/streaming_data_types/fbschemas/logdata_f144/ArrayInt.py b/src/streaming_data_types/fbschemas/logdata_f144/ArrayInt.py similarity index 100% rename from streaming_data_types/fbschemas/logdata_f144/ArrayInt.py rename to src/streaming_data_types/fbschemas/logdata_f144/ArrayInt.py diff --git a/streaming_data_types/fbschemas/logdata_f144/ArrayLong.py b/src/streaming_data_types/fbschemas/logdata_f144/ArrayLong.py similarity index 100% rename from streaming_data_types/fbschemas/logdata_f144/ArrayLong.py rename to src/streaming_data_types/fbschemas/logdata_f144/ArrayLong.py diff --git a/streaming_data_types/fbschemas/logdata_f144/ArrayShort.py b/src/streaming_data_types/fbschemas/logdata_f144/ArrayShort.py similarity index 100% rename from streaming_data_types/fbschemas/logdata_f144/ArrayShort.py rename to src/streaming_data_types/fbschemas/logdata_f144/ArrayShort.py diff --git a/streaming_data_types/fbschemas/logdata_f144/ArrayUByte.py b/src/streaming_data_types/fbschemas/logdata_f144/ArrayUByte.py similarity index 100% rename from streaming_data_types/fbschemas/logdata_f144/ArrayUByte.py rename to src/streaming_data_types/fbschemas/logdata_f144/ArrayUByte.py diff --git a/streaming_data_types/fbschemas/logdata_f144/ArrayUInt.py b/src/streaming_data_types/fbschemas/logdata_f144/ArrayUInt.py similarity index 100% rename from streaming_data_types/fbschemas/logdata_f144/ArrayUInt.py rename to src/streaming_data_types/fbschemas/logdata_f144/ArrayUInt.py diff --git a/streaming_data_types/fbschemas/logdata_f144/ArrayULong.py b/src/streaming_data_types/fbschemas/logdata_f144/ArrayULong.py similarity index 100% rename from streaming_data_types/fbschemas/logdata_f144/ArrayULong.py rename to src/streaming_data_types/fbschemas/logdata_f144/ArrayULong.py diff --git a/streaming_data_types/fbschemas/logdata_f144/ArrayUShort.py b/src/streaming_data_types/fbschemas/logdata_f144/ArrayUShort.py similarity index 100% rename from streaming_data_types/fbschemas/logdata_f144/ArrayUShort.py rename to src/streaming_data_types/fbschemas/logdata_f144/ArrayUShort.py diff --git a/streaming_data_types/fbschemas/logdata_f144/Byte.py b/src/streaming_data_types/fbschemas/logdata_f144/Byte.py similarity index 100% rename from streaming_data_types/fbschemas/logdata_f144/Byte.py rename to src/streaming_data_types/fbschemas/logdata_f144/Byte.py diff --git a/streaming_data_types/fbschemas/logdata_f144/Double.py b/src/streaming_data_types/fbschemas/logdata_f144/Double.py similarity index 100% rename from streaming_data_types/fbschemas/logdata_f144/Double.py rename to src/streaming_data_types/fbschemas/logdata_f144/Double.py diff --git a/streaming_data_types/fbschemas/logdata_f144/Float.py b/src/streaming_data_types/fbschemas/logdata_f144/Float.py similarity index 100% rename from streaming_data_types/fbschemas/logdata_f144/Float.py rename to src/streaming_data_types/fbschemas/logdata_f144/Float.py diff --git a/streaming_data_types/fbschemas/logdata_f144/Int.py b/src/streaming_data_types/fbschemas/logdata_f144/Int.py similarity index 100% rename from streaming_data_types/fbschemas/logdata_f144/Int.py rename to src/streaming_data_types/fbschemas/logdata_f144/Int.py diff --git a/streaming_data_types/fbschemas/logdata_f144/Long.py b/src/streaming_data_types/fbschemas/logdata_f144/Long.py similarity index 100% rename from streaming_data_types/fbschemas/logdata_f144/Long.py rename to src/streaming_data_types/fbschemas/logdata_f144/Long.py diff --git a/streaming_data_types/fbschemas/logdata_f144/Short.py b/src/streaming_data_types/fbschemas/logdata_f144/Short.py similarity index 100% rename from streaming_data_types/fbschemas/logdata_f144/Short.py rename to src/streaming_data_types/fbschemas/logdata_f144/Short.py diff --git a/streaming_data_types/fbschemas/logdata_f144/UByte.py b/src/streaming_data_types/fbschemas/logdata_f144/UByte.py similarity index 100% rename from streaming_data_types/fbschemas/logdata_f144/UByte.py rename to src/streaming_data_types/fbschemas/logdata_f144/UByte.py diff --git a/streaming_data_types/fbschemas/logdata_f144/UInt.py b/src/streaming_data_types/fbschemas/logdata_f144/UInt.py similarity index 100% rename from streaming_data_types/fbschemas/logdata_f144/UInt.py rename to src/streaming_data_types/fbschemas/logdata_f144/UInt.py diff --git a/streaming_data_types/fbschemas/logdata_f144/ULong.py b/src/streaming_data_types/fbschemas/logdata_f144/ULong.py similarity index 100% rename from streaming_data_types/fbschemas/logdata_f144/ULong.py rename to src/streaming_data_types/fbschemas/logdata_f144/ULong.py diff --git a/streaming_data_types/fbschemas/logdata_f144/UShort.py b/src/streaming_data_types/fbschemas/logdata_f144/UShort.py similarity index 100% rename from streaming_data_types/fbschemas/logdata_f144/UShort.py rename to src/streaming_data_types/fbschemas/logdata_f144/UShort.py diff --git a/streaming_data_types/fbschemas/logdata_f144/Value.py b/src/streaming_data_types/fbschemas/logdata_f144/Value.py similarity index 100% rename from streaming_data_types/fbschemas/logdata_f144/Value.py rename to src/streaming_data_types/fbschemas/logdata_f144/Value.py diff --git a/streaming_data_types/fbschemas/logdata_f144/__init__.py b/src/streaming_data_types/fbschemas/logdata_f144/__init__.py similarity index 100% rename from streaming_data_types/fbschemas/logdata_f144/__init__.py rename to src/streaming_data_types/fbschemas/logdata_f144/__init__.py diff --git a/streaming_data_types/fbschemas/logdata_f144/f144_LogData.py b/src/streaming_data_types/fbschemas/logdata_f144/f144_LogData.py similarity index 100% rename from streaming_data_types/fbschemas/logdata_f144/f144_LogData.py rename to src/streaming_data_types/fbschemas/logdata_f144/f144_LogData.py diff --git a/streaming_data_types/fbschemas/nicos_cache_ns10/CacheEntry.py b/src/streaming_data_types/fbschemas/nicos_cache_ns10/CacheEntry.py similarity index 100% rename from streaming_data_types/fbschemas/nicos_cache_ns10/CacheEntry.py rename to src/streaming_data_types/fbschemas/nicos_cache_ns10/CacheEntry.py diff --git a/streaming_data_types/fbschemas/nicos_cache_ns10/__init__.py b/src/streaming_data_types/fbschemas/nicos_cache_ns10/__init__.py similarity index 100% rename from streaming_data_types/fbschemas/nicos_cache_ns10/__init__.py rename to src/streaming_data_types/fbschemas/nicos_cache_ns10/__init__.py diff --git a/streaming_data_types/fbschemas/nmx_mo01/__init__.py b/src/streaming_data_types/fbschemas/nmx_mo01/__init__.py similarity index 100% rename from streaming_data_types/fbschemas/nmx_mo01/__init__.py rename to src/streaming_data_types/fbschemas/nmx_mo01/__init__.py diff --git a/streaming_data_types/fbschemas/readout_data_ar51/RawReadoutMessage.py b/src/streaming_data_types/fbschemas/readout_data_ar51/RawReadoutMessage.py similarity index 100% rename from streaming_data_types/fbschemas/readout_data_ar51/RawReadoutMessage.py rename to src/streaming_data_types/fbschemas/readout_data_ar51/RawReadoutMessage.py diff --git a/streaming_data_types/fbschemas/readout_data_ar51/__init__.py b/src/streaming_data_types/fbschemas/readout_data_ar51/__init__.py similarity index 100% rename from streaming_data_types/fbschemas/readout_data_ar51/__init__.py rename to src/streaming_data_types/fbschemas/readout_data_ar51/__init__.py diff --git a/streaming_data_types/fbschemas/run_start_pl72/RunStart.py b/src/streaming_data_types/fbschemas/run_start_pl72/RunStart.py similarity index 100% rename from streaming_data_types/fbschemas/run_start_pl72/RunStart.py rename to src/streaming_data_types/fbschemas/run_start_pl72/RunStart.py diff --git a/streaming_data_types/fbschemas/run_start_pl72/SpectraDetectorMapping.py b/src/streaming_data_types/fbschemas/run_start_pl72/SpectraDetectorMapping.py similarity index 100% rename from streaming_data_types/fbschemas/run_start_pl72/SpectraDetectorMapping.py rename to src/streaming_data_types/fbschemas/run_start_pl72/SpectraDetectorMapping.py diff --git a/streaming_data_types/fbschemas/run_start_pl72/__init__.py b/src/streaming_data_types/fbschemas/run_start_pl72/__init__.py similarity index 100% rename from streaming_data_types/fbschemas/run_start_pl72/__init__.py rename to src/streaming_data_types/fbschemas/run_start_pl72/__init__.py diff --git a/streaming_data_types/fbschemas/run_stop_6s4t/RunStop.py b/src/streaming_data_types/fbschemas/run_stop_6s4t/RunStop.py similarity index 100% rename from streaming_data_types/fbschemas/run_stop_6s4t/RunStop.py rename to src/streaming_data_types/fbschemas/run_stop_6s4t/RunStop.py diff --git a/streaming_data_types/fbschemas/run_stop_6s4t/__init__.py b/src/streaming_data_types/fbschemas/run_stop_6s4t/__init__.py similarity index 100% rename from streaming_data_types/fbschemas/run_stop_6s4t/__init__.py rename to src/streaming_data_types/fbschemas/run_stop_6s4t/__init__.py diff --git a/streaming_data_types/fbschemas/sample_environment_senv/Int16Array.py b/src/streaming_data_types/fbschemas/sample_environment_senv/Int16Array.py similarity index 100% rename from streaming_data_types/fbschemas/sample_environment_senv/Int16Array.py rename to src/streaming_data_types/fbschemas/sample_environment_senv/Int16Array.py diff --git a/streaming_data_types/fbschemas/sample_environment_senv/Int32Array.py b/src/streaming_data_types/fbschemas/sample_environment_senv/Int32Array.py similarity index 100% rename from streaming_data_types/fbschemas/sample_environment_senv/Int32Array.py rename to src/streaming_data_types/fbschemas/sample_environment_senv/Int32Array.py diff --git a/streaming_data_types/fbschemas/sample_environment_senv/Int64Array.py b/src/streaming_data_types/fbschemas/sample_environment_senv/Int64Array.py similarity index 100% rename from streaming_data_types/fbschemas/sample_environment_senv/Int64Array.py rename to src/streaming_data_types/fbschemas/sample_environment_senv/Int64Array.py diff --git a/streaming_data_types/fbschemas/sample_environment_senv/Int8Array.py b/src/streaming_data_types/fbschemas/sample_environment_senv/Int8Array.py similarity index 100% rename from streaming_data_types/fbschemas/sample_environment_senv/Int8Array.py rename to src/streaming_data_types/fbschemas/sample_environment_senv/Int8Array.py diff --git a/streaming_data_types/fbschemas/sample_environment_senv/Location.py b/src/streaming_data_types/fbschemas/sample_environment_senv/Location.py similarity index 100% rename from streaming_data_types/fbschemas/sample_environment_senv/Location.py rename to src/streaming_data_types/fbschemas/sample_environment_senv/Location.py diff --git a/streaming_data_types/fbschemas/sample_environment_senv/SampleEnvironmentData.py b/src/streaming_data_types/fbschemas/sample_environment_senv/SampleEnvironmentData.py similarity index 100% rename from streaming_data_types/fbschemas/sample_environment_senv/SampleEnvironmentData.py rename to src/streaming_data_types/fbschemas/sample_environment_senv/SampleEnvironmentData.py diff --git a/streaming_data_types/fbschemas/sample_environment_senv/UInt16Array.py b/src/streaming_data_types/fbschemas/sample_environment_senv/UInt16Array.py similarity index 100% rename from streaming_data_types/fbschemas/sample_environment_senv/UInt16Array.py rename to src/streaming_data_types/fbschemas/sample_environment_senv/UInt16Array.py diff --git a/streaming_data_types/fbschemas/sample_environment_senv/UInt32Array.py b/src/streaming_data_types/fbschemas/sample_environment_senv/UInt32Array.py similarity index 100% rename from streaming_data_types/fbschemas/sample_environment_senv/UInt32Array.py rename to src/streaming_data_types/fbschemas/sample_environment_senv/UInt32Array.py diff --git a/streaming_data_types/fbschemas/sample_environment_senv/UInt64Array.py b/src/streaming_data_types/fbschemas/sample_environment_senv/UInt64Array.py similarity index 100% rename from streaming_data_types/fbschemas/sample_environment_senv/UInt64Array.py rename to src/streaming_data_types/fbschemas/sample_environment_senv/UInt64Array.py diff --git a/streaming_data_types/fbschemas/sample_environment_senv/UInt8Array.py b/src/streaming_data_types/fbschemas/sample_environment_senv/UInt8Array.py similarity index 100% rename from streaming_data_types/fbschemas/sample_environment_senv/UInt8Array.py rename to src/streaming_data_types/fbschemas/sample_environment_senv/UInt8Array.py diff --git a/streaming_data_types/fbschemas/sample_environment_senv/ValueUnion.py b/src/streaming_data_types/fbschemas/sample_environment_senv/ValueUnion.py similarity index 100% rename from streaming_data_types/fbschemas/sample_environment_senv/ValueUnion.py rename to src/streaming_data_types/fbschemas/sample_environment_senv/ValueUnion.py diff --git a/streaming_data_types/fbschemas/sample_environment_senv/__init__.py b/src/streaming_data_types/fbschemas/sample_environment_senv/__init__.py similarity index 100% rename from streaming_data_types/fbschemas/sample_environment_senv/__init__.py rename to src/streaming_data_types/fbschemas/sample_environment_senv/__init__.py diff --git a/streaming_data_types/fbschemas/status_x5f2/Status.py b/src/streaming_data_types/fbschemas/status_x5f2/Status.py similarity index 100% rename from streaming_data_types/fbschemas/status_x5f2/Status.py rename to src/streaming_data_types/fbschemas/status_x5f2/Status.py diff --git a/streaming_data_types/fbschemas/status_x5f2/__init__.py b/src/streaming_data_types/fbschemas/status_x5f2/__init__.py similarity index 100% rename from streaming_data_types/fbschemas/status_x5f2/__init__.py rename to src/streaming_data_types/fbschemas/status_x5f2/__init__.py diff --git a/streaming_data_types/fbschemas/timestamps_tdct/__init__.py b/src/streaming_data_types/fbschemas/timestamps_tdct/__init__.py similarity index 100% rename from streaming_data_types/fbschemas/timestamps_tdct/__init__.py rename to src/streaming_data_types/fbschemas/timestamps_tdct/__init__.py diff --git a/streaming_data_types/fbschemas/timestamps_tdct/timestamp.py b/src/streaming_data_types/fbschemas/timestamps_tdct/timestamp.py similarity index 100% rename from streaming_data_types/fbschemas/timestamps_tdct/timestamp.py rename to src/streaming_data_types/fbschemas/timestamps_tdct/timestamp.py diff --git a/streaming_data_types/fbschemas/units_un00/Units.py b/src/streaming_data_types/fbschemas/units_un00/Units.py similarity index 100% rename from streaming_data_types/fbschemas/units_un00/Units.py rename to src/streaming_data_types/fbschemas/units_un00/Units.py diff --git a/streaming_data_types/fbschemas/units_un00/__init__.py b/src/streaming_data_types/fbschemas/units_un00/__init__.py similarity index 100% rename from streaming_data_types/fbschemas/units_un00/__init__.py rename to src/streaming_data_types/fbschemas/units_un00/__init__.py diff --git a/streaming_data_types/finished_writing_wrdn.py b/src/streaming_data_types/finished_writing_wrdn.py similarity index 100% rename from streaming_data_types/finished_writing_wrdn.py rename to src/streaming_data_types/finished_writing_wrdn.py diff --git a/streaming_data_types/forwarder_config_update_fc00.py b/src/streaming_data_types/forwarder_config_update_fc00.py similarity index 100% rename from streaming_data_types/forwarder_config_update_fc00.py rename to src/streaming_data_types/forwarder_config_update_fc00.py diff --git a/streaming_data_types/forwarder_config_update_rf5k.py b/src/streaming_data_types/forwarder_config_update_rf5k.py similarity index 100% rename from streaming_data_types/forwarder_config_update_rf5k.py rename to src/streaming_data_types/forwarder_config_update_rf5k.py diff --git a/streaming_data_types/histogram_hs00.py b/src/streaming_data_types/histogram_hs00.py similarity index 100% rename from streaming_data_types/histogram_hs00.py rename to src/streaming_data_types/histogram_hs00.py diff --git a/streaming_data_types/histogram_hs01.py b/src/streaming_data_types/histogram_hs01.py similarity index 100% rename from streaming_data_types/histogram_hs01.py rename to src/streaming_data_types/histogram_hs01.py diff --git a/streaming_data_types/json_json.py b/src/streaming_data_types/json_json.py similarity index 100% rename from streaming_data_types/json_json.py rename to src/streaming_data_types/json_json.py diff --git a/streaming_data_types/logdata_f142.py b/src/streaming_data_types/logdata_f142.py similarity index 100% rename from streaming_data_types/logdata_f142.py rename to src/streaming_data_types/logdata_f142.py diff --git a/streaming_data_types/logdata_f144.py b/src/streaming_data_types/logdata_f144.py similarity index 100% rename from streaming_data_types/logdata_f144.py rename to src/streaming_data_types/logdata_f144.py diff --git a/streaming_data_types/nicos_cache_ns10.py b/src/streaming_data_types/nicos_cache_ns10.py similarity index 100% rename from streaming_data_types/nicos_cache_ns10.py rename to src/streaming_data_types/nicos_cache_ns10.py diff --git a/streaming_data_types/readout_data_ar51.py b/src/streaming_data_types/readout_data_ar51.py similarity index 100% rename from streaming_data_types/readout_data_ar51.py rename to src/streaming_data_types/readout_data_ar51.py diff --git a/streaming_data_types/run_start_pl72.py b/src/streaming_data_types/run_start_pl72.py similarity index 100% rename from streaming_data_types/run_start_pl72.py rename to src/streaming_data_types/run_start_pl72.py diff --git a/streaming_data_types/run_stop_6s4t.py b/src/streaming_data_types/run_stop_6s4t.py similarity index 100% rename from streaming_data_types/run_stop_6s4t.py rename to src/streaming_data_types/run_stop_6s4t.py diff --git a/streaming_data_types/sample_environment_senv.py b/src/streaming_data_types/sample_environment_senv.py similarity index 100% rename from streaming_data_types/sample_environment_senv.py rename to src/streaming_data_types/sample_environment_senv.py diff --git a/streaming_data_types/status_x5f2.py b/src/streaming_data_types/status_x5f2.py similarity index 100% rename from streaming_data_types/status_x5f2.py rename to src/streaming_data_types/status_x5f2.py diff --git a/streaming_data_types/timestamps_tdct.py b/src/streaming_data_types/timestamps_tdct.py similarity index 100% rename from streaming_data_types/timestamps_tdct.py rename to src/streaming_data_types/timestamps_tdct.py diff --git a/streaming_data_types/units_un00.py b/src/streaming_data_types/units_un00.py similarity index 100% rename from streaming_data_types/units_un00.py rename to src/streaming_data_types/units_un00.py diff --git a/streaming_data_types/utils.py b/src/streaming_data_types/utils.py similarity index 100% rename from streaming_data_types/utils.py rename to src/streaming_data_types/utils.py diff --git a/streaming_data_types/_version.py b/streaming_data_types/_version.py deleted file mode 100644 index 83ee1a0..0000000 --- a/streaming_data_types/_version.py +++ /dev/null @@ -1,4 +0,0 @@ -# Version is not directly defined in __init__ because that causes all -# run time dependencies to become build-time dependencies when it is -# imported in setup.py -version = "0.27.0" diff --git a/tox.ini b/tox.ini deleted file mode 100644 index 773bb75..0000000 --- a/tox.ini +++ /dev/null @@ -1,15 +0,0 @@ -[tox] -envlist = py38, py39, py310, py311, py312, flake8 -isolated_build = true -skipsdist=true - -[testenv] -deps = - -r{toxinidir}/requirements.txt - -r{toxinidir}/requirements-dev.txt -commands = - python -m pytest {posargs} -o pythonpath= - -[testenv:flake8] -commands = - python -m flake8 tests streaming_data_types From 0c2a86e4b5e60cd19b4ce8633adc3fd4751e8367 Mon Sep 17 00:00:00 2001 From: Jack Harper Date: Tue, 20 Jan 2026 16:21:05 +0000 Subject: [PATCH 340/363] use numpy 2 --- pyproject.toml | 2 +- src/streaming_data_types/logdata_f142.py | 10 +++++----- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index e147999..6079d6b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -41,7 +41,7 @@ classifiers = [ dependencies = [ "flatbuffers", - "numpy<2", # Pinned to <2 due to f142 np.unicode + "numpy>2" ] [project.optional-dependencies] diff --git a/src/streaming_data_types/logdata_f142.py b/src/streaming_data_types/logdata_f142.py index 8186c63..ce35e88 100644 --- a/src/streaming_data_types/logdata_f142.py +++ b/src/streaming_data_types/logdata_f142.py @@ -490,8 +490,8 @@ def _serialise_value( ): # We can use a dictionary to map most numpy types to one of the types defined in the flatbuffer schema # but we have to handle strings separately as there are many subtypes - if np.issubdtype(value.dtype, np.unicode_) or np.issubdtype( - value.dtype, np.string_ + if np.issubdtype(value.dtype, np.str_) or np.issubdtype( + value.dtype, np.bytes_ ): string_serialiser(builder, value, source) else: @@ -501,7 +501,7 @@ def _serialise_value( # There are a few numpy types we don't try to handle, for example complex numbers raise NotImplementedError( f"Cannot serialise data of type {value.dtype}, must use one of " - f"{list(_map_scalar_type_to_serialiser.keys()) + [np.unicode_]}" + f"{list(_map_scalar_type_to_serialiser.keys()) + [np.str_]}" ) @@ -539,8 +539,8 @@ def _serialise_value( def _decode_if_scalar_string(value: np.ndarray) -> Union[str, np.ndarray]: if value.ndim == 0 and ( - np.issubdtype(value.dtype, np.unicode_) - or np.issubdtype(value.dtype, np.string_) + np.issubdtype(value.dtype, np.str_) + or np.issubdtype(value.dtype, np.bytes_) ): return value.item().decode() return value From 3ac1ecb5a1d29a7b7288e835dfa490e087b455cd Mon Sep 17 00:00:00 2001 From: Jack Harper Date: Wed, 4 Mar 2026 09:26:09 +0000 Subject: [PATCH 341/363] review comments --- .github/dependabot.yml | 10 ++++++++++ .github/workflows/Lint-and-test.yml | 2 +- .github/workflows/lint-and-test-nightly.yml | 9 +++++++++ pyproject.toml | 4 ---- 4 files changed, 20 insertions(+), 5 deletions(-) create mode 100644 .github/dependabot.yml create mode 100644 .github/workflows/lint-and-test-nightly.yml diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..b728efb --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,10 @@ +version: 2 +updates: + - package-ecosystem: "pip" + directory: "/" + schedule: + interval: "daily" + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "daily" diff --git a/.github/workflows/Lint-and-test.yml b/.github/workflows/Lint-and-test.yml index 02697cc..d329f10 100644 --- a/.github/workflows/Lint-and-test.yml +++ b/.github/workflows/Lint-and-test.yml @@ -28,7 +28,7 @@ jobs: - name: Install uv and set the python version uses: astral-sh/setup-uv@v7 with: - python-version: ${{ matrix.runs-on }} + python-version: ${{ matrix.version }} - name: Install dependencies run: uv sync --all-extras --dev - name: Test with pytest diff --git a/.github/workflows/lint-and-test-nightly.yml b/.github/workflows/lint-and-test-nightly.yml new file mode 100644 index 0000000..5119ad4 --- /dev/null +++ b/.github/workflows/lint-and-test-nightly.yml @@ -0,0 +1,9 @@ +name: lint-and-test-nightly +on: + schedule: + - cron: "0 0 * * *" + workflow_dispatch: + +jobs: + lint-and-test-nightly: + uses: ./.github/workflows/Lint-and-test.yml diff --git a/pyproject.toml b/pyproject.toml index 6079d6b..4cb4fb0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -61,10 +61,6 @@ dev = [ testpaths = "tests" asyncio_mode = "auto" addopts = "--cov --cov-report=html -vv" -filterwarnings = [ - 'ignore:FigureCanvasAgg is non-interactive, and thus cannot be shown:UserWarning', - 'error:Using UFloat objects with std_dev==0 may give unexpected results.:UserWarning', -] [tool.coverage.run] branch = true From 19e3dbd468ddb783b17fef39e36a00bfda26d7da Mon Sep 17 00:00:00 2001 From: Tom Willemsen Date: Wed, 4 Mar 2026 10:32:04 +0000 Subject: [PATCH 342/363] tidy --- schemas/pu00_pulse_metadata.fbs | 16 ++-------------- 1 file changed, 2 insertions(+), 14 deletions(-) diff --git a/schemas/pu00_pulse_metadata.fbs b/schemas/pu00_pulse_metadata.fbs index fb90a88..84ec36d 100644 --- a/schemas/pu00_pulse_metadata.fbs +++ b/schemas/pu00_pulse_metadata.fbs @@ -8,20 +8,8 @@ table Pu00Message { // If pulse times are available in the aquisition system, this field holds // those timestamps. Holds wall time otherwise. -// old is84 bits - period_number : uint; - proton_charge : float; - - -// however more general alternatie woudl be - name : [string] - value : [some union] - -// or - - name : [ soem agrreed enum list] - value : [ some union ] - + period_number : uint; // Period number into which this pulse was collected + proton_charge : float; // Proton charge for this frame } root_type Pu00Message; From cfc8d8d8846ad7ad64ac67d80d3f187dfaa0ff3b Mon Sep 17 00:00:00 2001 From: Tom Willemsen Date: Wed, 4 Mar 2026 13:00:57 +0000 Subject: [PATCH 343/363] Reference time not an array --- schemas/pu00_pulse_metadata.fbs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schemas/pu00_pulse_metadata.fbs b/schemas/pu00_pulse_metadata.fbs index 84ec36d..f63fdc5 100644 --- a/schemas/pu00_pulse_metadata.fbs +++ b/schemas/pu00_pulse_metadata.fbs @@ -4,7 +4,7 @@ file_identifier "pu00"; table Pu00Message { message_id : long; // Consecutive numbers, to detect missing or unordered messages. - reference_time : [long] (required); // Nanoseconds since Unix epoch (1 Jan 1970) + reference_time : long (required); // Nanoseconds since Unix epoch (1 Jan 1970) // If pulse times are available in the aquisition system, this field holds // those timestamps. Holds wall time otherwise. From 911678a1f66ebd5bc1303ba5b5515fd87ef2d490 Mon Sep 17 00:00:00 2001 From: Tom Willemsen Date: Wed, 4 Mar 2026 13:26:25 +0000 Subject: [PATCH 344/363] Add units --- schemas/pu00_pulse_metadata.fbs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schemas/pu00_pulse_metadata.fbs b/schemas/pu00_pulse_metadata.fbs index f63fdc5..391e6e8 100644 --- a/schemas/pu00_pulse_metadata.fbs +++ b/schemas/pu00_pulse_metadata.fbs @@ -9,7 +9,7 @@ table Pu00Message { // those timestamps. Holds wall time otherwise. period_number : uint; // Period number into which this pulse was collected - proton_charge : float; // Proton charge for this frame + proton_charge : float; // Proton charge for this frame (uAh per frame) } root_type Pu00Message; From 90603ba4df23d580b23e287e5d9f7ebb0e05842b Mon Sep 17 00:00:00 2001 From: Tom Willemsen Date: Wed, 4 Mar 2026 13:36:05 +0000 Subject: [PATCH 345/363] vetos --- schemas/pu00_pulse_metadata.fbs | 1 + 1 file changed, 1 insertion(+) diff --git a/schemas/pu00_pulse_metadata.fbs b/schemas/pu00_pulse_metadata.fbs index 391e6e8..2760fdc 100644 --- a/schemas/pu00_pulse_metadata.fbs +++ b/schemas/pu00_pulse_metadata.fbs @@ -8,6 +8,7 @@ table Pu00Message { // If pulse times are available in the aquisition system, this field holds // those timestamps. Holds wall time otherwise. + vetos : uint; // Vetos for this frame period_number : uint; // Period number into which this pulse was collected proton_charge : float; // Proton charge for this frame (uAh per frame) } From 667b2c8925c89a86e894881a180fbd61e000ee04 Mon Sep 17 00:00:00 2001 From: Tom Willemsen Date: Wed, 4 Mar 2026 14:02:52 +0000 Subject: [PATCH 346/363] Add source name --- schemas/pu00_pulse_metadata.fbs | 1 + 1 file changed, 1 insertion(+) diff --git a/schemas/pu00_pulse_metadata.fbs b/schemas/pu00_pulse_metadata.fbs index 2760fdc..69241b8 100644 --- a/schemas/pu00_pulse_metadata.fbs +++ b/schemas/pu00_pulse_metadata.fbs @@ -3,6 +3,7 @@ file_identifier "pu00"; table Pu00Message { + source_name : string (required); // Field identifying the producer type, for example detector type message_id : long; // Consecutive numbers, to detect missing or unordered messages. reference_time : long (required); // Nanoseconds since Unix epoch (1 Jan 1970) // If pulse times are available in the aquisition system, this field holds From 8f5c35855e9ae05145a53bfd3f267574deec2695 Mon Sep 17 00:00:00 2001 From: Tom Willemsen Date: Wed, 4 Mar 2026 14:11:22 +0000 Subject: [PATCH 347/363] doc --- schemas/pu00_pulse_metadata.fbs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schemas/pu00_pulse_metadata.fbs b/schemas/pu00_pulse_metadata.fbs index 69241b8..ce26e27 100644 --- a/schemas/pu00_pulse_metadata.fbs +++ b/schemas/pu00_pulse_metadata.fbs @@ -9,7 +9,7 @@ table Pu00Message { // If pulse times are available in the aquisition system, this field holds // those timestamps. Holds wall time otherwise. - vetos : uint; // Vetos for this frame + vetos : uint; // Veto bitmask for this frame period_number : uint; // Period number into which this pulse was collected proton_charge : float; // Proton charge for this frame (uAh per frame) } From 7ea0bbbf05f232017076253e6997e7b3f244d3d0 Mon Sep 17 00:00:00 2001 From: Tom Willemsen Date: Mon, 9 Mar 2026 14:10:26 +0000 Subject: [PATCH 348/363] Update reference_time field Removed 'required' constraint from reference_time field. --- schemas/pu00_pulse_metadata.fbs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schemas/pu00_pulse_metadata.fbs b/schemas/pu00_pulse_metadata.fbs index ce26e27..28ce258 100644 --- a/schemas/pu00_pulse_metadata.fbs +++ b/schemas/pu00_pulse_metadata.fbs @@ -5,7 +5,7 @@ file_identifier "pu00"; table Pu00Message { source_name : string (required); // Field identifying the producer type, for example detector type message_id : long; // Consecutive numbers, to detect missing or unordered messages. - reference_time : long (required); // Nanoseconds since Unix epoch (1 Jan 1970) + reference_time : long; // Nanoseconds since Unix epoch (1 Jan 1970) // If pulse times are available in the aquisition system, this field holds // those timestamps. Holds wall time otherwise. From 92c7add4b1a395dc8d0f64d1306026b869b29a9a Mon Sep 17 00:00:00 2001 From: Tom Willemsen Date: Wed, 11 Mar 2026 18:52:36 +0000 Subject: [PATCH 349/363] Make vetos/periods/ppp optional --- schemas/pu00_pulse_metadata.fbs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/schemas/pu00_pulse_metadata.fbs b/schemas/pu00_pulse_metadata.fbs index ce26e27..2d975f0 100644 --- a/schemas/pu00_pulse_metadata.fbs +++ b/schemas/pu00_pulse_metadata.fbs @@ -5,13 +5,13 @@ file_identifier "pu00"; table Pu00Message { source_name : string (required); // Field identifying the producer type, for example detector type message_id : long; // Consecutive numbers, to detect missing or unordered messages. - reference_time : long (required); // Nanoseconds since Unix epoch (1 Jan 1970) + reference_time : long; // Nanoseconds since Unix epoch (1 Jan 1970) // If pulse times are available in the aquisition system, this field holds // those timestamps. Holds wall time otherwise. - vetos : uint; // Veto bitmask for this frame - period_number : uint; // Period number into which this pulse was collected - proton_charge : float; // Proton charge for this frame (uAh per frame) + vetos : uint = null; // Veto bitmask for this frame, if present + period_number : uint = null; // Period number into which this pulse was collected, if present + proton_charge : float = null; // Proton charge for this frame (uAh per frame), if present } root_type Pu00Message; From 9f8bfe5bd5f4decc2b12f4f3e7dfdb9d6aeb505a Mon Sep 17 00:00:00 2001 From: Tom Willemsen Date: Wed, 18 Mar 2026 09:01:12 +0000 Subject: [PATCH 350/363] delete c++ build tooling --- .gitignore | 1 + CMakeLists.txt | 37 ------------------------------------- conanfile.txt | 8 -------- 3 files changed, 1 insertion(+), 45 deletions(-) delete mode 100644 CMakeLists.txt delete mode 100644 conanfile.txt diff --git a/.gitignore b/.gitignore index 9e6822f..a0dc4ef 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,3 @@ #directories build/ +.idea diff --git a/CMakeLists.txt b/CMakeLists.txt deleted file mode 100644 index 70b6e15..0000000 --- a/CMakeLists.txt +++ /dev/null @@ -1,37 +0,0 @@ -# Just a cmake example of generating the flatbuffer headers. -# Personally, I like to generate these headers from the projects -# which use them. - -# Tries to locate flatc with find_program. - -cmake_minimum_required(VERSION 2.8.11) -project(streaming-data-types) - -if(EXISTS "${CMAKE_BINARY_DIR}/conanbuildinfo.cmake") - include("${CMAKE_BINARY_DIR}/conanbuildinfo.cmake") - conan_basic_setup(SKIP_RPATH NO_OUTPUT_DIRS) -endif() - -find_program(FLATC flatc) -message("** Found flatc: ${FLATC}") - -set(flatbuffers_generated_headers "") -set(schemas_subdir "schemas") -file(MAKE_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/${schemas_subdir}") -file(GLOB_RECURSE flatbuffers_schemata RELATIVE "${CMAKE_CURRENT_SOURCE_DIR}/schemas" "schemas/*.fbs") - -foreach (f0 ${flatbuffers_schemata}) - string(REGEX REPLACE "\\.fbs$" "" s0 ${f0}) - set(fbs "${schemas_subdir}/${s0}.fbs") - set(fbh "${schemas_subdir}/${s0}_generated.h") - add_custom_command( - OUTPUT "${fbh}" - COMMAND ${FLATC} --cpp --gen-mutable --gen-name-strings --scoped-enums "${CMAKE_CURRENT_SOURCE_DIR}/${fbs}" - DEPENDS "${fbs}" - WORKING_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/${schemas_subdir}" - COMMENT "Process ${fbs} using ${FLATC}" - ) - list(APPEND flatbuffers_generated_headers "${CMAKE_CURRENT_BINARY_DIR}/${fbh}") -endforeach() - -add_custom_target(flatbuffers_generate ALL DEPENDS ${flatbuffers_generated_headers}) diff --git a/conanfile.txt b/conanfile.txt deleted file mode 100644 index 243fd63..0000000 --- a/conanfile.txt +++ /dev/null @@ -1,8 +0,0 @@ -[requires] -flatbuffers/1.12.0 - -[build_requires] -flatc/1.12.0 - -[generators] -virtualrunenv From 2ebf59d1373bb498f8f29c3ce0f6baa2d0dc3e04 Mon Sep 17 00:00:00 2001 From: Tom Willemsen Date: Wed, 18 Mar 2026 15:18:54 +0000 Subject: [PATCH 351/363] Delete schemas not used at ISIS and add rust bindings --- .gitignore | 1 + README.md | 1 - rust/Cargo.lock | 64 ++++++++++ rust/Cargo.toml | 11 ++ rust/build.rs | 59 +++++++++ rust/src/lib.rs | 121 ++++++++++++++++++ schemas/ADAr_area_detector_array.fbs | 26 ---- schemas/NDAr_NDArray_schema.fbs | 45 ------- schemas/amo0_psi_sinq.fbs | 11 -- schemas/an44_events.fbs | 23 ---- schemas/ar51_readout_data.fbs | 12 -- schemas/ba57_run_info.fbs | 23 ---- schemas/dtdb_adc_pulse_debug.fbs | 14 --- schemas/ep00_epics_connection_info.fbs | 23 ---- schemas/ev42_events.fbs | 22 ---- schemas/ev43_events.fbs | 18 --- schemas/f140_general.fbs | 164 ------------------------- schemas/f141_epics_nt.fbs | 84 ------------- schemas/f142_logdata.fbs | 98 --------------- schemas/f143_structure.fbs | 77 ------------ schemas/fwdi_forwarder_internal.fbs | 15 --- schemas/hs00_event_histogram.fbs | 49 -------- schemas/is84_isis_events.fbs | 11 -- schemas/mo01_nmx.fbs | 57 --------- schemas/ns10_cache_entry.fbs | 15 --- schemas/ns11_typed_cache_entry.fbs | 48 -------- schemas/rf5k_forwarder_config.fbs | 40 ------ schemas/senv_data.fbs | 42 ------- schemas/tdct_timestamps.fbs | 9 -- 29 files changed, 256 insertions(+), 927 deletions(-) create mode 100644 rust/Cargo.lock create mode 100644 rust/Cargo.toml create mode 100644 rust/build.rs create mode 100644 rust/src/lib.rs delete mode 100644 schemas/ADAr_area_detector_array.fbs delete mode 100644 schemas/NDAr_NDArray_schema.fbs delete mode 100644 schemas/amo0_psi_sinq.fbs delete mode 100644 schemas/an44_events.fbs delete mode 100644 schemas/ar51_readout_data.fbs delete mode 100644 schemas/ba57_run_info.fbs delete mode 100644 schemas/dtdb_adc_pulse_debug.fbs delete mode 100644 schemas/ep00_epics_connection_info.fbs delete mode 100644 schemas/ev42_events.fbs delete mode 100644 schemas/ev43_events.fbs delete mode 100644 schemas/f140_general.fbs delete mode 100644 schemas/f141_epics_nt.fbs delete mode 100644 schemas/f142_logdata.fbs delete mode 100644 schemas/f143_structure.fbs delete mode 100644 schemas/fwdi_forwarder_internal.fbs delete mode 100644 schemas/hs00_event_histogram.fbs delete mode 100644 schemas/is84_isis_events.fbs delete mode 100644 schemas/mo01_nmx.fbs delete mode 100644 schemas/ns10_cache_entry.fbs delete mode 100644 schemas/ns11_typed_cache_entry.fbs delete mode 100644 schemas/rf5k_forwarder_config.fbs delete mode 100644 schemas/senv_data.fbs delete mode 100644 schemas/tdct_timestamps.fbs diff --git a/.gitignore b/.gitignore index d836711..80feb90 100644 --- a/.gitignore +++ b/.gitignore @@ -137,3 +137,4 @@ dmypy.json .vscode python/src/streaming_data_types/_version.py +rust/src/flatbuffers_generated \ No newline at end of file diff --git a/README.md b/README.md index 3a24ff1..77ed465 100644 --- a/README.md +++ b/README.md @@ -99,5 +99,4 @@ and work with the flat buffers union data type in your root element. ## Useful information: -- [Have CMake download and compile schema](documentation/cmakeCompileSchema.md) - [Time formats we use and how to convert between them](documentation/timestamps.md) diff --git a/rust/Cargo.lock b/rust/Cargo.lock new file mode 100644 index 0000000..726725e --- /dev/null +++ b/rust/Cargo.lock @@ -0,0 +1,64 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 4 + +[[package]] +name = "anyhow" +version = "1.0.102" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f202df86484c868dbad7eaa557ef785d5c66295e41b460ef922eca0723b842c" + +[[package]] +name = "bitflags" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "843867be96c8daad0d758b57df9392b6d8d271134fce549de6ce169ff98a92af" + +[[package]] +name = "flatbuffers" +version = "25.12.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35f6839d7b3b98adde531effaf34f0c2badc6f4735d26fe74709d8e513a96ef3" +dependencies = [ + "bitflags", + "rustc_version", +] + +[[package]] +name = "flatc-rust" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57e61227926ef5b237af48bee74394cc4a5a221ebd10c5147a98e612f207851d" +dependencies = [ + "log", +] + +[[package]] +name = "isis_streaming_data_types" +version = "0.1.0" +dependencies = [ + "anyhow", + "flatbuffers", + "flatc-rust", +] + +[[package]] +name = "log" +version = "0.4.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" + +[[package]] +name = "rustc_version" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" +dependencies = [ + "semver", +] + +[[package]] +name = "semver" +version = "1.0.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2" diff --git a/rust/Cargo.toml b/rust/Cargo.toml new file mode 100644 index 0000000..918d3b7 --- /dev/null +++ b/rust/Cargo.toml @@ -0,0 +1,11 @@ +[package] +name = "isis_streaming_data_types" +version = "0.1.0" +edition = "2024" + +[build-dependencies] +flatc-rust = "0.2.0" + +[dependencies] +flatbuffers = "*" +anyhow = "*" \ No newline at end of file diff --git a/rust/build.rs b/rust/build.rs new file mode 100644 index 0000000..d5bdf0b --- /dev/null +++ b/rust/build.rs @@ -0,0 +1,59 @@ +use std::fs; +use std::fs::{File, OpenOptions, create_dir_all}; +use std::io::Write; +use std::path::Path; + +fn mod_name_from_stem(name: &str) -> String { + format!("{}_{}", name.get(5..).unwrap(), name.get(0..4).unwrap()).to_owned() +} + +fn main() { + create_dir_all("src/flatbuffers_generated") + .expect("Unable to create src/flatbuffers_generated"); + File::create("src/flatbuffers_generated/mod.rs") + .expect("Failed to create src/flatbuffers_generated/mod.rs"); + + let mut mod_file = OpenOptions::new() + .append(true) + .open("src/flatbuffers_generated/mod.rs") + .expect("Could not open src/flatbuffers_generated/mod.rs"); + + fs::read_dir("../schemas") + .expect("Could not read schemas directory") + .filter_map(|e| e.ok()) + .for_each(|entry| { + println!( + "cargo:rerun-if-changed={}", + entry.path().as_path().to_str().unwrap() + ); + flatc_rust::run(flatc_rust::Args { + inputs: &[entry.path().as_path()], + out_dir: Path::new("src/flatbuffers_generated/"), + extra: &[ + "--include-prefix", + "flatbuffers_generated", + "--filename-suffix", + "", + "--gen-all", + ], + ..Default::default() + }) + .expect("cannot find flatc compiler"); + + let path = entry.path(); + let stem = path + .file_stem() + .expect("Can't get file stem") + .to_str() + .expect("Can't convert file stem to str"); + let rust_name = mod_name_from_stem(stem); + + writeln!( + mod_file, + "#[path = \"{stem}.rs\"] +pub mod {rust_name}; +" + ) + .expect("Could not write to src/flatbuffers_generated/mod.rs"); + }) +} diff --git a/rust/src/lib.rs b/rust/src/lib.rs new file mode 100644 index 0000000..4d555b9 --- /dev/null +++ b/rust/src/lib.rs @@ -0,0 +1,121 @@ +use crate::flatbuffers_generated::action_response_answ::{ActionResponse, root_as_action_response}; +use crate::flatbuffers_generated::alarm_al00::{Alarm, root_as_alarm}; +use crate::flatbuffers_generated::area_detector_array_ad00::{ad00_ADArray, root_as_ad_00_adarray}; +use crate::flatbuffers_generated::data_se00::{ + root_as_se_00_sample_environment_data, se00_SampleEnvironmentData, +}; +use crate::flatbuffers_generated::dataarray_da00::{da00_DataArray, root_as_da_00_data_array}; +use crate::flatbuffers_generated::det_spec_map_df12::{ + SpectraDetectorMapping, root_as_spectra_detector_mapping, +}; +use crate::flatbuffers_generated::epics_connection_ep01::{ + EpicsPVConnectionInfo, root_as_epics_pvconnection_info, +}; +use crate::flatbuffers_generated::event_histogram_hs01::{EventHistogram, root_as_event_histogram}; +use crate::flatbuffers_generated::events_ev44::{Event44Message, root_as_event_44_message}; +use crate::flatbuffers_generated::finished_writing_wrdn::{ + FinishedWriting, root_as_finished_writing, +}; +use crate::flatbuffers_generated::forwarder_config_fc00::{ + fc00_ConfigUpdate, root_as_fc_00_config_update, +}; +use crate::flatbuffers_generated::json_json::{JsonData, root_as_json_data}; +use crate::flatbuffers_generated::logdata_f144::{f144_LogData, root_as_f_144_log_data}; +use crate::flatbuffers_generated::run_start_pl72::{RunStart, root_as_run_start}; +use crate::flatbuffers_generated::run_stop_6s4t::{RunStop, root_as_run_stop}; +use crate::flatbuffers_generated::status_x5f2::{Status, root_as_status}; +use flatbuffers::InvalidFlatbuffer; + +#[allow(clippy::all)] +#[rustfmt::skip] +#[allow(dead_code, unused, non_snake_case, non_camel_case_types, non_upper_case_globals)] +pub mod flatbuffers_generated; + +/// Enum containing all possible messages currently supported by +/// `deserialize_message`. +pub enum DeserializedMessage<'a> { + EventDataEv44(Event44Message<'a>), + AreaDetectorAd00(ad00_ADArray<'a>), + RunStartPl72(RunStart<'a>), + RunStop6s4t(RunStop<'a>), + LogDataF144(f144_LogData<'a>), + DetSpecMapDf12(SpectraDetectorMapping<'a>), + SenvSe00(se00_SampleEnvironmentData<'a>), + HistogramHs01(EventHistogram<'a>), + EpicsConnectionEp01(EpicsPVConnectionInfo<'a>), + JsonDataJson(JsonData<'a>), + ActionResponseAnsw(ActionResponse<'a>), + FinishedWritingWrdn(FinishedWriting<'a>), + StatusX5f2(Status<'a>), + ForwarderConfigFc00(fc00_ConfigUpdate<'a>), + AlarmAl00(Alarm<'a>), + DataArrayDa00(da00_DataArray<'a>), +} + +/// Error raised from `deserialize_message` describing why a message +/// cannot be deserialized +pub enum DeserializationError { + UnsupportedSchema(String), + InvalidFlatbuffer(InvalidFlatbuffer), +} + +impl From for DeserializationError { + fn from(value: InvalidFlatbuffer) -> Self { + DeserializationError::InvalidFlatbuffer(value) + } +} + +/// Get the schema ID from a message. +pub fn get_schema_id(data: &[u8]) -> Option<&[u8]> { + data.get(4..8) +} + +/// Deserialize an arbitrary message from Kafka. +/// +/// Returns `Ok(DeserializedMessage)` if the message type is understood by +/// this function and the message deserialized correctly, or `Err` otherwise. +pub fn deserialize_message(data: &[u8]) -> Result, DeserializationError> { + match get_schema_id(data) { + Some(b"ev44") => Ok(DeserializedMessage::EventDataEv44( + root_as_event_44_message(data)?, + )), + Some(b"ad00") => Ok(DeserializedMessage::AreaDetectorAd00( + root_as_ad_00_adarray(data)?, + )), + Some(b"pl72") => Ok(DeserializedMessage::RunStartPl72(root_as_run_start(data)?)), + Some(b"6s4t") => Ok(DeserializedMessage::RunStop6s4t(root_as_run_stop(data)?)), + Some(b"f144") => Ok(DeserializedMessage::LogDataF144(root_as_f_144_log_data( + data, + )?)), + Some(b"df12") => Ok(DeserializedMessage::DetSpecMapDf12( + root_as_spectra_detector_mapping(data)?, + )), + Some(b"se00") => Ok(DeserializedMessage::SenvSe00( + root_as_se_00_sample_environment_data(data)?, + )), + Some(b"hs01") => Ok(DeserializedMessage::HistogramHs01(root_as_event_histogram( + data, + )?)), + Some(b"ep01") => Ok(DeserializedMessage::EpicsConnectionEp01( + root_as_epics_pvconnection_info(data)?, + )), + Some(b"json") => Ok(DeserializedMessage::JsonDataJson(root_as_json_data(data)?)), + Some(b"answ") => Ok(DeserializedMessage::ActionResponseAnsw( + root_as_action_response(data)?, + )), + Some(b"wrdn") => Ok(DeserializedMessage::FinishedWritingWrdn( + root_as_finished_writing(data)?, + )), + Some(b"x5f2") => Ok(DeserializedMessage::StatusX5f2(root_as_status(data)?)), + Some(b"fc00") => Ok(DeserializedMessage::ForwarderConfigFc00( + root_as_fc_00_config_update(data)?, + )), + Some(b"al00") => Ok(DeserializedMessage::AlarmAl00(root_as_alarm(data)?)), + Some(b"da00") => Ok(DeserializedMessage::DataArrayDa00( + root_as_da_00_data_array(data)?, + )), + _ => Err(DeserializationError::UnsupportedSchema( + "Unknown message type passed to deserialize".to_owned(), + )), + } +} diff --git a/schemas/ADAr_area_detector_array.fbs b/schemas/ADAr_area_detector_array.fbs deleted file mode 100644 index e9ec1cc..0000000 --- a/schemas/ADAr_area_detector_array.fbs +++ /dev/null @@ -1,26 +0,0 @@ - -// A flatbuffer schema for holding EPICS area detector updates - -file_identifier "ADAr"; - -enum DType:byte { int8, uint8, int16, uint16, int32, uint32, int64, uint64, float32, float64, c_string } - -table Attribute { - name: string (required); // Name of attribute - description: string; // Description of attribute - source: string; // EPICS PV name or DRV_INFO string of attribute - data_type: DType; // The type of the data (value) in this attribute - data: [ubyte] (required); // The data/value of the attribute -} - -table ADArray { - source_name: string (required); // Source name of array - id: int; // Unique id to this particular NDArray - timestamp: ulong; // Timestamp in nanoseconds since UNIX epoch - dimensions: [ulong] (required); // Dimensions of the array - data_type: DType; // The type of the data stored in the array - data: [ubyte] (required); // Elements in the array - attributes: [Attribute]; // Extra metadata about the array -} - -root_type ADArray; diff --git a/schemas/NDAr_NDArray_schema.fbs b/schemas/NDAr_NDArray_schema.fbs deleted file mode 100644 index 8f6e078..0000000 --- a/schemas/NDAr_NDArray_schema.fbs +++ /dev/null @@ -1,45 +0,0 @@ - -// NOTE: THIS SCHEMA HAS BEEN DEPRECATED AND WILL BE REMOVED SOON - -namespace FB_Tables; - -file_identifier "NDAr"; - -enum DType : byte { Int8, Uint8, Int16, Uint16, Int32, Uint32, Int64, Uint64, Float32, Float64, c_string } - -struct epicsTimeStamp { - secPastEpoch : int; - nsec : int; -} - -table NDAttribute { -pName: - string; -pDescription: - string; -pSource: - string; -dataType: - DType; -pData: - [ubyte]; -} - -table NDArray { -id: - int; -timeStamp: - double; -epicsTS: - epicsTimeStamp; -dims: - [ulong]; -dataType: - DType; -pData: - [ubyte]; -pAttributeList: - [NDAttribute]; -} - -root_type NDArray; diff --git a/schemas/amo0_psi_sinq.fbs b/schemas/amo0_psi_sinq.fbs deleted file mode 100644 index f0e59e8..0000000 --- a/schemas/amo0_psi_sinq.fbs +++ /dev/null @@ -1,11 +0,0 @@ - -file_identifier "amo0"; - -table EventMessage { - source_name : string; // used to be htype - message_id : ulong; // pid - pulse_time : ulong; // ts - time_of_flight : [uint]; // timestamp - detector_id : [uint]; // data -} -root_type EventMessage; diff --git a/schemas/an44_events.fbs b/schemas/an44_events.fbs deleted file mode 100644 index 00a792f..0000000 --- a/schemas/an44_events.fbs +++ /dev/null @@ -1,23 +0,0 @@ -// Schema for ansto variant of neutron detection event data - -file_identifier "an44"; - -table an44_EventMessage { - source_name : string (required); // Field identifying the producer type, for example detector type - message_id : long; // Consecutive numbers, to detect missing or unordered messages - reference_time : [long] (required); // Nanoseconds since Unix epoch (1 Jan 1970) - // If pulse times are available in the acquisition system, this field holds - // those timestamps. Holds wall time otherwise. - reference_time_index : [int] (required); // Index into the time_of_flight array for the start of the neutron events linked - // to the corresponding pulse/reference time. - // reference_time_index and reference_time are the same length. - time_of_flight : [int]; // Nanoseconds - // Time of flight for each event if pulse time is available. If not, a - // (positive) offset from the wall time stored in the `reference_time`. - // Cannot be empty if events are being sent. - pixel_id : [int]; // Identifiers that represent the positions of the events in the detector(s). - // Can be empty even when events are sent if the pixel_id is implicit (e.g. single-pixel beam monitor). - weight : [short]; // Records a weight for the matching neutron event if present, otherwise is empty -} - -root_type an44_EventMessage; diff --git a/schemas/ar51_readout_data.fbs b/schemas/ar51_readout_data.fbs deleted file mode 100644 index c565120..0000000 --- a/schemas/ar51_readout_data.fbs +++ /dev/null @@ -1,12 +0,0 @@ -// Schema for arbitrary binary buffer data -// Developed for streaming raw ESS Readout Payload - -file_identifier "ar51"; - -table RawReadoutMessage { - source_name : string (required); // Field identifying the producer type, for example detector type - message_id : long; // Consecutive numbers, to detect missing or unordered messages - raw_data : [ubyte]; // UDP payload buffer, unsigned bytes -} - -root_type RawReadoutMessage; diff --git a/schemas/ba57_run_info.fbs b/schemas/ba57_run_info.fbs deleted file mode 100644 index 1b1df8e..0000000 --- a/schemas/ba57_run_info.fbs +++ /dev/null @@ -1,23 +0,0 @@ -// Run start/stop information for Mantid - -file_identifier "ba57"; - -table RunStart { - start_time : ulong; // nanoseconds since Unix epoch (1 Jan 1970) - run_number : int; // ID for the run - instrument_name : string; // Name of the instrument - n_periods : int; // Number of periods (ISIS only) -} - -table RunStop { - stop_time : ulong; // nanoseconds since Unix epoch (1 Jan 1970) - run_number : int; // ID for the run -} - -union InfoTypes { RunStart, RunStop } - -table RunInfo { - info_type : InfoTypes; -} - -root_type RunInfo; diff --git a/schemas/dtdb_adc_pulse_debug.fbs b/schemas/dtdb_adc_pulse_debug.fbs deleted file mode 100644 index a591243..0000000 --- a/schemas/dtdb_adc_pulse_debug.fbs +++ /dev/null @@ -1,14 +0,0 @@ -// Schema for transmitting additonal (debug) event information. -// All fields are optional. - -file_identifier "dtdb"; - -table AdcPulseDebug { - amplitude : [uint32]; // Amplitude of the pulse above bkg. - peak_area : [uint32]; // Area under the curve of the pulse. - background : [uint32]; // Background level of pulses. - threshold_time : [uint64]; // Timestamp in (ns) UNIX epoch when the pulse - // passed the threshold on the rising edge - peak_time : [uint64]; // Timestamp in (ns) UNIX epoch when the pulse - // reached its peak value -} diff --git a/schemas/ep00_epics_connection_info.fbs b/schemas/ep00_epics_connection_info.fbs deleted file mode 100644 index 188b11f..0000000 --- a/schemas/ep00_epics_connection_info.fbs +++ /dev/null @@ -1,23 +0,0 @@ -// Represent events about the underlying EPICS connection. - -file_identifier "ep00"; - -enum EventType: ushort { - UNKNOWN, - NEVER_CONNECTED, - CONNECTED, - DISCONNECTED, - DESTROYED, -} - -table EpicsConnectionInfo { - // Nanoseconds since UNIX epoch - timestamp: ulong; - type: EventType; - // The channel name, called `source_name` to stay in sync with `f142` - source_name: string; - // Identifies the client which has observed the event - service_id: string; -} - -root_type EpicsConnectionInfo; diff --git a/schemas/ev42_events.fbs b/schemas/ev42_events.fbs deleted file mode 100644 index e18e21f..0000000 --- a/schemas/ev42_events.fbs +++ /dev/null @@ -1,22 +0,0 @@ -// Schema for neutron detection event data - -include "is84_isis_events.fbs"; -include "dtdb_adc_pulse_debug.fbs"; - -file_identifier "ev42"; - -union FacilityData { ISISData, AdcPulseDebug } - -table EventMessage { - source_name : string; // optional field identifying the producer type, for example detector type - message_id : ulong; // consecutive numbers, to detect missing or unordered messages - pulse_time : ulong; // Nanoseconds since Unix epoch (1 Jan 1970) - // If a pulse time is available in the aquisition system, this field holds - // that timestamp. Holds wall time otherwise. - time_of_flight : [uint]; // Nanoseconds - // Time of flight for each event if pulse time is available. If not, a - // (positive) offset from the wall time stored in the `pulse_time`. - detector_id : [uint]; // Identifiers that represent the positions of the events in the detector(s). - facility_specific_data : FacilityData; // optional field -} -root_type EventMessage; diff --git a/schemas/ev43_events.fbs b/schemas/ev43_events.fbs deleted file mode 100644 index 6caa531..0000000 --- a/schemas/ev43_events.fbs +++ /dev/null @@ -1,18 +0,0 @@ -// Schema for neutron detection event data with multiple pulse events - -file_identifier "ev43"; - -table Event43Message { - source_name : string; // optional field identifying the producer type, for example detector type - message_id : ulong; // consecutive numbers, to detect missing or unordered messages - pulse_time : [ulong]; // Nanoseconds since Unix epoch (1 Jan 1970) - // If pulse times are available in the aquisition system, this field holds - // those timestamps. Holds wall time otherwise. - pulse_index : [uint]; // Index into the array for the start of the neutron events linked to the - // corresponding pulse time. Pulse index and pulse time are the same length. - time_of_flight : [uint]; // Nanoseconds - // Time of flight for each event if pulse time is available. If not, a - // (positive) offset from the wall time stored in the `pulse_time`. - detector_id : [uint]; // Identifiers that represent the positions of the events in the detector(s). -} -root_type Event43Message; diff --git a/schemas/f140_general.fbs b/schemas/f140_general.fbs deleted file mode 100644 index fb82515..0000000 --- a/schemas/f140_general.fbs +++ /dev/null @@ -1,164 +0,0 @@ -// General schema which allows any PVStructure to be forwarded as a flatbuffer. -// Generality comes at a price: More overhead during construction in terms of space -// and cpu, more work for the receiver of the flatbuffer to access. - -// file_identifier "\\xf1\\x40"; - -file_identifier "f140"; - -namespace BrightnESS.FlatBufs.f140_general; - -table pvByte { - v: byte; -} - -table pvUByte { - v: ubyte; -} - -table pvShort { - v: short; -} - -table pvUShort { - v: ushort; -} - -table pvInt { - v: int; -} - -table pvUInt { - v: uint; -} - -table pvLong { - v: long; -} - -table pvULong { - v: ulong; -} - -table pvFloat { - v: float; -} - -table pvDouble { - v: double; -} - -table pvString { - v: string; -} - - - - -table pvByte_a { - v: [byte]; -} - -table pvShort_a { - v: [short]; -} - -table pvInt_a { - v: [int]; -} - -table pvLong_a { - v: [long]; -} - -table pvUByte_a { - v: [ubyte]; -} - -table pvUShort_a { - v: [ushort]; -} - -table pvUInt_a { - v: [uint]; -} - -table pvULong_a { - v: [ulong]; -} - -table pvFloat_a { - v: [float]; -} - -table pvDouble_a { - v: [double]; -} - -table pvString_a { - v: [string]; -} - - -union F { - pvByte, - pvShort, - pvInt, - pvLong, - pvUByte, - pvUShort, - pvUInt, - pvULong, - - pvFloat, - pvDouble, - - pvString, - - pvByte_a, - pvShort_a, - pvInt_a, - pvLong_a, - pvUByte_a, - pvUShort_a, - pvUInt_a, - pvULong_a, - - pvFloat_a, - pvDouble_a, - - pvString_a, - - Obj, - Obj_a, -} - - -table ObjM { - k: string; - v: F; -} - -table Obj { - ms: [ObjM]; -} - -table Obj_a { - v: [Obj]; -} - -struct fwdinfo_t { - seq: ulong; - ts_data: ulong; - ts_fwd: ulong; - fwdix: ubyte; -} - -table PV { - n: string; - v: F; - fwdinfo: fwdinfo_t; -} - -// Root must be a table -root_type PV; diff --git a/schemas/f141_epics_nt.fbs b/schemas/f141_epics_nt.fbs deleted file mode 100644 index 6bbca00..0000000 --- a/schemas/f141_epics_nt.fbs +++ /dev/null @@ -1,84 +0,0 @@ -file_identifier "f141"; - -namespace BrightnESS.FlatBufs.f141_epics_nt; - -struct timeStamp_t { - secondsPastEpoch: ulong; - nanoseconds: int; -} - -table NTScalarByte { value: byte; } -table NTScalarUByte { value: ubyte; } -table NTScalarShort { value: short; } -table NTScalarUShort { value: ushort; } -table NTScalarInt { value: int; } -table NTScalarUInt { value: uint; } -table NTScalarLong { value: long; } -table NTScalarULong { value: ulong; } -table NTScalarFloat { value: float; } -table NTScalarDouble { value: double; } - -table NTScalarArrayByte { value: [ byte]; } -table NTScalarArrayUByte { value: [ubyte]; } -table NTScalarArrayShort { value: [ short]; } -table NTScalarArrayUShort { value: [ushort]; } -table NTScalarArrayInt { value: [ int]; } -table NTScalarArrayUInt { value: [uint]; } -table NTScalarArrayLong { value: [ long]; } -table NTScalarArrayULong { value: [ulong]; } -table NTScalarArrayFloat { value: [ float]; } -table NTScalarArrayDouble { value: [ double]; } - -union PV { - NTScalarByte, - NTScalarUByte, - NTScalarShort, - NTScalarUShort, - NTScalarInt, - NTScalarUInt, - NTScalarLong, - NTScalarULong, - NTScalarFloat, - NTScalarDouble, - NTScalarArrayByte, - NTScalarArrayUByte, - NTScalarArrayShort, - NTScalarArrayUShort, - NTScalarArrayInt, - NTScalarArrayUInt, - NTScalarArrayLong, - NTScalarArrayULong, - NTScalarArrayFloat, - NTScalarArrayDouble -} - -struct fwdinfo_t { - seq: ulong; - ts_data: ulong; - ts_fwd: ulong; - fwdix: ubyte; - teamid: ulong; -} - -table fwdinfo_2_t { - seq_data: ulong; - seq_fwd: ulong; - ts_data: ulong; - ts_fwd: ulong; - fwdix: uint; - teamid: ulong; -} - -union fwdinfo_u { - fwdinfo_2_t, -} - -table EpicsPV { - name: string; - pv: PV; - timeStamp: timeStamp_t; - fwdinfo: fwdinfo_t; - fwdinfo2: fwdinfo_u; -} - -root_type EpicsPV; diff --git a/schemas/f142_logdata.fbs b/schemas/f142_logdata.fbs deleted file mode 100644 index 6c883f5..0000000 --- a/schemas/f142_logdata.fbs +++ /dev/null @@ -1,98 +0,0 @@ -// Log data, for example "slow" sample environment measurements -// -// Typical producers and consumers: -// Produced by EPICS forwarder from EPICS PV -// Produced by NeXus-Streamer from NXlogs -// Consumed by NeXus file writer -> NXLog -// Consumed by Mantid -> Workspace log - -file_identifier "f142"; - -table Byte { value: byte; } -table UByte { value: ubyte; } -table Short { value: short; } -table UShort { value: ushort; } -table Int { value: int; } -table UInt { value: uint; } -table Long { value: long; } -table ULong { value: ulong; } -table Float { value: float; } -table Double { value: double; } - -table ArrayByte { value: [ byte]; } -table ArrayUByte { value: [ubyte]; } -table ArrayShort { value: [ short]; } -table ArrayUShort { value: [ushort]; } -table ArrayInt { value: [ int]; } -table ArrayUInt { value: [uint]; } -table ArrayLong { value: [ long]; } -table ArrayULong { value: [ulong]; } -table ArrayFloat { value: [ float]; } -table ArrayDouble { value: [ double]; } - -union Value { - Byte, - UByte, - Short, - UShort, - Int, - UInt, - Long, - ULong, - Float, - Double, - ArrayByte, - ArrayUByte, - ArrayShort, - ArrayUShort, - ArrayInt, - ArrayUInt, - ArrayLong, - ArrayULong, - ArrayFloat, - ArrayDouble, -} - -enum AlarmStatus: ushort { - NO_ALARM, - READ, - WRITE, - HIHI, - HIGH, - LOLO, - LOW, - STATE, - COS, - COMM, - TIMED, - HWLIMIT, - CALC, - SCAN, - LINK, - SOFT, - BAD_SUB, - UDF, - DISABLE, - SIMM, - READ_ACCESS, - WRITE_ACCESS, - NO_CHANGE -} - -enum AlarmSeverity: ushort { - MINOR, - MAJOR, - NO_ALARM, - INVALID, - NO_CHANGE -} - -table LogData { - source_name: string; // identify source on multiplexed topics, e.g. PV name if from EPICS - value: Value; // may be scalar or array - timestamp: ulong; // nanoseconds past epoch (1 Jan 1970), zero reserved for invalid timestamp - status: AlarmStatus = NO_CHANGE; // details of EPICS alarm, default being NO_CHANGE: file writer only records changes - severity: AlarmSeverity = NO_CHANGE; // severity of current EPICS alarm status, default of NO_CHANGE should be used if status has value of NO_CHANGE -} - -root_type LogData; diff --git a/schemas/f143_structure.fbs b/schemas/f143_structure.fbs deleted file mode 100644 index a3af0bb..0000000 --- a/schemas/f143_structure.fbs +++ /dev/null @@ -1,77 +0,0 @@ -// General schema which allows any EPICS structure to be forwarded as a flatbuffer. -// Generality comes at a price: More overhead during construction in terms of space -// and cpu, more work for the receiver of the flatbuffer to access. - -include "fwdi_forwarder_internal.fbs"; - -file_identifier "f143"; - -namespace f143_structure; - -table Byte { value: byte; } -table UByte { value: ubyte; } -table Short { value: short; } -table UShort { value: ushort; } -table Int { value: int; } -table UInt { value: uint; } -table Long { value: long; } -table ULong { value: ulong; } -table Float { value: float; } -table Double { value: double; } -table String { value: string; } - -table ArrayByte { value: [ byte]; } -table ArrayUByte { value: [ubyte]; } -table ArrayShort { value: [ short]; } -table ArrayUShort { value: [ushort]; } -table ArrayInt { value: [ int]; } -table ArrayUInt { value: [uint]; } -table ArrayLong { value: [ long]; } -table ArrayULong { value: [ulong]; } -table ArrayFloat { value: [ float]; } -table ArrayDouble { value: [ double]; } -table ArrayString { value: [ string]; } - -union Value { - Byte, - Short, - Int, - Long, - UByte, - UShort, - UInt, - ULong, - Float, - Double, - String, - Obj, - ArrayByte, - ArrayShort, - ArrayInt, - ArrayLong, - ArrayUByte, - ArrayUShort, - ArrayUInt, - ArrayULong, - ArrayFloat, - ArrayDouble, - ArrayString, - ArrayObj, -} - -table ObjM { - k: string; - v: Value; -} - -table Obj { value: [ObjM]; } -table ArrayObj { value: [Obj]; } - -table Structure { - name: string; - value: Value; - timestamp: ulong; - fwdinfo: forwarder_internal; -} - -root_type Structure; diff --git a/schemas/fwdi_forwarder_internal.fbs b/schemas/fwdi_forwarder_internal.fbs deleted file mode 100644 index 2ddf61f..0000000 --- a/schemas/fwdi_forwarder_internal.fbs +++ /dev/null @@ -1,15 +0,0 @@ -file_identifier "fwdi"; - -// optional, currently only used by forwarder -table fwdinfo_1_t { - seq_data: ulong; - seq_fwd: ulong; - ts_data: ulong; - ts_fwd: ulong; - fwdix: uint; - teamid: ulong; -} - -union forwarder_internal { - fwdinfo_1_t, -} diff --git a/schemas/hs00_event_histogram.fbs b/schemas/hs00_event_histogram.fbs deleted file mode 100644 index 01d6463..0000000 --- a/schemas/hs00_event_histogram.fbs +++ /dev/null @@ -1,49 +0,0 @@ -// General schema for histogram - -file_identifier "hs00"; - -table ArrayUInt { value: [uint]; } -table ArrayULong { value: [ulong]; } -table ArrayDouble { value: [double]; } -table ArrayFloat { value: [float]; } - -// Union of allowed data types for the arrays -union Array { - ArrayUInt, - ArrayULong, - ArrayDouble, - ArrayFloat, -} - -// Meta information for one dimension -table DimensionMetaData { - length: uint; // Length of the full histogram along this dimension - unit: string; // Unit - label: string; // Label - bin_boundaries: Array; // Boundary information (should be of length: DimensionMetaData.length+1) -} - -// Represents a n-dimensional histogram -// Subsets of histogram are also supported -table EventHistogram { - source: string; // Source name - timestamp: ulong; // Timestamp (in ns, after unix epoch) - dim_metadata: [DimensionMetaData]; // Meta data for each dimension - last_metadata_timestamp: ulong; // Timestamp (ns, after unix epoch) when the last metadata information was written - current_shape: [uint] (required); // Shape of the current data in each dimension - offset: [uint]; // Offset giving the starting index in each dimension - data: Array; // Data represented in RowMajor order (C Style), filled with 0 if missing - errors: Array; // Errors in calculation of histogram data (same size as data) - info: string; // Additional information (Integrated/Processed) -} - -// The "current_shape" and "offset" fields can be used to define a slice of a -// larger histogram. This allows breaking a large histogram into multiple messages. -// For example the dim_metadata could look like this: -// dim_metadata=[DimensionMetaData(label="x", length=10, ...), DimensionMetaData(label="y", length=10, ...)] -// and each row could be sent as a separate message by using: -// current_shape=[10, 1] and offset=[0, 0] in the 1st message -// current_shape=[10, 1] and offset=[0, 1] in the 2nd message -// and so on. - -root_type EventHistogram; diff --git a/schemas/is84_isis_events.fbs b/schemas/is84_isis_events.fbs deleted file mode 100644 index 81afb65..0000000 --- a/schemas/is84_isis_events.fbs +++ /dev/null @@ -1,11 +0,0 @@ -// Schema for ISIS specific fields to be added to neutron event messages - -file_identifier "is84"; - -enum RunState : byte { SETUP=0, RUNNING=1 } - -table ISISData { - period_number : uint; - run_state : RunState; // current instrument run state - proton_charge : float; // at ESS this will likely come through EPICS forwarder instead -} diff --git a/schemas/mo01_nmx.fbs b/schemas/mo01_nmx.fbs deleted file mode 100644 index d65a3bb..0000000 --- a/schemas/mo01_nmx.fbs +++ /dev/null @@ -1,57 +0,0 @@ -// Schema for event-formation-unit detector monitoring, such as ADC and -// channel histograms, particle tracks or 'Hits'. -// Sent periodically (typically once per second or so) by the -// detector pipelines to Kafka and consumed by Daquiri for visualisation. -// Useful for debugging, commissioning and testing but not necessarily an -// essential service provided when ESS becomes fully operational. - -file_identifier "mo01"; - -// GEMHist is used by gdgem, sonde, multiblade for adc and channel histograms in HistSerializer.cpp -// https://github.com/ess-dmsc/event-formation-unit/blob/master/prototype2/common/HistSerializer.cpp -// GEMTrack is used by gdgem for particle tracks in TrackSerializer.cpp -// https://github.com/ess-dmsc/event-formation-unit/blob/master/prototype2/gdgem/nmx/TrackSerializer.cpp -// MONHit is used by multigrid for streaming readouts in ReadoutSerializer.cpp -// https://github.com/ess-dmsc/event-formation-unit/blob/master/prototype2/common/ReadoutSerializer.cpp - - -union DataField { GEMHist, GEMTrack, MONHit } - -// used for GEMTrack -table pos { - time : ushort; // Arbitrary units, could be ns or clock ticks - strip: ushort; // An index along one axis - adc : ushort; // ADC value from digitiser or other intensity/weight -} - -// -table MONHit { - plane : [ushort]; // A coordinate dimension (x, y, z) or detector dimension (wires, strips) - time : [uint]; // Arbitrary units, could be ns or clock ticks - channel : [ushort]; // a channels representing a 'position' along the plane - adc : [ushort]; // ADC value from digitiser or other intensity/weight -} - -table GEMTrack { - time_offset : ulong; // - xtrack : [pos]; // Particle track projection on x - ytrack : [pos]; // Particle track projection on y - xpos : double; // Calculated neutron entry position, x-coord - ypos : double; // Calculated neutron entry position, y-coord -} - -table GEMHist { - xstrips : [uint]; // Histogram counts along x-coord - ystrips : [uint]; // Histogram counts along y-coord - xspectrum : [uint]; - yspectrum : [uint]; - cluster_spectrum : [uint]; - bin_width : uint; -} - -table MonitorMessage { - source_name : string; - data : DataField; -} - -root_type MonitorMessage; diff --git a/schemas/ns10_cache_entry.fbs b/schemas/ns10_cache_entry.fbs deleted file mode 100644 index b03dd1d..0000000 --- a/schemas/ns10_cache_entry.fbs +++ /dev/null @@ -1,15 +0,0 @@ - -file_identifier "ns10"; - -/// pylint: skip-file -table CacheEntry { - key:string; // key for this entry (usually nicos/device/parameter) - time:double; // time (in seconds after epoch) when this entry was set - ttl:double; // time to live (in seconds after time field of this entry) - expired:bool = false; // already expired (manually or using ttl), supersedes ttl - // Value for the key. - // The value can be numerical types, strings, list, tuple, dictionaries, sets - value:string; -} - -root_type CacheEntry; diff --git a/schemas/ns11_typed_cache_entry.fbs b/schemas/ns11_typed_cache_entry.fbs deleted file mode 100644 index 207f336..0000000 --- a/schemas/ns11_typed_cache_entry.fbs +++ /dev/null @@ -1,48 +0,0 @@ -file_identifier "ns11"; - -table Bool { value: bool; } -table Long { value: long; } -table Double { value: double; } -table String { value: string; } -table Object { value: string; } // Python object represented as string - -enum ArrayType : byte { - ListType = 0, - TupleType, - SetType -} - -union Value { - Object, - Bool, - Long, - Double, - String, - Dict, - Array -} - -table DictMapping { - k: Value; - v: Value; -} - -table Dict { value: [DictMapping]; } - -table ArrayElement { v: Value; } - -table Array { - value: [ArrayElement]; - array_type: ArrayType; -} - -/// pylint: skip-file -table TypedCacheEntry { - key: string; // key for this entry (usually nicos/device/parameter) - time: double; // time (in seconds after epoch) when this entry was set - ttl: double; // time to live (in seconds after time field of this entry). NOT TO BE USED OUTSIDE OF NICOS! - expired: bool = false; // already expired (manually or using ttl), supersedes ttl. NOT TO BE USED OUTSIDE OF NICOS! - value: Value; -} - -root_type TypedCacheEntry; diff --git a/schemas/rf5k_forwarder_config.fbs b/schemas/rf5k_forwarder_config.fbs deleted file mode 100644 index 2c5c29e..0000000 --- a/schemas/rf5k_forwarder_config.fbs +++ /dev/null @@ -1,40 +0,0 @@ -// Forwarder Configuration Update -// Add or remove channels from a Forwarder configuration -// -// Typical producers and consumers: -// Produced by NICOS -// Consumed by Forwarder - -file_identifier "rf5k"; - -enum UpdateType: ushort { - ADD, - REMOVE, - REMOVEALL -} - -enum Protocol: ushort { - PVA, // EPICS PV access - CA, // EPICS channel access - FAKE // Forwarder generates fake updates, frequency configurable with command line argument -} - -table Stream { - // If config_change=ADD then all fields of Stream must be populated. - // If config_change=REMOVE then at least one of the string fields must be populated, - // and the Forwarder will remove any streams which match all of the populated string fields. - // "populated" here means supplying a non-empty string for the field. - // Wildcards '?' (single character) and '*' (multi-character) can be used to match topic or channel name, - // Wildcards cannot be used to match schema as they are valid characters in schema identifiers. - channel: string; // Name of the EPICS channel/pv (e.g. "MYIOC:VALUE1") - schema: string; // Identify the output format for updates from the named channel (e.g. "f142" or "tdct") - topic: string; // Name of the output topic for updates from the named channel (e.g. "LOKI_motionControl") - protocol: Protocol = PVA; // Protocol for channel, EPICS PV access by default -} - -table ConfigUpdate { - config_change: UpdateType; // Type of config change, add streams, remove streams or remove all streams - streams: [Stream]; // Details what should be forwarded where, empty if config_change=REMOVEALL -} - -root_type ConfigUpdate; diff --git a/schemas/senv_data.fbs b/schemas/senv_data.fbs deleted file mode 100644 index 82af240..0000000 --- a/schemas/senv_data.fbs +++ /dev/null @@ -1,42 +0,0 @@ -//Used to transmit fast sample environment data -// NOTE: THIS SCHEMA HAS BEEN DEPRECATED - -file_identifier "senv"; - -enum Location : byte { Unknown = 0, Start, Middle, End } - -table Int8Array { value: [ byte] (required); } -table UInt8Array { value: [ubyte] (required); } -table Int16Array { value: [ short] (required); } -table UInt16Array { value: [ushort] (required); } -table Int32Array { value: [ int] (required); } -table UInt32Array { value: [uint] (required); } -table Int64Array { value: [ long] (required); } -table UInt64Array { value: [ulong] (required); } - -union ValueUnion { - Int8Array, - UInt8Array, - Int16Array, - UInt16Array, - Int32Array, - UInt32Array, - Int64Array, - UInt64Array -} - -table SampleEnvironmentData { - Name: string (required); // Name of the device/source of the data. - Channel: int; // Can be used to store the ADC channel number. Should be set to -1 if not used. - PacketTimestamp: ulong; // The timestamp (in nanoseconds since UNIX epoch) of the first sample in the value vector. - TimeDelta: double; // Time in nanoseconds between samples. Available for "compression" of the schema. Should - // be set to <= 0 if not used. - TimestampLocation: Location; // Relevant when the delta time between two consecutive timestamps is long in comparison - // to the resolution of the timestamp. For example, when using oversampling. - // middle or end of the samples that were summed to produce each oversampled sample. - Values: ValueUnion (required); // The sample values. - Timestamps: [ulong]; // OPTIONAL (nanosecond) timestamps of each individual sample. - MessageCounter: ulong; // Monotonically increasing counter. -} - -root_type SampleEnvironmentData; diff --git a/schemas/tdct_timestamps.fbs b/schemas/tdct_timestamps.fbs deleted file mode 100644 index 3e30061..0000000 --- a/schemas/tdct_timestamps.fbs +++ /dev/null @@ -1,9 +0,0 @@ -file_identifier "tdct"; - -table timestamp { - name: string (required); // Name of the device (e.g. "Chopper_3"). - timestamps: [ulong] (required); // Timestamps in the form of nano seconds since UNIX epoch. - sequence_counter: ulong; // Monotonically increasing counter. -} - -root_type timestamp; From 8a388416642add1dcf2986119a0c88109761a460 Mon Sep 17 00:00:00 2001 From: Tom Willemsen Date: Wed, 18 Mar 2026 15:22:30 +0000 Subject: [PATCH 352/363] Correct readme --- README.md | 81 ++----------------------------------------------------- 1 file changed, 2 insertions(+), 79 deletions(-) diff --git a/README.md b/README.md index 77ed465..0d50e6a 100644 --- a/README.md +++ b/README.md @@ -1,102 +1,25 @@ # Streaming Data Types -[![DOI](https://zenodo.org/badge/81330954.svg)](https://zenodo.org/badge/latestdoi/81330954) - -FlatBuffers is the format chosen for the ESS messaging system. - -We would like to be able to read any message in the system at any time, -therefore: - -All schemas that we use for the transmission of data are collected in this -repository. - -The names of the schema files in this repository are prefixed by their unique -4-character `file_identifier`. This `file_identifier` must be set in the -schema definition file as: -``` -file_identifier = "abcd"; -``` - -The file identifiers (also called "schema id") must be unique on the network. -The `root_type` should include the schema version number. For example, the f144 schema has -the `root_type`: -``` -root_type f144_LogData; -``` - -The naming convention for new identifiers and a table of existing identifiers follows later in this README. -Please add your own (new schema) with file identifier to that table. - - -## Backwards compatibility - -Please, avoid changes which break binary compatibility. FlatBuffers documentation contains good information about how to maintain binary compatibility. If you need to make breaking changes to schemas that are not under development, acquire a new schema id. - -Schemas that are under development should be clearly marked as such in the schema file and in the **Schema ids** below to warn users of possible loss of backwards compatibility. - -## Not enough file identifiers available? - -If you feel that you may need a lot of schema ids, you can use a single schema -and work with the flat buffers union data type in your root element. - - -## Schema coding standard - -* Completely new schemas should have an ID comprising of two characters plus 00, e.g. hs00 -* When updating an existing schema with a breaking change then the new schema should have the same ID but with the number incremented, e.g. hs00 -> hs01 - * For older schema which don't end with two numbers, propose a new name which matches the convention. -* Prefix your schema files in this repository with the chosen schema id to more easily prevent id collision. -* Tables should use *UpperCamelCase*. -* Fields should use *snake_case*. -* Try to keep names consistent with equivalent fields in existing schema, e.g.: - * `timestamp` for timestamp - * `source_name` for a string indicating origin/source of data in flatbuffer - * `service_id` for a string indicating the name of the service that created the flatbuffer -* Do not use unsigned integers unless required for your application. - +FlatBuffers is the format chosen for the ISIS data streaming system, derived from the +[ESS messaging system](https://github.com/ess-dmsc/streaming-data-types). ## Schema ids | ID | File name | Description | |------|----------------------------------|-----------------------------------------------------------------------------------------------| -| f140 | `f140_general.fbs ` | [OBSOLETE] Can encode an arbitrary EPICS PV | -| f141 | `f141_ntarraydouble.fbs ` | [OBSOLETE] A simple array of double, testing file writing | -| f142 | `f142_logdata.fbs ` | (DEPRECATED) For log data, for example forwarded EPICS PV update [superseded by f144] | -| f143 | `f143_structure.fbs ` | [OBSOLETE] Arbitrary nested data | | f144 | `f144_logdata.fbs ` | Controls related log data, typically from EPICS or NICOS. Note: not to be used for array data | -| ev42 | `ev42_events.fbs ` | Multi-institution neutron event data for a single pulse | -| ev43 | `ev43_events.fbs ` | Multi-institution neutron event data from multiple pulses | | ev44 | `ev44_events.fbs ` | Multi-institution neutron event data for both single and multiple pulses | -| an44 | `an44_events.fbs ` | ANSTO-specific variant of ev44 | -| ar51 | `ar51_readout_data.fbs ` | Streaming raw ESS detector readout data | -| is84 | `is84_isis_events.fbs ` | ISIS specific addition to event messages | -| ba57 | `ba57_run_info.fbs ` | [OBSOLETE] Run start/stop information for Mantid [superseded by pl72] | | df12 | `df12_det_spec_map.fbs ` | Detector-spectrum map for Mantid | -| senv | `senv_data.fbs ` | (DEPRECATED) Used for storing for waveforms from DG ADC readout system. | | se00 | `se00_data.fbs ` | Used for storing arrays with optional timestamps, for example waveform data. Replaces _senv_. | -| NDAr | `NDAr_NDArray_schema.fbs ` | (DEPRECATED) Holds binary blob of data with n dimensions | -| ADAr | `ADAr_area_detector_array.fbs ` | (DEPRECATED) EPICS area detector array data [superseded by ad00] | | ad00 | `ad00_area_detector_array.fbs ` | EPICS area detector array data | -| mo01 | `mo01_nmx.fbs ` | Daquiri monitor data: pre-binned histograms, raw hits and NMX tracks | -| ns10 | `ns10_cache_entry.fbs ` | (DEPRECATED) NICOS cache entry | -| ns11 | `ns11_typed_cache_entry.fbs ` | (DEPRECATED) NICOS cache entry with typed data (not used) | -| hs00 | `hs00_event_histogram.fbs ` | (DEPRECATED) Event histogram stored in n dim array | | hs01 | `hs01_event_histogram.fbs ` | Event histogram stored in n dim array | -| dtdb | `dtdb_adc_pulse_debug.fbs ` | Debug fields that can be added to the ev42 schema | -| ep00 | `ep00_epics_connection_info.fbs` | (DEPRECATED) Status of the EPICS connection | | ep01 | `ep01_epics_connection.fbs ` | Status or event of EPICS connection. Replaces _ep00_ | | json | `json_json.fbs ` | Carries a JSON payload | -| tdct | `tdct_timestamps.fbs ` | Timestamps from a device (e.g. a chopper) | | pl72 | `pl72_run_start.fbs ` | File writing, run start message for file writer and Mantid | | 6s4t | `6s4t_run_stop.fbs ` | File writing, run stop message for file writer and Mantid | | answ | `answ_action_response.fbs ` | Holds the result of a command to the filewriter | | wrdn | `wrdn_finished_writing.fbs ` | Message from the filewriter when it is done writing a file | | x5f2 | `x5f2_status.fbs ` | Status update and heartbeat message for any software | -| rf5k | `rf5k_forwarder_config.fbs ` | (DEPRECATED) Configuration update for Forwarder [superseded by fc00] | | fc00 | `fc00_forwarder_config.fbs ` | Configuration update for Forwarder | | al00 | `al00_alarm.fbs ` | Generic alarm schema for EPICS, NICOS, etc. | | da00 | `da00_dataarray.fbs ` | Pseudo-scipp DataArray with time-dependent and constant Variables | | un00 | `un00_units.fbs ` | Engineering units update | - -## Useful information: - -- [Time formats we use and how to convert between them](documentation/timestamps.md) From b1bc8e93b16f57b02a00f0571b75e7c1f5422520 Mon Sep 17 00:00:00 2001 From: Tom Willemsen Date: Wed, 18 Mar 2026 15:50:34 +0000 Subject: [PATCH 353/363] python: delete unused/obsolete schemas that will never be used at ISIS --- PULL_REQUEST_TEMPLATE.md | 18 - README.md | 22 + python/README | 77 --- python/README_DEV.md | 108 ---- python/pyproject.toml | 3 +- python/src/streaming_data_types/__init__.py | 51 -- .../area_detector_ADAr.py | 207 ------- .../area_detector_NDAr.py | 94 --- .../src/streaming_data_types/array_1d_se00.py | 149 ----- .../epics_connection_info_ep00.py | 62 -- .../streaming_data_types/eventdata_an44.py | 95 --- .../streaming_data_types/eventdata_ev42.py | 107 ---- .../streaming_data_types/eventdata_ev43.py | 82 --- .../fbschemas/ADAr_ADArray_schema/ADArray.py | 201 ------ .../ADAr_ADArray_schema/Attribute.py | 127 ---- .../fbschemas/ADAr_ADArray_schema/DType.py | 17 - .../fbschemas/ADAr_ADArray_schema/__init__.py | 0 .../fbschemas/NDAr_NDArray_schema/DType.py | 17 - .../fbschemas/NDAr_NDArray_schema/NDArray.py | 182 ------ .../NDAr_NDArray_schema/NDAttribute.py | 113 ---- .../fbschemas/NDAr_NDArray_schema/__init__.py | 0 .../NDAr_NDArray_schema/epicsTimeStamp.py | 34 -- .../fbschemas/array_1d_se00/DoubleArray.py | 69 --- .../fbschemas/array_1d_se00/FloatArray.py | 69 --- .../fbschemas/array_1d_se00/Int16Array.py | 69 --- .../fbschemas/array_1d_se00/Int32Array.py | 69 --- .../fbschemas/array_1d_se00/Int64Array.py | 69 --- .../fbschemas/array_1d_se00/Int8Array.py | 69 --- .../fbschemas/array_1d_se00/Location.py | 9 - .../fbschemas/array_1d_se00/UInt16Array.py | 69 --- .../fbschemas/array_1d_se00/UInt32Array.py | 69 --- .../fbschemas/array_1d_se00/UInt64Array.py | 69 --- .../fbschemas/array_1d_se00/UInt8Array.py | 69 --- .../fbschemas/array_1d_se00/ValueUnion.py | 16 - .../fbschemas/array_1d_se00/__init__.py | 0 .../se00_SampleEnvironmentData.py | 152 ----- .../EpicsConnectionInfo.py | 80 --- .../epics_connection_info_ep00/EventType.py | 11 - .../epics_connection_info_ep00/__init__.py | 0 .../eventdata_an44/AN44EventMessage.py | 322 ---------- .../fbschemas/eventdata_an44/__init__.py | 0 .../fbschemas/eventdata_ev42/EventMessage.py | 165 ----- .../fbschemas/eventdata_ev42/FacilityData.py | 9 - .../fbschemas/eventdata_ev42/__init__.py | 0 .../eventdata_ev43/Event43Message.py | 223 ------- .../fbschemas/eventdata_ev43/__init__.py | 0 .../ConfigUpdate.py | 72 --- .../forwarder_config_update_rf5k/Protocol.py | 9 - .../forwarder_config_update_rf5k/Stream.py | 80 --- .../UpdateType.py | 9 - .../forwarder_config_update_rf5k/__init__.py | 0 .../fbschemas/histogram_hs00/Array.py | 11 - .../fbschemas/histogram_hs00/ArrayDouble.py | 63 -- .../fbschemas/histogram_hs00/ArrayFloat.py | 63 -- .../fbschemas/histogram_hs00/ArrayUInt.py | 63 -- .../fbschemas/histogram_hs00/ArrayULong.py | 63 -- .../histogram_hs00/DimensionMetaData.py | 95 --- .../histogram_hs00/EventHistogram.py | 237 ------- .../fbschemas/histogram_hs00/__init__.py | 0 .../isis_event_info_is84/ISISData.py | 65 -- .../isis_event_info_is84/RunState.py | 8 - .../isis_event_info_is84/__init__.py | 0 .../fbschemas/logdata_f142/AlarmSeverity.py | 11 - .../fbschemas/logdata_f142/AlarmStatus.py | 29 - .../fbschemas/logdata_f142/ArrayByte.py | 63 -- .../fbschemas/logdata_f142/ArrayDouble.py | 63 -- .../fbschemas/logdata_f142/ArrayFloat.py | 63 -- .../fbschemas/logdata_f142/ArrayInt.py | 63 -- .../fbschemas/logdata_f142/ArrayLong.py | 63 -- .../fbschemas/logdata_f142/ArrayShort.py | 63 -- .../fbschemas/logdata_f142/ArrayString.py | 55 -- .../fbschemas/logdata_f142/ArrayUByte.py | 63 -- .../fbschemas/logdata_f142/ArrayUInt.py | 63 -- .../fbschemas/logdata_f142/ArrayULong.py | 63 -- .../fbschemas/logdata_f142/ArrayUShort.py | 63 -- .../fbschemas/logdata_f142/Byte.py | 39 -- .../fbschemas/logdata_f142/Double.py | 41 -- .../fbschemas/logdata_f142/Float.py | 41 -- .../fbschemas/logdata_f142/Int.py | 39 -- .../fbschemas/logdata_f142/LogData.py | 108 ---- .../fbschemas/logdata_f142/Long.py | 39 -- .../fbschemas/logdata_f142/Short.py | 39 -- .../fbschemas/logdata_f142/String.py | 41 -- .../fbschemas/logdata_f142/UByte.py | 39 -- .../fbschemas/logdata_f142/UInt.py | 41 -- .../fbschemas/logdata_f142/ULong.py | 41 -- .../fbschemas/logdata_f142/UShort.py | 41 -- .../fbschemas/logdata_f142/Value.py | 29 - .../fbschemas/logdata_f142/__init__.py | 0 .../fbschemas/nicos_cache_ns10/CacheEntry.py | 94 --- .../fbschemas/nicos_cache_ns10/__init__.py | 0 .../fbschemas/nmx_mo01/__init__.py | 0 .../readout_data_ar51/RawReadoutMessage.py | 130 ---- .../fbschemas/readout_data_ar51/__init__.py | 0 .../sample_environment_senv/Int16Array.py | 77 --- .../sample_environment_senv/Int32Array.py | 77 --- .../sample_environment_senv/Int64Array.py | 77 --- .../sample_environment_senv/Int8Array.py | 77 --- .../sample_environment_senv/Location.py | 10 - .../SampleEnvironmentData.py | 179 ------ .../sample_environment_senv/UInt16Array.py | 77 --- .../sample_environment_senv/UInt32Array.py | 77 --- .../sample_environment_senv/UInt64Array.py | 77 --- .../sample_environment_senv/UInt8Array.py | 77 --- .../sample_environment_senv/ValueUnion.py | 15 - .../sample_environment_senv/__init__.py | 0 .../fbschemas/timestamps_tdct/__init__.py | 0 .../fbschemas/timestamps_tdct/timestamp.py | 89 --- .../forwarder_config_update_rf5k.py | 112 ---- .../streaming_data_types/histogram_hs00.py | 231 ------- .../src/streaming_data_types/logdata_f142.py | 577 ------------------ .../streaming_data_types/nicos_cache_ns10.py | 45 -- .../streaming_data_types/readout_data_ar51.py | 66 -- .../sample_environment_senv.py | 153 ----- .../streaming_data_types/timestamps_tdct.py | 58 -- python/tests/test_ADAr.py | 113 ---- python/tests/test_NDAr.py | 64 -- python/tests/test_an44.py | 118 ---- python/tests/test_ar51.py | 80 --- python/tests/test_ep00.py | 41 -- python/tests/test_ev42.py | 118 ---- python/tests/test_ev43.py | 76 --- python/tests/test_f142.py | 211 ------- python/tests/test_hs00.py | 365 ----------- python/tests/test_ns10.py | 49 -- python/tests/test_rf5k.py | 60 -- python/tests/test_se00.py | 73 --- python/tests/test_senv.py | 57 -- python/tests/test_tdct.py | 59 -- 129 files changed, 24 insertions(+), 9419 deletions(-) delete mode 100644 PULL_REQUEST_TEMPLATE.md delete mode 100644 python/README delete mode 100644 python/README_DEV.md delete mode 100644 python/src/streaming_data_types/area_detector_ADAr.py delete mode 100644 python/src/streaming_data_types/area_detector_NDAr.py delete mode 100644 python/src/streaming_data_types/array_1d_se00.py delete mode 100644 python/src/streaming_data_types/epics_connection_info_ep00.py delete mode 100644 python/src/streaming_data_types/eventdata_an44.py delete mode 100644 python/src/streaming_data_types/eventdata_ev42.py delete mode 100644 python/src/streaming_data_types/eventdata_ev43.py delete mode 100644 python/src/streaming_data_types/fbschemas/ADAr_ADArray_schema/ADArray.py delete mode 100644 python/src/streaming_data_types/fbschemas/ADAr_ADArray_schema/Attribute.py delete mode 100644 python/src/streaming_data_types/fbschemas/ADAr_ADArray_schema/DType.py delete mode 100644 python/src/streaming_data_types/fbschemas/ADAr_ADArray_schema/__init__.py delete mode 100644 python/src/streaming_data_types/fbschemas/NDAr_NDArray_schema/DType.py delete mode 100644 python/src/streaming_data_types/fbschemas/NDAr_NDArray_schema/NDArray.py delete mode 100644 python/src/streaming_data_types/fbschemas/NDAr_NDArray_schema/NDAttribute.py delete mode 100644 python/src/streaming_data_types/fbschemas/NDAr_NDArray_schema/__init__.py delete mode 100644 python/src/streaming_data_types/fbschemas/NDAr_NDArray_schema/epicsTimeStamp.py delete mode 100644 python/src/streaming_data_types/fbschemas/array_1d_se00/DoubleArray.py delete mode 100644 python/src/streaming_data_types/fbschemas/array_1d_se00/FloatArray.py delete mode 100644 python/src/streaming_data_types/fbschemas/array_1d_se00/Int16Array.py delete mode 100644 python/src/streaming_data_types/fbschemas/array_1d_se00/Int32Array.py delete mode 100644 python/src/streaming_data_types/fbschemas/array_1d_se00/Int64Array.py delete mode 100644 python/src/streaming_data_types/fbschemas/array_1d_se00/Int8Array.py delete mode 100644 python/src/streaming_data_types/fbschemas/array_1d_se00/Location.py delete mode 100644 python/src/streaming_data_types/fbschemas/array_1d_se00/UInt16Array.py delete mode 100644 python/src/streaming_data_types/fbschemas/array_1d_se00/UInt32Array.py delete mode 100644 python/src/streaming_data_types/fbschemas/array_1d_se00/UInt64Array.py delete mode 100644 python/src/streaming_data_types/fbschemas/array_1d_se00/UInt8Array.py delete mode 100644 python/src/streaming_data_types/fbschemas/array_1d_se00/ValueUnion.py delete mode 100644 python/src/streaming_data_types/fbschemas/array_1d_se00/__init__.py delete mode 100644 python/src/streaming_data_types/fbschemas/array_1d_se00/se00_SampleEnvironmentData.py delete mode 100644 python/src/streaming_data_types/fbschemas/epics_connection_info_ep00/EpicsConnectionInfo.py delete mode 100644 python/src/streaming_data_types/fbschemas/epics_connection_info_ep00/EventType.py delete mode 100644 python/src/streaming_data_types/fbschemas/epics_connection_info_ep00/__init__.py delete mode 100644 python/src/streaming_data_types/fbschemas/eventdata_an44/AN44EventMessage.py delete mode 100644 python/src/streaming_data_types/fbschemas/eventdata_an44/__init__.py delete mode 100644 python/src/streaming_data_types/fbschemas/eventdata_ev42/EventMessage.py delete mode 100644 python/src/streaming_data_types/fbschemas/eventdata_ev42/FacilityData.py delete mode 100644 python/src/streaming_data_types/fbschemas/eventdata_ev42/__init__.py delete mode 100644 python/src/streaming_data_types/fbschemas/eventdata_ev43/Event43Message.py delete mode 100644 python/src/streaming_data_types/fbschemas/eventdata_ev43/__init__.py delete mode 100644 python/src/streaming_data_types/fbschemas/forwarder_config_update_rf5k/ConfigUpdate.py delete mode 100644 python/src/streaming_data_types/fbschemas/forwarder_config_update_rf5k/Protocol.py delete mode 100644 python/src/streaming_data_types/fbschemas/forwarder_config_update_rf5k/Stream.py delete mode 100644 python/src/streaming_data_types/fbschemas/forwarder_config_update_rf5k/UpdateType.py delete mode 100644 python/src/streaming_data_types/fbschemas/forwarder_config_update_rf5k/__init__.py delete mode 100644 python/src/streaming_data_types/fbschemas/histogram_hs00/Array.py delete mode 100644 python/src/streaming_data_types/fbschemas/histogram_hs00/ArrayDouble.py delete mode 100644 python/src/streaming_data_types/fbschemas/histogram_hs00/ArrayFloat.py delete mode 100644 python/src/streaming_data_types/fbschemas/histogram_hs00/ArrayUInt.py delete mode 100644 python/src/streaming_data_types/fbschemas/histogram_hs00/ArrayULong.py delete mode 100644 python/src/streaming_data_types/fbschemas/histogram_hs00/DimensionMetaData.py delete mode 100644 python/src/streaming_data_types/fbschemas/histogram_hs00/EventHistogram.py delete mode 100644 python/src/streaming_data_types/fbschemas/histogram_hs00/__init__.py delete mode 100644 python/src/streaming_data_types/fbschemas/isis_event_info_is84/ISISData.py delete mode 100644 python/src/streaming_data_types/fbschemas/isis_event_info_is84/RunState.py delete mode 100644 python/src/streaming_data_types/fbschemas/isis_event_info_is84/__init__.py delete mode 100644 python/src/streaming_data_types/fbschemas/logdata_f142/AlarmSeverity.py delete mode 100644 python/src/streaming_data_types/fbschemas/logdata_f142/AlarmStatus.py delete mode 100644 python/src/streaming_data_types/fbschemas/logdata_f142/ArrayByte.py delete mode 100644 python/src/streaming_data_types/fbschemas/logdata_f142/ArrayDouble.py delete mode 100644 python/src/streaming_data_types/fbschemas/logdata_f142/ArrayFloat.py delete mode 100644 python/src/streaming_data_types/fbschemas/logdata_f142/ArrayInt.py delete mode 100644 python/src/streaming_data_types/fbschemas/logdata_f142/ArrayLong.py delete mode 100644 python/src/streaming_data_types/fbschemas/logdata_f142/ArrayShort.py delete mode 100644 python/src/streaming_data_types/fbschemas/logdata_f142/ArrayString.py delete mode 100644 python/src/streaming_data_types/fbschemas/logdata_f142/ArrayUByte.py delete mode 100644 python/src/streaming_data_types/fbschemas/logdata_f142/ArrayUInt.py delete mode 100644 python/src/streaming_data_types/fbschemas/logdata_f142/ArrayULong.py delete mode 100644 python/src/streaming_data_types/fbschemas/logdata_f142/ArrayUShort.py delete mode 100644 python/src/streaming_data_types/fbschemas/logdata_f142/Byte.py delete mode 100644 python/src/streaming_data_types/fbschemas/logdata_f142/Double.py delete mode 100644 python/src/streaming_data_types/fbschemas/logdata_f142/Float.py delete mode 100644 python/src/streaming_data_types/fbschemas/logdata_f142/Int.py delete mode 100644 python/src/streaming_data_types/fbschemas/logdata_f142/LogData.py delete mode 100644 python/src/streaming_data_types/fbschemas/logdata_f142/Long.py delete mode 100644 python/src/streaming_data_types/fbschemas/logdata_f142/Short.py delete mode 100644 python/src/streaming_data_types/fbschemas/logdata_f142/String.py delete mode 100644 python/src/streaming_data_types/fbschemas/logdata_f142/UByte.py delete mode 100644 python/src/streaming_data_types/fbschemas/logdata_f142/UInt.py delete mode 100644 python/src/streaming_data_types/fbschemas/logdata_f142/ULong.py delete mode 100644 python/src/streaming_data_types/fbschemas/logdata_f142/UShort.py delete mode 100644 python/src/streaming_data_types/fbschemas/logdata_f142/Value.py delete mode 100644 python/src/streaming_data_types/fbschemas/logdata_f142/__init__.py delete mode 100644 python/src/streaming_data_types/fbschemas/nicos_cache_ns10/CacheEntry.py delete mode 100644 python/src/streaming_data_types/fbschemas/nicos_cache_ns10/__init__.py delete mode 100644 python/src/streaming_data_types/fbschemas/nmx_mo01/__init__.py delete mode 100644 python/src/streaming_data_types/fbschemas/readout_data_ar51/RawReadoutMessage.py delete mode 100644 python/src/streaming_data_types/fbschemas/readout_data_ar51/__init__.py delete mode 100644 python/src/streaming_data_types/fbschemas/sample_environment_senv/Int16Array.py delete mode 100644 python/src/streaming_data_types/fbschemas/sample_environment_senv/Int32Array.py delete mode 100644 python/src/streaming_data_types/fbschemas/sample_environment_senv/Int64Array.py delete mode 100644 python/src/streaming_data_types/fbschemas/sample_environment_senv/Int8Array.py delete mode 100644 python/src/streaming_data_types/fbschemas/sample_environment_senv/Location.py delete mode 100644 python/src/streaming_data_types/fbschemas/sample_environment_senv/SampleEnvironmentData.py delete mode 100644 python/src/streaming_data_types/fbschemas/sample_environment_senv/UInt16Array.py delete mode 100644 python/src/streaming_data_types/fbschemas/sample_environment_senv/UInt32Array.py delete mode 100644 python/src/streaming_data_types/fbschemas/sample_environment_senv/UInt64Array.py delete mode 100644 python/src/streaming_data_types/fbschemas/sample_environment_senv/UInt8Array.py delete mode 100644 python/src/streaming_data_types/fbschemas/sample_environment_senv/ValueUnion.py delete mode 100644 python/src/streaming_data_types/fbschemas/sample_environment_senv/__init__.py delete mode 100644 python/src/streaming_data_types/fbschemas/timestamps_tdct/__init__.py delete mode 100644 python/src/streaming_data_types/fbschemas/timestamps_tdct/timestamp.py delete mode 100644 python/src/streaming_data_types/forwarder_config_update_rf5k.py delete mode 100644 python/src/streaming_data_types/histogram_hs00.py delete mode 100644 python/src/streaming_data_types/logdata_f142.py delete mode 100644 python/src/streaming_data_types/nicos_cache_ns10.py delete mode 100644 python/src/streaming_data_types/readout_data_ar51.py delete mode 100644 python/src/streaming_data_types/sample_environment_senv.py delete mode 100644 python/src/streaming_data_types/timestamps_tdct.py delete mode 100644 python/tests/test_ADAr.py delete mode 100644 python/tests/test_NDAr.py delete mode 100644 python/tests/test_an44.py delete mode 100644 python/tests/test_ar51.py delete mode 100644 python/tests/test_ep00.py delete mode 100644 python/tests/test_ev42.py delete mode 100644 python/tests/test_ev43.py delete mode 100644 python/tests/test_f142.py delete mode 100644 python/tests/test_hs00.py delete mode 100644 python/tests/test_ns10.py delete mode 100644 python/tests/test_rf5k.py delete mode 100644 python/tests/test_se00.py delete mode 100644 python/tests/test_senv.py delete mode 100644 python/tests/test_tdct.py diff --git a/PULL_REQUEST_TEMPLATE.md b/PULL_REQUEST_TEMPLATE.md deleted file mode 100644 index 6a5a132..0000000 --- a/PULL_REQUEST_TEMPLATE.md +++ /dev/null @@ -1,18 +0,0 @@ -### Description of Work - -*Add a description of the changes here. The aim is provide information to help the approvers review and approve the PR.* - -### Issue - -*If there is an associated issue, write 'Closes #XXX'* - -### Developer Checklist - -- [ ] If there are new schema in this PR I have added them to the list in README.md -- [ ] If there are breaking changes to a schema, I have used a new file identifier and updated the list in README.md -- [ ] There is some documentation here or in the flat buffer file on the use case for this data, including which component is intended to send the data and/or which is the intended receiver. - -## Approval Criteria - -This PR should not be merged until the ECDC Group Leader (acting or permanent) has given their explicit approval in the comments section. -SCIPP/DRAM should also be consulted on changes which may affect them. diff --git a/README.md b/README.md index 0d50e6a..eaa490b 100644 --- a/README.md +++ b/README.md @@ -23,3 +23,25 @@ FlatBuffers is the format chosen for the ISIS data streaming system, derived fro | al00 | `al00_alarm.fbs ` | Generic alarm schema for EPICS, NICOS, etc. | | da00 | `da00_dataarray.fbs ` | Pseudo-scipp DataArray with time-dependent and constant Variables | | un00 | `un00_units.fbs ` | Engineering units update | + + +## Adding new schemas + +### Add `.fbs` file to `schemas directory + +Check `ess-streaming-data-types` first; attempt not to diverge without reason. + +### Python bindings + +Python bindings have low-level code (autogenerated by `flatc`) in the `fbschemas` directory, but **also** +manually-written convenience serializers and deserializers in the top-level of the python module. + +When adding or modifying a schema, these manually-written serializers & deserializers will need to be updated, +and added to the lists in `__init__.py`. + +### Rust bindings + +Rust bindings have low-level code (autogenerated by `flatc` in a `build.rs` script), and a small high-level wrapper +to deserialize any message. + +When adding a new schema, the wrapper (defined in `lib.rs`) will need to be updated with the new schema. diff --git a/python/README b/python/README deleted file mode 100644 index 00437df..0000000 --- a/python/README +++ /dev/null @@ -1,77 +0,0 @@ -# Python Streaming Data Types -Utilities for working with the FlatBuffers schemas used at the European -Spallation Source ERIC for data transport. - -https://github.com/ess-dmsc/streaming-data-types - -## FlatBuffer Schemas - -| name | description | -|------|------------------------------------------------------------------------------| -| hs00 | Histogram schema (deprecated in favour of hs01) | -| hs01 | Histogram schema | -| ns10 | NICOS cache entry schema | -| pl72 | Run start | -| 6s4t | Run stop | -| f142 | Log data (deprecated in favour of f144) | -| f144 | Log data | -| ev42 | Event data (deprecated in favour of ev44) | -| ev43 | Event data from multiple pulses | -| ev44 | Event data with signed data types | -| an44 | ev44 with ANSTO specific changes | -| x5f2 | Status messages | -| tdct | Timestamps | -| ep00 | EPICS connection info (deprecated in favour of ep01) | -| ep01 | EPICS connection info | -| rf5k | Forwarder configuration update (deprecated in favour of fc00) | -| fc00 | Forwarder configuration update | -| answ | File-writer command response | -| wrdn | File-writer finished writing | -| NDAr | **Deprecated** | -| ADAr | EPICS areaDetector data | -| al00 | Alarm/status messages used by the Forwarder and NICOS | -| senv | **Deprecated** | -| json | Generic JSON data | -| se00 | Arrays with optional timestamps, for example waveform data. Replaces _senv_. | -| da00 | Scipp-like data arrays, for histograms, etc. | - -### hs00 and hs01 -Schema for histogram data. It is one of the more complicated to use schemas. -It takes a Python dictionary as its input; this dictionary needs to have correctly -named fields. - -The input histogram data for serialisation and the output deserialisation data -have the same dictionary "layout". -Example for a 2-D histogram: -```json -hist = { - "source": "some_source", - "timestamp": 123456, - "current_shape": [2, 5], - "dim_metadata": [ - { - "length": 2, - "unit": "a", - "label": "x", - "bin_boundaries": np.array([10, 11, 12]), - }, - { - "length": 5, - "unit": "b", - "label": "y", - "bin_boundaries": np.array([0, 1, 2, 3, 4, 5]), - }, - ], - "last_metadata_timestamp": 123456, - "data": np.array([[1, 2, 3, 4, 5], [6, 7, 8, 9, 10]]), - "errors": np.array([[5, 4, 3, 2, 1], [10, 9, 8, 7, 6]]), - "info": "info_string", -} -``` -The arrays passed in for `data`, `errors` and `bin_boundaries` can be NumPy arrays -or regular lists, but on deserialisation they will be NumPy arrays. - - -## Developer documentation - -See [README_DEV.md](README_DEV.md) diff --git a/python/README_DEV.md b/python/README_DEV.md deleted file mode 100644 index 7f826d0..0000000 --- a/python/README_DEV.md +++ /dev/null @@ -1,108 +0,0 @@ -# Python Streaming Data Types -## For developers - -### Install the commit hooks (important) -There are commit hooks for Black and Flake8. - -The commit hooks are handled using [pre-commit](https://pre-commit.com). - -To install the hooks for this project run: -``` -pre-commit install -``` - -To test the hooks run: -``` -pre-commit run --all-files -``` -This command can also be used to run the hooks manually. - -### Adding new schemas checklist (important) -* Generate Python bindings for the schema using FlatBuffers' `flatc` executable -* Add the generated bindings to the project -* Add unit-tests (see existing tests for an example) -* Update `fbschemas.__init__.py` to include the new serialiser and deserialiser -* Check whether the serialised data produced by the new code can be verified in C++ - * There is a helper program in the [FlatBufferVerification](https://github.com/ess-dmsc/FlatBufferVerification) repository - * Don't worry if it fails verification - it seems to be an inherent FlatBuffers issue - -### Tox -Tox allows the unit tests to be run against multiple versions of Python. -See the tox.ini file for which versions are supported. -From the top directory: -``` -tox -``` - -### Installing the development version locally -First, uninstall any existing versions of the Python streaming data types package: - -``` -pip uninstall ess-streaming-data-types -``` -Then, from the _python-streaming-data-types_ root directory, run the following command: - -``` -pip install --user -e ./ -``` - -### Building the package locally and deploying it to PyPI - -#### Requirements -* A [PyPi](https://pypi.org/) account -* A [TestPyPi](https://test.pypi.org/) account (this is separate to the PyPi account) -* Permission to push to the ess-streaming-data-types project on TestPyPi and PyPi -* Installed all requirements in `requirements-dev.txt` - -#### Steps - -***First update the __version__ number in streaming_data_types/__init__.py and push the update to the repository.*** - -Uninstall streaming_data_types if you have previously installed it from PyPi: -``` -pip uninstall ess_streaming_data_types -``` - -Delete any old builds you may have (IMPORTANT!): -``` -rm -rf build dist -``` - -Build it locally: -``` -python setup.py sdist bdist_wheel -``` - -Check dist files: -``` -twine check dist/* -``` - -Push to test.pypi.org for testing: -``` -twine upload --repository-url https://test.pypi.org/legacy/ dist/* -``` - -The new module can then be installed from test.pypi.org like so: -``` -pip uninstall ess-streaming-data-types # Remove old version if present -pip install -i https://test.pypi.org/simple/ ess-streaming-data-types -``` -Unfortunately, flatbuffers is not on test.pypi.org so the following error may occur: -``` -ERROR: Could not find a version that satisfies the requirement flatbuffers -``` -The workaround is to install flatbuffers manually first using `pip install flatbuffers` and then rerun the previous command. - -Test the module using the existing test-suite (from project root): -``` -rm -rf streaming_data_types # Rename the local source directory -pytest # The tests will be run against the pip installed module -git reset --hard origin/main # Put everything back to before -``` - -After testing installing from test.pypi.org works, push to PyPI: -``` -twine upload dist/* -``` -Finally, create a tag on the GitHub repository with the appropriate name, e.g. `v0.7.0`. diff --git a/python/pyproject.toml b/python/pyproject.toml index 4cb4fb0..96f5a56 100644 --- a/python/pyproject.toml +++ b/python/pyproject.toml @@ -2,7 +2,6 @@ requires = ["setuptools", "setuptools_scm>=8"] build-backend = "setuptools.build_meta" - [project] name = "isis_streaming_data_types" dynamic = ["version"] @@ -95,5 +94,7 @@ reportUntypedFunctionDecorator = true [tool.setuptools_scm] version_file = "src/streaming_data_types/_version.py" +relative_to = "pyproject.toml" +root = ".." diff --git a/python/src/streaming_data_types/__init__.py b/python/src/streaming_data_types/__init__.py index b163503..cbcb897 100644 --- a/python/src/streaming_data_types/__init__.py +++ b/python/src/streaming_data_types/__init__.py @@ -2,109 +2,58 @@ from streaming_data_types.action_response_answ import deserialise_answ, serialise_answ from streaming_data_types.alarm_al00 import deserialise_al00, serialise_al00 from streaming_data_types.area_detector_ad00 import deserialise_ad00, serialise_ad00 -from streaming_data_types.area_detector_ADAr import deserialise_ADAr, serialise_ADAr -from streaming_data_types.area_detector_NDAr import deserialise_ndar, serialise_ndar -from streaming_data_types.array_1d_se00 import deserialise_se00, serialise_se00 from streaming_data_types.dataarray_da00 import deserialise_da00, serialise_da00 from streaming_data_types.epics_connection_ep01 import deserialise_ep01, serialise_ep01 -from streaming_data_types.epics_connection_info_ep00 import ( - deserialise_ep00, - serialise_ep00, -) -from streaming_data_types.eventdata_an44 import deserialise_an44, serialise_an44 -from streaming_data_types.eventdata_ev42 import deserialise_ev42, serialise_ev42 -from streaming_data_types.eventdata_ev43 import deserialise_ev43, serialise_ev43 from streaming_data_types.eventdata_ev44 import deserialise_ev44, serialise_ev44 from streaming_data_types.finished_writing_wrdn import deserialise_wrdn, serialise_wrdn from streaming_data_types.forwarder_config_update_fc00 import ( deserialise_fc00, serialise_fc00, ) -from streaming_data_types.forwarder_config_update_rf5k import ( - deserialise_rf5k, - serialise_rf5k, -) -from streaming_data_types.histogram_hs00 import deserialise_hs00, serialise_hs00 from streaming_data_types.histogram_hs01 import deserialise_hs01, serialise_hs01 from streaming_data_types.json_json import deserialise_json, serialise_json -from streaming_data_types.logdata_f142 import deserialise_f142, serialise_f142 from streaming_data_types.logdata_f144 import deserialise_f144, serialise_f144 -from streaming_data_types.nicos_cache_ns10 import deserialise_ns10, serialise_ns10 -from streaming_data_types.readout_data_ar51 import deserialise_ar51, serialise_ar51 from streaming_data_types.run_start_pl72 import deserialise_pl72, serialise_pl72 from streaming_data_types.run_stop_6s4t import deserialise_6s4t, serialise_6s4t -from streaming_data_types.sample_environment_senv import ( - deserialise_senv, - serialise_senv, -) from streaming_data_types.status_x5f2 import deserialise_x5f2, serialise_x5f2 -from streaming_data_types.timestamps_tdct import deserialise_tdct, serialise_tdct from streaming_data_types.units_un00 import serialise_un00, deserialise_un00 __version__ = version SERIALISERS = { - "an44": serialise_an44, - "ev42": serialise_ev42, - "ev43": serialise_ev43, "ev44": serialise_ev44, - "hs00": serialise_hs00, "hs01": serialise_hs01, - "f142": serialise_f142, "f144": serialise_f144, - "ns10": serialise_ns10, "pl72": serialise_pl72, "6s4t": serialise_6s4t, "x5f2": serialise_x5f2, - "ep00": serialise_ep00, "ep01": serialise_ep01, - "tdct": serialise_tdct, - "rf5k": serialise_rf5k, "fc00": serialise_fc00, "answ": serialise_answ, "wrdn": serialise_wrdn, - "NDAr": serialise_ndar, - "senv": serialise_senv, - "se00": serialise_se00, - "ADAr": serialise_ADAr, "al00": serialise_al00, "json": serialise_json, "ad00": serialise_ad00, "da00": serialise_da00, - "ar51": serialise_ar51, "un00": serialise_un00, } DESERIALISERS = { - "an44": deserialise_an44, - "ev42": deserialise_ev42, - "ev43": deserialise_ev43, "ev44": deserialise_ev44, - "hs00": deserialise_hs00, "hs01": deserialise_hs01, - "f142": deserialise_f142, "f144": deserialise_f144, - "ns10": deserialise_ns10, "pl72": deserialise_pl72, "6s4t": deserialise_6s4t, "x5f2": deserialise_x5f2, - "ep00": deserialise_ep00, "ep01": deserialise_ep01, - "tdct": deserialise_tdct, - "rf5k": deserialise_rf5k, "fc00": deserialise_fc00, "answ": deserialise_answ, "wrdn": deserialise_wrdn, - "NDAr": deserialise_ndar, - "senv": deserialise_senv, - "se00": deserialise_se00, - "ADAr": deserialise_ADAr, "al00": deserialise_al00, "json": deserialise_json, "ad00": deserialise_ad00, "da00": deserialise_da00, - "ar51": deserialise_ar51, "un00": deserialise_un00, } diff --git a/python/src/streaming_data_types/area_detector_ADAr.py b/python/src/streaming_data_types/area_detector_ADAr.py deleted file mode 100644 index 0819e98..0000000 --- a/python/src/streaming_data_types/area_detector_ADAr.py +++ /dev/null @@ -1,207 +0,0 @@ -from datetime import datetime, timezone -from struct import pack -from typing import List, NamedTuple, Union - -import flatbuffers -import numpy as np - -import streaming_data_types.fbschemas.ADAr_ADArray_schema.Attribute as ADArAttribute -from streaming_data_types.fbschemas.ADAr_ADArray_schema import ADArray -from streaming_data_types.fbschemas.ADAr_ADArray_schema.DType import DType -from streaming_data_types.utils import check_schema_identifier - -FILE_IDENTIFIER = b"ADAr" - - -class Attribute: - def __init__( - self, - name: str, - description: str, - source: str, - data: Union[np.ndarray, str, int, float], - ): - self.name = name - self.description = description - self.source = source - self.data = data - - def __eq__(self, other): - data_is_equal = type(self.data) == type(other.data) # noqa: E721 - if type(self.data) is np.ndarray: - data_is_equal = data_is_equal and np.array_equal(self.data, other.data) - else: - data_is_equal = data_is_equal and self.data == other.data - return ( - self.name == other.name - and self.description == other.description - and self.source == other.source - and data_is_equal - ) - - -def serialise_ADAr( - source_name: str, - unique_id: int, - timestamp: datetime, - data: Union[np.ndarray, str], - attributes: List[Attribute] = [], -) -> bytes: - builder = flatbuffers.Builder(1024) - builder.ForceDefaults(True) - - type_map = { - np.dtype("uint8"): DType.uint8, - np.dtype("int8"): DType.int8, - np.dtype("uint16"): DType.uint16, - np.dtype("int16"): DType.int16, - np.dtype("uint32"): DType.uint32, - np.dtype("int32"): DType.int32, - np.dtype("uint64"): DType.uint64, - np.dtype("int64"): DType.int64, - np.dtype("float32"): DType.float32, - np.dtype("float64"): DType.float64, - } - - if type(data) is str: - data = np.frombuffer(data.encode(), np.uint8) - data_type = DType.c_string - else: - data_type = type_map[data.dtype] - - # Build dims - dims_offset = builder.CreateNumpyVector(np.asarray(data.shape)) - - # Build data - data_offset = builder.CreateNumpyVector(data.flatten().view(np.uint8)) - - source_name_offset = builder.CreateString(source_name) - - temp_attributes = [] - for item in attributes: - if type(item.data) is np.ndarray: - attr_data_type = type_map[item.data.dtype] - attr_data = item.data - elif type(item.data) is str: - attr_data_type = DType.c_string - attr_data = np.frombuffer(item.data.encode(), np.uint8) - elif type(item.data) is int: - attr_data_type = DType.int64 - attr_data = np.frombuffer(pack("q", item.data), np.uint8) - elif type(item.data) is float: - attr_data_type = DType.float64 - attr_data = np.frombuffer(pack("d", item.data), np.uint8) - attr_name_offset = builder.CreateString(item.name) - attr_desc_offset = builder.CreateString(item.description) - attr_src_offset = builder.CreateString(item.source) - attr_data_offset = builder.CreateNumpyVector(attr_data.flatten().view(np.uint8)) - ADArAttribute.AttributeStart(builder) - ADArAttribute.AttributeAddName(builder, attr_name_offset) - ADArAttribute.AttributeAddDescription(builder, attr_desc_offset) - ADArAttribute.AttributeAddSource(builder, attr_src_offset) - ADArAttribute.AttributeAddDataType(builder, attr_data_type) - ADArAttribute.AttributeAddData(builder, attr_data_offset) - attr_offset = ADArAttribute.AttributeEnd(builder) - temp_attributes.append(attr_offset) - - ADArray.ADArrayStartAttributesVector(builder, len(attributes)) - for item in reversed(temp_attributes): - builder.PrependUOffsetTRelative(item) - attributes_offset = builder.EndVector() - - # Build the actual buffer - ADArray.ADArrayStart(builder) - ADArray.ADArrayAddSourceName(builder, source_name_offset) - ADArray.ADArrayAddDataType(builder, data_type) - ADArray.ADArrayAddDimensions(builder, dims_offset) - ADArray.ADArrayAddId(builder, unique_id) - ADArray.ADArrayAddData(builder, data_offset) - ADArray.ADArrayAddTimestamp(builder, int(timestamp.timestamp() * 1e9)) - ADArray.ADArrayAddAttributes(builder, attributes_offset) - array_message = ADArray.ADArrayEnd(builder) - - builder.Finish(array_message, file_identifier=FILE_IDENTIFIER) - return bytes(builder.Output()) - - -ADArray_t = NamedTuple( - "ADArray", - ( - ("source_name", str), - ("unique_id", int), - ("timestamp", datetime), - ("dimensions", np.ndarray), - ("data", np.ndarray), - ("attributes", List[Attribute]), - ), -) - - -def get_payload_data(fb_arr) -> np.ndarray: - return get_data(fb_arr).reshape(fb_arr.DimensionsAsNumpy()) - - -def get_data(fb_arr) -> np.ndarray: - """ - Converts the data array into the correct type. - """ - raw_data = fb_arr.DataAsNumpy() - type_map = { - DType.uint8: np.uint8, - DType.int8: np.int8, - DType.uint16: np.uint16, - DType.int16: np.int16, - DType.uint32: np.uint32, - DType.int32: np.int32, - DType.uint64: np.uint64, - DType.int64: np.int64, - DType.float32: np.float32, - DType.float64: np.float64, - } - return raw_data.view(type_map[fb_arr.DataType()]) - - -def deserialise_ADAr(buffer: Union[bytearray, bytes]) -> ADArray: - check_schema_identifier(buffer, FILE_IDENTIFIER) - - ad_array = ADArray.ADArray.GetRootAsADArray(buffer, 0) - unique_id = ad_array.Id() - max_time = datetime( - year=3001, month=1, day=1, hour=0, minute=0, second=0 - ).timestamp() - used_timestamp = ad_array.Timestamp() / 1e9 - if used_timestamp > max_time: - used_timestamp = max_time - if ad_array.DataType() == DType.c_string: - data = ad_array.DataAsNumpy().tobytes().decode() - else: - data = get_payload_data(ad_array) - - attributes_list = [] - for i in range(ad_array.AttributesLength()): - attribute_ptr = ad_array.Attributes(i) - if attribute_ptr.DataType() == DType.c_string: - attr_data = attribute_ptr.DataAsNumpy().tobytes().decode() - else: - attr_data = get_data(attribute_ptr) - temp_attribute = Attribute( - name=attribute_ptr.Name().decode(), - description=attribute_ptr.Description().decode(), - source=attribute_ptr.Source().decode(), - data=attr_data, - ) - if type(temp_attribute.data) is np.ndarray and len(temp_attribute.data) == 1: - if np.issubdtype(temp_attribute.data.dtype, np.floating): - temp_attribute.data = float(temp_attribute.data[0]) - elif np.issubdtype(temp_attribute.data.dtype, np.integer): - temp_attribute.data = int(temp_attribute.data[0]) - attributes_list.append(temp_attribute) - - return ADArray_t( - source_name=ad_array.SourceName().decode(), - unique_id=unique_id, - timestamp=datetime.fromtimestamp(used_timestamp, tz=timezone.utc), - dimensions=tuple(ad_array.DimensionsAsNumpy()), - data=data, - attributes=attributes_list, - ) diff --git a/python/src/streaming_data_types/area_detector_NDAr.py b/python/src/streaming_data_types/area_detector_NDAr.py deleted file mode 100644 index 12ac809..0000000 --- a/python/src/streaming_data_types/area_detector_NDAr.py +++ /dev/null @@ -1,94 +0,0 @@ -import time -from collections import namedtuple -from typing import Union - -import flatbuffers -import numpy as np - -from streaming_data_types.fbschemas.NDAr_NDArray_schema import NDArray -from streaming_data_types.utils import check_schema_identifier - -FILE_IDENTIFIER = b"NDAr" - - -def serialise_ndar( - id: str, - dims: list, - data_type: int, - data: list, -) -> bytes: - builder = flatbuffers.Builder(1024) - builder.ForceDefaults(True) - - # Build dims - NDArray.NDArrayStartDimsVector(builder, len(dims)) - # FlatBuffers builds arrays backwards - for s in reversed(dims): - builder.PrependUint64(s) - dims_offset = builder.EndVector() - - # Build data - NDArray.NDArrayStartPDataVector(builder, len(data)) - # FlatBuffers builds arrays backwards - for s in reversed(data): - builder.PrependUint8(s) - data_offset = builder.EndVector() - - # Build the actual buffer - NDArray.NDArrayStart(builder) - NDArray.NDArrayAddDataType(builder, data_type) - NDArray.NDArrayAddDims(builder, dims_offset) - NDArray.NDArrayAddId(builder, id) - NDArray.NDArrayAddPData(builder, data_offset) - NDArray.NDArrayAddTimeStamp(builder, int(time.time() * 1000)) - nd_array_message = NDArray.NDArrayEnd(builder) - - builder.Finish(nd_array_message, file_identifier=FILE_IDENTIFIER) - return bytes(builder.Output()) - - -nd_Array = namedtuple( - "NDArray", - ( - "id", - "timestamp", - "data", - ), -) - - -def get_data(fb_arr): - """ - Converts the data array into the correct type. - """ - raw_data = fb_arr.PDataAsNumpy() - numpy_arr_type = [ - np.int8, - np.uint8, - np.int16, - np.uint16, - np.int32, - np.uint32, - np.int64, - np.uint64, - np.float32, - np.float64, - ] - return raw_data.view(numpy_arr_type[fb_arr.DataType()]).reshape( - fb_arr.DimsAsNumpy() - ) - - -def deserialise_ndar(buffer: Union[bytearray, bytes]) -> NDArray: - check_schema_identifier(buffer, FILE_IDENTIFIER) - - nd_array = NDArray.NDArray.GetRootAsNDArray(buffer, 0) - id = nd_array.Id() - timestamp = nd_array.TimeStamp() - data = get_data(nd_array) - - return nd_Array( - id=id, - timestamp=timestamp, - data=data, - ) diff --git a/python/src/streaming_data_types/array_1d_se00.py b/python/src/streaming_data_types/array_1d_se00.py deleted file mode 100644 index bec7760..0000000 --- a/python/src/streaming_data_types/array_1d_se00.py +++ /dev/null @@ -1,149 +0,0 @@ -from typing import List, NamedTuple, Optional, Union - -import flatbuffers -import numpy as np -from flatbuffers.number_types import ( - Float32Flags, - Float64Flags, - Int8Flags, - Int16Flags, - Int32Flags, - Int64Flags, - Uint8Flags, - Uint16Flags, - Uint32Flags, - Uint64Flags, -) - -from streaming_data_types.fbschemas.array_1d_se00.Location import Location -from streaming_data_types.fbschemas.array_1d_se00.se00_SampleEnvironmentData import ( - se00_SampleEnvironmentData, - se00_SampleEnvironmentDataAddChannel, - se00_SampleEnvironmentDataAddMessageCounter, - se00_SampleEnvironmentDataAddName, - se00_SampleEnvironmentDataAddPacketTimestamp, - se00_SampleEnvironmentDataAddTimeDelta, - se00_SampleEnvironmentDataAddTimestampLocation, - se00_SampleEnvironmentDataAddTimestamps, - se00_SampleEnvironmentDataAddValues, - se00_SampleEnvironmentDataAddValuesType, - se00_SampleEnvironmentDataEnd, - se00_SampleEnvironmentDataStart, -) -from streaming_data_types.fbschemas.array_1d_se00.ValueUnion import ValueUnion -from streaming_data_types.utils import check_schema_identifier - -FILE_IDENTIFIER = b"se00" - -flag_map = { - ValueUnion.Int8Array: Int8Flags, - ValueUnion.UInt8Array: Uint8Flags, - ValueUnion.Int16Array: Int16Flags, - ValueUnion.UInt16Array: Uint16Flags, - ValueUnion.Int32Array: Int32Flags, - ValueUnion.UInt32Array: Uint32Flags, - ValueUnion.Int64Array: Int64Flags, - ValueUnion.UInt64Array: Uint64Flags, - ValueUnion.DoubleArray: Float64Flags, - ValueUnion.FloatArray: Float32Flags, -} - -numpy_type_map = { - np.dtype("int8"): ValueUnion.Int8Array, - np.dtype("uint8"): ValueUnion.UInt8Array, - np.dtype("int16"): ValueUnion.Int16Array, - np.dtype("uint16"): ValueUnion.UInt16Array, - np.dtype("int32"): ValueUnion.Int32Array, - np.dtype("uint32"): ValueUnion.UInt32Array, - np.dtype("int64"): ValueUnion.Int64Array, - np.dtype("uint64"): ValueUnion.UInt64Array, - np.dtype("float64"): ValueUnion.DoubleArray, - np.dtype("float32"): ValueUnion.FloatArray, -} - -Response = NamedTuple( - "SampleEnvironmentData", - ( - ("name", str), - ("channel", int), - ("timestamp_unix_ns", int), - ("sample_ts_delta", int), - ("ts_location", Location), - ("message_counter", int), - ("values", np.ndarray), - ("value_ts", Optional[np.ndarray]), - ), -) - - -def serialise_se00( - name: str, - channel: int, - timestamp_unix_ns: int, - sample_ts_delta: int, - message_counter: int, - values: Union[np.ndarray, List], - ts_location: Location = Location.Middle, - value_timestamps: Union[np.ndarray, List, None] = None, -) -> bytes: - builder = flatbuffers.Builder(1024) - - if value_timestamps is not None: - used_timestamps = np.atleast_1d(np.asarray(value_timestamps)).astype(np.uint64) - timestamps_offset = builder.CreateNumpyVector(used_timestamps) - - temp_values = np.atleast_1d(np.asarray(values)) - - value_array_offset = builder.CreateNumpyVector(temp_values) - - # Some flatbuffer fu in order to avoid >200 lines of code - builder.StartObject(1) - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(value_array_offset), 0 - ) - value_offset = builder.EndObject() - - name_offset = builder.CreateString(name) - - se00_SampleEnvironmentDataStart(builder) - se00_SampleEnvironmentDataAddName(builder, name_offset) - se00_SampleEnvironmentDataAddTimeDelta(builder, sample_ts_delta) - se00_SampleEnvironmentDataAddTimestampLocation(builder, ts_location) - se00_SampleEnvironmentDataAddMessageCounter(builder, message_counter) - se00_SampleEnvironmentDataAddChannel(builder, channel) - se00_SampleEnvironmentDataAddPacketTimestamp(builder, timestamp_unix_ns) - se00_SampleEnvironmentDataAddValues(builder, value_offset) - se00_SampleEnvironmentDataAddValuesType(builder, numpy_type_map[temp_values.dtype]) - if value_timestamps is not None: - se00_SampleEnvironmentDataAddTimestamps(builder, timestamps_offset) - - SE_Message = se00_SampleEnvironmentDataEnd(builder) - - builder.Finish(SE_Message, file_identifier=FILE_IDENTIFIER) - return bytes(builder.Output()) - - -def deserialise_se00(buffer: Union[bytearray, bytes]) -> Response: - check_schema_identifier(buffer, FILE_IDENTIFIER) - - SE_data = se00_SampleEnvironmentData.GetRootAsse00_SampleEnvironmentData(buffer, 0) - - value_timestamps = None - if not SE_data.TimestampsIsNone(): - value_timestamps = SE_data.TimestampsAsNumpy() - - # Some flatbuffers fu in order to avoid >200 lines of code - value_offset = SE_data.Values() - value_type = SE_data.ValuesType() - values = value_offset.GetVectorAsNumpy(flag_map[value_type], 4) - - return Response( - name=SE_data.Name().decode(), - channel=SE_data.Channel(), - timestamp_unix_ns=SE_data.PacketTimestamp(), - sample_ts_delta=SE_data.TimeDelta(), - ts_location=SE_data.TimestampLocation(), - message_counter=SE_data.MessageCounter(), - values=values, - value_ts=value_timestamps, - ) diff --git a/python/src/streaming_data_types/epics_connection_info_ep00.py b/python/src/streaming_data_types/epics_connection_info_ep00.py deleted file mode 100644 index 99718b5..0000000 --- a/python/src/streaming_data_types/epics_connection_info_ep00.py +++ /dev/null @@ -1,62 +0,0 @@ -from collections import namedtuple -from typing import Optional, Union - -import flatbuffers - -from streaming_data_types.fbschemas.epics_connection_info_ep00 import ( - EpicsConnectionInfo, - EventType, -) -from streaming_data_types.utils import check_schema_identifier - -FILE_IDENTIFIER = b"ep00" - - -def serialise_ep00( - timestamp_ns: int, - event_type: EventType, - source_name: str, - service_id: Optional[str] = None, -) -> bytes: - builder = flatbuffers.Builder(136) - builder.ForceDefaults(True) - - if service_id is not None: - service_id_offset = builder.CreateString(service_id) - source_name_offset = builder.CreateString(source_name) - - EpicsConnectionInfo.EpicsConnectionInfoStart(builder) - if service_id is not None: - EpicsConnectionInfo.EpicsConnectionInfoAddServiceId(builder, service_id_offset) - EpicsConnectionInfo.EpicsConnectionInfoAddSourceName(builder, source_name_offset) - EpicsConnectionInfo.EpicsConnectionInfoAddType(builder, event_type) - EpicsConnectionInfo.EpicsConnectionInfoAddTimestamp(builder, timestamp_ns) - - end = EpicsConnectionInfo.EpicsConnectionInfoEnd(builder) - builder.Finish(end, file_identifier=FILE_IDENTIFIER) - return bytes(builder.Output()) - - -EpicsConnection = namedtuple( - "EpicsConnection", ("timestamp", "type", "source_name", "service_id") -) - - -def deserialise_ep00(buffer: Union[bytearray, bytes]) -> EpicsConnection: - check_schema_identifier(buffer, FILE_IDENTIFIER) - - epics_connection = ( - EpicsConnectionInfo.EpicsConnectionInfo.GetRootAsEpicsConnectionInfo(buffer, 0) - ) - - source_name = ( - epics_connection.SourceName() if epics_connection.SourceName() else b"" - ) - service_id = epics_connection.ServiceId() if epics_connection.ServiceId() else b"" - - return EpicsConnection( - epics_connection.Timestamp(), - epics_connection.Type(), - source_name.decode(), - service_id.decode(), - ) diff --git a/python/src/streaming_data_types/eventdata_an44.py b/python/src/streaming_data_types/eventdata_an44.py deleted file mode 100644 index 0e41e8d..0000000 --- a/python/src/streaming_data_types/eventdata_an44.py +++ /dev/null @@ -1,95 +0,0 @@ -from collections import namedtuple - -import flatbuffers -import numpy as np - -import streaming_data_types.fbschemas.eventdata_an44.AN44EventMessage as AN44EventMessage -from streaming_data_types.utils import check_schema_identifier - -FILE_IDENTIFIER = b"an44" - - -EventData = namedtuple( - "EventData", - ( - "source_name", - "message_id", - "reference_time", - "reference_time_index", - "time_of_flight", - "pixel_id", - "weight", - ), -) - - -def deserialise_an44(buffer): - """ - Deserialise FlatBuffer an44. - - :param buffer: The FlatBuffers buffer. - :return: The deserialised data. - """ - check_schema_identifier(buffer, FILE_IDENTIFIER) - - event = AN44EventMessage.AN44EventMessage.GetRootAs(buffer, 0) - - return EventData( - event.SourceName().decode("utf-8"), - event.MessageId(), - event.ReferenceTimeAsNumpy(), - event.ReferenceTimeIndexAsNumpy(), - event.TimeOfFlightAsNumpy(), - event.PixelIdAsNumpy(), - event.WeightAsNumpy(), - ) - - -def serialise_an44( - source_name, - message_id, - reference_time, - reference_time_index, - time_of_flight, - pixel_id, - weight, -): - """ - Serialise event data as an an44 FlatBuffers message. - - :param source_name: - :param message_id: - :param reference_time: - :param reference_time_index: - :param time_of_flight: - :param pixel_id: - :param weight: - :return: - """ - builder = flatbuffers.Builder(1024) - builder.ForceDefaults(True) - - source = builder.CreateString(source_name) - ref_time_data = builder.CreateNumpyVector( - np.asarray(reference_time).astype(np.int64) - ) - ref_time_index_data = builder.CreateNumpyVector( - np.asarray(reference_time_index).astype(np.int32) - ) - tof_data = builder.CreateNumpyVector(np.asarray(time_of_flight).astype(np.int32)) - pixel_id_data = builder.CreateNumpyVector(np.asarray(pixel_id).astype(np.int32)) - weight_data = builder.CreateNumpyVector(np.asarray(weight).astype(np.int16)) - - AN44EventMessage.AN44EventMessageStart(builder) - AN44EventMessage.AN44EventMessageAddReferenceTime(builder, ref_time_data) - AN44EventMessage.AN44EventMessageAddReferenceTimeIndex(builder, ref_time_index_data) - AN44EventMessage.AN44EventMessageAddTimeOfFlight(builder, tof_data) - AN44EventMessage.AN44EventMessageAddPixelId(builder, pixel_id_data) - AN44EventMessage.AN44EventMessageAddWeight(builder, weight_data) - AN44EventMessage.AN44EventMessageAddMessageId(builder, message_id) - AN44EventMessage.AN44EventMessageAddSourceName(builder, source) - - data = AN44EventMessage.AN44EventMessageEnd(builder) - builder.Finish(data, file_identifier=FILE_IDENTIFIER) - - return bytes(builder.Output()) diff --git a/python/src/streaming_data_types/eventdata_ev42.py b/python/src/streaming_data_types/eventdata_ev42.py deleted file mode 100644 index 5315841..0000000 --- a/python/src/streaming_data_types/eventdata_ev42.py +++ /dev/null @@ -1,107 +0,0 @@ -from collections import namedtuple - -import flatbuffers -import numpy as np - -import streaming_data_types.fbschemas.eventdata_ev42.EventMessage as EventMessage -import streaming_data_types.fbschemas.eventdata_ev42.FacilityData as FacilityData -import streaming_data_types.fbschemas.isis_event_info_is84.ISISData as ISISData -from streaming_data_types.utils import check_schema_identifier - -FILE_IDENTIFIER = b"ev42" - - -EventData = namedtuple( - "EventData", - ( - "source_name", - "message_id", - "pulse_time", - "time_of_flight", - "detector_id", - "specific_data", - ), -) - - -def deserialise_ev42(buffer): - """ - Deserialise FlatBuffer ev42. - - :param buffer: The FlatBuffers buffer. - :return: The deserialised data. - """ - check_schema_identifier(buffer, FILE_IDENTIFIER) - - event = EventMessage.EventMessage.GetRootAsEventMessage(buffer, 0) - - specific_data = None - if event.FacilitySpecificDataType() == FacilityData.FacilityData.ISISData: - specific = event.FacilitySpecificData() - isis_buf = ISISData.ISISData() - isis_buf.Init(specific.Bytes, specific.Pos) - specific_data = { - "period_number": isis_buf.PeriodNumber(), - "run_state": isis_buf.RunState(), - "proton_charge": isis_buf.ProtonCharge(), - } - - return EventData( - event.SourceName().decode("utf-8"), - event.MessageId(), - event.PulseTime(), - event.TimeOfFlightAsNumpy(), - event.DetectorIdAsNumpy(), - specific_data, - ) - - -def serialise_ev42( - source_name, message_id, pulse_time, time_of_flight, detector_id, isis_specific=None -): - """ - Serialise event data as an ev42 FlatBuffers message. - - :param source_name: - :param message_id: - :param pulse_time: - :param time_of_flight: - :param detector_id: - :param isis_specific: - :return: - """ - builder = flatbuffers.Builder(1024) - builder.ForceDefaults(True) - - source = builder.CreateString(source_name) - - tof_data = builder.CreateNumpyVector(np.asarray(time_of_flight).astype(np.uint32)) - det_data = builder.CreateNumpyVector(np.asarray(detector_id).astype(np.uint32)) - - isis_data = None - if isis_specific: - # isis_builder = flatbuffers.Builder(96) - ISISData.ISISDataStart(builder) - ISISData.ISISDataAddPeriodNumber(builder, isis_specific["period_number"]) - ISISData.ISISDataAddRunState(builder, isis_specific["run_state"]) - ISISData.ISISDataAddProtonCharge(builder, isis_specific["proton_charge"]) - isis_data = ISISData.ISISDataEnd(builder) - - # Build the actual buffer - EventMessage.EventMessageStart(builder) - EventMessage.EventMessageAddDetectorId(builder, det_data) - EventMessage.EventMessageAddTimeOfFlight(builder, tof_data) - EventMessage.EventMessageAddPulseTime(builder, pulse_time) - EventMessage.EventMessageAddMessageId(builder, message_id) - EventMessage.EventMessageAddSourceName(builder, source) - - if isis_specific: - EventMessage.EventMessageAddFacilitySpecificDataType( - builder, FacilityData.FacilityData.ISISData - ) - EventMessage.EventMessageAddFacilitySpecificData(builder, isis_data) - - data = EventMessage.EventMessageEnd(builder) - - builder.Finish(data, file_identifier=FILE_IDENTIFIER) - return bytes(builder.Output()) diff --git a/python/src/streaming_data_types/eventdata_ev43.py b/python/src/streaming_data_types/eventdata_ev43.py deleted file mode 100644 index 3bf5274..0000000 --- a/python/src/streaming_data_types/eventdata_ev43.py +++ /dev/null @@ -1,82 +0,0 @@ -from collections import namedtuple - -import flatbuffers -import numpy as np - -import streaming_data_types.fbschemas.eventdata_ev43.Event43Message as Event43Message -from streaming_data_types.utils import check_schema_identifier - -FILE_IDENTIFIER = b"ev43" - - -EventData = namedtuple( - "EventData", - ( - "source_name", - "message_id", - "pulse_time", - "pulse_index", - "time_of_flight", - "detector_id", - ), -) - - -def deserialise_ev43(buffer): - """ - Deserialise FlatBuffer ev43. - - :param buffer: The FlatBuffers buffer. - :return: The deserialised data. - """ - check_schema_identifier(buffer, FILE_IDENTIFIER) - - event = Event43Message.Event43Message.GetRootAsEvent43Message(buffer, 0) - - return EventData( - event.SourceName().decode("utf-8"), - event.MessageId(), - event.PulseTimeAsNumpy(), - event.PulseIndexAsNumpy(), - event.TimeOfFlightAsNumpy(), - event.DetectorIdAsNumpy(), - ) - - -def serialise_ev43( - source_name, message_id, pulse_time, pulse_index, time_of_flight, detector_id -): - """ - Serialise event data as an ev43 FlatBuffers message. - - :param source_name: - :param message_id: - :param pulse_time: - :param pulse_index: - :param time_of_flight: - :param detector_id: - :return: - """ - builder = flatbuffers.Builder(1024) - builder.ForceDefaults(True) - - source = builder.CreateString(source_name) - - pulse_ts_data = builder.CreateNumpyVector(np.asarray(pulse_time).astype(np.uint64)) - pulse_ix_data = builder.CreateNumpyVector(np.asarray(pulse_index).astype(np.uint32)) - tof_data = builder.CreateNumpyVector(np.asarray(time_of_flight).astype(np.uint32)) - det_data = builder.CreateNumpyVector(np.asarray(detector_id).astype(np.uint32)) - - # Build the actual buffer - Event43Message.Event43MessageStart(builder) - Event43Message.Event43MessageAddPulseTime(builder, pulse_ts_data) - Event43Message.Event43MessageAddPulseIndex(builder, pulse_ix_data) - Event43Message.Event43MessageAddDetectorId(builder, det_data) - Event43Message.Event43MessageAddTimeOfFlight(builder, tof_data) - Event43Message.Event43MessageAddMessageId(builder, message_id) - Event43Message.Event43MessageAddSourceName(builder, source) - - data = Event43Message.Event43MessageEnd(builder) - - builder.Finish(data, file_identifier=FILE_IDENTIFIER) - return bytes(builder.Output()) diff --git a/python/src/streaming_data_types/fbschemas/ADAr_ADArray_schema/ADArray.py b/python/src/streaming_data_types/fbschemas/ADAr_ADArray_schema/ADArray.py deleted file mode 100644 index a234658..0000000 --- a/python/src/streaming_data_types/fbschemas/ADAr_ADArray_schema/ADArray.py +++ /dev/null @@ -1,201 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers -from flatbuffers.compat import import_numpy - -np = import_numpy() - - -class ADArray(object): - __slots__ = ["_tab"] - - @classmethod - def GetRootAsADArray(cls, buf, offset): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = ADArray() - x.Init(buf, n + offset) - return x - - @classmethod - def ADArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier( - buf, offset, b"\x41\x44\x41\x72", size_prefixed=size_prefixed - ) - - # ADArray - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # ADArray - def SourceName(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.String(o + self._tab.Pos) - return None - - # ADArray - def Id(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) - return 0 - - # ADArray - def Timestamp(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - return self._tab.Get( - flatbuffers.number_types.Uint64Flags, o + self._tab.Pos - ) - return 0 - - # ADArray - def Dimensions(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Uint64Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8), - ) - return 0 - - # ADArray - def DimensionsAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint64Flags, o) - return 0 - - # ADArray - def DimensionsLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - # ADArray - def DimensionsIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) - return o == 0 - - # ADArray - def DataType(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) - return 0 - - # ADArray - def Data(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Uint8Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1), - ) - return 0 - - # ADArray - def DataAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) - return 0 - - # ADArray - def DataLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - # ADArray - def DataIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) - return o == 0 - - # ADArray - def Attributes(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) - if o != 0: - x = self._tab.Vector(o) - x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 - x = self._tab.Indirect(x) - from .Attribute import Attribute - - obj = Attribute() - obj.Init(self._tab.Bytes, x) - return obj - return None - - # ADArray - def AttributesLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - # ADArray - def AttributesIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) - return o == 0 - - -def ADArrayStart(builder): - builder.StartObject(7) - - -def ADArrayAddSourceName(builder, sourceName): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(sourceName), 0 - ) - - -def ADArrayAddId(builder, id): - builder.PrependInt32Slot(1, id, 0) - - -def ADArrayAddTimestamp(builder, timestamp): - builder.PrependUint64Slot(2, timestamp, 0) - - -def ADArrayAddDimensions(builder, dimensions): - builder.PrependUOffsetTRelativeSlot( - 3, flatbuffers.number_types.UOffsetTFlags.py_type(dimensions), 0 - ) - - -def ADArrayStartDimensionsVector(builder, numElems): - return builder.StartVector(8, numElems, 8) - - -def ADArrayAddDataType(builder, dataType): - builder.PrependInt8Slot(4, dataType, 0) - - -def ADArrayAddData(builder, data): - builder.PrependUOffsetTRelativeSlot( - 5, flatbuffers.number_types.UOffsetTFlags.py_type(data), 0 - ) - - -def ADArrayStartDataVector(builder, numElems): - return builder.StartVector(1, numElems, 1) - - -def ADArrayAddAttributes(builder, attributes): - builder.PrependUOffsetTRelativeSlot( - 6, flatbuffers.number_types.UOffsetTFlags.py_type(attributes), 0 - ) - - -def ADArrayStartAttributesVector(builder, numElems): - return builder.StartVector(4, numElems, 4) - - -def ADArrayEnd(builder): - return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/ADAr_ADArray_schema/Attribute.py b/python/src/streaming_data_types/fbschemas/ADAr_ADArray_schema/Attribute.py deleted file mode 100644 index 434f3f1..0000000 --- a/python/src/streaming_data_types/fbschemas/ADAr_ADArray_schema/Attribute.py +++ /dev/null @@ -1,127 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers -from flatbuffers.compat import import_numpy - -np = import_numpy() - - -class Attribute(object): - __slots__ = ["_tab"] - - @classmethod - def GetRootAsAttribute(cls, buf, offset): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = Attribute() - x.Init(buf, n + offset) - return x - - @classmethod - def AttributeBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier( - buf, offset, b"\x41\x44\x41\x72", size_prefixed=size_prefixed - ) - - # Attribute - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # Attribute - def Name(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.String(o + self._tab.Pos) - return None - - # Attribute - def Description(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return self._tab.String(o + self._tab.Pos) - return None - - # Attribute - def Source(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - return self._tab.String(o + self._tab.Pos) - return None - - # Attribute - def DataType(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) - return 0 - - # Attribute - def Data(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Uint8Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1), - ) - return 0 - - # Attribute - def DataAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) - return 0 - - # Attribute - def DataLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - # Attribute - def DataIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) - return o == 0 - - -def AttributeStart(builder): - builder.StartObject(5) - - -def AttributeAddName(builder, name): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(name), 0 - ) - - -def AttributeAddDescription(builder, description): - builder.PrependUOffsetTRelativeSlot( - 1, flatbuffers.number_types.UOffsetTFlags.py_type(description), 0 - ) - - -def AttributeAddSource(builder, source): - builder.PrependUOffsetTRelativeSlot( - 2, flatbuffers.number_types.UOffsetTFlags.py_type(source), 0 - ) - - -def AttributeAddDataType(builder, dataType): - builder.PrependInt8Slot(3, dataType, 0) - - -def AttributeAddData(builder, data): - builder.PrependUOffsetTRelativeSlot( - 4, flatbuffers.number_types.UOffsetTFlags.py_type(data), 0 - ) - - -def AttributeStartDataVector(builder, numElems): - return builder.StartVector(1, numElems, 1) - - -def AttributeEnd(builder): - return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/ADAr_ADArray_schema/DType.py b/python/src/streaming_data_types/fbschemas/ADAr_ADArray_schema/DType.py deleted file mode 100644 index 22098af..0000000 --- a/python/src/streaming_data_types/fbschemas/ADAr_ADArray_schema/DType.py +++ /dev/null @@ -1,17 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - - -class DType(object): - int8 = 0 - uint8 = 1 - int16 = 2 - uint16 = 3 - int32 = 4 - uint32 = 5 - int64 = 6 - uint64 = 7 - float32 = 8 - float64 = 9 - c_string = 10 diff --git a/python/src/streaming_data_types/fbschemas/ADAr_ADArray_schema/__init__.py b/python/src/streaming_data_types/fbschemas/ADAr_ADArray_schema/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/python/src/streaming_data_types/fbschemas/NDAr_NDArray_schema/DType.py b/python/src/streaming_data_types/fbschemas/NDAr_NDArray_schema/DType.py deleted file mode 100644 index 48dac73..0000000 --- a/python/src/streaming_data_types/fbschemas/NDAr_NDArray_schema/DType.py +++ /dev/null @@ -1,17 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: FB_Tables - - -class DType(object): - Int8 = 0 - Uint8 = 1 - Int16 = 2 - Uint16 = 3 - Int32 = 4 - Uint32 = 5 - Int64 = 6 - Uint64 = 7 - Float32 = 8 - Float64 = 9 - c_string = 10 diff --git a/python/src/streaming_data_types/fbschemas/NDAr_NDArray_schema/NDArray.py b/python/src/streaming_data_types/fbschemas/NDAr_NDArray_schema/NDArray.py deleted file mode 100644 index 9264718..0000000 --- a/python/src/streaming_data_types/fbschemas/NDAr_NDArray_schema/NDArray.py +++ /dev/null @@ -1,182 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: FB_Tables - -import flatbuffers - - -class NDArray(object): - __slots__ = ["_tab"] - - @classmethod - def GetRootAsNDArray(cls, buf, offset): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = NDArray() - x.Init(buf, n + offset) - return x - - # NDArray - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # NDArray - def Id(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) - return 0 - - # NDArray - def TimeStamp(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return self._tab.Get( - flatbuffers.number_types.Float64Flags, o + self._tab.Pos - ) - return 0.0 - - # NDArray - def EpicsTS(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - x = o + self._tab.Pos - from .epicsTimeStamp import epicsTimeStamp - - obj = epicsTimeStamp() - obj.Init(self._tab.Bytes, x) - return obj - return None - - # NDArray - def Dims(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Uint64Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8), - ) - return 0 - - # NDArray - def DimsAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint64Flags, o) - return 0 - - # NDArray - def DimsLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - # NDArray - def DataType(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) - return 0 - - # NDArray - def PData(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Uint8Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1), - ) - return 0 - - # NDArray - def PDataAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) - return 0 - - # NDArray - def PDataLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - # NDArray - def PAttributeList(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) - if o != 0: - x = self._tab.Vector(o) - x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 - x = self._tab.Indirect(x) - from .NDAttribute import NDAttribute - - obj = NDAttribute() - obj.Init(self._tab.Bytes, x) - return obj - return None - - # NDArray - def PAttributeListLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - -def NDArrayStart(builder): - builder.StartObject(7) - - -def NDArrayAddId(builder, id): - builder.PrependInt32Slot(0, id, 0) - - -def NDArrayAddTimeStamp(builder, timeStamp): - builder.PrependFloat64Slot(1, timeStamp, 0.0) - - -def NDArrayAddEpicsTS(builder, epicsTS): - builder.PrependStructSlot( - 2, flatbuffers.number_types.UOffsetTFlags.py_type(epicsTS), 0 - ) - - -def NDArrayAddDims(builder, dims): - builder.PrependUOffsetTRelativeSlot( - 3, flatbuffers.number_types.UOffsetTFlags.py_type(dims), 0 - ) - - -def NDArrayStartDimsVector(builder, numElems): - return builder.StartVector(8, numElems, 8) - - -def NDArrayAddDataType(builder, dataType): - builder.PrependInt8Slot(4, dataType, 0) - - -def NDArrayAddPData(builder, pData): - builder.PrependUOffsetTRelativeSlot( - 5, flatbuffers.number_types.UOffsetTFlags.py_type(pData), 0 - ) - - -def NDArrayStartPDataVector(builder, numElems): - return builder.StartVector(1, numElems, 1) - - -def NDArrayAddPAttributeList(builder, pAttributeList): - builder.PrependUOffsetTRelativeSlot( - 6, flatbuffers.number_types.UOffsetTFlags.py_type(pAttributeList), 0 - ) - - -def NDArrayStartPAttributeListVector(builder, numElems): - return builder.StartVector(4, numElems, 4) - - -def NDArrayEnd(builder): - return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/NDAr_NDArray_schema/NDAttribute.py b/python/src/streaming_data_types/fbschemas/NDAr_NDArray_schema/NDAttribute.py deleted file mode 100644 index 8448343..0000000 --- a/python/src/streaming_data_types/fbschemas/NDAr_NDArray_schema/NDAttribute.py +++ /dev/null @@ -1,113 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: FB_Tables - -import flatbuffers - - -class NDAttribute(object): - __slots__ = ["_tab"] - - @classmethod - def GetRootAsNDAttribute(cls, buf, offset): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = NDAttribute() - x.Init(buf, n + offset) - return x - - # NDAttribute - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # NDAttribute - def PName(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.String(o + self._tab.Pos) - return None - - # NDAttribute - def PDescription(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return self._tab.String(o + self._tab.Pos) - return None - - # NDAttribute - def PSource(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - return self._tab.String(o + self._tab.Pos) - return None - - # NDAttribute - def DataType(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) - return 0 - - # NDAttribute - def PData(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Uint8Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1), - ) - return 0 - - # NDAttribute - def PDataAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) - return 0 - - # NDAttribute - def PDataLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - -def NDAttributeStart(builder): - builder.StartObject(5) - - -def NDAttributeAddPName(builder, pName): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(pName), 0 - ) - - -def NDAttributeAddPDescription(builder, pDescription): - builder.PrependUOffsetTRelativeSlot( - 1, flatbuffers.number_types.UOffsetTFlags.py_type(pDescription), 0 - ) - - -def NDAttributeAddPSource(builder, pSource): - builder.PrependUOffsetTRelativeSlot( - 2, flatbuffers.number_types.UOffsetTFlags.py_type(pSource), 0 - ) - - -def NDAttributeAddDataType(builder, dataType): - builder.PrependInt8Slot(3, dataType, 0) - - -def NDAttributeAddPData(builder, pData): - builder.PrependUOffsetTRelativeSlot( - 4, flatbuffers.number_types.UOffsetTFlags.py_type(pData), 0 - ) - - -def NDAttributeStartPDataVector(builder, numElems): - return builder.StartVector(1, numElems, 1) - - -def NDAttributeEnd(builder): - return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/NDAr_NDArray_schema/__init__.py b/python/src/streaming_data_types/fbschemas/NDAr_NDArray_schema/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/python/src/streaming_data_types/fbschemas/NDAr_NDArray_schema/epicsTimeStamp.py b/python/src/streaming_data_types/fbschemas/NDAr_NDArray_schema/epicsTimeStamp.py deleted file mode 100644 index 4afea11..0000000 --- a/python/src/streaming_data_types/fbschemas/NDAr_NDArray_schema/epicsTimeStamp.py +++ /dev/null @@ -1,34 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: FB_Tables - -import flatbuffers - - -class epicsTimeStamp(object): - __slots__ = ["_tab"] - - # epicsTimeStamp - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # epicsTimeStamp - def SecPastEpoch(self): - return self._tab.Get( - flatbuffers.number_types.Int32Flags, - self._tab.Pos + flatbuffers.number_types.UOffsetTFlags.py_type(0), - ) - - # epicsTimeStamp - def Nsec(self): - return self._tab.Get( - flatbuffers.number_types.Int32Flags, - self._tab.Pos + flatbuffers.number_types.UOffsetTFlags.py_type(4), - ) - - -def CreateepicsTimeStamp(builder, secPastEpoch, nsec): - builder.Prep(4, 8) - builder.PrependInt32(nsec) - builder.PrependInt32(secPastEpoch) - return builder.Offset() diff --git a/python/src/streaming_data_types/fbschemas/array_1d_se00/DoubleArray.py b/python/src/streaming_data_types/fbschemas/array_1d_se00/DoubleArray.py deleted file mode 100644 index 8ffcb60..0000000 --- a/python/src/streaming_data_types/fbschemas/array_1d_se00/DoubleArray.py +++ /dev/null @@ -1,69 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers -from flatbuffers.compat import import_numpy -np = import_numpy() - -class DoubleArray(object): - __slots__ = ['_tab'] - - @classmethod - def GetRootAs(cls, buf, offset=0): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = DoubleArray() - x.Init(buf, n + offset) - return x - - @classmethod - def GetRootAsDoubleArray(cls, buf, offset=0): - """This method is deprecated. Please switch to GetRootAs.""" - return cls.GetRootAs(buf, offset) - @classmethod - def DoubleArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x73\x65\x30\x30", size_prefixed=size_prefixed) - - # DoubleArray - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # DoubleArray - def Value(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Float64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) - return 0 - - # DoubleArray - def ValueAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Float64Flags, o) - return 0 - - # DoubleArray - def ValueLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - # DoubleArray - def ValueIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - return o == 0 - -def DoubleArrayStart(builder): builder.StartObject(1) -def Start(builder): - return DoubleArrayStart(builder) -def DoubleArrayAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) -def AddValue(builder, value): - return DoubleArrayAddValue(builder, value) -def DoubleArrayStartValueVector(builder, numElems): return builder.StartVector(8, numElems, 8) -def StartValueVector(builder, numElems): - return DoubleArrayStartValueVector(builder, numElems) -def DoubleArrayEnd(builder): return builder.EndObject() -def End(builder): - return DoubleArrayEnd(builder) \ No newline at end of file diff --git a/python/src/streaming_data_types/fbschemas/array_1d_se00/FloatArray.py b/python/src/streaming_data_types/fbschemas/array_1d_se00/FloatArray.py deleted file mode 100644 index 2298eb1..0000000 --- a/python/src/streaming_data_types/fbschemas/array_1d_se00/FloatArray.py +++ /dev/null @@ -1,69 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers -from flatbuffers.compat import import_numpy -np = import_numpy() - -class FloatArray(object): - __slots__ = ['_tab'] - - @classmethod - def GetRootAs(cls, buf, offset=0): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = FloatArray() - x.Init(buf, n + offset) - return x - - @classmethod - def GetRootAsFloatArray(cls, buf, offset=0): - """This method is deprecated. Please switch to GetRootAs.""" - return cls.GetRootAs(buf, offset) - @classmethod - def FloatArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x73\x65\x30\x30", size_prefixed=size_prefixed) - - # FloatArray - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # FloatArray - def Value(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Float32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) - return 0 - - # FloatArray - def ValueAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Float32Flags, o) - return 0 - - # FloatArray - def ValueLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - # FloatArray - def ValueIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - return o == 0 - -def FloatArrayStart(builder): builder.StartObject(1) -def Start(builder): - return FloatArrayStart(builder) -def FloatArrayAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) -def AddValue(builder, value): - return FloatArrayAddValue(builder, value) -def FloatArrayStartValueVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def StartValueVector(builder, numElems): - return FloatArrayStartValueVector(builder, numElems) -def FloatArrayEnd(builder): return builder.EndObject() -def End(builder): - return FloatArrayEnd(builder) \ No newline at end of file diff --git a/python/src/streaming_data_types/fbschemas/array_1d_se00/Int16Array.py b/python/src/streaming_data_types/fbschemas/array_1d_se00/Int16Array.py deleted file mode 100644 index 03df4ba..0000000 --- a/python/src/streaming_data_types/fbschemas/array_1d_se00/Int16Array.py +++ /dev/null @@ -1,69 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers -from flatbuffers.compat import import_numpy -np = import_numpy() - -class Int16Array(object): - __slots__ = ['_tab'] - - @classmethod - def GetRootAs(cls, buf, offset=0): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = Int16Array() - x.Init(buf, n + offset) - return x - - @classmethod - def GetRootAsInt16Array(cls, buf, offset=0): - """This method is deprecated. Please switch to GetRootAs.""" - return cls.GetRootAs(buf, offset) - @classmethod - def Int16ArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x73\x65\x30\x30", size_prefixed=size_prefixed) - - # Int16Array - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # Int16Array - def Value(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Int16Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 2)) - return 0 - - # Int16Array - def ValueAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int16Flags, o) - return 0 - - # Int16Array - def ValueLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - # Int16Array - def ValueIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - return o == 0 - -def Int16ArrayStart(builder): builder.StartObject(1) -def Start(builder): - return Int16ArrayStart(builder) -def Int16ArrayAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) -def AddValue(builder, value): - return Int16ArrayAddValue(builder, value) -def Int16ArrayStartValueVector(builder, numElems): return builder.StartVector(2, numElems, 2) -def StartValueVector(builder, numElems): - return Int16ArrayStartValueVector(builder, numElems) -def Int16ArrayEnd(builder): return builder.EndObject() -def End(builder): - return Int16ArrayEnd(builder) \ No newline at end of file diff --git a/python/src/streaming_data_types/fbschemas/array_1d_se00/Int32Array.py b/python/src/streaming_data_types/fbschemas/array_1d_se00/Int32Array.py deleted file mode 100644 index 27a8627..0000000 --- a/python/src/streaming_data_types/fbschemas/array_1d_se00/Int32Array.py +++ /dev/null @@ -1,69 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers -from flatbuffers.compat import import_numpy -np = import_numpy() - -class Int32Array(object): - __slots__ = ['_tab'] - - @classmethod - def GetRootAs(cls, buf, offset=0): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = Int32Array() - x.Init(buf, n + offset) - return x - - @classmethod - def GetRootAsInt32Array(cls, buf, offset=0): - """This method is deprecated. Please switch to GetRootAs.""" - return cls.GetRootAs(buf, offset) - @classmethod - def Int32ArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x73\x65\x30\x30", size_prefixed=size_prefixed) - - # Int32Array - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # Int32Array - def Value(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) - return 0 - - # Int32Array - def ValueAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) - return 0 - - # Int32Array - def ValueLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - # Int32Array - def ValueIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - return o == 0 - -def Int32ArrayStart(builder): builder.StartObject(1) -def Start(builder): - return Int32ArrayStart(builder) -def Int32ArrayAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) -def AddValue(builder, value): - return Int32ArrayAddValue(builder, value) -def Int32ArrayStartValueVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def StartValueVector(builder, numElems): - return Int32ArrayStartValueVector(builder, numElems) -def Int32ArrayEnd(builder): return builder.EndObject() -def End(builder): - return Int32ArrayEnd(builder) \ No newline at end of file diff --git a/python/src/streaming_data_types/fbschemas/array_1d_se00/Int64Array.py b/python/src/streaming_data_types/fbschemas/array_1d_se00/Int64Array.py deleted file mode 100644 index ece7e54..0000000 --- a/python/src/streaming_data_types/fbschemas/array_1d_se00/Int64Array.py +++ /dev/null @@ -1,69 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers -from flatbuffers.compat import import_numpy -np = import_numpy() - -class Int64Array(object): - __slots__ = ['_tab'] - - @classmethod - def GetRootAs(cls, buf, offset=0): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = Int64Array() - x.Init(buf, n + offset) - return x - - @classmethod - def GetRootAsInt64Array(cls, buf, offset=0): - """This method is deprecated. Please switch to GetRootAs.""" - return cls.GetRootAs(buf, offset) - @classmethod - def Int64ArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x73\x65\x30\x30", size_prefixed=size_prefixed) - - # Int64Array - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # Int64Array - def Value(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Int64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) - return 0 - - # Int64Array - def ValueAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) - return 0 - - # Int64Array - def ValueLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - # Int64Array - def ValueIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - return o == 0 - -def Int64ArrayStart(builder): builder.StartObject(1) -def Start(builder): - return Int64ArrayStart(builder) -def Int64ArrayAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) -def AddValue(builder, value): - return Int64ArrayAddValue(builder, value) -def Int64ArrayStartValueVector(builder, numElems): return builder.StartVector(8, numElems, 8) -def StartValueVector(builder, numElems): - return Int64ArrayStartValueVector(builder, numElems) -def Int64ArrayEnd(builder): return builder.EndObject() -def End(builder): - return Int64ArrayEnd(builder) \ No newline at end of file diff --git a/python/src/streaming_data_types/fbschemas/array_1d_se00/Int8Array.py b/python/src/streaming_data_types/fbschemas/array_1d_se00/Int8Array.py deleted file mode 100644 index f30a9b1..0000000 --- a/python/src/streaming_data_types/fbschemas/array_1d_se00/Int8Array.py +++ /dev/null @@ -1,69 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers -from flatbuffers.compat import import_numpy -np = import_numpy() - -class Int8Array(object): - __slots__ = ['_tab'] - - @classmethod - def GetRootAs(cls, buf, offset=0): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = Int8Array() - x.Init(buf, n + offset) - return x - - @classmethod - def GetRootAsInt8Array(cls, buf, offset=0): - """This method is deprecated. Please switch to GetRootAs.""" - return cls.GetRootAs(buf, offset) - @classmethod - def Int8ArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x73\x65\x30\x30", size_prefixed=size_prefixed) - - # Int8Array - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # Int8Array - def Value(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Int8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) - return 0 - - # Int8Array - def ValueAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int8Flags, o) - return 0 - - # Int8Array - def ValueLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - # Int8Array - def ValueIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - return o == 0 - -def Int8ArrayStart(builder): builder.StartObject(1) -def Start(builder): - return Int8ArrayStart(builder) -def Int8ArrayAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) -def AddValue(builder, value): - return Int8ArrayAddValue(builder, value) -def Int8ArrayStartValueVector(builder, numElems): return builder.StartVector(1, numElems, 1) -def StartValueVector(builder, numElems): - return Int8ArrayStartValueVector(builder, numElems) -def Int8ArrayEnd(builder): return builder.EndObject() -def End(builder): - return Int8ArrayEnd(builder) \ No newline at end of file diff --git a/python/src/streaming_data_types/fbschemas/array_1d_se00/Location.py b/python/src/streaming_data_types/fbschemas/array_1d_se00/Location.py deleted file mode 100644 index 99cf994..0000000 --- a/python/src/streaming_data_types/fbschemas/array_1d_se00/Location.py +++ /dev/null @@ -1,9 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -class Location(object): - Unknown = 0 - Start = 1 - Middle = 2 - End = 3 diff --git a/python/src/streaming_data_types/fbschemas/array_1d_se00/UInt16Array.py b/python/src/streaming_data_types/fbschemas/array_1d_se00/UInt16Array.py deleted file mode 100644 index 8fe1f77..0000000 --- a/python/src/streaming_data_types/fbschemas/array_1d_se00/UInt16Array.py +++ /dev/null @@ -1,69 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers -from flatbuffers.compat import import_numpy -np = import_numpy() - -class UInt16Array(object): - __slots__ = ['_tab'] - - @classmethod - def GetRootAs(cls, buf, offset=0): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = UInt16Array() - x.Init(buf, n + offset) - return x - - @classmethod - def GetRootAsUInt16Array(cls, buf, offset=0): - """This method is deprecated. Please switch to GetRootAs.""" - return cls.GetRootAs(buf, offset) - @classmethod - def UInt16ArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x73\x65\x30\x30", size_prefixed=size_prefixed) - - # UInt16Array - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # UInt16Array - def Value(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Uint16Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 2)) - return 0 - - # UInt16Array - def ValueAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint16Flags, o) - return 0 - - # UInt16Array - def ValueLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - # UInt16Array - def ValueIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - return o == 0 - -def UInt16ArrayStart(builder): builder.StartObject(1) -def Start(builder): - return UInt16ArrayStart(builder) -def UInt16ArrayAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) -def AddValue(builder, value): - return UInt16ArrayAddValue(builder, value) -def UInt16ArrayStartValueVector(builder, numElems): return builder.StartVector(2, numElems, 2) -def StartValueVector(builder, numElems): - return UInt16ArrayStartValueVector(builder, numElems) -def UInt16ArrayEnd(builder): return builder.EndObject() -def End(builder): - return UInt16ArrayEnd(builder) \ No newline at end of file diff --git a/python/src/streaming_data_types/fbschemas/array_1d_se00/UInt32Array.py b/python/src/streaming_data_types/fbschemas/array_1d_se00/UInt32Array.py deleted file mode 100644 index 904ed86..0000000 --- a/python/src/streaming_data_types/fbschemas/array_1d_se00/UInt32Array.py +++ /dev/null @@ -1,69 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers -from flatbuffers.compat import import_numpy -np = import_numpy() - -class UInt32Array(object): - __slots__ = ['_tab'] - - @classmethod - def GetRootAs(cls, buf, offset=0): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = UInt32Array() - x.Init(buf, n + offset) - return x - - @classmethod - def GetRootAsUInt32Array(cls, buf, offset=0): - """This method is deprecated. Please switch to GetRootAs.""" - return cls.GetRootAs(buf, offset) - @classmethod - def UInt32ArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x73\x65\x30\x30", size_prefixed=size_prefixed) - - # UInt32Array - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # UInt32Array - def Value(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Uint32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) - return 0 - - # UInt32Array - def ValueAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint32Flags, o) - return 0 - - # UInt32Array - def ValueLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - # UInt32Array - def ValueIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - return o == 0 - -def UInt32ArrayStart(builder): builder.StartObject(1) -def Start(builder): - return UInt32ArrayStart(builder) -def UInt32ArrayAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) -def AddValue(builder, value): - return UInt32ArrayAddValue(builder, value) -def UInt32ArrayStartValueVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def StartValueVector(builder, numElems): - return UInt32ArrayStartValueVector(builder, numElems) -def UInt32ArrayEnd(builder): return builder.EndObject() -def End(builder): - return UInt32ArrayEnd(builder) \ No newline at end of file diff --git a/python/src/streaming_data_types/fbschemas/array_1d_se00/UInt64Array.py b/python/src/streaming_data_types/fbschemas/array_1d_se00/UInt64Array.py deleted file mode 100644 index cad65f6..0000000 --- a/python/src/streaming_data_types/fbschemas/array_1d_se00/UInt64Array.py +++ /dev/null @@ -1,69 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers -from flatbuffers.compat import import_numpy -np = import_numpy() - -class UInt64Array(object): - __slots__ = ['_tab'] - - @classmethod - def GetRootAs(cls, buf, offset=0): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = UInt64Array() - x.Init(buf, n + offset) - return x - - @classmethod - def GetRootAsUInt64Array(cls, buf, offset=0): - """This method is deprecated. Please switch to GetRootAs.""" - return cls.GetRootAs(buf, offset) - @classmethod - def UInt64ArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x73\x65\x30\x30", size_prefixed=size_prefixed) - - # UInt64Array - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # UInt64Array - def Value(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Uint64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) - return 0 - - # UInt64Array - def ValueAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint64Flags, o) - return 0 - - # UInt64Array - def ValueLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - # UInt64Array - def ValueIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - return o == 0 - -def UInt64ArrayStart(builder): builder.StartObject(1) -def Start(builder): - return UInt64ArrayStart(builder) -def UInt64ArrayAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) -def AddValue(builder, value): - return UInt64ArrayAddValue(builder, value) -def UInt64ArrayStartValueVector(builder, numElems): return builder.StartVector(8, numElems, 8) -def StartValueVector(builder, numElems): - return UInt64ArrayStartValueVector(builder, numElems) -def UInt64ArrayEnd(builder): return builder.EndObject() -def End(builder): - return UInt64ArrayEnd(builder) \ No newline at end of file diff --git a/python/src/streaming_data_types/fbschemas/array_1d_se00/UInt8Array.py b/python/src/streaming_data_types/fbschemas/array_1d_se00/UInt8Array.py deleted file mode 100644 index ba3d05e..0000000 --- a/python/src/streaming_data_types/fbschemas/array_1d_se00/UInt8Array.py +++ /dev/null @@ -1,69 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers -from flatbuffers.compat import import_numpy -np = import_numpy() - -class UInt8Array(object): - __slots__ = ['_tab'] - - @classmethod - def GetRootAs(cls, buf, offset=0): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = UInt8Array() - x.Init(buf, n + offset) - return x - - @classmethod - def GetRootAsUInt8Array(cls, buf, offset=0): - """This method is deprecated. Please switch to GetRootAs.""" - return cls.GetRootAs(buf, offset) - @classmethod - def UInt8ArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x73\x65\x30\x30", size_prefixed=size_prefixed) - - # UInt8Array - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # UInt8Array - def Value(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) - return 0 - - # UInt8Array - def ValueAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) - return 0 - - # UInt8Array - def ValueLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - # UInt8Array - def ValueIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - return o == 0 - -def UInt8ArrayStart(builder): builder.StartObject(1) -def Start(builder): - return UInt8ArrayStart(builder) -def UInt8ArrayAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) -def AddValue(builder, value): - return UInt8ArrayAddValue(builder, value) -def UInt8ArrayStartValueVector(builder, numElems): return builder.StartVector(1, numElems, 1) -def StartValueVector(builder, numElems): - return UInt8ArrayStartValueVector(builder, numElems) -def UInt8ArrayEnd(builder): return builder.EndObject() -def End(builder): - return UInt8ArrayEnd(builder) \ No newline at end of file diff --git a/python/src/streaming_data_types/fbschemas/array_1d_se00/ValueUnion.py b/python/src/streaming_data_types/fbschemas/array_1d_se00/ValueUnion.py deleted file mode 100644 index 514ab29..0000000 --- a/python/src/streaming_data_types/fbschemas/array_1d_se00/ValueUnion.py +++ /dev/null @@ -1,16 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -class ValueUnion(object): - NONE = 0 - Int8Array = 1 - UInt8Array = 2 - Int16Array = 3 - UInt16Array = 4 - Int32Array = 5 - UInt32Array = 6 - Int64Array = 7 - UInt64Array = 8 - DoubleArray = 9 - FloatArray = 10 diff --git a/python/src/streaming_data_types/fbschemas/array_1d_se00/__init__.py b/python/src/streaming_data_types/fbschemas/array_1d_se00/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/python/src/streaming_data_types/fbschemas/array_1d_se00/se00_SampleEnvironmentData.py b/python/src/streaming_data_types/fbschemas/array_1d_se00/se00_SampleEnvironmentData.py deleted file mode 100644 index 34bc859..0000000 --- a/python/src/streaming_data_types/fbschemas/array_1d_se00/se00_SampleEnvironmentData.py +++ /dev/null @@ -1,152 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers -from flatbuffers.compat import import_numpy -np = import_numpy() - -class se00_SampleEnvironmentData(object): - __slots__ = ['_tab'] - - @classmethod - def GetRootAs(cls, buf, offset=0): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = se00_SampleEnvironmentData() - x.Init(buf, n + offset) - return x - - @classmethod - def GetRootAsse00_SampleEnvironmentData(cls, buf, offset=0): - """This method is deprecated. Please switch to GetRootAs.""" - return cls.GetRootAs(buf, offset) - @classmethod - def se00_SampleEnvironmentDataBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x73\x65\x30\x30", size_prefixed=size_prefixed) - - # se00_SampleEnvironmentData - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # se00_SampleEnvironmentData - def Name(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.String(o + self._tab.Pos) - return None - - # se00_SampleEnvironmentData - def Channel(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) - return 0 - - # se00_SampleEnvironmentData - def PacketTimestamp(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) - return 0 - - # se00_SampleEnvironmentData - def TimeDelta(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Float64Flags, o + self._tab.Pos) - return 0.0 - - # se00_SampleEnvironmentData - def TimestampLocation(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) - return 0 - - # se00_SampleEnvironmentData - def ValuesType(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) - return 0 - - # se00_SampleEnvironmentData - def Values(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) - if o != 0: - from flatbuffers.table import Table - obj = Table(bytearray(), 0) - self._tab.Union(obj, o) - return obj - return None - - # se00_SampleEnvironmentData - def Timestamps(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Int64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) - return 0 - - # se00_SampleEnvironmentData - def TimestampsAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) - return 0 - - # se00_SampleEnvironmentData - def TimestampsLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - # se00_SampleEnvironmentData - def TimestampsIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) - return o == 0 - - # se00_SampleEnvironmentData - def MessageCounter(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(20)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) - return 0 - -def se00_SampleEnvironmentDataStart(builder): builder.StartObject(9) -def Start(builder): - return se00_SampleEnvironmentDataStart(builder) -def se00_SampleEnvironmentDataAddName(builder, name): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(name), 0) -def AddName(builder, name): - return se00_SampleEnvironmentDataAddName(builder, name) -def se00_SampleEnvironmentDataAddChannel(builder, channel): builder.PrependInt32Slot(1, channel, 0) -def AddChannel(builder, channel): - return se00_SampleEnvironmentDataAddChannel(builder, channel) -def se00_SampleEnvironmentDataAddPacketTimestamp(builder, packetTimestamp): builder.PrependInt64Slot(2, packetTimestamp, 0) -def AddPacketTimestamp(builder, packetTimestamp): - return se00_SampleEnvironmentDataAddPacketTimestamp(builder, packetTimestamp) -def se00_SampleEnvironmentDataAddTimeDelta(builder, timeDelta): builder.PrependFloat64Slot(3, timeDelta, 0.0) -def AddTimeDelta(builder, timeDelta): - return se00_SampleEnvironmentDataAddTimeDelta(builder, timeDelta) -def se00_SampleEnvironmentDataAddTimestampLocation(builder, timestampLocation): builder.PrependInt8Slot(4, timestampLocation, 0) -def AddTimestampLocation(builder, timestampLocation): - return se00_SampleEnvironmentDataAddTimestampLocation(builder, timestampLocation) -def se00_SampleEnvironmentDataAddValuesType(builder, valuesType): builder.PrependUint8Slot(5, valuesType, 0) -def AddValuesType(builder, valuesType): - return se00_SampleEnvironmentDataAddValuesType(builder, valuesType) -def se00_SampleEnvironmentDataAddValues(builder, values): builder.PrependUOffsetTRelativeSlot(6, flatbuffers.number_types.UOffsetTFlags.py_type(values), 0) -def AddValues(builder, values): - return se00_SampleEnvironmentDataAddValues(builder, values) -def se00_SampleEnvironmentDataAddTimestamps(builder, timestamps): builder.PrependUOffsetTRelativeSlot(7, flatbuffers.number_types.UOffsetTFlags.py_type(timestamps), 0) -def AddTimestamps(builder, timestamps): - return se00_SampleEnvironmentDataAddTimestamps(builder, timestamps) -def se00_SampleEnvironmentDataStartTimestampsVector(builder, numElems): return builder.StartVector(8, numElems, 8) -def StartTimestampsVector(builder, numElems): - return se00_SampleEnvironmentDataStartTimestampsVector(builder, numElems) -def se00_SampleEnvironmentDataAddMessageCounter(builder, messageCounter): builder.PrependInt64Slot(8, messageCounter, 0) -def AddMessageCounter(builder, messageCounter): - return se00_SampleEnvironmentDataAddMessageCounter(builder, messageCounter) -def se00_SampleEnvironmentDataEnd(builder): return builder.EndObject() -def End(builder): - return se00_SampleEnvironmentDataEnd(builder) \ No newline at end of file diff --git a/python/src/streaming_data_types/fbschemas/epics_connection_info_ep00/EpicsConnectionInfo.py b/python/src/streaming_data_types/fbschemas/epics_connection_info_ep00/EpicsConnectionInfo.py deleted file mode 100644 index f2f1daa..0000000 --- a/python/src/streaming_data_types/fbschemas/epics_connection_info_ep00/EpicsConnectionInfo.py +++ /dev/null @@ -1,80 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers - - -class EpicsConnectionInfo(object): - __slots__ = ["_tab"] - - @classmethod - def GetRootAsEpicsConnectionInfo(cls, buf, offset): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = EpicsConnectionInfo() - x.Init(buf, n + offset) - return x - - # EpicsConnectionInfo - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # EpicsConnectionInfo - def Timestamp(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.Get( - flatbuffers.number_types.Uint64Flags, o + self._tab.Pos - ) - return 0 - - # EpicsConnectionInfo - def Type(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return self._tab.Get( - flatbuffers.number_types.Uint16Flags, o + self._tab.Pos - ) - return 0 - - # EpicsConnectionInfo - def SourceName(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - return self._tab.String(o + self._tab.Pos) - return None - - # EpicsConnectionInfo - def ServiceId(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) - if o != 0: - return self._tab.String(o + self._tab.Pos) - return None - - -def EpicsConnectionInfoStart(builder): - builder.StartObject(4) - - -def EpicsConnectionInfoAddTimestamp(builder, timestamp): - builder.PrependUint64Slot(0, timestamp, 0) - - -def EpicsConnectionInfoAddType(builder, type): - builder.PrependUint16Slot(1, type, 0) - - -def EpicsConnectionInfoAddSourceName(builder, sourceName): - builder.PrependUOffsetTRelativeSlot( - 2, flatbuffers.number_types.UOffsetTFlags.py_type(sourceName), 0 - ) - - -def EpicsConnectionInfoAddServiceId(builder, serviceId): - builder.PrependUOffsetTRelativeSlot( - 3, flatbuffers.number_types.UOffsetTFlags.py_type(serviceId), 0 - ) - - -def EpicsConnectionInfoEnd(builder): - return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/epics_connection_info_ep00/EventType.py b/python/src/streaming_data_types/fbschemas/epics_connection_info_ep00/EventType.py deleted file mode 100644 index 7a25f0f..0000000 --- a/python/src/streaming_data_types/fbschemas/epics_connection_info_ep00/EventType.py +++ /dev/null @@ -1,11 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - - -class EventType(object): - UNKNOWN = 0 - NEVER_CONNECTED = 1 - CONNECTED = 2 - DISCONNECTED = 3 - DESTROYED = 4 diff --git a/python/src/streaming_data_types/fbschemas/epics_connection_info_ep00/__init__.py b/python/src/streaming_data_types/fbschemas/epics_connection_info_ep00/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/python/src/streaming_data_types/fbschemas/eventdata_an44/AN44EventMessage.py b/python/src/streaming_data_types/fbschemas/eventdata_an44/AN44EventMessage.py deleted file mode 100644 index a107bca..0000000 --- a/python/src/streaming_data_types/fbschemas/eventdata_an44/AN44EventMessage.py +++ /dev/null @@ -1,322 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers -from flatbuffers.compat import import_numpy - -np = import_numpy() - - -class AN44EventMessage(object): - __slots__ = ["_tab"] - - @classmethod - def GetRootAs(cls, buf, offset=0): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = AN44EventMessage() - x.Init(buf, n + offset) - return x - - @classmethod - def GetRootAsAN44EventMessage(cls, buf, offset=0): - """This method is deprecated. Please switch to GetRootAs.""" - return cls.GetRootAs(buf, offset) - - @classmethod - def AN44EventMessageBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier( - buf, offset, b"\x61\x6E\x34\x34", size_prefixed=size_prefixed - ) - - # AN44EventMessage - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # AN44EventMessage - def SourceName(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.String(o + self._tab.Pos) - return None - - # AN44EventMessage - def MessageId(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) - return 0 - - # AN44EventMessage - def ReferenceTime(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Int64Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8), - ) - return 0 - - # AN44EventMessage - def ReferenceTimeAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) - return 0 - - # AN44EventMessage - def ReferenceTimeLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - # AN44EventMessage - def ReferenceTimeIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - return o == 0 - - # AN44EventMessage - def ReferenceTimeIndex(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Int32Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), - ) - return 0 - - # AN44EventMessage - def ReferenceTimeIndexAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) - return 0 - - # AN44EventMessage - def ReferenceTimeIndexLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - # AN44EventMessage - def ReferenceTimeIndexIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) - return o == 0 - - # AN44EventMessage - def TimeOfFlight(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Int32Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), - ) - return 0 - - # AN44EventMessage - def TimeOfFlightAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) - return 0 - - # AN44EventMessage - def TimeOfFlightLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - # AN44EventMessage - def TimeOfFlightIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) - return o == 0 - - # AN44EventMessage - def PixelId(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Int32Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), - ) - return 0 - - # AN44EventMessage - def PixelIdAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) - return 0 - - # AN44EventMessage - def PixelIdLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - # AN44EventMessage - def PixelIdIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) - return o == 0 - - # AN44EventMessage - def Weight(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Int16Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 2), - ) - return 0 - - # AN44EventMessage - def WeightAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int16Flags, o) - return 0 - - # AN44EventMessage - def WeightLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - # AN44EventMessage - def WeightIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) - return o == 0 - - -def AN44EventMessageStart(builder): - builder.StartObject(7) - - -def Start(builder): - AN44EventMessageStart(builder) - - -def AN44EventMessageAddSourceName(builder, sourceName): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(sourceName), 0 - ) - - -def AddSourceName(builder, sourceName): - AN44EventMessageAddSourceName(builder, sourceName) - - -def AN44EventMessageAddMessageId(builder, messageId): - builder.PrependInt64Slot(1, messageId, 0) - - -def AddMessageId(builder, messageId): - AN44EventMessageAddMessageId(builder, messageId) - - -def AN44EventMessageAddReferenceTime(builder, referenceTime): - builder.PrependUOffsetTRelativeSlot( - 2, flatbuffers.number_types.UOffsetTFlags.py_type(referenceTime), 0 - ) - - -def AddReferenceTime(builder, referenceTime): - AN44EventMessageAddReferenceTime(builder, referenceTime) - - -def AN44EventMessageStartReferenceTimeVector(builder, numElems): - return builder.StartVector(8, numElems, 8) - - -def StartReferenceTimeVector(builder, numElems): - return AN44EventMessageStartReferenceTimeVector(builder, numElems) - - -def AN44EventMessageAddReferenceTimeIndex(builder, referenceTimeIndex): - builder.PrependUOffsetTRelativeSlot( - 3, flatbuffers.number_types.UOffsetTFlags.py_type(referenceTimeIndex), 0 - ) - - -def AddReferenceTimeIndex(builder, referenceTimeIndex): - AN44EventMessageAddReferenceTimeIndex(builder, referenceTimeIndex) - - -def AN44EventMessageStartReferenceTimeIndexVector(builder, numElems): - return builder.StartVector(4, numElems, 4) - - -def StartReferenceTimeIndexVector(builder, numElems): - return AN44EventMessageStartReferenceTimeIndexVector(builder, numElems) - - -def AN44EventMessageAddTimeOfFlight(builder, timeOfFlight): - builder.PrependUOffsetTRelativeSlot( - 4, flatbuffers.number_types.UOffsetTFlags.py_type(timeOfFlight), 0 - ) - - -def AddTimeOfFlight(builder, timeOfFlight): - AN44EventMessageAddTimeOfFlight(builder, timeOfFlight) - - -def AN44EventMessageStartTimeOfFlightVector(builder, numElems): - return builder.StartVector(4, numElems, 4) - - -def StartTimeOfFlightVector(builder, numElems): - return AN44EventMessageStartTimeOfFlightVector(builder, numElems) - - -def AN44EventMessageAddPixelId(builder, pixelId): - builder.PrependUOffsetTRelativeSlot( - 5, flatbuffers.number_types.UOffsetTFlags.py_type(pixelId), 0 - ) - - -def AddPixelId(builder, pixelId): - AN44EventMessageAddPixelId(builder, pixelId) - - -def AN44EventMessageStartPixelIdVector(builder, numElems): - return builder.StartVector(4, numElems, 4) - - -def StartPixelIdVector(builder, numElems): - return AN44EventMessageStartPixelIdVector(builder, numElems) - - -def AN44EventMessageAddWeight(builder, weight): - builder.PrependUOffsetTRelativeSlot( - 6, flatbuffers.number_types.UOffsetTFlags.py_type(weight), 0 - ) - - -def AddWeight(builder, weight): - AN44EventMessageAddWeight(builder, weight) - - -def AN44EventMessageStartWeightVector(builder, numElems): - return builder.StartVector(2, numElems, 2) - - -def StartWeightVector(builder, numElems): - return AN44EventMessageStartWeightVector(builder, numElems) - - -def AN44EventMessageEnd(builder): - return builder.EndObject() - - -def End(builder): - return AN44EventMessageEnd(builder) diff --git a/python/src/streaming_data_types/fbschemas/eventdata_an44/__init__.py b/python/src/streaming_data_types/fbschemas/eventdata_an44/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/python/src/streaming_data_types/fbschemas/eventdata_ev42/EventMessage.py b/python/src/streaming_data_types/fbschemas/eventdata_ev42/EventMessage.py deleted file mode 100644 index 3e4290a..0000000 --- a/python/src/streaming_data_types/fbschemas/eventdata_ev42/EventMessage.py +++ /dev/null @@ -1,165 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers - - -class EventMessage(object): - __slots__ = ["_tab"] - - @classmethod - def GetRootAsEventMessage(cls, buf, offset): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = EventMessage() - x.Init(buf, n + offset) - return x - - # EventMessage - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # EventMessage - def SourceName(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.String(o + self._tab.Pos) - return None - - # EventMessage - def MessageId(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return self._tab.Get( - flatbuffers.number_types.Uint64Flags, o + self._tab.Pos - ) - return 0 - - # EventMessage - def PulseTime(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - return self._tab.Get( - flatbuffers.number_types.Uint64Flags, o + self._tab.Pos - ) - return 0 - - # EventMessage - def TimeOfFlight(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Uint32Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), - ) - return 0 - - # EventMessage - def TimeOfFlightAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint32Flags, o) - return 0 - - # EventMessage - def TimeOfFlightLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - # EventMessage - def DetectorId(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Uint32Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), - ) - return 0 - - # EventMessage - def DetectorIdAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint32Flags, o) - return 0 - - # EventMessage - def DetectorIdLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - # EventMessage - def FacilitySpecificDataType(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) - return 0 - - # EventMessage - def FacilitySpecificData(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) - if o != 0: - from flatbuffers.table import Table - - obj = Table(bytearray(), 0) - self._tab.Union(obj, o) - return obj - return None - - -def EventMessageStart(builder): - builder.StartObject(7) - - -def EventMessageAddSourceName(builder, sourceName): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(sourceName), 0 - ) - - -def EventMessageAddMessageId(builder, messageId): - builder.PrependUint64Slot(1, messageId, 0) - - -def EventMessageAddPulseTime(builder, pulseTime): - builder.PrependUint64Slot(2, pulseTime, 0) - - -def EventMessageAddTimeOfFlight(builder, timeOfFlight): - builder.PrependUOffsetTRelativeSlot( - 3, flatbuffers.number_types.UOffsetTFlags.py_type(timeOfFlight), 0 - ) - - -def EventMessageStartTimeOfFlightVector(builder, numElems): - return builder.StartVector(4, numElems, 4) - - -def EventMessageAddDetectorId(builder, detectorId): - builder.PrependUOffsetTRelativeSlot( - 4, flatbuffers.number_types.UOffsetTFlags.py_type(detectorId), 0 - ) - - -def EventMessageStartDetectorIdVector(builder, numElems): - return builder.StartVector(4, numElems, 4) - - -def EventMessageAddFacilitySpecificDataType(builder, facilitySpecificDataType): - builder.PrependUint8Slot(5, facilitySpecificDataType, 0) - - -def EventMessageAddFacilitySpecificData(builder, facilitySpecificData): - builder.PrependUOffsetTRelativeSlot( - 6, flatbuffers.number_types.UOffsetTFlags.py_type(facilitySpecificData), 0 - ) - - -def EventMessageEnd(builder): - return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/eventdata_ev42/FacilityData.py b/python/src/streaming_data_types/fbschemas/eventdata_ev42/FacilityData.py deleted file mode 100644 index 2be4a10..0000000 --- a/python/src/streaming_data_types/fbschemas/eventdata_ev42/FacilityData.py +++ /dev/null @@ -1,9 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - - -class FacilityData(object): - NONE = 0 - ISISData = 1 - AdcPulseDebug = 2 diff --git a/python/src/streaming_data_types/fbschemas/eventdata_ev42/__init__.py b/python/src/streaming_data_types/fbschemas/eventdata_ev42/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/python/src/streaming_data_types/fbschemas/eventdata_ev43/Event43Message.py b/python/src/streaming_data_types/fbschemas/eventdata_ev43/Event43Message.py deleted file mode 100644 index ad0b58a..0000000 --- a/python/src/streaming_data_types/fbschemas/eventdata_ev43/Event43Message.py +++ /dev/null @@ -1,223 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers -from flatbuffers.compat import import_numpy - -np = import_numpy() - - -class Event43Message(object): - __slots__ = ["_tab"] - - @classmethod - def GetRootAsEvent43Message(cls, buf, offset): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = Event43Message() - x.Init(buf, n + offset) - return x - - @classmethod - def Event43MessageBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier( - buf, offset, b"\x65\x76\x34\x33", size_prefixed=size_prefixed - ) - - # Event43Message - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # Event43Message - def SourceName(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.String(o + self._tab.Pos) - return None - - # Event43Message - def MessageId(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return self._tab.Get( - flatbuffers.number_types.Uint64Flags, o + self._tab.Pos - ) - return 0 - - # Event43Message - def PulseTime(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Uint64Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8), - ) - return 0 - - # Event43Message - def PulseTimeAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint64Flags, o) - return 0 - - # Event43Message - def PulseTimeLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - # Event43Message - def PulseTimeIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - return o == 0 - - # Event43Message - def PulseIndex(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Uint32Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), - ) - return 0 - - # Event43Message - def PulseIndexAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint32Flags, o) - return 0 - - # Event43Message - def PulseIndexLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - # Event43Message - def PulseIndexIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) - return o == 0 - - # Event43Message - def TimeOfFlight(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Uint32Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), - ) - return 0 - - # Event43Message - def TimeOfFlightAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint32Flags, o) - return 0 - - # Event43Message - def TimeOfFlightLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - # Event43Message - def TimeOfFlightIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) - return o == 0 - - # Event43Message - def DetectorId(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Uint32Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), - ) - return 0 - - # Event43Message - def DetectorIdAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint32Flags, o) - return 0 - - # Event43Message - def DetectorIdLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - # Event43Message - def DetectorIdIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) - return o == 0 - - -def Event43MessageStart(builder): - builder.StartObject(6) - - -def Event43MessageAddSourceName(builder, sourceName): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(sourceName), 0 - ) - - -def Event43MessageAddMessageId(builder, messageId): - builder.PrependUint64Slot(1, messageId, 0) - - -def Event43MessageAddPulseTime(builder, pulseTime): - builder.PrependUOffsetTRelativeSlot( - 2, flatbuffers.number_types.UOffsetTFlags.py_type(pulseTime), 0 - ) - - -def Event43MessageStartPulseTimeVector(builder, numElems): - return builder.StartVector(8, numElems, 8) - - -def Event43MessageAddPulseIndex(builder, pulseIndex): - builder.PrependUOffsetTRelativeSlot( - 3, flatbuffers.number_types.UOffsetTFlags.py_type(pulseIndex), 0 - ) - - -def Event43MessageStartPulseIndexVector(builder, numElems): - return builder.StartVector(4, numElems, 4) - - -def Event43MessageAddTimeOfFlight(builder, timeOfFlight): - builder.PrependUOffsetTRelativeSlot( - 4, flatbuffers.number_types.UOffsetTFlags.py_type(timeOfFlight), 0 - ) - - -def Event43MessageStartTimeOfFlightVector(builder, numElems): - return builder.StartVector(4, numElems, 4) - - -def Event43MessageAddDetectorId(builder, detectorId): - builder.PrependUOffsetTRelativeSlot( - 5, flatbuffers.number_types.UOffsetTFlags.py_type(detectorId), 0 - ) - - -def Event43MessageStartDetectorIdVector(builder, numElems): - return builder.StartVector(4, numElems, 4) - - -def Event43MessageEnd(builder): - return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/eventdata_ev43/__init__.py b/python/src/streaming_data_types/fbschemas/eventdata_ev43/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/python/src/streaming_data_types/fbschemas/forwarder_config_update_rf5k/ConfigUpdate.py b/python/src/streaming_data_types/fbschemas/forwarder_config_update_rf5k/ConfigUpdate.py deleted file mode 100644 index f9522c3..0000000 --- a/python/src/streaming_data_types/fbschemas/forwarder_config_update_rf5k/ConfigUpdate.py +++ /dev/null @@ -1,72 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers - - -class ConfigUpdate(object): - __slots__ = ["_tab"] - - @classmethod - def GetRootAsConfigUpdate(cls, buf, offset): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = ConfigUpdate() - x.Init(buf, n + offset) - return x - - # ConfigUpdate - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # ConfigUpdate - def ConfigChange(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.Get( - flatbuffers.number_types.Uint16Flags, o + self._tab.Pos - ) - return 0 - - # ConfigUpdate - def Streams(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - x = self._tab.Vector(o) - x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 - x = self._tab.Indirect(x) - from .Stream import Stream - - obj = Stream() - obj.Init(self._tab.Bytes, x) - return obj - return None - - # ConfigUpdate - def StreamsLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - -def ConfigUpdateStart(builder): - builder.StartObject(2) - - -def ConfigUpdateAddConfigChange(builder, configChange): - builder.PrependUint16Slot(0, configChange, 0) - - -def ConfigUpdateAddStreams(builder, streams): - builder.PrependUOffsetTRelativeSlot( - 1, flatbuffers.number_types.UOffsetTFlags.py_type(streams), 0 - ) - - -def ConfigUpdateStartStreamsVector(builder, numElems): - return builder.StartVector(4, numElems, 4) - - -def ConfigUpdateEnd(builder): - return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/forwarder_config_update_rf5k/Protocol.py b/python/src/streaming_data_types/fbschemas/forwarder_config_update_rf5k/Protocol.py deleted file mode 100644 index f8231fa..0000000 --- a/python/src/streaming_data_types/fbschemas/forwarder_config_update_rf5k/Protocol.py +++ /dev/null @@ -1,9 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - - -class Protocol(object): - PVA = 0 - CA = 1 - FAKE = 2 diff --git a/python/src/streaming_data_types/fbschemas/forwarder_config_update_rf5k/Stream.py b/python/src/streaming_data_types/fbschemas/forwarder_config_update_rf5k/Stream.py deleted file mode 100644 index afcdb86..0000000 --- a/python/src/streaming_data_types/fbschemas/forwarder_config_update_rf5k/Stream.py +++ /dev/null @@ -1,80 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers - - -class Stream(object): - __slots__ = ["_tab"] - - @classmethod - def GetRootAsStream(cls, buf, offset): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = Stream() - x.Init(buf, n + offset) - return x - - # Stream - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # Stream - def Channel(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.String(o + self._tab.Pos) - return None - - # Stream - def Schema(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return self._tab.String(o + self._tab.Pos) - return None - - # Stream - def Topic(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - return self._tab.String(o + self._tab.Pos) - return None - - # Stream - def Protocol(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) - if o != 0: - return self._tab.Get( - flatbuffers.number_types.Uint16Flags, o + self._tab.Pos - ) - return 0 - - -def StreamStart(builder): - builder.StartObject(4) - - -def StreamAddChannel(builder, channel): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(channel), 0 - ) - - -def StreamAddSchema(builder, schema): - builder.PrependUOffsetTRelativeSlot( - 1, flatbuffers.number_types.UOffsetTFlags.py_type(schema), 0 - ) - - -def StreamAddTopic(builder, topic): - builder.PrependUOffsetTRelativeSlot( - 2, flatbuffers.number_types.UOffsetTFlags.py_type(topic), 0 - ) - - -def StreamAddProtocol(builder, protocol): - builder.PrependUint16Slot(3, protocol, 0) - - -def StreamEnd(builder): - return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/forwarder_config_update_rf5k/UpdateType.py b/python/src/streaming_data_types/fbschemas/forwarder_config_update_rf5k/UpdateType.py deleted file mode 100644 index e69b8e4..0000000 --- a/python/src/streaming_data_types/fbschemas/forwarder_config_update_rf5k/UpdateType.py +++ /dev/null @@ -1,9 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - - -class UpdateType(object): - ADD = 0 - REMOVE = 1 - REMOVEALL = 2 diff --git a/python/src/streaming_data_types/fbschemas/forwarder_config_update_rf5k/__init__.py b/python/src/streaming_data_types/fbschemas/forwarder_config_update_rf5k/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/python/src/streaming_data_types/fbschemas/histogram_hs00/Array.py b/python/src/streaming_data_types/fbschemas/histogram_hs00/Array.py deleted file mode 100644 index ed3c3e9..0000000 --- a/python/src/streaming_data_types/fbschemas/histogram_hs00/Array.py +++ /dev/null @@ -1,11 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - - -class Array(object): - NONE = 0 - ArrayUInt = 1 - ArrayULong = 2 - ArrayDouble = 3 - ArrayFloat = 4 diff --git a/python/src/streaming_data_types/fbschemas/histogram_hs00/ArrayDouble.py b/python/src/streaming_data_types/fbschemas/histogram_hs00/ArrayDouble.py deleted file mode 100644 index 7b57488..0000000 --- a/python/src/streaming_data_types/fbschemas/histogram_hs00/ArrayDouble.py +++ /dev/null @@ -1,63 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers - - -class ArrayDouble(object): - __slots__ = ["_tab"] - - @classmethod - def GetRootAsArrayDouble(cls, buf, offset): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = ArrayDouble() - x.Init(buf, n + offset) - return x - - # ArrayDouble - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # ArrayDouble - def Value(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Float64Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8), - ) - return 0 - - # ArrayDouble - def ValueAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Float64Flags, o) - return 0 - - # ArrayDouble - def ValueLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - -def ArrayDoubleStart(builder): - builder.StartObject(1) - - -def ArrayDoubleAddValue(builder, value): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 - ) - - -def ArrayDoubleStartValueVector(builder, numElems): - return builder.StartVector(8, numElems, 8) - - -def ArrayDoubleEnd(builder): - return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/histogram_hs00/ArrayFloat.py b/python/src/streaming_data_types/fbschemas/histogram_hs00/ArrayFloat.py deleted file mode 100644 index 9b1a84c..0000000 --- a/python/src/streaming_data_types/fbschemas/histogram_hs00/ArrayFloat.py +++ /dev/null @@ -1,63 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers - - -class ArrayFloat(object): - __slots__ = ["_tab"] - - @classmethod - def GetRootAsArrayFloat(cls, buf, offset): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = ArrayFloat() - x.Init(buf, n + offset) - return x - - # ArrayFloat - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # ArrayFloat - def Value(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Float32Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), - ) - return 0 - - # ArrayFloat - def ValueAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Float32Flags, o) - return 0 - - # ArrayFloat - def ValueLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - -def ArrayFloatStart(builder): - builder.StartObject(1) - - -def ArrayFloatAddValue(builder, value): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 - ) - - -def ArrayFloatStartValueVector(builder, numElems): - return builder.StartVector(4, numElems, 4) - - -def ArrayFloatEnd(builder): - return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/histogram_hs00/ArrayUInt.py b/python/src/streaming_data_types/fbschemas/histogram_hs00/ArrayUInt.py deleted file mode 100644 index 615967d..0000000 --- a/python/src/streaming_data_types/fbschemas/histogram_hs00/ArrayUInt.py +++ /dev/null @@ -1,63 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers - - -class ArrayUInt(object): - __slots__ = ["_tab"] - - @classmethod - def GetRootAsArrayUInt(cls, buf, offset): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = ArrayUInt() - x.Init(buf, n + offset) - return x - - # ArrayUInt - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # ArrayUInt - def Value(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Uint32Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), - ) - return 0 - - # ArrayUInt - def ValueAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint32Flags, o) - return 0 - - # ArrayUInt - def ValueLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - -def ArrayUIntStart(builder): - builder.StartObject(1) - - -def ArrayUIntAddValue(builder, value): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 - ) - - -def ArrayUIntStartValueVector(builder, numElems): - return builder.StartVector(4, numElems, 4) - - -def ArrayUIntEnd(builder): - return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/histogram_hs00/ArrayULong.py b/python/src/streaming_data_types/fbschemas/histogram_hs00/ArrayULong.py deleted file mode 100644 index 6e29d22..0000000 --- a/python/src/streaming_data_types/fbschemas/histogram_hs00/ArrayULong.py +++ /dev/null @@ -1,63 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers - - -class ArrayULong(object): - __slots__ = ["_tab"] - - @classmethod - def GetRootAsArrayULong(cls, buf, offset): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = ArrayULong() - x.Init(buf, n + offset) - return x - - # ArrayULong - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # ArrayULong - def Value(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Uint64Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8), - ) - return 0 - - # ArrayULong - def ValueAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint64Flags, o) - return 0 - - # ArrayULong - def ValueLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - -def ArrayULongStart(builder): - builder.StartObject(1) - - -def ArrayULongAddValue(builder, value): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 - ) - - -def ArrayULongStartValueVector(builder, numElems): - return builder.StartVector(8, numElems, 8) - - -def ArrayULongEnd(builder): - return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/histogram_hs00/DimensionMetaData.py b/python/src/streaming_data_types/fbschemas/histogram_hs00/DimensionMetaData.py deleted file mode 100644 index a498793..0000000 --- a/python/src/streaming_data_types/fbschemas/histogram_hs00/DimensionMetaData.py +++ /dev/null @@ -1,95 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers - - -class DimensionMetaData(object): - __slots__ = ["_tab"] - - @classmethod - def GetRootAsDimensionMetaData(cls, buf, offset): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = DimensionMetaData() - x.Init(buf, n + offset) - return x - - # DimensionMetaData - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # DimensionMetaData - def Length(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.Get( - flatbuffers.number_types.Uint32Flags, o + self._tab.Pos - ) - return 0 - - # DimensionMetaData - def Unit(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return self._tab.String(o + self._tab.Pos) - return None - - # DimensionMetaData - def Label(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - return self._tab.String(o + self._tab.Pos) - return None - - # DimensionMetaData - def BinBoundariesType(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) - return 0 - - # DimensionMetaData - def BinBoundaries(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) - if o != 0: - from flatbuffers.table import Table - - obj = Table(bytearray(), 0) - self._tab.Union(obj, o) - return obj - return None - - -def DimensionMetaDataStart(builder): - builder.StartObject(5) - - -def DimensionMetaDataAddLength(builder, length): - builder.PrependUint32Slot(0, length, 0) - - -def DimensionMetaDataAddUnit(builder, unit): - builder.PrependUOffsetTRelativeSlot( - 1, flatbuffers.number_types.UOffsetTFlags.py_type(unit), 0 - ) - - -def DimensionMetaDataAddLabel(builder, label): - builder.PrependUOffsetTRelativeSlot( - 2, flatbuffers.number_types.UOffsetTFlags.py_type(label), 0 - ) - - -def DimensionMetaDataAddBinBoundariesType(builder, binBoundariesType): - builder.PrependUint8Slot(3, binBoundariesType, 0) - - -def DimensionMetaDataAddBinBoundaries(builder, binBoundaries): - builder.PrependUOffsetTRelativeSlot( - 4, flatbuffers.number_types.UOffsetTFlags.py_type(binBoundaries), 0 - ) - - -def DimensionMetaDataEnd(builder): - return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/histogram_hs00/EventHistogram.py b/python/src/streaming_data_types/fbschemas/histogram_hs00/EventHistogram.py deleted file mode 100644 index 593a31d..0000000 --- a/python/src/streaming_data_types/fbschemas/histogram_hs00/EventHistogram.py +++ /dev/null @@ -1,237 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers - - -class EventHistogram(object): - __slots__ = ["_tab"] - - @classmethod - def GetRootAsEventHistogram(cls, buf, offset): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = EventHistogram() - x.Init(buf, n + offset) - return x - - # EventHistogram - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # EventHistogram - def Source(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.String(o + self._tab.Pos) - return None - - # EventHistogram - def Timestamp(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return self._tab.Get( - flatbuffers.number_types.Uint64Flags, o + self._tab.Pos - ) - return 0 - - # EventHistogram - def DimMetadata(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - x = self._tab.Vector(o) - x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 - x = self._tab.Indirect(x) - from .DimensionMetaData import DimensionMetaData - - obj = DimensionMetaData() - obj.Init(self._tab.Bytes, x) - return obj - return None - - # EventHistogram - def DimMetadataLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - # EventHistogram - def LastMetadataTimestamp(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) - if o != 0: - return self._tab.Get( - flatbuffers.number_types.Uint64Flags, o + self._tab.Pos - ) - return 0 - - # EventHistogram - def CurrentShape(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Uint32Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), - ) - return 0 - - # EventHistogram - def CurrentShapeAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint32Flags, o) - return 0 - - # EventHistogram - def CurrentShapeLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - # EventHistogram - def Offset(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Uint32Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), - ) - return 0 - - # EventHistogram - def OffsetAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint32Flags, o) - return 0 - - # EventHistogram - def OffsetLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - # EventHistogram - def DataType(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) - return 0 - - # EventHistogram - def Data(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) - if o != 0: - from flatbuffers.table import Table - - obj = Table(bytearray(), 0) - self._tab.Union(obj, o) - return obj - return None - - # EventHistogram - def ErrorsType(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(20)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) - return 0 - - # EventHistogram - def Errors(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(22)) - if o != 0: - from flatbuffers.table import Table - - obj = Table(bytearray(), 0) - self._tab.Union(obj, o) - return obj - return None - - # EventHistogram - def Info(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(24)) - if o != 0: - return self._tab.String(o + self._tab.Pos) - return None - - -def EventHistogramStart(builder): - builder.StartObject(11) - - -def EventHistogramAddSource(builder, source): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(source), 0 - ) - - -def EventHistogramAddTimestamp(builder, timestamp): - builder.PrependUint64Slot(1, timestamp, 0) - - -def EventHistogramAddDimMetadata(builder, dimMetadata): - builder.PrependUOffsetTRelativeSlot( - 2, flatbuffers.number_types.UOffsetTFlags.py_type(dimMetadata), 0 - ) - - -def EventHistogramStartDimMetadataVector(builder, numElems): - return builder.StartVector(4, numElems, 4) - - -def EventHistogramAddLastMetadataTimestamp(builder, lastMetadataTimestamp): - builder.PrependUint64Slot(3, lastMetadataTimestamp, 0) - - -def EventHistogramAddCurrentShape(builder, currentShape): - builder.PrependUOffsetTRelativeSlot( - 4, flatbuffers.number_types.UOffsetTFlags.py_type(currentShape), 0 - ) - - -def EventHistogramStartCurrentShapeVector(builder, numElems): - return builder.StartVector(4, numElems, 4) - - -def EventHistogramAddOffset(builder, offset): - builder.PrependUOffsetTRelativeSlot( - 5, flatbuffers.number_types.UOffsetTFlags.py_type(offset), 0 - ) - - -def EventHistogramStartOffsetVector(builder, numElems): - return builder.StartVector(4, numElems, 4) - - -def EventHistogramAddDataType(builder, dataType): - builder.PrependUint8Slot(6, dataType, 0) - - -def EventHistogramAddData(builder, data): - builder.PrependUOffsetTRelativeSlot( - 7, flatbuffers.number_types.UOffsetTFlags.py_type(data), 0 - ) - - -def EventHistogramAddErrorsType(builder, errorsType): - builder.PrependUint8Slot(8, errorsType, 0) - - -def EventHistogramAddErrors(builder, errors): - builder.PrependUOffsetTRelativeSlot( - 9, flatbuffers.number_types.UOffsetTFlags.py_type(errors), 0 - ) - - -def EventHistogramAddInfo(builder, info): - builder.PrependUOffsetTRelativeSlot( - 10, flatbuffers.number_types.UOffsetTFlags.py_type(info), 0 - ) - - -def EventHistogramEnd(builder): - return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/histogram_hs00/__init__.py b/python/src/streaming_data_types/fbschemas/histogram_hs00/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/python/src/streaming_data_types/fbschemas/isis_event_info_is84/ISISData.py b/python/src/streaming_data_types/fbschemas/isis_event_info_is84/ISISData.py deleted file mode 100644 index 79e3c11..0000000 --- a/python/src/streaming_data_types/fbschemas/isis_event_info_is84/ISISData.py +++ /dev/null @@ -1,65 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers - - -class ISISData(object): - __slots__ = ["_tab"] - - @classmethod - def GetRootAsISISData(cls, buf, offset): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = ISISData() - x.Init(buf, n + offset) - return x - - # ISISData - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # ISISData - def PeriodNumber(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.Get( - flatbuffers.number_types.Uint32Flags, o + self._tab.Pos - ) - return 0 - - # ISISData - def RunState(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) - return 0 - - # ISISData - def ProtonCharge(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - return self._tab.Get( - flatbuffers.number_types.Float32Flags, o + self._tab.Pos - ) - return 0.0 - - -def ISISDataStart(builder): - builder.StartObject(3) - - -def ISISDataAddPeriodNumber(builder, periodNumber): - builder.PrependUint32Slot(0, periodNumber, 0) - - -def ISISDataAddRunState(builder, runState): - builder.PrependInt8Slot(1, runState, 0) - - -def ISISDataAddProtonCharge(builder, protonCharge): - builder.PrependFloat32Slot(2, protonCharge, 0.0) - - -def ISISDataEnd(builder): - return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/isis_event_info_is84/RunState.py b/python/src/streaming_data_types/fbschemas/isis_event_info_is84/RunState.py deleted file mode 100644 index 9d34cd9..0000000 --- a/python/src/streaming_data_types/fbschemas/isis_event_info_is84/RunState.py +++ /dev/null @@ -1,8 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - - -class RunState(object): - SETUP = 0 - RUNNING = 1 diff --git a/python/src/streaming_data_types/fbschemas/isis_event_info_is84/__init__.py b/python/src/streaming_data_types/fbschemas/isis_event_info_is84/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/python/src/streaming_data_types/fbschemas/logdata_f142/AlarmSeverity.py b/python/src/streaming_data_types/fbschemas/logdata_f142/AlarmSeverity.py deleted file mode 100644 index 8ab40d7..0000000 --- a/python/src/streaming_data_types/fbschemas/logdata_f142/AlarmSeverity.py +++ /dev/null @@ -1,11 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - - -class AlarmSeverity(object): - MINOR = 0 - MAJOR = 1 - NO_ALARM = 2 - INVALID = 3 - NO_CHANGE = 4 diff --git a/python/src/streaming_data_types/fbschemas/logdata_f142/AlarmStatus.py b/python/src/streaming_data_types/fbschemas/logdata_f142/AlarmStatus.py deleted file mode 100644 index 959a28a..0000000 --- a/python/src/streaming_data_types/fbschemas/logdata_f142/AlarmStatus.py +++ /dev/null @@ -1,29 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - - -class AlarmStatus(object): - NO_ALARM = 0 - READ = 1 - WRITE = 2 - HIHI = 3 - HIGH = 4 - LOLO = 5 - LOW = 6 - STATE = 7 - COS = 8 - COMM = 9 - TIMED = 10 - HWLIMIT = 11 - CALC = 12 - SCAN = 13 - LINK = 14 - SOFT = 15 - BAD_SUB = 16 - UDF = 17 - DISABLE = 18 - SIMM = 19 - READ_ACCESS = 20 - WRITE_ACCESS = 21 - NO_CHANGE = 22 diff --git a/python/src/streaming_data_types/fbschemas/logdata_f142/ArrayByte.py b/python/src/streaming_data_types/fbschemas/logdata_f142/ArrayByte.py deleted file mode 100644 index 8307e0d..0000000 --- a/python/src/streaming_data_types/fbschemas/logdata_f142/ArrayByte.py +++ /dev/null @@ -1,63 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers - - -class ArrayByte(object): - __slots__ = ["_tab"] - - @classmethod - def GetRootAsArrayByte(cls, buf, offset): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = ArrayByte() - x.Init(buf, n + offset) - return x - - # ArrayByte - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # ArrayByte - def Value(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Int8Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1), - ) - return 0 - - # ArrayByte - def ValueAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int8Flags, o) - return 0 - - # ArrayByte - def ValueLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - -def ArrayByteStart(builder): - builder.StartObject(1) - - -def ArrayByteAddValue(builder, value): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 - ) - - -def ArrayByteStartValueVector(builder, numElems): - return builder.StartVector(1, numElems, 1) - - -def ArrayByteEnd(builder): - return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/logdata_f142/ArrayDouble.py b/python/src/streaming_data_types/fbschemas/logdata_f142/ArrayDouble.py deleted file mode 100644 index 7b57488..0000000 --- a/python/src/streaming_data_types/fbschemas/logdata_f142/ArrayDouble.py +++ /dev/null @@ -1,63 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers - - -class ArrayDouble(object): - __slots__ = ["_tab"] - - @classmethod - def GetRootAsArrayDouble(cls, buf, offset): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = ArrayDouble() - x.Init(buf, n + offset) - return x - - # ArrayDouble - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # ArrayDouble - def Value(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Float64Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8), - ) - return 0 - - # ArrayDouble - def ValueAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Float64Flags, o) - return 0 - - # ArrayDouble - def ValueLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - -def ArrayDoubleStart(builder): - builder.StartObject(1) - - -def ArrayDoubleAddValue(builder, value): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 - ) - - -def ArrayDoubleStartValueVector(builder, numElems): - return builder.StartVector(8, numElems, 8) - - -def ArrayDoubleEnd(builder): - return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/logdata_f142/ArrayFloat.py b/python/src/streaming_data_types/fbschemas/logdata_f142/ArrayFloat.py deleted file mode 100644 index 9b1a84c..0000000 --- a/python/src/streaming_data_types/fbschemas/logdata_f142/ArrayFloat.py +++ /dev/null @@ -1,63 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers - - -class ArrayFloat(object): - __slots__ = ["_tab"] - - @classmethod - def GetRootAsArrayFloat(cls, buf, offset): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = ArrayFloat() - x.Init(buf, n + offset) - return x - - # ArrayFloat - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # ArrayFloat - def Value(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Float32Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), - ) - return 0 - - # ArrayFloat - def ValueAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Float32Flags, o) - return 0 - - # ArrayFloat - def ValueLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - -def ArrayFloatStart(builder): - builder.StartObject(1) - - -def ArrayFloatAddValue(builder, value): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 - ) - - -def ArrayFloatStartValueVector(builder, numElems): - return builder.StartVector(4, numElems, 4) - - -def ArrayFloatEnd(builder): - return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/logdata_f142/ArrayInt.py b/python/src/streaming_data_types/fbschemas/logdata_f142/ArrayInt.py deleted file mode 100644 index 841c0e2..0000000 --- a/python/src/streaming_data_types/fbschemas/logdata_f142/ArrayInt.py +++ /dev/null @@ -1,63 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers - - -class ArrayInt(object): - __slots__ = ["_tab"] - - @classmethod - def GetRootAsArrayInt(cls, buf, offset): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = ArrayInt() - x.Init(buf, n + offset) - return x - - # ArrayInt - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # ArrayInt - def Value(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Int32Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), - ) - return 0 - - # ArrayInt - def ValueAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) - return 0 - - # ArrayInt - def ValueLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - -def ArrayIntStart(builder): - builder.StartObject(1) - - -def ArrayIntAddValue(builder, value): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 - ) - - -def ArrayIntStartValueVector(builder, numElems): - return builder.StartVector(4, numElems, 4) - - -def ArrayIntEnd(builder): - return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/logdata_f142/ArrayLong.py b/python/src/streaming_data_types/fbschemas/logdata_f142/ArrayLong.py deleted file mode 100644 index 21277cc..0000000 --- a/python/src/streaming_data_types/fbschemas/logdata_f142/ArrayLong.py +++ /dev/null @@ -1,63 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers - - -class ArrayLong(object): - __slots__ = ["_tab"] - - @classmethod - def GetRootAsArrayLong(cls, buf, offset): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = ArrayLong() - x.Init(buf, n + offset) - return x - - # ArrayLong - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # ArrayLong - def Value(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Int64Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8), - ) - return 0 - - # ArrayLong - def ValueAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) - return 0 - - # ArrayLong - def ValueLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - -def ArrayLongStart(builder): - builder.StartObject(1) - - -def ArrayLongAddValue(builder, value): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 - ) - - -def ArrayLongStartValueVector(builder, numElems): - return builder.StartVector(8, numElems, 8) - - -def ArrayLongEnd(builder): - return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/logdata_f142/ArrayShort.py b/python/src/streaming_data_types/fbschemas/logdata_f142/ArrayShort.py deleted file mode 100644 index edea083..0000000 --- a/python/src/streaming_data_types/fbschemas/logdata_f142/ArrayShort.py +++ /dev/null @@ -1,63 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers - - -class ArrayShort(object): - __slots__ = ["_tab"] - - @classmethod - def GetRootAsArrayShort(cls, buf, offset): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = ArrayShort() - x.Init(buf, n + offset) - return x - - # ArrayShort - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # ArrayShort - def Value(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Int16Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 2), - ) - return 0 - - # ArrayShort - def ValueAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int16Flags, o) - return 0 - - # ArrayShort - def ValueLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - -def ArrayShortStart(builder): - builder.StartObject(1) - - -def ArrayShortAddValue(builder, value): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 - ) - - -def ArrayShortStartValueVector(builder, numElems): - return builder.StartVector(2, numElems, 2) - - -def ArrayShortEnd(builder): - return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/logdata_f142/ArrayString.py b/python/src/streaming_data_types/fbschemas/logdata_f142/ArrayString.py deleted file mode 100644 index 0594d66..0000000 --- a/python/src/streaming_data_types/fbschemas/logdata_f142/ArrayString.py +++ /dev/null @@ -1,55 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers - - -class ArrayString(object): - __slots__ = ["_tab"] - - @classmethod - def GetRootAsArrayString(cls, buf, offset): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = ArrayString() - x.Init(buf, n + offset) - return x - - # ArrayString - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # ArrayString - def Value(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.String( - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4) - ) - return "" - - # ArrayString - def ValueLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - -def ArrayStringStart(builder): - builder.StartObject(1) - - -def ArrayStringAddValue(builder, value): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 - ) - - -def ArrayStringStartValueVector(builder, numElems): - return builder.StartVector(4, numElems, 4) - - -def ArrayStringEnd(builder): - return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/logdata_f142/ArrayUByte.py b/python/src/streaming_data_types/fbschemas/logdata_f142/ArrayUByte.py deleted file mode 100644 index 82c7772..0000000 --- a/python/src/streaming_data_types/fbschemas/logdata_f142/ArrayUByte.py +++ /dev/null @@ -1,63 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers - - -class ArrayUByte(object): - __slots__ = ["_tab"] - - @classmethod - def GetRootAsArrayUByte(cls, buf, offset): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = ArrayUByte() - x.Init(buf, n + offset) - return x - - # ArrayUByte - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # ArrayUByte - def Value(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Uint8Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1), - ) - return 0 - - # ArrayUByte - def ValueAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) - return 0 - - # ArrayUByte - def ValueLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - -def ArrayUByteStart(builder): - builder.StartObject(1) - - -def ArrayUByteAddValue(builder, value): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 - ) - - -def ArrayUByteStartValueVector(builder, numElems): - return builder.StartVector(1, numElems, 1) - - -def ArrayUByteEnd(builder): - return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/logdata_f142/ArrayUInt.py b/python/src/streaming_data_types/fbschemas/logdata_f142/ArrayUInt.py deleted file mode 100644 index 615967d..0000000 --- a/python/src/streaming_data_types/fbschemas/logdata_f142/ArrayUInt.py +++ /dev/null @@ -1,63 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers - - -class ArrayUInt(object): - __slots__ = ["_tab"] - - @classmethod - def GetRootAsArrayUInt(cls, buf, offset): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = ArrayUInt() - x.Init(buf, n + offset) - return x - - # ArrayUInt - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # ArrayUInt - def Value(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Uint32Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), - ) - return 0 - - # ArrayUInt - def ValueAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint32Flags, o) - return 0 - - # ArrayUInt - def ValueLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - -def ArrayUIntStart(builder): - builder.StartObject(1) - - -def ArrayUIntAddValue(builder, value): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 - ) - - -def ArrayUIntStartValueVector(builder, numElems): - return builder.StartVector(4, numElems, 4) - - -def ArrayUIntEnd(builder): - return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/logdata_f142/ArrayULong.py b/python/src/streaming_data_types/fbschemas/logdata_f142/ArrayULong.py deleted file mode 100644 index 6e29d22..0000000 --- a/python/src/streaming_data_types/fbschemas/logdata_f142/ArrayULong.py +++ /dev/null @@ -1,63 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers - - -class ArrayULong(object): - __slots__ = ["_tab"] - - @classmethod - def GetRootAsArrayULong(cls, buf, offset): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = ArrayULong() - x.Init(buf, n + offset) - return x - - # ArrayULong - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # ArrayULong - def Value(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Uint64Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8), - ) - return 0 - - # ArrayULong - def ValueAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint64Flags, o) - return 0 - - # ArrayULong - def ValueLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - -def ArrayULongStart(builder): - builder.StartObject(1) - - -def ArrayULongAddValue(builder, value): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 - ) - - -def ArrayULongStartValueVector(builder, numElems): - return builder.StartVector(8, numElems, 8) - - -def ArrayULongEnd(builder): - return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/logdata_f142/ArrayUShort.py b/python/src/streaming_data_types/fbschemas/logdata_f142/ArrayUShort.py deleted file mode 100644 index 7e182c0..0000000 --- a/python/src/streaming_data_types/fbschemas/logdata_f142/ArrayUShort.py +++ /dev/null @@ -1,63 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers - - -class ArrayUShort(object): - __slots__ = ["_tab"] - - @classmethod - def GetRootAsArrayUShort(cls, buf, offset): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = ArrayUShort() - x.Init(buf, n + offset) - return x - - # ArrayUShort - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # ArrayUShort - def Value(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Uint16Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 2), - ) - return 0 - - # ArrayUShort - def ValueAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint16Flags, o) - return 0 - - # ArrayUShort - def ValueLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - -def ArrayUShortStart(builder): - builder.StartObject(1) - - -def ArrayUShortAddValue(builder, value): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 - ) - - -def ArrayUShortStartValueVector(builder, numElems): - return builder.StartVector(2, numElems, 2) - - -def ArrayUShortEnd(builder): - return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/logdata_f142/Byte.py b/python/src/streaming_data_types/fbschemas/logdata_f142/Byte.py deleted file mode 100644 index 85d5095..0000000 --- a/python/src/streaming_data_types/fbschemas/logdata_f142/Byte.py +++ /dev/null @@ -1,39 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers - - -class Byte(object): - __slots__ = ["_tab"] - - @classmethod - def GetRootAsByte(cls, buf, offset): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = Byte() - x.Init(buf, n + offset) - return x - - # Byte - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # Byte - def Value(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) - return 0 - - -def ByteStart(builder): - builder.StartObject(1) - - -def ByteAddValue(builder, value): - builder.PrependInt8Slot(0, value, 0) - - -def ByteEnd(builder): - return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/logdata_f142/Double.py b/python/src/streaming_data_types/fbschemas/logdata_f142/Double.py deleted file mode 100644 index 3c99568..0000000 --- a/python/src/streaming_data_types/fbschemas/logdata_f142/Double.py +++ /dev/null @@ -1,41 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers - - -class Double(object): - __slots__ = ["_tab"] - - @classmethod - def GetRootAsDouble(cls, buf, offset): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = Double() - x.Init(buf, n + offset) - return x - - # Double - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # Double - def Value(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.Get( - flatbuffers.number_types.Float64Flags, o + self._tab.Pos - ) - return 0.0 - - -def DoubleStart(builder): - builder.StartObject(1) - - -def DoubleAddValue(builder, value): - builder.PrependFloat64Slot(0, value, 0.0) - - -def DoubleEnd(builder): - return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/logdata_f142/Float.py b/python/src/streaming_data_types/fbschemas/logdata_f142/Float.py deleted file mode 100644 index a83926f..0000000 --- a/python/src/streaming_data_types/fbschemas/logdata_f142/Float.py +++ /dev/null @@ -1,41 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers - - -class Float(object): - __slots__ = ["_tab"] - - @classmethod - def GetRootAsFloat(cls, buf, offset): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = Float() - x.Init(buf, n + offset) - return x - - # Float - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # Float - def Value(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.Get( - flatbuffers.number_types.Float32Flags, o + self._tab.Pos - ) - return 0.0 - - -def FloatStart(builder): - builder.StartObject(1) - - -def FloatAddValue(builder, value): - builder.PrependFloat32Slot(0, value, 0.0) - - -def FloatEnd(builder): - return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/logdata_f142/Int.py b/python/src/streaming_data_types/fbschemas/logdata_f142/Int.py deleted file mode 100644 index b2ca0b3..0000000 --- a/python/src/streaming_data_types/fbschemas/logdata_f142/Int.py +++ /dev/null @@ -1,39 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers - - -class Int(object): - __slots__ = ["_tab"] - - @classmethod - def GetRootAsInt(cls, buf, offset): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = Int() - x.Init(buf, n + offset) - return x - - # Int - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # Int - def Value(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) - return 0 - - -def IntStart(builder): - builder.StartObject(1) - - -def IntAddValue(builder, value): - builder.PrependInt32Slot(0, value, 0) - - -def IntEnd(builder): - return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/logdata_f142/LogData.py b/python/src/streaming_data_types/fbschemas/logdata_f142/LogData.py deleted file mode 100644 index 8c21ed4..0000000 --- a/python/src/streaming_data_types/fbschemas/logdata_f142/LogData.py +++ /dev/null @@ -1,108 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers - - -class LogData(object): - __slots__ = ["_tab"] - - @classmethod - def GetRootAsLogData(cls, buf, offset): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = LogData() - x.Init(buf, n + offset) - return x - - # LogData - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # LogData - def SourceName(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.String(o + self._tab.Pos) - return None - - # LogData - def ValueType(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) - return 0 - - # LogData - def Value(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - from flatbuffers.table import Table - - obj = Table(bytearray(), 0) - self._tab.Union(obj, o) - return obj - return None - - # LogData - def Timestamp(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) - if o != 0: - return self._tab.Get( - flatbuffers.number_types.Uint64Flags, o + self._tab.Pos - ) - return 0 - - # LogData - def Status(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) - if o != 0: - return self._tab.Get( - flatbuffers.number_types.Uint16Flags, o + self._tab.Pos - ) - return 22 - - # LogData - def Severity(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) - if o != 0: - return self._tab.Get( - flatbuffers.number_types.Uint16Flags, o + self._tab.Pos - ) - return 4 - - -def LogDataStart(builder): - builder.StartObject(6) - - -def LogDataAddSourceName(builder, sourceName): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(sourceName), 0 - ) - - -def LogDataAddValueType(builder, valueType): - builder.PrependUint8Slot(1, valueType, 0) - - -def LogDataAddValue(builder, value): - builder.PrependUOffsetTRelativeSlot( - 2, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 - ) - - -def LogDataAddTimestamp(builder, timestamp): - builder.PrependUint64Slot(3, timestamp, 0) - - -def LogDataAddStatus(builder, status): - builder.PrependUint16Slot(4, status, 22) - - -def LogDataAddSeverity(builder, severity): - builder.PrependUint16Slot(5, severity, 4) - - -def LogDataEnd(builder): - return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/logdata_f142/Long.py b/python/src/streaming_data_types/fbschemas/logdata_f142/Long.py deleted file mode 100644 index 25d945d..0000000 --- a/python/src/streaming_data_types/fbschemas/logdata_f142/Long.py +++ /dev/null @@ -1,39 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers - - -class Long(object): - __slots__ = ["_tab"] - - @classmethod - def GetRootAsLong(cls, buf, offset): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = Long() - x.Init(buf, n + offset) - return x - - # Long - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # Long - def Value(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) - return 0 - - -def LongStart(builder): - builder.StartObject(1) - - -def LongAddValue(builder, value): - builder.PrependInt64Slot(0, value, 0) - - -def LongEnd(builder): - return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/logdata_f142/Short.py b/python/src/streaming_data_types/fbschemas/logdata_f142/Short.py deleted file mode 100644 index abf883a..0000000 --- a/python/src/streaming_data_types/fbschemas/logdata_f142/Short.py +++ /dev/null @@ -1,39 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers - - -class Short(object): - __slots__ = ["_tab"] - - @classmethod - def GetRootAsShort(cls, buf, offset): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = Short() - x.Init(buf, n + offset) - return x - - # Short - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # Short - def Value(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int16Flags, o + self._tab.Pos) - return 0 - - -def ShortStart(builder): - builder.StartObject(1) - - -def ShortAddValue(builder, value): - builder.PrependInt16Slot(0, value, 0) - - -def ShortEnd(builder): - return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/logdata_f142/String.py b/python/src/streaming_data_types/fbschemas/logdata_f142/String.py deleted file mode 100644 index 8e3751e..0000000 --- a/python/src/streaming_data_types/fbschemas/logdata_f142/String.py +++ /dev/null @@ -1,41 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers - - -class String(object): - __slots__ = ["_tab"] - - @classmethod - def GetRootAsString(cls, buf, offset): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = String() - x.Init(buf, n + offset) - return x - - # String - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # String - def Value(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.String(o + self._tab.Pos) - return None - - -def StringStart(builder): - builder.StartObject(1) - - -def StringAddValue(builder, value): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 - ) - - -def StringEnd(builder): - return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/logdata_f142/UByte.py b/python/src/streaming_data_types/fbschemas/logdata_f142/UByte.py deleted file mode 100644 index 09e92b2..0000000 --- a/python/src/streaming_data_types/fbschemas/logdata_f142/UByte.py +++ /dev/null @@ -1,39 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers - - -class UByte(object): - __slots__ = ["_tab"] - - @classmethod - def GetRootAsUByte(cls, buf, offset): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = UByte() - x.Init(buf, n + offset) - return x - - # UByte - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # UByte - def Value(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) - return 0 - - -def UByteStart(builder): - builder.StartObject(1) - - -def UByteAddValue(builder, value): - builder.PrependUint8Slot(0, value, 0) - - -def UByteEnd(builder): - return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/logdata_f142/UInt.py b/python/src/streaming_data_types/fbschemas/logdata_f142/UInt.py deleted file mode 100644 index e905742..0000000 --- a/python/src/streaming_data_types/fbschemas/logdata_f142/UInt.py +++ /dev/null @@ -1,41 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers - - -class UInt(object): - __slots__ = ["_tab"] - - @classmethod - def GetRootAsUInt(cls, buf, offset): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = UInt() - x.Init(buf, n + offset) - return x - - # UInt - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # UInt - def Value(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.Get( - flatbuffers.number_types.Uint32Flags, o + self._tab.Pos - ) - return 0 - - -def UIntStart(builder): - builder.StartObject(1) - - -def UIntAddValue(builder, value): - builder.PrependUint32Slot(0, value, 0) - - -def UIntEnd(builder): - return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/logdata_f142/ULong.py b/python/src/streaming_data_types/fbschemas/logdata_f142/ULong.py deleted file mode 100644 index 295d1e2..0000000 --- a/python/src/streaming_data_types/fbschemas/logdata_f142/ULong.py +++ /dev/null @@ -1,41 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers - - -class ULong(object): - __slots__ = ["_tab"] - - @classmethod - def GetRootAsULong(cls, buf, offset): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = ULong() - x.Init(buf, n + offset) - return x - - # ULong - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # ULong - def Value(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.Get( - flatbuffers.number_types.Uint64Flags, o + self._tab.Pos - ) - return 0 - - -def ULongStart(builder): - builder.StartObject(1) - - -def ULongAddValue(builder, value): - builder.PrependUint64Slot(0, value, 0) - - -def ULongEnd(builder): - return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/logdata_f142/UShort.py b/python/src/streaming_data_types/fbschemas/logdata_f142/UShort.py deleted file mode 100644 index a74e208..0000000 --- a/python/src/streaming_data_types/fbschemas/logdata_f142/UShort.py +++ /dev/null @@ -1,41 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers - - -class UShort(object): - __slots__ = ["_tab"] - - @classmethod - def GetRootAsUShort(cls, buf, offset): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = UShort() - x.Init(buf, n + offset) - return x - - # UShort - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # UShort - def Value(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.Get( - flatbuffers.number_types.Uint16Flags, o + self._tab.Pos - ) - return 0 - - -def UShortStart(builder): - builder.StartObject(1) - - -def UShortAddValue(builder, value): - builder.PrependUint16Slot(0, value, 0) - - -def UShortEnd(builder): - return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/logdata_f142/Value.py b/python/src/streaming_data_types/fbschemas/logdata_f142/Value.py deleted file mode 100644 index 9cefe8e..0000000 --- a/python/src/streaming_data_types/fbschemas/logdata_f142/Value.py +++ /dev/null @@ -1,29 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - - -class Value(object): - NONE = 0 - Byte = 1 - UByte = 2 - Short = 3 - UShort = 4 - Int = 5 - UInt = 6 - Long = 7 - ULong = 8 - Float = 9 - Double = 10 - ArrayByte = 11 - ArrayUByte = 12 - ArrayShort = 13 - ArrayUShort = 14 - ArrayInt = 15 - ArrayUInt = 16 - ArrayLong = 17 - ArrayULong = 18 - ArrayFloat = 19 - ArrayDouble = 20 - String = 21 - ArrayString = 22 diff --git a/python/src/streaming_data_types/fbschemas/logdata_f142/__init__.py b/python/src/streaming_data_types/fbschemas/logdata_f142/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/python/src/streaming_data_types/fbschemas/nicos_cache_ns10/CacheEntry.py b/python/src/streaming_data_types/fbschemas/nicos_cache_ns10/CacheEntry.py deleted file mode 100644 index 36d2ba9..0000000 --- a/python/src/streaming_data_types/fbschemas/nicos_cache_ns10/CacheEntry.py +++ /dev/null @@ -1,94 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -from __future__ import absolute_import, division, print_function - -import flatbuffers - - -# /// pylint: skip-file -class CacheEntry(object): - __slots__ = ["_tab"] - - @classmethod - def GetRootAsCacheEntry(cls, buf, offset): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = CacheEntry() - x.Init(buf, n + offset) - return x - - # CacheEntry - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # CacheEntry - def Key(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.String(o + self._tab.Pos) - return "" - - # CacheEntry - def Time(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return self._tab.Get( - flatbuffers.number_types.Float64Flags, o + self._tab.Pos - ) - return 0.0 - - # CacheEntry - def Ttl(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - return self._tab.Get( - flatbuffers.number_types.Float64Flags, o + self._tab.Pos - ) - return 0.0 - - # CacheEntry - def Expired(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos) - return 0 - - # CacheEntry - def Value(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) - if o != 0: - return self._tab.String(o + self._tab.Pos) - return "" - - -def CacheEntryStart(builder): - builder.StartObject(5) - - -def CacheEntryAddKey(builder, key): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(key), 0 - ) - - -def CacheEntryAddTime(builder, time): - builder.PrependFloat64Slot(1, time, 0.0) - - -def CacheEntryAddTtl(builder, ttl): - builder.PrependFloat64Slot(2, ttl, 0.0) - - -def CacheEntryAddExpired(builder, expired): - builder.PrependBoolSlot(3, expired, 0) - - -def CacheEntryAddValue(builder, value): - builder.PrependUOffsetTRelativeSlot( - 4, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 - ) - - -def CacheEntryEnd(builder): - return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/nicos_cache_ns10/__init__.py b/python/src/streaming_data_types/fbschemas/nicos_cache_ns10/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/python/src/streaming_data_types/fbschemas/nmx_mo01/__init__.py b/python/src/streaming_data_types/fbschemas/nmx_mo01/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/python/src/streaming_data_types/fbschemas/readout_data_ar51/RawReadoutMessage.py b/python/src/streaming_data_types/fbschemas/readout_data_ar51/RawReadoutMessage.py deleted file mode 100644 index c91261c..0000000 --- a/python/src/streaming_data_types/fbschemas/readout_data_ar51/RawReadoutMessage.py +++ /dev/null @@ -1,130 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers -from flatbuffers.compat import import_numpy - -np = import_numpy() - - -class RawReadoutMessage(object): - __slots__ = ["_tab"] - - @classmethod - def GetRootAs(cls, buf, offset=0): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = RawReadoutMessage() - x.Init(buf, n + offset) - return x - - @classmethod - def GetRootAsRawReadoutMessage(cls, buf, offset=0): - """This method is deprecated. Please switch to GetRootAs.""" - return cls.GetRootAs(buf, offset) - - @classmethod - def RawReadoutMessageBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier( - buf, offset, b"\x61\x72\x35\x31", size_prefixed=size_prefixed - ) - - # RawReadoutMessage - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # RawReadoutMessage - def SourceName(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.String(o + self._tab.Pos) - return None - - # RawReadoutMessage - def MessageId(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) - return 0 - - # RawReadoutMessage - def RawData(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Uint8Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1), - ) - return 0 - - # RawReadoutMessage - def RawDataAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) - return 0 - - # RawReadoutMessage - def RawDataLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - # RawReadoutMessage - def RawDataIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - return o == 0 - - -def RawReadoutMessageStart(builder): - builder.StartObject(3) - - -def Start(builder): - RawReadoutMessageStart(builder) - - -def RawReadoutMessageAddSourceName(builder, sourceName): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(sourceName), 0 - ) - - -def AddSourceName(builder, sourceName): - RawReadoutMessageAddSourceName(builder, sourceName) - - -def RawReadoutMessageAddMessageId(builder, messageId): - builder.PrependInt64Slot(1, messageId, 0) - - -def AddMessageId(builder, messageId): - RawReadoutMessageAddMessageId(builder, messageId) - - -def RawReadoutMessageAddRawData(builder, rawData): - builder.PrependUOffsetTRelativeSlot( - 2, flatbuffers.number_types.UOffsetTFlags.py_type(rawData), 0 - ) - - -def AddRawData(builder, rawData): - RawReadoutMessageAddRawData(builder, rawData) - - -def RawReadoutMessageStartRawDataVector(builder, numElems): - return builder.StartVector(1, numElems, 1) - - -def StartRawDataVector(builder, numElems: int) -> int: - return RawReadoutMessageStartRawDataVector(builder, numElems) - - -def RawReadoutMessageEnd(builder): - return builder.EndObject() - - -def End(builder): - return RawReadoutMessageEnd(builder) diff --git a/python/src/streaming_data_types/fbschemas/readout_data_ar51/__init__.py b/python/src/streaming_data_types/fbschemas/readout_data_ar51/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/python/src/streaming_data_types/fbschemas/sample_environment_senv/Int16Array.py b/python/src/streaming_data_types/fbschemas/sample_environment_senv/Int16Array.py deleted file mode 100644 index 9edffdd..0000000 --- a/python/src/streaming_data_types/fbschemas/sample_environment_senv/Int16Array.py +++ /dev/null @@ -1,77 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers -from flatbuffers.compat import import_numpy - -np = import_numpy() - - -class Int16Array(object): - __slots__ = ["_tab"] - - @classmethod - def GetRootAsInt16Array(cls, buf, offset): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = Int16Array() - x.Init(buf, n + offset) - return x - - @classmethod - def Int16ArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier( - buf, offset, b"\x73\x65\x6E\x76", size_prefixed=size_prefixed - ) - - # Int16Array - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # Int16Array - def Value(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Int16Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 2), - ) - return 0 - - # Int16Array - def ValueAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int16Flags, o) - return 0 - - # Int16Array - def ValueLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - # Int16Array - def ValueIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - return o == 0 - - -def Int16ArrayStart(builder): - builder.StartObject(1) - - -def Int16ArrayAddValue(builder, value): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 - ) - - -def Int16ArrayStartValueVector(builder, numElems): - return builder.StartVector(2, numElems, 2) - - -def Int16ArrayEnd(builder): - return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/sample_environment_senv/Int32Array.py b/python/src/streaming_data_types/fbschemas/sample_environment_senv/Int32Array.py deleted file mode 100644 index 1cfa5a9..0000000 --- a/python/src/streaming_data_types/fbschemas/sample_environment_senv/Int32Array.py +++ /dev/null @@ -1,77 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers -from flatbuffers.compat import import_numpy - -np = import_numpy() - - -class Int32Array(object): - __slots__ = ["_tab"] - - @classmethod - def GetRootAsInt32Array(cls, buf, offset): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = Int32Array() - x.Init(buf, n + offset) - return x - - @classmethod - def Int32ArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier( - buf, offset, b"\x73\x65\x6E\x76", size_prefixed=size_prefixed - ) - - # Int32Array - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # Int32Array - def Value(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Int32Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), - ) - return 0 - - # Int32Array - def ValueAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) - return 0 - - # Int32Array - def ValueLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - # Int32Array - def ValueIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - return o == 0 - - -def Int32ArrayStart(builder): - builder.StartObject(1) - - -def Int32ArrayAddValue(builder, value): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 - ) - - -def Int32ArrayStartValueVector(builder, numElems): - return builder.StartVector(4, numElems, 4) - - -def Int32ArrayEnd(builder): - return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/sample_environment_senv/Int64Array.py b/python/src/streaming_data_types/fbschemas/sample_environment_senv/Int64Array.py deleted file mode 100644 index 9647205..0000000 --- a/python/src/streaming_data_types/fbschemas/sample_environment_senv/Int64Array.py +++ /dev/null @@ -1,77 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers -from flatbuffers.compat import import_numpy - -np = import_numpy() - - -class Int64Array(object): - __slots__ = ["_tab"] - - @classmethod - def GetRootAsInt64Array(cls, buf, offset): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = Int64Array() - x.Init(buf, n + offset) - return x - - @classmethod - def Int64ArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier( - buf, offset, b"\x73\x65\x6E\x76", size_prefixed=size_prefixed - ) - - # Int64Array - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # Int64Array - def Value(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Int64Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8), - ) - return 0 - - # Int64Array - def ValueAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) - return 0 - - # Int64Array - def ValueLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - # Int64Array - def ValueIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - return o == 0 - - -def Int64ArrayStart(builder): - builder.StartObject(1) - - -def Int64ArrayAddValue(builder, value): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 - ) - - -def Int64ArrayStartValueVector(builder, numElems): - return builder.StartVector(8, numElems, 8) - - -def Int64ArrayEnd(builder): - return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/sample_environment_senv/Int8Array.py b/python/src/streaming_data_types/fbschemas/sample_environment_senv/Int8Array.py deleted file mode 100644 index ec4a41f..0000000 --- a/python/src/streaming_data_types/fbschemas/sample_environment_senv/Int8Array.py +++ /dev/null @@ -1,77 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers -from flatbuffers.compat import import_numpy - -np = import_numpy() - - -class Int8Array(object): - __slots__ = ["_tab"] - - @classmethod - def GetRootAsInt8Array(cls, buf, offset): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = Int8Array() - x.Init(buf, n + offset) - return x - - @classmethod - def Int8ArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier( - buf, offset, b"\x73\x65\x6E\x76", size_prefixed=size_prefixed - ) - - # Int8Array - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # Int8Array - def Value(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Int8Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1), - ) - return 0 - - # Int8Array - def ValueAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int8Flags, o) - return 0 - - # Int8Array - def ValueLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - # Int8Array - def ValueIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - return o == 0 - - -def Int8ArrayStart(builder): - builder.StartObject(1) - - -def Int8ArrayAddValue(builder, value): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 - ) - - -def Int8ArrayStartValueVector(builder, numElems): - return builder.StartVector(1, numElems, 1) - - -def Int8ArrayEnd(builder): - return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/sample_environment_senv/Location.py b/python/src/streaming_data_types/fbschemas/sample_environment_senv/Location.py deleted file mode 100644 index 7af8fcf..0000000 --- a/python/src/streaming_data_types/fbschemas/sample_environment_senv/Location.py +++ /dev/null @@ -1,10 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - - -class Location(object): - Unknown = 0 - Start = 1 - Middle = 2 - End = 3 diff --git a/python/src/streaming_data_types/fbschemas/sample_environment_senv/SampleEnvironmentData.py b/python/src/streaming_data_types/fbschemas/sample_environment_senv/SampleEnvironmentData.py deleted file mode 100644 index f67e817..0000000 --- a/python/src/streaming_data_types/fbschemas/sample_environment_senv/SampleEnvironmentData.py +++ /dev/null @@ -1,179 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers -from flatbuffers.compat import import_numpy - -np = import_numpy() - - -class SampleEnvironmentData(object): - __slots__ = ["_tab"] - - @classmethod - def GetRootAsSampleEnvironmentData(cls, buf, offset): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = SampleEnvironmentData() - x.Init(buf, n + offset) - return x - - @classmethod - def SampleEnvironmentDataBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier( - buf, offset, b"\x73\x65\x6E\x76", size_prefixed=size_prefixed - ) - - # SampleEnvironmentData - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # SampleEnvironmentData - def Name(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.String(o + self._tab.Pos) - return None - - # SampleEnvironmentData - def Channel(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) - return 0 - - # SampleEnvironmentData - def PacketTimestamp(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - return self._tab.Get( - flatbuffers.number_types.Uint64Flags, o + self._tab.Pos - ) - return 0 - - # SampleEnvironmentData - def TimeDelta(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) - if o != 0: - return self._tab.Get( - flatbuffers.number_types.Float64Flags, o + self._tab.Pos - ) - return 0.0 - - # SampleEnvironmentData - def TimestampLocation(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) - return 0 - - # SampleEnvironmentData - def ValuesType(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) - return 0 - - # SampleEnvironmentData - def Values(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) - if o != 0: - from flatbuffers.table import Table - - obj = Table(bytearray(), 0) - self._tab.Union(obj, o) - return obj - return None - - # SampleEnvironmentData - def Timestamps(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Uint64Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8), - ) - return 0 - - # SampleEnvironmentData - def TimestampsAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint64Flags, o) - return 0 - - # SampleEnvironmentData - def TimestampsLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - # SampleEnvironmentData - def TimestampsIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) - return o == 0 - - # SampleEnvironmentData - def MessageCounter(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(20)) - if o != 0: - return self._tab.Get( - flatbuffers.number_types.Uint64Flags, o + self._tab.Pos - ) - return 0 - - -def SampleEnvironmentDataStart(builder): - builder.StartObject(9) - - -def SampleEnvironmentDataAddName(builder, Name): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(Name), 0 - ) - - -def SampleEnvironmentDataAddChannel(builder, Channel): - builder.PrependInt32Slot(1, Channel, 0) - - -def SampleEnvironmentDataAddPacketTimestamp(builder, PacketTimestamp): - builder.PrependUint64Slot(2, PacketTimestamp, 0) - - -def SampleEnvironmentDataAddTimeDelta(builder, TimeDelta): - builder.PrependFloat64Slot(3, TimeDelta, 0.0) - - -def SampleEnvironmentDataAddTimestampLocation(builder, TimestampLocation): - builder.PrependInt8Slot(4, TimestampLocation, 0) - - -def SampleEnvironmentDataAddValuesType(builder, ValuesType): - builder.PrependUint8Slot(5, ValuesType, 0) - - -def SampleEnvironmentDataAddValues(builder, Values): - builder.PrependUOffsetTRelativeSlot( - 6, flatbuffers.number_types.UOffsetTFlags.py_type(Values), 0 - ) - - -def SampleEnvironmentDataAddTimestamps(builder, Timestamps): - builder.PrependUOffsetTRelativeSlot( - 7, flatbuffers.number_types.UOffsetTFlags.py_type(Timestamps), 0 - ) - - -def SampleEnvironmentDataStartTimestampsVector(builder, numElems): - return builder.StartVector(8, numElems, 8) - - -def SampleEnvironmentDataAddMessageCounter(builder, MessageCounter): - builder.PrependUint64Slot(8, MessageCounter, 0) - - -def SampleEnvironmentDataEnd(builder): - return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/sample_environment_senv/UInt16Array.py b/python/src/streaming_data_types/fbschemas/sample_environment_senv/UInt16Array.py deleted file mode 100644 index 633ed7c..0000000 --- a/python/src/streaming_data_types/fbschemas/sample_environment_senv/UInt16Array.py +++ /dev/null @@ -1,77 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers -from flatbuffers.compat import import_numpy - -np = import_numpy() - - -class UInt16Array(object): - __slots__ = ["_tab"] - - @classmethod - def GetRootAsUInt16Array(cls, buf, offset): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = UInt16Array() - x.Init(buf, n + offset) - return x - - @classmethod - def UInt16ArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier( - buf, offset, b"\x73\x65\x6E\x76", size_prefixed=size_prefixed - ) - - # UInt16Array - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # UInt16Array - def Value(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Uint16Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 2), - ) - return 0 - - # UInt16Array - def ValueAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint16Flags, o) - return 0 - - # UInt16Array - def ValueLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - # UInt16Array - def ValueIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - return o == 0 - - -def UInt16ArrayStart(builder): - builder.StartObject(1) - - -def UInt16ArrayAddValue(builder, value): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 - ) - - -def UInt16ArrayStartValueVector(builder, numElems): - return builder.StartVector(2, numElems, 2) - - -def UInt16ArrayEnd(builder): - return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/sample_environment_senv/UInt32Array.py b/python/src/streaming_data_types/fbschemas/sample_environment_senv/UInt32Array.py deleted file mode 100644 index d62876f..0000000 --- a/python/src/streaming_data_types/fbschemas/sample_environment_senv/UInt32Array.py +++ /dev/null @@ -1,77 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers -from flatbuffers.compat import import_numpy - -np = import_numpy() - - -class UInt32Array(object): - __slots__ = ["_tab"] - - @classmethod - def GetRootAsUInt32Array(cls, buf, offset): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = UInt32Array() - x.Init(buf, n + offset) - return x - - @classmethod - def UInt32ArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier( - buf, offset, b"\x73\x65\x6E\x76", size_prefixed=size_prefixed - ) - - # UInt32Array - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # UInt32Array - def Value(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Uint32Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), - ) - return 0 - - # UInt32Array - def ValueAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint32Flags, o) - return 0 - - # UInt32Array - def ValueLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - # UInt32Array - def ValueIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - return o == 0 - - -def UInt32ArrayStart(builder): - builder.StartObject(1) - - -def UInt32ArrayAddValue(builder, value): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 - ) - - -def UInt32ArrayStartValueVector(builder, numElems): - return builder.StartVector(4, numElems, 4) - - -def UInt32ArrayEnd(builder): - return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/sample_environment_senv/UInt64Array.py b/python/src/streaming_data_types/fbschemas/sample_environment_senv/UInt64Array.py deleted file mode 100644 index 7fc6498..0000000 --- a/python/src/streaming_data_types/fbschemas/sample_environment_senv/UInt64Array.py +++ /dev/null @@ -1,77 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers -from flatbuffers.compat import import_numpy - -np = import_numpy() - - -class UInt64Array(object): - __slots__ = ["_tab"] - - @classmethod - def GetRootAsUInt64Array(cls, buf, offset): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = UInt64Array() - x.Init(buf, n + offset) - return x - - @classmethod - def UInt64ArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier( - buf, offset, b"\x73\x65\x6E\x76", size_prefixed=size_prefixed - ) - - # UInt64Array - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # UInt64Array - def Value(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Uint64Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8), - ) - return 0 - - # UInt64Array - def ValueAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint64Flags, o) - return 0 - - # UInt64Array - def ValueLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - # UInt64Array - def ValueIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - return o == 0 - - -def UInt64ArrayStart(builder): - builder.StartObject(1) - - -def UInt64ArrayAddValue(builder, value): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 - ) - - -def UInt64ArrayStartValueVector(builder, numElems): - return builder.StartVector(8, numElems, 8) - - -def UInt64ArrayEnd(builder): - return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/sample_environment_senv/UInt8Array.py b/python/src/streaming_data_types/fbschemas/sample_environment_senv/UInt8Array.py deleted file mode 100644 index 702a7d7..0000000 --- a/python/src/streaming_data_types/fbschemas/sample_environment_senv/UInt8Array.py +++ /dev/null @@ -1,77 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers -from flatbuffers.compat import import_numpy - -np = import_numpy() - - -class UInt8Array(object): - __slots__ = ["_tab"] - - @classmethod - def GetRootAsUInt8Array(cls, buf, offset): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = UInt8Array() - x.Init(buf, n + offset) - return x - - @classmethod - def UInt8ArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier( - buf, offset, b"\x73\x65\x6E\x76", size_prefixed=size_prefixed - ) - - # UInt8Array - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # UInt8Array - def Value(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Uint8Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1), - ) - return 0 - - # UInt8Array - def ValueAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) - return 0 - - # UInt8Array - def ValueLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - # UInt8Array - def ValueIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - return o == 0 - - -def UInt8ArrayStart(builder): - builder.StartObject(1) - - -def UInt8ArrayAddValue(builder, value): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 - ) - - -def UInt8ArrayStartValueVector(builder, numElems): - return builder.StartVector(1, numElems, 1) - - -def UInt8ArrayEnd(builder): - return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/sample_environment_senv/ValueUnion.py b/python/src/streaming_data_types/fbschemas/sample_environment_senv/ValueUnion.py deleted file mode 100644 index 50e6a2b..0000000 --- a/python/src/streaming_data_types/fbschemas/sample_environment_senv/ValueUnion.py +++ /dev/null @@ -1,15 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - - -class ValueUnion(object): - NONE = 0 - Int8Array = 1 - UInt8Array = 2 - Int16Array = 3 - UInt16Array = 4 - Int32Array = 5 - UInt32Array = 6 - Int64Array = 7 - UInt64Array = 8 diff --git a/python/src/streaming_data_types/fbschemas/sample_environment_senv/__init__.py b/python/src/streaming_data_types/fbschemas/sample_environment_senv/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/python/src/streaming_data_types/fbschemas/timestamps_tdct/__init__.py b/python/src/streaming_data_types/fbschemas/timestamps_tdct/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/python/src/streaming_data_types/fbschemas/timestamps_tdct/timestamp.py b/python/src/streaming_data_types/fbschemas/timestamps_tdct/timestamp.py deleted file mode 100644 index 908a5bb..0000000 --- a/python/src/streaming_data_types/fbschemas/timestamps_tdct/timestamp.py +++ /dev/null @@ -1,89 +0,0 @@ -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: - -import flatbuffers - - -class timestamp(object): - __slots__ = ["_tab"] - - @classmethod - def GetRootAstimestamp(cls, buf, offset): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = timestamp() - x.Init(buf, n + offset) - return x - - # timestamp - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # timestamp - def Name(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.String(o + self._tab.Pos) - return None - - # timestamp - def Timestamps(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get( - flatbuffers.number_types.Uint64Flags, - a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8), - ) - return 0 - - # timestamp - def TimestampsAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint64Flags, o) - return 0 - - # timestamp - def TimestampsLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - # timestamp - def SequenceCounter(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - return self._tab.Get( - flatbuffers.number_types.Uint64Flags, o + self._tab.Pos - ) - return 0 - - -def timestampStart(builder): - builder.StartObject(3) - - -def timestampAddName(builder, name): - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(name), 0 - ) - - -def timestampAddTimestamps(builder, timestamps): - builder.PrependUOffsetTRelativeSlot( - 1, flatbuffers.number_types.UOffsetTFlags.py_type(timestamps), 0 - ) - - -def timestampStartTimestampsVector(builder, numElems): - return builder.StartVector(8, numElems, 8) - - -def timestampAddSequenceCounter(builder, sequenceCounter): - builder.PrependUint64Slot(2, sequenceCounter, 0) - - -def timestampEnd(builder): - return builder.EndObject() diff --git a/python/src/streaming_data_types/forwarder_config_update_rf5k.py b/python/src/streaming_data_types/forwarder_config_update_rf5k.py deleted file mode 100644 index 9638102..0000000 --- a/python/src/streaming_data_types/forwarder_config_update_rf5k.py +++ /dev/null @@ -1,112 +0,0 @@ -from collections import namedtuple -from typing import List, Union - -import flatbuffers -from flatbuffers.packer import struct as flatbuffer_struct - -from streaming_data_types.fbschemas.forwarder_config_update_rf5k import ( - ConfigUpdate, - Protocol, - Stream, - UpdateType, -) -from streaming_data_types.utils import check_schema_identifier - -FILE_IDENTIFIER = b"rf5k" - -ConfigurationUpdate = namedtuple("ConfigurationUpdate", ("config_change", "streams")) - -StreamInfo = namedtuple("StreamInfo", ("channel", "schema", "topic", "protocol")) - - -def deserialise_rf5k(buffer: Union[bytearray, bytes]) -> ConfigurationUpdate: - """ - Deserialise FlatBuffer rf5k. - - :param buffer: The FlatBuffers buffer. - :return: The deserialised data. - """ - check_schema_identifier(buffer, FILE_IDENTIFIER) - - config_message = ConfigUpdate.ConfigUpdate.GetRootAsConfigUpdate(buffer, 0) - - streams = [] - try: - for i in range(config_message.StreamsLength()): - stream_message = config_message.Streams(i) - streams.append( - StreamInfo( - stream_message.Channel().decode("utf-8") - if stream_message.Channel() - else "", - stream_message.Schema().decode("utf-8") - if stream_message.Schema() - else "", - stream_message.Topic().decode("utf-8") - if stream_message.Topic() - else "", - stream_message.Protocol(), - ) - ) - except flatbuffer_struct.error: - pass # No streams in buffer - - return ConfigurationUpdate(config_message.ConfigChange(), streams) - - -def serialise_stream( - builder: flatbuffers.Builder, - protocol: Protocol, - channel_offset: int, - schema_offset: int, - topic_offset: int, -) -> int: - Stream.StreamStart(builder) - Stream.StreamAddProtocol(builder, protocol) - Stream.StreamAddTopic(builder, topic_offset) - Stream.StreamAddSchema(builder, schema_offset) - Stream.StreamAddChannel(builder, channel_offset) - return Stream.StreamEnd(builder) - - -def serialise_rf5k(config_change: UpdateType, streams: List[StreamInfo]) -> bytes: - """ - Serialise config update message as an rf5k FlatBuffers message. - - :param config_change: - :param streams: channel, schema and output topic configurations - :return: - """ - builder = flatbuffers.Builder(1024) - builder.ForceDefaults(True) - - if streams: - # We have to use multiple loops/list comprehensions here because we cannot create strings after we have - # called StreamStart and cannot create streams after we have called StartVector - stream_field_offsets = [ - ( - builder.CreateString(stream.channel), - builder.CreateString(stream.schema), - builder.CreateString(stream.topic), - ) - for stream in streams - ] - stream_offsets = [ - serialise_stream(builder, stream.protocol, *stream_fields) - for stream, stream_fields in zip(streams, stream_field_offsets) - ] - - ConfigUpdate.ConfigUpdateStartStreamsVector(builder, len(streams)) - for stream_offset in stream_offsets: - builder.PrependUOffsetTRelative(stream_offset) - streams_offset = builder.EndVector() - - # Build the actual buffer - ConfigUpdate.ConfigUpdateStart(builder) - if streams: - ConfigUpdate.ConfigUpdateAddStreams(builder, streams_offset) - ConfigUpdate.ConfigUpdateAddConfigChange(builder, config_change) - data = ConfigUpdate.ConfigUpdateEnd(builder) - - builder.Finish(data, file_identifier=FILE_IDENTIFIER) - return bytes(builder.Output()) diff --git a/python/src/streaming_data_types/histogram_hs00.py b/python/src/streaming_data_types/histogram_hs00.py deleted file mode 100644 index 0121bbd..0000000 --- a/python/src/streaming_data_types/histogram_hs00.py +++ /dev/null @@ -1,231 +0,0 @@ -import flatbuffers -import numpy - -import streaming_data_types.fbschemas.histogram_hs00.ArrayDouble as ArrayDouble -import streaming_data_types.fbschemas.histogram_hs00.ArrayFloat as ArrayFloat -import streaming_data_types.fbschemas.histogram_hs00.ArrayUInt as ArrayUInt -import streaming_data_types.fbschemas.histogram_hs00.ArrayULong as ArrayULong -import streaming_data_types.fbschemas.histogram_hs00.DimensionMetaData as DimensionMetaData -import streaming_data_types.fbschemas.histogram_hs00.EventHistogram as EventHistogram -from streaming_data_types.fbschemas.histogram_hs00.Array import Array -from streaming_data_types.utils import check_schema_identifier - -FILE_IDENTIFIER = b"hs00" - - -_array_for_type = { - Array.ArrayUInt: ArrayUInt.ArrayUInt(), - Array.ArrayULong: ArrayULong.ArrayULong(), - Array.ArrayFloat: ArrayFloat.ArrayFloat(), -} - - -def _create_array_object_for_type(array_type): - return _array_for_type.get(array_type, ArrayDouble.ArrayDouble()) - - -def deserialise_hs00(buffer): - """ - Deserialise flatbuffer hs10 into a histogram. - - :param buffer: - :return: dict of histogram information - """ - check_schema_identifier(buffer, FILE_IDENTIFIER) - event_hist = EventHistogram.EventHistogram.GetRootAsEventHistogram(buffer, 0) - - dims = [] - for i in range(event_hist.DimMetadataLength()): - bins_fb = _create_array_object_for_type( - event_hist.DimMetadata(i).BinBoundariesType() - ) - - # Get bins - bins_offset = event_hist.DimMetadata(i).BinBoundaries() - bins_fb.Init(bins_offset.Bytes, bins_offset.Pos) - bin_boundaries = bins_fb.ValueAsNumpy() - - hist_info = { - "length": event_hist.DimMetadata(i).Length(), - "bin_boundaries": bin_boundaries, - "unit": event_hist.DimMetadata(i).Unit().decode("utf-8") - if event_hist.DimMetadata(i).Unit() - else "", - "label": event_hist.DimMetadata(i).Label().decode("utf-8") - if event_hist.DimMetadata(i).Label() - else "", - } - dims.append(hist_info) - - metadata_timestamp = event_hist.LastMetadataTimestamp() - - data_fb = _create_array_object_for_type(event_hist.DataType()) - data_offset = event_hist.Data() - data_fb.Init(data_offset.Bytes, data_offset.Pos) - shape = event_hist.CurrentShapeAsNumpy().tolist() - data = data_fb.ValueAsNumpy().reshape(shape) - - # Get the errors - errors_offset = event_hist.Errors() - if errors_offset: - errors_fb = _create_array_object_for_type(event_hist.ErrorsType()) - errors_fb.Init(errors_offset.Bytes, errors_offset.Pos) - errors = errors_fb.ValueAsNumpy().reshape(shape) - else: - errors = [] - - hist = { - "source": event_hist.Source().decode("utf-8") if event_hist.Source() else "", - "timestamp": event_hist.Timestamp(), - "current_shape": shape, - "dim_metadata": dims, - "data": data, - "errors": errors, - "last_metadata_timestamp": metadata_timestamp, - "info": event_hist.Info().decode("utf-8") if event_hist.Info() else "", - } - return hist - - -def _serialise_metadata(builder, length, edges, unit, label): - unit_offset = builder.CreateString(unit) - label_offset = builder.CreateString(label) - - bins_offset, bin_type = _serialise_array(builder, edges) - - DimensionMetaData.DimensionMetaDataStart(builder) - DimensionMetaData.DimensionMetaDataAddLength(builder, length) - DimensionMetaData.DimensionMetaDataAddBinBoundaries(builder, bins_offset) - DimensionMetaData.DimensionMetaDataAddBinBoundariesType(builder, bin_type) - DimensionMetaData.DimensionMetaDataAddLabel(builder, label_offset) - DimensionMetaData.DimensionMetaDataAddUnit(builder, unit_offset) - return DimensionMetaData.DimensionMetaDataEnd(builder) - - -def serialise_hs00(histogram): - """ - Serialise a histogram as an hs00 FlatBuffers message. - - If arrays are provided as numpy arrays with type np.uint32, np.uint64, np.float32 - or np.float64 then type is preserved in output buffer. - - :param histogram: A dictionary containing the histogram to serialise. - """ - source_offset = None - info_offset = None - - builder = flatbuffers.Builder(1024) - builder.ForceDefaults(True) - if "source" in histogram: - source_offset = builder.CreateString(histogram["source"]) - if "info" in histogram: - info_offset = builder.CreateString(histogram["info"]) - - # Build shape array - shape_offset = builder.CreateNumpyVector( - numpy.array(histogram["current_shape"]).astype(numpy.uint32) - ) - - # Build dimensions metadata - metadata = [] - for meta in histogram["dim_metadata"]: - unit = "" if "unit" not in meta else meta["unit"] - label = "" if "label" not in meta else meta["label"] - metadata.append( - _serialise_metadata( - builder, meta["length"], meta["bin_boundaries"], unit, label - ) - ) - - rank = len(histogram["current_shape"]) - EventHistogram.EventHistogramStartDimMetadataVector(builder, rank) - # FlatBuffers builds arrays backwards - for m in reversed(metadata): - builder.PrependUOffsetTRelative(m) - metadata_vector = builder.EndVector() - - # Build the data - data_offset, data_type = _serialise_array(builder, histogram["data"]) - - errors_offset = None - if "errors" in histogram: - errors_offset, error_type = _serialise_array(builder, histogram["errors"]) - - # Build the actual buffer - EventHistogram.EventHistogramStart(builder) - if info_offset: - EventHistogram.EventHistogramAddInfo(builder, info_offset) - EventHistogram.EventHistogramAddData(builder, data_offset) - EventHistogram.EventHistogramAddCurrentShape(builder, shape_offset) - EventHistogram.EventHistogramAddDimMetadata(builder, metadata_vector) - EventHistogram.EventHistogramAddTimestamp(builder, histogram["timestamp"]) - if source_offset: - EventHistogram.EventHistogramAddSource(builder, source_offset) - EventHistogram.EventHistogramAddDataType(builder, data_type) - if errors_offset: - EventHistogram.EventHistogramAddErrors(builder, errors_offset) - EventHistogram.EventHistogramAddErrorsType(builder, error_type) - if "last_metadata_timestamp" in histogram: - EventHistogram.EventHistogramAddLastMetadataTimestamp( - builder, histogram["last_metadata_timestamp"] - ) - hist_message = EventHistogram.EventHistogramEnd(builder) - - builder.Finish(hist_message, file_identifier=FILE_IDENTIFIER) - return bytes(builder.Output()) - - -def _serialise_array(builder, data): - flattened_data = numpy.asarray(data).flatten() - - # Carefully preserve explicitly supported types - if numpy.issubdtype(flattened_data.dtype, numpy.uint32): - return _serialise_uint32(builder, flattened_data) - if numpy.issubdtype(flattened_data.dtype, numpy.uint64): - return _serialise_uint64(builder, flattened_data) - if numpy.issubdtype(flattened_data.dtype, numpy.float32): - return _serialise_float(builder, flattened_data) - if numpy.issubdtype(flattened_data.dtype, numpy.float64): - return _serialise_double(builder, flattened_data) - - # Otherwise if it looks like an int then use uint64, or use double as last resort - if numpy.issubdtype(flattened_data.dtype, numpy.int64): - return _serialise_uint64(builder, flattened_data) - - return _serialise_double(builder, flattened_data) - - -def _serialise_float(builder, flattened_data): - data_type = Array.ArrayFloat - data_vector = builder.CreateNumpyVector(flattened_data) - ArrayFloat.ArrayFloatStart(builder) - ArrayFloat.ArrayFloatAddValue(builder, data_vector) - data_offset = ArrayFloat.ArrayFloatEnd(builder) - return data_offset, data_type - - -def _serialise_double(builder, flattened_data): - data_type = Array.ArrayDouble - data_vector = builder.CreateNumpyVector(flattened_data) - ArrayDouble.ArrayDoubleStart(builder) - ArrayDouble.ArrayDoubleAddValue(builder, data_vector) - data_offset = ArrayDouble.ArrayDoubleEnd(builder) - return data_offset, data_type - - -def _serialise_uint32(builder, flattened_data): - data_type = Array.ArrayUInt - data_vector = builder.CreateNumpyVector(flattened_data) - ArrayUInt.ArrayUIntStart(builder) - ArrayUInt.ArrayUIntAddValue(builder, data_vector) - data_offset = ArrayUInt.ArrayUIntEnd(builder) - return data_offset, data_type - - -def _serialise_uint64(builder, flattened_data): - data_type = Array.ArrayULong - data_vector = builder.CreateNumpyVector(flattened_data) - ArrayULong.ArrayULongStart(builder) - ArrayULong.ArrayULongAddValue(builder, data_vector) - data_offset = ArrayULong.ArrayULongEnd(builder) - return data_offset, data_type diff --git a/python/src/streaming_data_types/logdata_f142.py b/python/src/streaming_data_types/logdata_f142.py deleted file mode 100644 index ce35e88..0000000 --- a/python/src/streaming_data_types/logdata_f142.py +++ /dev/null @@ -1,577 +0,0 @@ -from collections import namedtuple -from typing import Any, Callable, Dict, Tuple, Union - -import flatbuffers -import numpy as np - -from streaming_data_types.fbschemas.logdata_f142 import LogData -from streaming_data_types.fbschemas.logdata_f142.ArrayByte import ( - ArrayByte, - ArrayByteAddValue, - ArrayByteEnd, - ArrayByteStart, -) -from streaming_data_types.fbschemas.logdata_f142.ArrayDouble import ( - ArrayDouble, - ArrayDoubleAddValue, - ArrayDoubleEnd, - ArrayDoubleStart, -) -from streaming_data_types.fbschemas.logdata_f142.ArrayFloat import ( - ArrayFloat, - ArrayFloatAddValue, - ArrayFloatEnd, - ArrayFloatStart, -) -from streaming_data_types.fbschemas.logdata_f142.ArrayInt import ( - ArrayInt, - ArrayIntAddValue, - ArrayIntEnd, - ArrayIntStart, -) -from streaming_data_types.fbschemas.logdata_f142.ArrayLong import ( - ArrayLong, - ArrayLongAddValue, - ArrayLongEnd, - ArrayLongStart, -) -from streaming_data_types.fbschemas.logdata_f142.ArrayShort import ( - ArrayShort, - ArrayShortAddValue, - ArrayShortEnd, - ArrayShortStart, -) -from streaming_data_types.fbschemas.logdata_f142.ArrayString import ( - ArrayString, - ArrayStringAddValue, - ArrayStringEnd, - ArrayStringStart, - ArrayStringStartValueVector, -) -from streaming_data_types.fbschemas.logdata_f142.ArrayUByte import ( - ArrayUByte, - ArrayUByteAddValue, - ArrayUByteEnd, - ArrayUByteStart, -) -from streaming_data_types.fbschemas.logdata_f142.ArrayUInt import ( - ArrayUInt, - ArrayUIntAddValue, - ArrayUIntEnd, - ArrayUIntStart, -) -from streaming_data_types.fbschemas.logdata_f142.ArrayULong import ( - ArrayULong, - ArrayULongAddValue, - ArrayULongEnd, - ArrayULongStart, -) -from streaming_data_types.fbschemas.logdata_f142.ArrayUShort import ( - ArrayUShort, - ArrayUShortAddValue, - ArrayUShortEnd, - ArrayUShortStart, -) -from streaming_data_types.fbschemas.logdata_f142.Byte import ( - Byte, - ByteAddValue, - ByteEnd, - ByteStart, -) -from streaming_data_types.fbschemas.logdata_f142.Double import ( - Double, - DoubleAddValue, - DoubleEnd, - DoubleStart, -) -from streaming_data_types.fbschemas.logdata_f142.Float import ( - Float, - FloatAddValue, - FloatEnd, - FloatStart, -) -from streaming_data_types.fbschemas.logdata_f142.Int import ( - Int, - IntAddValue, - IntEnd, - IntStart, -) -from streaming_data_types.fbschemas.logdata_f142.Long import ( - Long, - LongAddValue, - LongEnd, - LongStart, -) -from streaming_data_types.fbschemas.logdata_f142.Short import ( - Short, - ShortAddValue, - ShortEnd, - ShortStart, -) -from streaming_data_types.fbschemas.logdata_f142.String import ( - String, - StringAddValue, - StringEnd, - StringStart, -) -from streaming_data_types.fbschemas.logdata_f142.UByte import ( - UByte, - UByteAddValue, - UByteEnd, - UByteStart, -) -from streaming_data_types.fbschemas.logdata_f142.UInt import ( - UInt, - UIntAddValue, - UIntEnd, - UIntStart, -) -from streaming_data_types.fbschemas.logdata_f142.ULong import ( - ULong, - ULongAddValue, - ULongEnd, - ULongStart, -) -from streaming_data_types.fbschemas.logdata_f142.UShort import ( - UShort, - UShortAddValue, - UShortEnd, - UShortStart, -) -from streaming_data_types.fbschemas.logdata_f142.Value import Value -from streaming_data_types.utils import check_schema_identifier - -FILE_IDENTIFIER = b"f142" - - -def _complete_buffer( - builder, - timestamp_unix_ns: int, - alarm_status: Union[int, None] = None, - alarm_severity: Union[int, None] = None, -) -> bytearray: - LogData.LogDataAddTimestamp(builder, timestamp_unix_ns) - - if alarm_status is not None: - LogData.LogDataAddStatus(builder, alarm_status) - # Only include severity if status was provided, it would be meaningless by itself - if alarm_severity is not None: - LogData.LogDataAddSeverity(builder, alarm_severity) - - log_msg = LogData.LogDataEnd(builder) - - builder.Finish(log_msg, file_identifier=FILE_IDENTIFIER) - return bytes(builder.Output()) - - -def _setup_builder(source_name: str) -> Tuple[flatbuffers.Builder, int]: - builder = flatbuffers.Builder(1024) - builder.ForceDefaults(True) - source = builder.CreateString(source_name) - return builder, source - - -def _serialise_byte(builder: flatbuffers.Builder, data: np.ndarray, source: int): - ByteStart(builder) - ByteAddValue(builder, data.item()) - value_position = ByteEnd(builder) - LogData.LogDataStart(builder) - LogData.LogDataAddSourceName(builder, source) - LogData.LogDataAddValue(builder, value_position) - LogData.LogDataAddValueType(builder, Value.Byte) - - -def _serialise_bytearray(builder: flatbuffers.Builder, data: np.ndarray, source: int): - array_offset = builder.CreateNumpyVector(data) - ArrayByteStart(builder) - ArrayByteAddValue(builder, array_offset) - value_position = ArrayByteEnd(builder) - LogData.LogDataStart(builder) - LogData.LogDataAddSourceName(builder, source) - LogData.LogDataAddValue(builder, value_position) - LogData.LogDataAddValueType(builder, Value.ArrayByte) - - -def _serialise_ubyte(builder: flatbuffers.Builder, data: np.ndarray, source: int): - UByteStart(builder) - UByteAddValue(builder, data.item()) - value_position = UByteEnd(builder) - LogData.LogDataStart(builder) - LogData.LogDataAddSourceName(builder, source) - LogData.LogDataAddValue(builder, value_position) - LogData.LogDataAddValueType(builder, Value.UByte) - - -def _serialise_ubytearray(builder: flatbuffers.Builder, data: np.ndarray, source: int): - array_offset = builder.CreateNumpyVector(data) - ArrayUByteStart(builder) - ArrayUByteAddValue(builder, array_offset) - value_position = ArrayUByteEnd(builder) - LogData.LogDataStart(builder) - LogData.LogDataAddSourceName(builder, source) - LogData.LogDataAddValue(builder, value_position) - LogData.LogDataAddValueType(builder, Value.ArrayUByte) - - -def _serialise_short(builder: flatbuffers.Builder, data: np.ndarray, source: int): - ShortStart(builder) - ShortAddValue(builder, data.item()) - value_position = ShortEnd(builder) - LogData.LogDataStart(builder) - LogData.LogDataAddSourceName(builder, source) - LogData.LogDataAddValue(builder, value_position) - LogData.LogDataAddValueType(builder, Value.Short) - - -def _serialise_shortarray(builder: flatbuffers.Builder, data: np.ndarray, source: int): - array_offset = builder.CreateNumpyVector(data) - ArrayShortStart(builder) - ArrayShortAddValue(builder, array_offset) - value_position = ArrayShortEnd(builder) - LogData.LogDataStart(builder) - LogData.LogDataAddSourceName(builder, source) - LogData.LogDataAddValue(builder, value_position) - LogData.LogDataAddValueType(builder, Value.ArrayShort) - - -def _serialise_ushort(builder: flatbuffers.Builder, data: np.ndarray, source: int): - UShortStart(builder) - UShortAddValue(builder, data.item()) - value_position = UShortEnd(builder) - LogData.LogDataStart(builder) - LogData.LogDataAddSourceName(builder, source) - LogData.LogDataAddValue(builder, value_position) - LogData.LogDataAddValueType(builder, Value.UShort) - - -def _serialise_ushortarray(builder: flatbuffers.Builder, data: np.ndarray, source: int): - array_offset = builder.CreateNumpyVector(data) - ArrayUShortStart(builder) - ArrayUShortAddValue(builder, array_offset) - value_position = ArrayUShortEnd(builder) - LogData.LogDataStart(builder) - LogData.LogDataAddSourceName(builder, source) - LogData.LogDataAddValue(builder, value_position) - LogData.LogDataAddValueType(builder, Value.ArrayUShort) - - -def _serialise_int(builder: flatbuffers.Builder, data: np.ndarray, source: int): - IntStart(builder) - IntAddValue(builder, data.item()) - value_position = IntEnd(builder) - LogData.LogDataStart(builder) - LogData.LogDataAddSourceName(builder, source) - LogData.LogDataAddValue(builder, value_position) - LogData.LogDataAddValueType(builder, Value.Int) - - -def _serialise_intarray(builder: flatbuffers.Builder, data: np.ndarray, source: int): - array_offset = builder.CreateNumpyVector(data) - ArrayIntStart(builder) - ArrayIntAddValue(builder, array_offset) - value_position = ArrayIntEnd(builder) - LogData.LogDataStart(builder) - LogData.LogDataAddSourceName(builder, source) - LogData.LogDataAddValue(builder, value_position) - LogData.LogDataAddValueType(builder, Value.ArrayInt) - - -def _serialise_uint(builder: flatbuffers.Builder, data: np.ndarray, source: int): - UIntStart(builder) - UIntAddValue(builder, data.item()) - value_position = UIntEnd(builder) - LogData.LogDataStart(builder) - LogData.LogDataAddSourceName(builder, source) - LogData.LogDataAddValue(builder, value_position) - LogData.LogDataAddValueType(builder, Value.UInt) - - -def _serialise_uintarray(builder: flatbuffers.Builder, data: np.ndarray, source: int): - array_offset = builder.CreateNumpyVector(data) - ArrayUIntStart(builder) - ArrayUIntAddValue(builder, array_offset) - value_position = ArrayUIntEnd(builder) - LogData.LogDataStart(builder) - LogData.LogDataAddSourceName(builder, source) - LogData.LogDataAddValue(builder, value_position) - LogData.LogDataAddValueType(builder, Value.ArrayUInt) - - -def _serialise_long(builder: flatbuffers.Builder, data: np.ndarray, source: int): - LongStart(builder) - LongAddValue(builder, data.item()) - value_position = LongEnd(builder) - LogData.LogDataStart(builder) - LogData.LogDataAddSourceName(builder, source) - LogData.LogDataAddValue(builder, value_position) - LogData.LogDataAddValueType(builder, Value.Long) - - -def _serialise_longarray(builder: flatbuffers.Builder, data: np.ndarray, source: int): - array_offset = builder.CreateNumpyVector(data) - ArrayLongStart(builder) - ArrayLongAddValue(builder, array_offset) - value_position = ArrayLongEnd(builder) - LogData.LogDataStart(builder) - LogData.LogDataAddSourceName(builder, source) - LogData.LogDataAddValue(builder, value_position) - LogData.LogDataAddValueType(builder, Value.ArrayLong) - - -def _serialise_ulong(builder: flatbuffers.Builder, data: np.ndarray, source: int): - ULongStart(builder) - ULongAddValue(builder, data.item()) - value_position = ULongEnd(builder) - LogData.LogDataStart(builder) - LogData.LogDataAddSourceName(builder, source) - LogData.LogDataAddValue(builder, value_position) - LogData.LogDataAddValueType(builder, Value.ULong) - - -def _serialise_ulongarray(builder: flatbuffers.Builder, data: np.ndarray, source: int): - array_offset = builder.CreateNumpyVector(data) - ArrayULongStart(builder) - ArrayULongAddValue(builder, array_offset) - value_position = ArrayULongEnd(builder) - LogData.LogDataStart(builder) - LogData.LogDataAddSourceName(builder, source) - LogData.LogDataAddValue(builder, value_position) - LogData.LogDataAddValueType(builder, Value.ArrayULong) - - -def _serialise_float(builder: flatbuffers.Builder, data: np.ndarray, source: int): - FloatStart(builder) - FloatAddValue(builder, data.item()) - value_position = FloatEnd(builder) - LogData.LogDataStart(builder) - LogData.LogDataAddSourceName(builder, source) - LogData.LogDataAddValue(builder, value_position) - LogData.LogDataAddValueType(builder, Value.Float) - - -def _serialise_floatarray(builder: flatbuffers.Builder, data: np.ndarray, source: int): - array_offset = builder.CreateNumpyVector(data) - ArrayFloatStart(builder) - ArrayFloatAddValue(builder, array_offset) - value_position = ArrayFloatEnd(builder) - LogData.LogDataStart(builder) - LogData.LogDataAddSourceName(builder, source) - LogData.LogDataAddValue(builder, value_position) - LogData.LogDataAddValueType(builder, Value.ArrayFloat) - - -def _serialise_double(builder: flatbuffers.Builder, data: np.ndarray, source: int): - DoubleStart(builder) - DoubleAddValue(builder, data.item()) - value_position = DoubleEnd(builder) - LogData.LogDataStart(builder) - LogData.LogDataAddSourceName(builder, source) - LogData.LogDataAddValue(builder, value_position) - LogData.LogDataAddValueType(builder, Value.Double) - - -def _serialise_doublearray(builder: flatbuffers.Builder, data: np.ndarray, source: int): - array_offset = builder.CreateNumpyVector(data) - ArrayDoubleStart(builder) - ArrayDoubleAddValue(builder, array_offset) - value_position = ArrayDoubleEnd(builder) - LogData.LogDataStart(builder) - LogData.LogDataAddSourceName(builder, source) - LogData.LogDataAddValue(builder, value_position) - LogData.LogDataAddValueType(builder, Value.ArrayDouble) - - -def _serialise_string(builder: flatbuffers.Builder, data: np.ndarray, source: int): - string_offset = builder.CreateString(data.item()) - StringStart(builder) - StringAddValue(builder, string_offset) - value_position = StringEnd(builder) - LogData.LogDataStart(builder) - LogData.LogDataAddSourceName(builder, source) - LogData.LogDataAddValue(builder, value_position) - LogData.LogDataAddValueType(builder, Value.String) - - -def _serialise_stringarray(builder: flatbuffers.Builder, data: np.ndarray, source: int): - string_offsets = [ - builder.CreateString(string_item) for string_item in reversed(data) - ] - ArrayStringStartValueVector(builder, len(data)) - for string_offset in string_offsets: - builder.PrependSOffsetTRelative(string_offset) - string_array_offset = builder.EndVector() - ArrayStringStart(builder) - ArrayStringAddValue(builder, string_array_offset) - value_position = ArrayStringEnd(builder) - LogData.LogDataStart(builder) - LogData.LogDataAddSourceName(builder, source) - LogData.LogDataAddValue(builder, value_position) - LogData.LogDataAddValueType(builder, Value.ArrayString) - - -_map_scalar_type_to_serialiser = { - np.dtype("byte"): _serialise_byte, - np.dtype("ubyte"): _serialise_ubyte, - np.dtype("int8"): _serialise_byte, - np.dtype("int16"): _serialise_short, - np.dtype("int32"): _serialise_int, - np.dtype("int64"): _serialise_long, - np.dtype("uint8"): _serialise_ubyte, - np.dtype("uint16"): _serialise_ushort, - np.dtype("uint32"): _serialise_uint, - np.dtype("uint64"): _serialise_ulong, - np.dtype("float32"): _serialise_float, - np.dtype("float64"): _serialise_double, -} - -_map_array_type_to_serialiser = { - np.dtype("byte"): _serialise_bytearray, - np.dtype("ubyte"): _serialise_ubytearray, - np.dtype("int8"): _serialise_bytearray, - np.dtype("int16"): _serialise_shortarray, - np.dtype("int32"): _serialise_intarray, - np.dtype("int64"): _serialise_longarray, - np.dtype("uint8"): _serialise_ubytearray, - np.dtype("uint16"): _serialise_ushortarray, - np.dtype("uint32"): _serialise_uintarray, - np.dtype("uint64"): _serialise_ulongarray, - np.dtype("float32"): _serialise_floatarray, - np.dtype("float64"): _serialise_doublearray, -} - - -def serialise_f142( - value: Any, - source_name: str, - timestamp_unix_ns: int = 0, - alarm_status: Union[int, None] = None, - alarm_severity: Union[int, None] = None, -) -> bytes: - """ - Serialise value and corresponding timestamp as an f142 Flatbuffer message. - Should automagically use a sensible type for value in the message, but if - in doubt pass value in as a numpy ndarray of a carefully chosen dtype. - - :param value: only scalar value currently supported; if ndarray then ndim must be 0 - :param source_name: name of the data source - :param timestamp_unix_ns: timestamp corresponding to value, e.g. when value was measured, in nanoseconds - :param alarm_status: EPICS alarm status, best to provide using enum-like class defined in logdata_f142.AlarmStatus - :param alarm_severity: EPICS alarm severity, best to provide using enum-like class defined in logdata_f142.AlarmSeverity - """ - builder, source = _setup_builder(source_name) - value = np.array(value) - - if value.ndim == 0: - _serialise_value( - builder, source, value, _serialise_string, _map_scalar_type_to_serialiser - ) - elif value.ndim == 1: - _serialise_value( - builder, - source, - value, - _serialise_stringarray, - _map_array_type_to_serialiser, - ) - else: - raise NotImplementedError("f142 only supports scalars or 1D array values") - - return bytes( - _complete_buffer(builder, timestamp_unix_ns, alarm_status, alarm_severity) - ) - - -def _serialise_value( - builder: flatbuffers.Builder, - source: int, - value: Any, - string_serialiser: Callable, - serialisers_map: Dict, -): - # We can use a dictionary to map most numpy types to one of the types defined in the flatbuffer schema - # but we have to handle strings separately as there are many subtypes - if np.issubdtype(value.dtype, np.str_) or np.issubdtype( - value.dtype, np.bytes_ - ): - string_serialiser(builder, value, source) - else: - try: - serialisers_map[value.dtype](builder, value, source) - except KeyError: - # There are a few numpy types we don't try to handle, for example complex numbers - raise NotImplementedError( - f"Cannot serialise data of type {value.dtype}, must use one of " - f"{list(_map_scalar_type_to_serialiser.keys()) + [np.str_]}" - ) - - -_map_fb_enum_to_type = { - Value.Byte: Byte, - Value.UByte: UByte, - Value.Short: Short, - Value.UShort: UShort, - Value.Int: Int, - Value.UInt: UInt, - Value.Long: Long, - Value.ULong: ULong, - Value.Float: Float, - Value.Double: Double, - Value.String: String, - Value.ArrayByte: ArrayByte, - Value.ArrayUByte: ArrayUByte, - Value.ArrayShort: ArrayShort, - Value.ArrayUShort: ArrayUShort, - Value.ArrayInt: ArrayInt, - Value.ArrayUInt: ArrayUInt, - Value.ArrayLong: ArrayLong, - Value.ArrayULong: ArrayULong, - Value.ArrayFloat: ArrayFloat, - Value.ArrayDouble: ArrayDouble, - Value.ArrayString: ArrayString, -} - - -LogDataInfo = namedtuple( - "LogDataInfo", - ("value", "source_name", "timestamp_unix_ns", "alarm_status", "alarm_severity"), -) - - -def _decode_if_scalar_string(value: np.ndarray) -> Union[str, np.ndarray]: - if value.ndim == 0 and ( - np.issubdtype(value.dtype, np.str_) - or np.issubdtype(value.dtype, np.bytes_) - ): - return value.item().decode() - return value - - -def deserialise_f142(buffer: Union[bytearray, bytes]) -> LogDataInfo: - check_schema_identifier(buffer, FILE_IDENTIFIER) - - log_data = LogData.LogData.GetRootAsLogData(buffer, 0) - source_name = log_data.SourceName() if log_data.SourceName() else b"" - - value_offset = log_data.Value() - value_fb = _map_fb_enum_to_type[log_data.ValueType()]() - value_fb.Init(value_offset.Bytes, value_offset.Pos) - try: - value = value_fb.ValueAsNumpy() - except AttributeError: - try: - # Must be a scalar value then, so we'll get it like this - value = np.array(value_fb.Value()) - except TypeError: - # In that case it is an array of strings, which for some reason doesn't get a generated ValueAsNumpy method - # So we'll have to extract each element from the buffer manually and construct our own numpy array - value = np.array( - [str(value_fb.Value(n), "utf-8") for n in range(value_fb.ValueLength())] - ) - - value = _decode_if_scalar_string(value) - - timestamp = log_data.Timestamp() - - return LogDataInfo( - value, source_name.decode(), timestamp, log_data.Status(), log_data.Severity() - ) diff --git a/python/src/streaming_data_types/nicos_cache_ns10.py b/python/src/streaming_data_types/nicos_cache_ns10.py deleted file mode 100644 index c40ef19..0000000 --- a/python/src/streaming_data_types/nicos_cache_ns10.py +++ /dev/null @@ -1,45 +0,0 @@ -from collections import namedtuple - -import flatbuffers - -from streaming_data_types.fbschemas.nicos_cache_ns10 import CacheEntry -from streaming_data_types.utils import check_schema_identifier - -FILE_IDENTIFIER = b"ns10" - - -def serialise_ns10( - key: str, value: str, time_stamp: float = 0, ttl: float = 0, expired: bool = False -): - builder = flatbuffers.Builder(128) - builder.ForceDefaults(True) - - value_offset = builder.CreateString(value) - key_offset = builder.CreateString(key) - - CacheEntry.CacheEntryStart(builder) - CacheEntry.CacheEntryAddValue(builder, value_offset) - CacheEntry.CacheEntryAddExpired(builder, expired) - CacheEntry.CacheEntryAddTtl(builder, ttl) - CacheEntry.CacheEntryAddTime(builder, time_stamp) - CacheEntry.CacheEntryAddKey(builder, key_offset) - cache_entry_message = CacheEntry.CacheEntryEnd(builder) - - builder.Finish(cache_entry_message, file_identifier=FILE_IDENTIFIER) - return bytes(builder.Output()) - - -def deserialise_ns10(buffer): - check_schema_identifier(buffer, FILE_IDENTIFIER) - - entry = CacheEntry.CacheEntry.GetRootAsCacheEntry(buffer, 0) - - key = entry.Key() if entry.Key() else b"" - time_stamp = entry.Time() - ttl = entry.Ttl() if entry.Ttl() else 0 - expired = entry.Expired() if entry.Expired() else False - value = entry.Value() if entry.Value() else b"" - - Entry = namedtuple("Entry", ("key", "time_stamp", "ttl", "expired", "value")) - - return Entry(key.decode().strip(), time_stamp, ttl, expired, value.decode()) diff --git a/python/src/streaming_data_types/readout_data_ar51.py b/python/src/streaming_data_types/readout_data_ar51.py deleted file mode 100644 index bdb09f0..0000000 --- a/python/src/streaming_data_types/readout_data_ar51.py +++ /dev/null @@ -1,66 +0,0 @@ -from collections import namedtuple - -import flatbuffers -import numpy as np - -import streaming_data_types.fbschemas.readout_data_ar51.RawReadoutMessage as RawReadoutMessage -from streaming_data_types.utils import check_schema_identifier - -FILE_IDENTIFIER = b"ar51" - - -RawReadoutData = namedtuple( - "RawReadoutData", - ( - "source_name", - "message_id", - "raw_data", - ), -) - - -def deserialise_ar51(buffer): - """ - Deserialize FlatBuffer ar51. - - :param buffer: The FlatBuffers buffer. - :return: The deserialized data. - """ - check_schema_identifier(buffer, FILE_IDENTIFIER) - - event = RawReadoutMessage.RawReadoutMessage.GetRootAs(buffer, 0) - - return RawReadoutData( - event.SourceName().decode("utf-8"), - event.MessageId(), - event.RawDataAsNumpy(), - ) - - -def serialise_ar51( - source_name, - message_id, - raw_data, -): - """ - Serialize data as an ar51 FlatBuffers message. - - :param source_name: - :param message_id: - :param raw_data: - :return: - """ - builder = flatbuffers.Builder(1024) - builder.ForceDefaults(True) - - source = builder.CreateString(source_name) - raw_data_data = builder.CreateNumpyVector(np.asarray(raw_data).astype(np.ubyte)) - RawReadoutMessage.RawReadoutMessageStart(builder) - RawReadoutMessage.RawReadoutMessageAddRawData(builder, raw_data_data) - RawReadoutMessage.RawReadoutMessageAddMessageId(builder, message_id) - RawReadoutMessage.RawReadoutMessageAddSourceName(builder, source) - - data = RawReadoutMessage.RawReadoutMessageEnd(builder) - builder.Finish(data, file_identifier=FILE_IDENTIFIER) - - return bytes(builder.Output()) diff --git a/python/src/streaming_data_types/sample_environment_senv.py b/python/src/streaming_data_types/sample_environment_senv.py deleted file mode 100644 index d39da76..0000000 --- a/python/src/streaming_data_types/sample_environment_senv.py +++ /dev/null @@ -1,153 +0,0 @@ -from datetime import datetime, timezone -from typing import List, NamedTuple, Optional, Union - -import flatbuffers -import numpy as np - -from streaming_data_types.fbschemas.sample_environment_senv.Location import Location -from streaming_data_types.fbschemas.sample_environment_senv.SampleEnvironmentData import ( - SampleEnvironmentData, - SampleEnvironmentDataAddChannel, - SampleEnvironmentDataAddMessageCounter, - SampleEnvironmentDataAddName, - SampleEnvironmentDataAddPacketTimestamp, - SampleEnvironmentDataAddTimeDelta, - SampleEnvironmentDataAddTimestampLocation, - SampleEnvironmentDataAddTimestamps, - SampleEnvironmentDataAddValues, - SampleEnvironmentDataAddValuesType, - SampleEnvironmentDataEnd, - SampleEnvironmentDataStart, -) -from streaming_data_types.fbschemas.sample_environment_senv.ValueUnion import ValueUnion -from streaming_data_types.utils import check_schema_identifier - -FILE_IDENTIFIER = b"senv" - - -def serialise_senv( - name: str, - channel: int, - timestamp: datetime, - sample_ts_delta: int, - message_counter: int, - values: Union[np.ndarray, List], - ts_location: Location = Location.Middle, - value_timestamps: Union[np.ndarray, List, None] = None, -) -> bytes: - builder = flatbuffers.Builder(1024) - - if value_timestamps is not None: - used_timestamps = np.atleast_1d(np.asarray(value_timestamps)).astype(np.uint64) - timestamps_offset = builder.CreateNumpyVector(used_timestamps) - - numpy_type_map = { - np.dtype("int8"): ValueUnion.Int8Array, - np.dtype("uint8"): ValueUnion.UInt8Array, - np.dtype("int16"): ValueUnion.Int16Array, - np.dtype("uint16"): ValueUnion.UInt16Array, - np.dtype("int32"): ValueUnion.Int32Array, - np.dtype("uint32"): ValueUnion.UInt32Array, - np.dtype("int64"): ValueUnion.Int64Array, - np.dtype("uint64"): ValueUnion.UInt64Array, - } - - temp_values = np.atleast_1d(np.asarray(values)) - - value_array_offset = builder.CreateNumpyVector(temp_values) - - # Some flatbuffer fu in order to avoid >200 lines of code - builder.StartObject(1) - builder.PrependUOffsetTRelativeSlot( - 0, flatbuffers.number_types.UOffsetTFlags.py_type(value_array_offset), 0 - ) - value_offset = builder.EndObject() - - name_offset = builder.CreateString(name) - - SampleEnvironmentDataStart(builder) - SampleEnvironmentDataAddName(builder, name_offset) - SampleEnvironmentDataAddTimeDelta(builder, sample_ts_delta) - SampleEnvironmentDataAddTimestampLocation(builder, ts_location) - SampleEnvironmentDataAddMessageCounter(builder, message_counter) - SampleEnvironmentDataAddChannel(builder, channel) - SampleEnvironmentDataAddPacketTimestamp(builder, int(timestamp.timestamp() * 1e9)) - SampleEnvironmentDataAddValues(builder, value_offset) - SampleEnvironmentDataAddValuesType(builder, numpy_type_map[temp_values.dtype]) - if value_timestamps is not None: - SampleEnvironmentDataAddTimestamps(builder, timestamps_offset) - - SE_Message = SampleEnvironmentDataEnd(builder) - - builder.Finish(SE_Message, file_identifier=FILE_IDENTIFIER) - return bytes(builder.Output()) - - -Response = NamedTuple( - "SampleEnvironmentData", - ( - ("name", str), - ("channel", int), - ("timestamp", datetime), - ("sample_ts_delta", int), - ("ts_location", Location), - ("message_counter", int), - ("values", np.ndarray), - ("value_ts", Optional[np.ndarray]), - ), -) - - -def deserialise_senv(buffer: Union[bytearray, bytes]) -> Response: - check_schema_identifier(buffer, FILE_IDENTIFIER) - - SE_data = SampleEnvironmentData.GetRootAsSampleEnvironmentData(buffer, 0) - - max_time = datetime( - year=3001, month=1, day=1, hour=0, minute=0, second=0 - ).timestamp() - used_timestamp = SE_data.PacketTimestamp() / 1e9 - if used_timestamp > max_time: - used_timestamp = max_time - - value_timestamps = None - if not SE_data.TimestampsIsNone(): - value_timestamps = SE_data.TimestampsAsNumpy() - - from flatbuffers.number_types import ( - Int8Flags, - Int16Flags, - Int32Flags, - Int64Flags, - Uint8Flags, - Uint16Flags, - Uint32Flags, - Uint64Flags, - ) - - flag_map = { - ValueUnion.Int8Array: Int8Flags, - ValueUnion.UInt8Array: Uint8Flags, - ValueUnion.Int16Array: Int16Flags, - ValueUnion.UInt16Array: Uint16Flags, - ValueUnion.Int32Array: Int32Flags, - ValueUnion.UInt32Array: Uint32Flags, - ValueUnion.Int64Array: Int64Flags, - ValueUnion.UInt64Array: Uint64Flags, - } - - # Some flatbuffers fu in order to avoid >200 lines of code - value_offset = SE_data.Values() - value_type = SE_data.ValuesType() - values = value_offset.GetVectorAsNumpy(flag_map[value_type], 4) - - return Response( - name=SE_data.Name().decode(), - channel=SE_data.Channel(), - timestamp=datetime.fromtimestamp(used_timestamp, tz=timezone.utc), - sample_ts_delta=SE_data.TimeDelta(), - ts_location=SE_data.TimestampLocation(), - message_counter=SE_data.MessageCounter(), - values=values, - value_ts=value_timestamps, - ) diff --git a/python/src/streaming_data_types/timestamps_tdct.py b/python/src/streaming_data_types/timestamps_tdct.py deleted file mode 100644 index 7820b85..0000000 --- a/python/src/streaming_data_types/timestamps_tdct.py +++ /dev/null @@ -1,58 +0,0 @@ -from typing import List, NamedTuple, Optional, Union - -import flatbuffers -import numpy as np - -from streaming_data_types.fbschemas.timestamps_tdct.timestamp import ( - timestamp, - timestampAddName, - timestampAddSequenceCounter, - timestampAddTimestamps, - timestampEnd, - timestampStart, -) -from streaming_data_types.utils import check_schema_identifier - -FILE_IDENTIFIER = b"tdct" - - -def serialise_tdct( - name: str, - timestamps: Union[np.ndarray, List], - sequence_counter: Optional[int] = None, -) -> bytes: - builder = flatbuffers.Builder(1024) - builder.ForceDefaults(True) - - timestamps = np.atleast_1d(np.asarray(timestamps)).astype(np.uint64) - - name_offset = builder.CreateString(name) - - array_offset = builder.CreateNumpyVector(timestamps) - - timestampStart(builder) - timestampAddName(builder, name_offset) - timestampAddTimestamps(builder, array_offset) - if sequence_counter is not None: - timestampAddSequenceCounter(builder, sequence_counter) - timestamps_message = timestampEnd(builder) - - builder.Finish(timestamps_message, file_identifier=FILE_IDENTIFIER) - return bytes(builder.Output()) - - -Timestamps = NamedTuple( - "Timestamps", - (("name", str), ("timestamps", np.ndarray), ("sequence_counter", int)), -) - - -def deserialise_tdct(buffer: Union[bytearray, bytes]) -> Timestamps: - check_schema_identifier(buffer, FILE_IDENTIFIER) - - timestamps = timestamp.GetRootAstimestamp(buffer, 0) - name = timestamps.Name() if timestamps.Name() else b"" - - timestamps_array = timestamps.TimestampsAsNumpy() - - return Timestamps(name.decode(), timestamps_array, timestamps.SequenceCounter()) diff --git a/python/tests/test_ADAr.py b/python/tests/test_ADAr.py deleted file mode 100644 index 7ffc7ca..0000000 --- a/python/tests/test_ADAr.py +++ /dev/null @@ -1,113 +0,0 @@ -from datetime import datetime, timezone - -import numpy as np -import pytest - -from streaming_data_types import DESERIALISERS, SERIALISERS -from streaming_data_types.area_detector_ADAr import ( - Attribute, - deserialise_ADAr, - serialise_ADAr, -) -from streaming_data_types.exceptions import WrongSchemaException - - -class TestSerialisationNDAr: - def test_serialises_and_deserialises_ADAr_int_array(self): - """ - Round-trip to check what we serialise is what we get back. - """ - original_entry = { - "source_name": "some source name", - "unique_id": 754, - "data": np.array([[1, 2, 3], [3, 4, 5]], dtype=np.uint64), - "timestamp": datetime.now(tz=timezone.utc), - "attributes": [ - Attribute("name1", "desc1", "src1", "value"), - Attribute("name2", "desc2", "src2", 11), - Attribute("name3", "desc3", "src3", 3.14), - Attribute("name4", "desc4", "src4", np.linspace(0, 10)), - ], - } - - buf = serialise_ADAr(**original_entry) - entry = deserialise_ADAr(buf) - - assert entry.unique_id == original_entry["unique_id"] - assert entry.source_name == original_entry["source_name"] - assert entry.timestamp == original_entry["timestamp"] - assert np.array_equal(entry.dimensions, original_entry["data"].shape) - assert np.array_equal(entry.data.shape, entry.dimensions) # Sanity check - assert np.array_equal(entry.data, original_entry["data"]) - assert entry.data.dtype == original_entry["data"].dtype - assert len(entry.attributes) == len(original_entry["attributes"]) - for i in range(len(entry.attributes)): - assert entry.attributes[i] == original_entry["attributes"][i] - - def test_serialises_and_deserialises_ADAr_float_array(self): - """ - Round-trip to check what we serialise is what we get back. - """ - original_entry = { - "source_name": "some other source name", - "unique_id": 789679, - "data": np.array([[1.1, 2.2, 3.3], [4.4, 5.5, 6.6]], dtype=np.float32), - "timestamp": datetime( - year=1992, - month=8, - day=11, - hour=3, - minute=34, - second=57, - tzinfo=timezone.utc, - ), - } - - buf = serialise_ADAr(**original_entry) - entry = deserialise_ADAr(buf) - - assert entry.unique_id == original_entry["unique_id"] - assert entry.source_name == original_entry["source_name"] - assert entry.timestamp == original_entry["timestamp"] - assert np.array_equal(entry.data, original_entry["data"]) - assert entry.data.dtype == original_entry["data"].dtype - - def test_serialises_and_deserialises_ADAr_string(self): - """ - Round-trip to check what we serialise is what we get back. - """ - original_entry = { - "source_name": "some source name", - "unique_id": 754, - "data": "hi, this is a string", - "timestamp": datetime.now(tz=timezone.utc), - } - - buf = serialise_ADAr(**original_entry) - entry = deserialise_ADAr(buf) - - assert entry.unique_id == original_entry["unique_id"] - assert entry.source_name == original_entry["source_name"] - assert entry.timestamp == original_entry["timestamp"] - assert entry.data == original_entry["data"] - - def test_if_buffer_has_wrong_id_then_throws(self): - original_entry = { - "source_name": "some source name", - "unique_id": 754, - "data": np.array([[1, 2, 3], [3, 4, 5]], dtype=np.uint64), - "timestamp": datetime.now(), - } - - buf = serialise_ADAr(**original_entry) - - # Manually hack the id - buf = bytearray(buf) - buf[4:8] = b"1234" - - with pytest.raises(WrongSchemaException): - deserialise_ADAr(buf) - - def test_schema_type_is_in_global_serialisers_list(self): - assert "ADAr" in SERIALISERS - assert "ADAr" in DESERIALISERS diff --git a/python/tests/test_NDAr.py b/python/tests/test_NDAr.py deleted file mode 100644 index 369997c..0000000 --- a/python/tests/test_NDAr.py +++ /dev/null @@ -1,64 +0,0 @@ -import numpy as np -import pytest - -from streaming_data_types import DESERIALISERS, SERIALISERS -from streaming_data_types.area_detector_NDAr import deserialise_ndar, serialise_ndar -from streaming_data_types.exceptions import WrongSchemaException -from streaming_data_types.fbschemas.NDAr_NDArray_schema.DType import DType - - -class TestSerialisationNDAr: - def test_serialises_and_deserialises_NDAr_message_correctly_float64_1_pixel(self): - """ - Round-trip to check what we serialise is what we get back. - """ - original_entry = { - "id": 754, - "dims": [1, 1], - "data_type": DType.Float64, - "data": [54, 78, 100, 156, 43, 1, 23, 0], - } - - buf = serialise_ndar(**original_entry) - entry = deserialise_ndar(buf) - - assert entry.id == original_entry["id"] - assert np.array_equal(entry.data, [[3.1991794446845865e-308]]) - - def test_serialises_and_deserialises_NDAr_message_correctly_int32_3_pixel(self): - """ - Round-trip to check what we serialise is what we get back. - """ - original_entry = { - "id": 754, - "dims": [1, 3], - "data_type": DType.Int32, - "data": [54, 78, 100, 200, 32, 19, 2, 156, 43, 1, 23, 0], - } - - buf = serialise_ndar(**original_entry) - entry = deserialise_ndar(buf) - - assert entry.id == original_entry["id"] - assert np.array_equal(entry.data, [[-932950474, -1677585632, 1507627]]) - - def test_if_buffer_has_wrong_id_then_throws(self): - original_entry = { - "id": 754, - "dims": [10, 10], - "data_type": 0, - "data": [0, 0, 100, 200, 250], - } - - buf = serialise_ndar(**original_entry) - - # Manually hack the id - buf = bytearray(buf) - buf[4:8] = b"1234" - - with pytest.raises(WrongSchemaException): - deserialise_ndar(buf) - - def test_schema_type_is_in_global_serialisers_list(self): - assert "NDAr" in SERIALISERS - assert "NDAr" in DESERIALISERS diff --git a/python/tests/test_an44.py b/python/tests/test_an44.py deleted file mode 100644 index cdd8cf8..0000000 --- a/python/tests/test_an44.py +++ /dev/null @@ -1,118 +0,0 @@ -import numpy as np -import pytest - -from streaming_data_types import DESERIALISERS, SERIALISERS -from streaming_data_types.eventdata_an44 import deserialise_an44, serialise_an44 -from streaming_data_types.exceptions import WrongSchemaException - - -class TestSerialisationEv44: - def test_serialises_and_deserialises_an44_message_correctly(self): - """ - Round-trip to check what we serialise is what we get back. - """ - original_entry = { - "source_name": "some_source", - "message_id": 123456, - "reference_time": [ - 1618573589123781958, - 1618573590133830371, - 1618573593677164112, - 1618573594185190549, - 1618573596217316066, - 1618573596725363109, - 1618573601295720976, - 1618573601799761445, - 1618573607354064836, - ], - "reference_time_index": [2, 4, 5, 7], - "time_of_flight": [100, 200, 300, 400, 500, 600, 700, 800, 900], - "pixel_id": [10, 20, 30, 40, 50, 60, 70, 80, 90], - "weight": [51, 52, 53, 54, 55, 56, 57, 58, 59], - } - - buf = serialise_an44(**original_entry) - entry = deserialise_an44(buf) - - assert entry.source_name == original_entry["source_name"] - assert entry.message_id == original_entry["message_id"] - assert np.array_equal(entry.reference_time, original_entry["reference_time"]) - assert np.array_equal( - entry.reference_time_index, original_entry["reference_time_index"] - ) - assert np.array_equal(entry.time_of_flight, original_entry["time_of_flight"]) - assert np.array_equal(entry.pixel_id, original_entry["pixel_id"]) - assert np.array_equal(entry.weight, original_entry["weight"]) - - def test_serialises_and_deserialises_an44_message_correctly_for_numpy_arrays(self): - """ - Round-trip to check what we serialise is what we get back. - """ - original_entry = { - "source_name": "some_source", - "message_id": 123456, - "reference_time": np.array( - [ - 1618573589123781958, - 1618573590133830371, - 1618573593677164112, - 1618573594185190549, - 1618573596217316066, - 1618573596725363109, - 1618573601295720976, - 1618573601799761445, - 1618573607354064836, - ] - ), - "reference_time_index": np.array([2, 4, 5, 7]), - "time_of_flight": np.array([100, 200, 300, 400, 500, 600, 700, 800, 900]), - "pixel_id": np.array([10, 20, 30, 40, 50, 60, 70, 80, 90]), - "weight": np.array([51, 52, 53, 54, 55, 56, 57, 58, 59]), - } - - buf = serialise_an44(**original_entry) - entry = deserialise_an44(buf) - - assert entry.source_name == original_entry["source_name"] - assert entry.message_id == original_entry["message_id"] - assert np.array_equal(entry.reference_time, original_entry["reference_time"]) - assert np.array_equal( - entry.reference_time_index, original_entry["reference_time_index"] - ) - assert np.array_equal(entry.time_of_flight, original_entry["time_of_flight"]) - assert np.array_equal(entry.pixel_id, original_entry["pixel_id"]) - - def test_if_buffer_has_wrong_id_then_throws(self): - original_entry = { - "source_name": "some_source", - "message_id": 123456, - "reference_time": np.array( - [ - 1618573589123781958, - 1618573590133830371, - 1618573593677164112, - 1618573594185190549, - 1618573596217316066, - 1618573596725363109, - 1618573601295720976, - 1618573601799761445, - 1618573607354064836, - ] - ), - "reference_time_index": np.array([2, 4, 5, 7]), - "time_of_flight": np.array([100, 200, 300, 400, 500, 600, 700, 800, 900]), - "pixel_id": np.array([10, 20, 30, 40, 50, 60, 70, 80, 90]), - "weight": np.array([51, 52, 53, 54, 55, 56, 57, 58, 59]), - } - buf = serialise_an44(**original_entry) - - # Manually introduce error in id. - buf = bytearray(buf) - buf[4:8] = b"1234" - - with pytest.raises(WrongSchemaException): - deserialise_an44(buf) - - def test_schema_type_is_in_global_serialisers_list(self): - assert "an44" in SERIALISERS - assert "an44" in DESERIALISERS diff --git a/python/tests/test_ar51.py b/python/tests/test_ar51.py deleted file mode 100644 index 532fbcc..0000000 --- a/python/tests/test_ar51.py +++ /dev/null @@ -1,80 +0,0 @@ -import numpy as np -import pytest - -from streaming_data_types import DESERIALISERS, SERIALISERS -from streaming_data_types.exceptions import WrongSchemaException -from streaming_data_types.readout_data_ar51 import deserialise_ar51, serialise_ar51 - - -class TestSerialisationAR51: - def test_serialises_and_deserialises_ar51_message_correctly(self): - """ - Round-trip to check what we serialise is what we get back. - """ - original_entry = { - "source_name": "some_source", - "message_id": 123456, - "raw_data": bytearray( - [ - 0, - 1, - 2, - 3, - 4, - 5, - 6, - 7, - 8, - ] - ), - } - - buf = serialise_ar51(**original_entry) - entry = deserialise_ar51(buf) - - assert entry.source_name == original_entry["source_name"] - assert entry.message_id == original_entry["message_id"] - assert np.array_equal(entry.raw_data, original_entry["raw_data"]) - - def test_serialises_and_deserialises_ar51_message_correctly_for_numpy_arrays(self): - """ - Round-trip to check what we serialise is what we get back. - """ - original_entry = { - "source_name": "some_source", - "message_id": 123456, - "raw_data": np.array([100, 200, 30, 40, 50, 60, 70, 80, 90]), - } - - buf = serialise_ar51(**original_entry) - entry = deserialise_ar51(buf) - - assert entry.source_name == original_entry["source_name"] - assert entry.message_id == original_entry["message_id"] - assert np.array_equal(entry.raw_data, original_entry["raw_data"]) - - def test_if_buffer_has_wrong_id_then_throws(self): - original_entry = { - "source_name": "some_source", - "message_id": 123456, - "raw_data": np.array([100, 200, 300, 400, 500, 600, 700, 800, 900]), - } - - buf = serialise_ar51(**original_entry) - - # Manually introduce error in id. - buf = bytearray(buf) - buf[4:8] = b"1234" - - with pytest.raises(WrongSchemaException): - deserialise_ar51(buf) - - def test_schema_type_is_in_global_serialisers_list(self): - assert "ar51" in SERIALISERS - assert "ar51" in DESERIALISERS - - -if __name__ == "__main__": - import unittest - - unittest.main() diff --git a/python/tests/test_ep00.py b/python/tests/test_ep00.py deleted file mode 100644 index 1c716db..0000000 --- a/python/tests/test_ep00.py +++ /dev/null @@ -1,41 +0,0 @@ -import pytest - -from streaming_data_types import DESERIALISERS, SERIALISERS -from streaming_data_types.epics_connection_info_ep00 import ( - deserialise_ep00, - serialise_ep00, -) -from streaming_data_types.exceptions import WrongSchemaException -from streaming_data_types.fbschemas.epics_connection_info_ep00 import EventType - - -class TestSerialisationEp00: - original_entry = { - "timestamp_ns": 1593620746000000000, - "event_type": EventType.EventType.DISCONNECTED, - "source_name": "test_source", - "service_id": "test_service", - } - - def test_serialises_and_deserialises_ep00_message_correctly(self): - buf = serialise_ep00(**self.original_entry) - deserialised_tuple = deserialise_ep00(buf) - - assert deserialised_tuple.timestamp == self.original_entry["timestamp_ns"] - assert deserialised_tuple.type == self.original_entry["event_type"] - assert deserialised_tuple.source_name == self.original_entry["source_name"] - assert deserialised_tuple.service_id == self.original_entry["service_id"] - - def test_if_buffer_has_wrong_id_then_throws(self): - buf = serialise_ep00(**self.original_entry) - - # Manually hack the id - buf = bytearray(buf) - buf[4:8] = b"1234" - - with pytest.raises(WrongSchemaException): - deserialise_ep00(buf) - - def test_schema_type_is_in_global_serialisers_list(self): - assert "ep00" in SERIALISERS - assert "ep00" in DESERIALISERS diff --git a/python/tests/test_ev42.py b/python/tests/test_ev42.py deleted file mode 100644 index 410f373..0000000 --- a/python/tests/test_ev42.py +++ /dev/null @@ -1,118 +0,0 @@ -import pathlib -import numpy as np -import pytest - -from streaming_data_types import DESERIALISERS, SERIALISERS -from streaming_data_types.eventdata_ev42 import deserialise_ev42, serialise_ev42 -from streaming_data_types.exceptions import WrongSchemaException - - -class TestSerialisationEv42: - def test_serialises_and_deserialises_ev42_message_correctly(self): - """ - Round-trip to check what we serialise is what we get back. - """ - original_entry = { - "source_name": "some_source", - "message_id": 123456, - "pulse_time": 567890, - "time_of_flight": [1, 2, 3, 4, 5, 6, 7, 8, 9], - "detector_id": [10, 20, 30, 40, 50, 60, 70, 80, 90], - } - - buf = serialise_ev42(**original_entry) - entry = deserialise_ev42(buf) - - assert entry.source_name == original_entry["source_name"] - assert entry.message_id == original_entry["message_id"] - assert entry.pulse_time == original_entry["pulse_time"] - assert np.array_equal(entry.time_of_flight, original_entry["time_of_flight"]) - assert np.array_equal(entry.detector_id, original_entry["detector_id"]) - - def test_serialises_and_deserialises_ev42_message_correctly_for_numpy_arrays(self): - """ - Round-trip to check what we serialise is what we get back. - """ - original_entry = { - "source_name": "some_source", - "message_id": 123456, - "pulse_time": 567890, - "time_of_flight": np.array([1, 2, 3, 4, 5, 6, 7, 8, 9]), - "detector_id": np.array([10, 20, 30, 40, 50, 60, 70, 80, 90]), - } - - buf = serialise_ev42(**original_entry) - entry = deserialise_ev42(buf) - - assert entry.source_name == original_entry["source_name"] - assert entry.message_id == original_entry["message_id"] - assert entry.pulse_time == original_entry["pulse_time"] - assert np.array_equal(entry.time_of_flight, original_entry["time_of_flight"]) - assert np.array_equal(entry.detector_id, original_entry["detector_id"]) - - def test_serialises_and_deserialises_ev42_message_correctly_with_isis_info(self): - """ - Round-trip to check what we serialise is what we get back. - """ - isis_data = {"period_number": 5, "run_state": 1, "proton_charge": 1.234} - - original_entry = { - "source_name": "some_source", - "message_id": 123456, - "pulse_time": 567890, - "time_of_flight": [1, 2, 3, 4, 5, 6, 7, 8, 9], - "detector_id": [10, 20, 30, 40, 50, 60, 70, 80, 90], - "isis_specific": isis_data, - } - - buf = serialise_ev42(**original_entry) - entry = deserialise_ev42(buf) - - assert entry.source_name == original_entry["source_name"] - assert entry.message_id == original_entry["message_id"] - assert entry.pulse_time == original_entry["pulse_time"] - assert np.array_equal(entry.time_of_flight, original_entry["time_of_flight"]) - assert np.array_equal(entry.detector_id, original_entry["detector_id"]) - assert entry.specific_data["period_number"] == isis_data["period_number"] - assert entry.specific_data["run_state"] == isis_data["run_state"] - assert entry.specific_data["proton_charge"] == pytest.approx( - isis_data["proton_charge"] - ) - - def test_if_buffer_has_wrong_id_then_throws(self): - original_entry = { - "source_name": "some_source", - "message_id": 123456, - "pulse_time": 567890, - "time_of_flight": [1, 2, 3, 4, 5, 6, 7, 8, 9], - "detector_id": [10, 20, 30, 40, 50, 60, 70, 80, 90], - } - buf = serialise_ev42(**original_entry) - - # Manually hack the id - buf = bytearray(buf) - buf[4:8] = b"1234" - - with pytest.raises(WrongSchemaException): - deserialise_ev42(buf) - - def test_schema_type_is_in_global_serialisers_list(self): - assert "ev42" in SERIALISERS - assert "ev42" in DESERIALISERS - - def test_converts_real_buffer(self): - file_path = pathlib.Path(__file__).parent / "example_buffers" / "ev42.bin" - with open(file_path, "rb") as file: - buffer = file.read() - - result = deserialise_ev42(buffer) - - assert result.source_name == "grace" - assert result.message_id == 1669290683232688000 - assert result.pulse_time == 1669290683232688000 - assert len(result.time_of_flight) == 1629 - assert result.time_of_flight[0] == 160436 - assert result.time_of_flight[~0] == 147296 - assert len(result.detector_id) == 1629 - assert result.detector_id[0] == 160436 - assert result.detector_id[~0] == 147296 diff --git a/python/tests/test_ev43.py b/python/tests/test_ev43.py deleted file mode 100644 index 2a46332..0000000 --- a/python/tests/test_ev43.py +++ /dev/null @@ -1,76 +0,0 @@ -import numpy as np -import pytest - -from streaming_data_types import DESERIALISERS, SERIALISERS -from streaming_data_types.eventdata_ev43 import deserialise_ev43, serialise_ev43 -from streaming_data_types.exceptions import WrongSchemaException - - -class TestSerialisationEv42: - def test_serialises_and_deserialises_ev43_message_correctly(self): - """ - Round-trip to check what we serialise is what we get back. - """ - original_entry = { - "source_name": "some_source", - "message_id": 123456, - "pulse_time": [567890, 568890], - "pulse_index": [0, 4], - "time_of_flight": [1, 2, 3, 4, 5, 6, 7, 8, 9], - "detector_id": [10, 20, 30, 40, 50, 60, 70, 80, 90], - } - - buf = serialise_ev43(**original_entry) - entry = deserialise_ev43(buf) - - assert entry.source_name == original_entry["source_name"] - assert entry.message_id == original_entry["message_id"] - assert np.array_equal(entry.pulse_time, original_entry["pulse_time"]) - assert np.array_equal(entry.pulse_index, original_entry["pulse_index"]) - assert np.array_equal(entry.time_of_flight, original_entry["time_of_flight"]) - assert np.array_equal(entry.detector_id, original_entry["detector_id"]) - - def test_serialises_and_deserialises_ev43_message_correctly_for_numpy_arrays(self): - """ - Round-trip to check what we serialise is what we get back. - """ - original_entry = { - "source_name": "some_source", - "message_id": 123456, - "pulse_time": np.array([567890, 568890]), - "pulse_index": np.array([0, 4]), - "time_of_flight": np.array([1, 2, 3, 4, 5, 6, 7, 8, 9]), - "detector_id": np.array([10, 20, 30, 40, 50, 60, 70, 80, 90]), - } - - buf = serialise_ev43(**original_entry) - entry = deserialise_ev43(buf) - - assert entry.source_name == original_entry["source_name"] - assert entry.message_id == original_entry["message_id"] - assert np.array_equal(entry.pulse_time, original_entry["pulse_time"]) - assert np.array_equal(entry.pulse_index, original_entry["pulse_index"]) - assert np.array_equal(entry.time_of_flight, original_entry["time_of_flight"]) - assert np.array_equal(entry.detector_id, original_entry["detector_id"]) - - def test_if_buffer_has_wrong_id_then_throws(self): - original_entry = { - "source_name": "some_source", - "message_id": 123456, - "pulse_time": [567890, 568890], - "pulse_index": [0, 4], - "time_of_flight": [1, 2, 3, 4, 5, 6, 7, 8, 9], - "detector_id": [10, 20, 30, 40, 50, 60, 70, 80, 90], - } - buf = serialise_ev43(**original_entry) - - # Manually hack the id - buf = bytearray(buf) - buf[4:8] = b"1234" - - with pytest.raises(WrongSchemaException): - deserialise_ev43(buf) - - def test_schema_type_is_in_global_serialisers_list(self): - assert "ev43" in SERIALISERS - assert "ev43" in DESERIALISERS diff --git a/python/tests/test_f142.py b/python/tests/test_f142.py deleted file mode 100644 index 983279c..0000000 --- a/python/tests/test_f142.py +++ /dev/null @@ -1,211 +0,0 @@ -import numpy as np -import pytest - -from streaming_data_types import DESERIALISERS, SERIALISERS -from streaming_data_types.exceptions import WrongSchemaException -from streaming_data_types.fbschemas.logdata_f142.AlarmSeverity import AlarmSeverity -from streaming_data_types.fbschemas.logdata_f142.AlarmStatus import AlarmStatus -from streaming_data_types.logdata_f142 import deserialise_f142, serialise_f142 - - -class TestSerialisationF142: - original_entry = { - "source_name": "some_source", - "value": 578214, - "timestamp_unix_ns": 1585332414000000000, - } - - def test_serialises_and_deserialises_integer_f142_message_correctly(self): - buf = serialise_f142(**self.original_entry) - deserialised_tuple = deserialise_f142(buf) - - assert deserialised_tuple.source_name == self.original_entry["source_name"] - assert deserialised_tuple.value == self.original_entry["value"] - assert ( - deserialised_tuple.timestamp_unix_ns - == self.original_entry["timestamp_unix_ns"] - ) - - def test_serialises_and_deserialises_byte_f142_message_correctly(self): - byte_log = { - "source_name": "some_source", - "value": 0x7F, - "timestamp_unix_ns": 1585332414000000000, - } - buf = serialise_f142(**byte_log) - deserialised_tuple = deserialise_f142(buf) - - assert deserialised_tuple.source_name == byte_log["source_name"] - assert deserialised_tuple.value == byte_log["value"] - assert deserialised_tuple.timestamp_unix_ns == byte_log["timestamp_unix_ns"] - - def test_serialises_and_deserialises_float_f142_message_correctly(self): - float_log = { - "source_name": "some_source", - "value": 1.234, - "timestamp_unix_ns": 1585332414000000000, - } - buf = serialise_f142(**float_log) - deserialised_tuple = deserialise_f142(buf) - - assert deserialised_tuple.source_name == float_log["source_name"] - assert deserialised_tuple.value == float_log["value"] - assert deserialised_tuple.timestamp_unix_ns == float_log["timestamp_unix_ns"] - - def test_serialises_and_deserialises_scalar_ndarray_f142_message_correctly(self): - numpy_log = { - "source_name": "some_source", - "value": np.array(42), - "timestamp_unix_ns": 1585332414000000000, - } - buf = serialise_f142(**numpy_log) - deserialised_tuple = deserialise_f142(buf) - - assert deserialised_tuple.source_name == numpy_log["source_name"] - assert deserialised_tuple.value == np.array(numpy_log["value"]) - assert deserialised_tuple.timestamp_unix_ns == numpy_log["timestamp_unix_ns"] - - def test_serialises_and_deserialises_string_f142_message_correctly(self): - string_log = { - "source_name": "some_source", - "value": "some_string", - "timestamp_unix_ns": 1585332414000000000, - } - buf = serialise_f142(**string_log) - deserialised_tuple = deserialise_f142(buf) - - assert deserialised_tuple.source_name == string_log["source_name"] - assert deserialised_tuple.value == string_log["value"] - assert deserialised_tuple.timestamp_unix_ns == string_log["timestamp_unix_ns"] - - def test_serialises_and_deserialises_native_list_correctly(self): - list_log = { - "source_name": "some_source", - "value": [1, 2, 3], - "timestamp_unix_ns": 1585332414000000000, - } - buf = serialise_f142(**list_log) - deserialised_tuple = deserialise_f142(buf) - - assert deserialised_tuple.source_name == list_log["source_name"] - # Array values are output as numpy array - assert np.array_equal(deserialised_tuple.value, np.array(list_log["value"])) - assert deserialised_tuple.timestamp_unix_ns == list_log["timestamp_unix_ns"] - - def test_serialises_and_deserialises_numpy_array_integers_correctly(self): - array_log = { - "source_name": "some_source", - "value": np.array([1, 2, 3]), - "timestamp_unix_ns": 1585332414000000000, - } - buf = serialise_f142(**array_log) - deserialised_tuple = deserialise_f142(buf) - - assert deserialised_tuple.source_name == array_log["source_name"] - assert np.array_equal(deserialised_tuple.value, array_log["value"]) - assert deserialised_tuple.timestamp_unix_ns == array_log["timestamp_unix_ns"] - - def test_serialises_and_deserialises_numpy_array_preserves_byte_type_correctly( - self, - ): - array_log = { - "source_name": "some_source", - "value": np.array([1, 2, 3], dtype=np.uint8), - "timestamp_unix_ns": 1585332414000000000, - } - buf = serialise_f142(**array_log) - deserialised_tuple = deserialise_f142(buf) - - assert np.array_equal(deserialised_tuple.value, array_log["value"]) - assert deserialised_tuple.value.dtype == array_log["value"].dtype - - def test_serialises_and_deserialises_numpy_array_preserves_integer_type_correctly( - self, - ): - array_log = { - "source_name": "some_source", - "value": np.array([1, 2, 3], dtype=np.uint16), - "timestamp_unix_ns": 1585332414000000000, - } - buf = serialise_f142(**array_log) - deserialised_tuple = deserialise_f142(buf) - - assert np.array_equal(deserialised_tuple.value, array_log["value"]) - assert deserialised_tuple.value.dtype == array_log["value"].dtype - - def test_serialises_and_deserialises_numpy_array_floats_correctly(self): - array_log = { - "source_name": "some_source", - "value": np.array([1.1, 2.2, 3.3]), - "timestamp_unix_ns": 1585332414000000000, - } - buf = serialise_f142(**array_log) - deserialised_tuple = deserialise_f142(buf) - - assert deserialised_tuple.source_name == array_log["source_name"] - assert np.allclose(deserialised_tuple.value, array_log["value"]) - assert deserialised_tuple.timestamp_unix_ns == array_log["timestamp_unix_ns"] - - def test_serialises_and_deserialises_numpy_array_strings_correctly(self): - array_log = { - "source_name": "some_source", - "value": np.array(["1", "2", "3"]), - "timestamp_unix_ns": 1585332414000000000, - } - buf = serialise_f142(**array_log) - deserialised_tuple = deserialise_f142(buf) - - assert deserialised_tuple.source_name == array_log["source_name"] - assert np.array_equal(deserialised_tuple.value, array_log["value"]) - assert deserialised_tuple.timestamp_unix_ns == array_log["timestamp_unix_ns"] - - def test_serialises_and_deserialises_epics_alarms_correctly(self): - float_log = { - "source_name": "some_source", - "value": 1.234, - "timestamp_unix_ns": 1585332414000000000, - "alarm_status": AlarmStatus.HIHI, - "alarm_severity": AlarmSeverity.MAJOR, - } - buf = serialise_f142(**float_log) - deserialised_tuple = deserialise_f142(buf) - - assert deserialised_tuple.alarm_status == float_log["alarm_status"] - assert deserialised_tuple.alarm_severity == float_log["alarm_severity"] - - def test_epics_alarms_default_to_no_change_when_not_provided_to_serialiser(self): - float_log = { - "source_name": "some_source", - "value": 1.234, - "timestamp_unix_ns": 1585332414000000000, - } - buf = serialise_f142(**float_log) - deserialised_tuple = deserialise_f142(buf) - - assert deserialised_tuple.alarm_status == AlarmStatus.NO_CHANGE - assert deserialised_tuple.alarm_severity == AlarmSeverity.NO_CHANGE - - def test_raises_not_implemented_error_when_trying_to_serialise_numpy_complex_number_type( - self, - ): - complex_log = { - "source_name": "some_source", - "value": complex(3, 4), - "timestamp_unix_ns": 1585332414000000000, - } - with pytest.raises(NotImplementedError): - serialise_f142(**complex_log) - - def test_if_buffer_has_wrong_id_then_throws(self): - buf = serialise_f142(**self.original_entry) - - # Manually hack the id - buf = bytearray(buf) - buf[4:8] = b"1234" - - with pytest.raises(WrongSchemaException): - deserialise_f142(buf) - - def test_schema_type_is_in_global_serialisers_list(self): - assert "f142" in SERIALISERS - assert "f142" in DESERIALISERS diff --git a/python/tests/test_hs00.py b/python/tests/test_hs00.py deleted file mode 100644 index cfca5a0..0000000 --- a/python/tests/test_hs00.py +++ /dev/null @@ -1,365 +0,0 @@ -import numpy as np -import pytest - -from streaming_data_types import DESERIALISERS, SERIALISERS -from streaming_data_types.exceptions import WrongSchemaException -from streaming_data_types.histogram_hs00 import deserialise_hs00, serialise_hs00 - - -def create_test_data_with_type(numpy_type): - return { - "source": "some_source", - "timestamp": 123456, - "current_shape": [5], - "dim_metadata": [ - { - "length": 5, - "unit": "m", - "label": "some_label", - "bin_boundaries": np.array([0, 1, 2, 3, 4, 5]).astype(numpy_type), - } - ], - "last_metadata_timestamp": 123456, - "data": np.array([1, 2, 3, 4, 5]).astype(numpy_type), - "errors": np.array([5, 4, 3, 2, 1]).astype(numpy_type), - "info": "info_string", - } - - -class TestSerialisationHs00: - def _check_metadata_for_one_dimension(self, data, original_data): - assert np.array_equal(data["bin_boundaries"], original_data["bin_boundaries"]) - assert data["length"] == original_data["length"] - assert data["unit"] == original_data["unit"] - assert data["label"] == original_data["label"] - - def test_serialises_and_deserialises_hs00_message_correctly_for_full_1d_data(self): - """ - Round-trip to check what we serialise is what we get back. - """ - original_hist = { - "source": "some_source", - "timestamp": 123456, - "current_shape": [5], - "dim_metadata": [ - { - "length": 5, - "unit": "m", - "label": "some_label", - "bin_boundaries": np.array([0.0, 1.0, 2.0, 3.0, 4.0, 5.0]), - } - ], - "last_metadata_timestamp": 123456, - "data": np.array([1.0, 2.0, 3.0, 4.0, 5.0]), - "errors": np.array([5.0, 4.0, 3.0, 2.0, 1.0]), - "info": "info_string", - } - - buf = serialise_hs00(original_hist) - hist = deserialise_hs00(buf) - - assert hist["source"] == original_hist["source"] - assert hist["timestamp"] == original_hist["timestamp"] - assert hist["current_shape"] == original_hist["current_shape"] - self._check_metadata_for_one_dimension( - hist["dim_metadata"][0], original_hist["dim_metadata"][0] - ) - assert np.array_equal(hist["data"], original_hist["data"]) - assert np.array_equal(hist["errors"], original_hist["errors"]) - assert hist["info"] == original_hist["info"] - assert ( - hist["last_metadata_timestamp"] == original_hist["last_metadata_timestamp"] - ) - - def test_serialises_and_deserialises_hs00_message_correctly_for_minimal_1d_data( - self, - ): - """ - Round-trip to check what we serialise is what we get back. - """ - original_hist = { - "timestamp": 123456, - "current_shape": [5], - "dim_metadata": [ - { - "length": 5, - "unit": "m", - "label": "some_label", - "bin_boundaries": np.array([0.0, 1.0, 2.0, 3.0, 4.0, 5.0]), - } - ], - "data": np.array([1.0, 2.0, 3.0, 4.0, 5.0]), - } - buf = serialise_hs00(original_hist) - - hist = deserialise_hs00(buf) - assert hist["source"] == "" - assert hist["timestamp"] == original_hist["timestamp"] - assert hist["current_shape"] == original_hist["current_shape"] - self._check_metadata_for_one_dimension( - hist["dim_metadata"][0], original_hist["dim_metadata"][0] - ) - assert np.array_equal(hist["data"], original_hist["data"]) - assert len(hist["errors"]) == 0 - assert hist["info"] == "" - - def test_serialises_and_deserialises_hs00_message_correctly_for_full_2d_data(self): - """ - Round-trip to check what we serialise is what we get back. - """ - original_hist = { - "source": "some_source", - "timestamp": 123456, - "current_shape": [2, 5], - "dim_metadata": [ - { - "length": 2, - "unit": "b", - "label": "y", - "bin_boundaries": np.array([10.0, 11.0, 12.0]), - }, - { - "length": 5, - "unit": "m", - "label": "x", - "bin_boundaries": np.array([0.0, 1.0, 2.0, 3.0, 4.0, 5.0]), - }, - ], - "last_metadata_timestamp": 123456, - "data": np.array([[1.0, 2.0, 3.0, 4.0, 5.0], [6.0, 7.0, 8.0, 9.0, 10.0]]), - "errors": np.array([[5.0, 4.0, 3.0, 2.0, 1.0], [10.0, 9.0, 8.0, 7.0, 6.0]]), - "info": "info_string", - } - buf = serialise_hs00(original_hist) - - hist = deserialise_hs00(buf) - assert hist["source"] == original_hist["source"] - assert hist["timestamp"] == original_hist["timestamp"] - assert hist["current_shape"] == original_hist["current_shape"] - self._check_metadata_for_one_dimension( - hist["dim_metadata"][0], original_hist["dim_metadata"][0] - ) - self._check_metadata_for_one_dimension( - hist["dim_metadata"][1], original_hist["dim_metadata"][1] - ) - assert np.array_equal(hist["data"], original_hist["data"]) - assert np.array_equal(hist["errors"], original_hist["errors"]) - assert hist["info"] == original_hist["info"] - assert ( - hist["last_metadata_timestamp"] == original_hist["last_metadata_timestamp"] - ) - - def test_if_buffer_has_wrong_id_then_throws(self): - original_hist = { - "timestamp": 123456, - "current_shape": [5], - "dim_metadata": [ - { - "length": 5, - "unit": "m", - "label": "some_label", - "bin_boundaries": np.array([0.0, 1.0, 2.0, 3.0, 4.0, 5.0]), - } - ], - "data": np.array([1.0, 2.0, 3.0, 4.0, 5.0]), - } - buf = serialise_hs00(original_hist) - - # Manually hack the id - buf = bytearray(buf) - buf[4:8] = b"1234" - - with pytest.raises(WrongSchemaException): - deserialise_hs00(buf) - - def test_serialises_and_deserialises_hs00_message_correctly_for_int_array_data( - self, - ): - """ - Round-trip to check what we serialise is what we get back. - """ - original_hist = { - "source": "some_source", - "timestamp": 123456, - "current_shape": [5], - "dim_metadata": [ - { - "length": 5, - "unit": "m", - "label": "some_label", - "bin_boundaries": np.array([0, 1, 2, 3, 4, 5]), - } - ], - "last_metadata_timestamp": 123456, - "data": np.array([1, 2, 3, 4, 5]), - "errors": np.array([5, 4, 3, 2, 1]), - "info": "info_string", - } - - buf = serialise_hs00(original_hist) - hist = deserialise_hs00(buf) - - assert hist["source"] == original_hist["source"] - assert hist["timestamp"] == original_hist["timestamp"] - assert hist["current_shape"] == original_hist["current_shape"] - self._check_metadata_for_one_dimension( - hist["dim_metadata"][0], original_hist["dim_metadata"][0] - ) - assert np.array_equal(hist["data"], original_hist["data"]) - assert np.array_equal(hist["errors"], original_hist["errors"]) - assert hist["info"] == original_hist["info"] - assert ( - hist["last_metadata_timestamp"] == original_hist["last_metadata_timestamp"] - ) - - def test_serialise_and_deserialise_hs00_message_returns_uint32_type(self): - original_hist = create_test_data_with_type(np.uint32) - - buf = serialise_hs00(original_hist) - hist = deserialise_hs00(buf) - - assert np.issubdtype( - hist["dim_metadata"][0]["bin_boundaries"].dtype, - original_hist["dim_metadata"][0]["bin_boundaries"].dtype, - ) - assert np.issubdtype(hist["data"].dtype, original_hist["data"].dtype) - assert np.issubdtype(hist["errors"].dtype, original_hist["errors"].dtype) - - def test_serialise_and_deserialise_hs00_message_returns_uint64_type(self): - original_hist = create_test_data_with_type(np.uint64) - - buf = serialise_hs00(original_hist) - hist = deserialise_hs00(buf) - - assert np.issubdtype( - hist["dim_metadata"][0]["bin_boundaries"].dtype, - original_hist["dim_metadata"][0]["bin_boundaries"].dtype, - ) - assert np.issubdtype(hist["data"].dtype, original_hist["data"].dtype) - assert np.issubdtype(hist["errors"].dtype, original_hist["errors"].dtype) - - def test_serialise_and_deserialise_hs00_message_returns_float32_type(self): - original_hist = create_test_data_with_type(np.float32) - - buf = serialise_hs00(original_hist) - hist = deserialise_hs00(buf) - - assert np.issubdtype( - hist["dim_metadata"][0]["bin_boundaries"].dtype, - original_hist["dim_metadata"][0]["bin_boundaries"].dtype, - ) - assert np.issubdtype(hist["data"].dtype, original_hist["data"].dtype) - assert np.issubdtype(hist["errors"].dtype, original_hist["errors"].dtype) - - def test_serialise_and_deserialise_hs00_message_returns_float64_type(self): - original_hist = create_test_data_with_type(np.float64) - - buf = serialise_hs00(original_hist) - hist = deserialise_hs00(buf) - - assert np.issubdtype( - hist["dim_metadata"][0]["bin_boundaries"].dtype, - original_hist["dim_metadata"][0]["bin_boundaries"].dtype, - ) - assert np.issubdtype(hist["data"].dtype, original_hist["data"].dtype) - assert np.issubdtype(hist["errors"].dtype, original_hist["errors"].dtype) - - def test_serialises_and_deserialises_hs00_message_correctly_when_float_input_is_not_ndarray( - self, - ): - """ - Round-trip to check what we serialise is what we get back. - """ - original_hist = { - "source": "some_source", - "timestamp": 123456, - "current_shape": [2, 5], - "dim_metadata": [ - { - "length": 2, - "unit": "b", - "label": "y", - "bin_boundaries": [10.0, 11.0, 12.0], - }, - { - "length": 5, - "unit": "m", - "label": "x", - "bin_boundaries": [0.0, 1.0, 2.0, 3.0, 4.0, 5.0], - }, - ], - "last_metadata_timestamp": 123456, - "data": [[1.0, 2.0, 3.0, 4.0, 5.0], [6.0, 7.0, 8.0, 9.0, 10.0]], - "errors": [[5.0, 4.0, 3.0, 2.0, 1.0], [10.0, 9.0, 8.0, 7.0, 6.0]], - "info": "info_string", - } - buf = serialise_hs00(original_hist) - - hist = deserialise_hs00(buf) - assert hist["source"] == original_hist["source"] - assert hist["timestamp"] == original_hist["timestamp"] - assert hist["current_shape"] == original_hist["current_shape"] - self._check_metadata_for_one_dimension( - hist["dim_metadata"][0], original_hist["dim_metadata"][0] - ) - self._check_metadata_for_one_dimension( - hist["dim_metadata"][1], original_hist["dim_metadata"][1] - ) - assert np.array_equal(hist["data"], original_hist["data"]) - assert np.array_equal(hist["errors"], original_hist["errors"]) - assert hist["info"] == original_hist["info"] - assert ( - hist["last_metadata_timestamp"] == original_hist["last_metadata_timestamp"] - ) - - def test_serialises_and_deserialises_hs00_message_correctly_when_int_input_is_not_ndarray( - self, - ): - """ - Round-trip to check what we serialise is what we get back. - """ - original_hist = { - "source": "some_source", - "timestamp": 123456, - "current_shape": [2, 5], - "dim_metadata": [ - { - "length": 2, - "unit": "b", - "label": "y", - "bin_boundaries": [10, 11, 12], - }, - { - "length": 5, - "unit": "m", - "label": "x", - "bin_boundaries": [0, 1, 2, 3, 4, 5], - }, - ], - "last_metadata_timestamp": 123456, - "data": [[1, 2, 3, 4, 5], [6, 7, 8, 9, 10]], - "errors": [[5, 4, 3, 2, 1], [10, 9, 8, 7, 6]], - "info": "info_string", - } - buf = serialise_hs00(original_hist) - - hist = deserialise_hs00(buf) - assert hist["source"] == original_hist["source"] - assert hist["timestamp"] == original_hist["timestamp"] - assert hist["current_shape"] == original_hist["current_shape"] - self._check_metadata_for_one_dimension( - hist["dim_metadata"][0], original_hist["dim_metadata"][0] - ) - self._check_metadata_for_one_dimension( - hist["dim_metadata"][1], original_hist["dim_metadata"][1] - ) - assert np.array_equal(hist["data"], original_hist["data"]) - assert np.array_equal(hist["errors"], original_hist["errors"]) - assert hist["info"] == original_hist["info"] - assert ( - hist["last_metadata_timestamp"] == original_hist["last_metadata_timestamp"] - ) - - def test_schema_type_is_in_global_serialisers_list(self): - assert "hs00" in SERIALISERS - assert "hs00" in DESERIALISERS diff --git a/python/tests/test_ns10.py b/python/tests/test_ns10.py deleted file mode 100644 index 346c66e..0000000 --- a/python/tests/test_ns10.py +++ /dev/null @@ -1,49 +0,0 @@ -import pytest - -from streaming_data_types import DESERIALISERS, SERIALISERS -from streaming_data_types.exceptions import WrongSchemaException -from streaming_data_types.nicos_cache_ns10 import deserialise_ns10, serialise_ns10 - - -class TestSerialisationNs10: - def test_serialises_and_deserialises_ns10_message_correctly(self): - """ - Round-trip to check what we serialise is what we get back. - """ - original_entry = { - "key": "some_key", - "time_stamp": 123456, - "ttl": 567890, - "expired": True, - "value": "some_value", - } - - buf = serialise_ns10(**original_entry) - entry = deserialise_ns10(buf) - - assert entry.key == original_entry["key"] - assert entry.time_stamp == original_entry["time_stamp"] - assert entry.ttl == original_entry["ttl"] - assert entry.expired == original_entry["expired"] - assert entry.value == original_entry["value"] - - def test_if_buffer_has_wrong_id_then_throws(self): - original_entry = { - "key": "some_key", - "time_stamp": 123456, - "ttl": 567890, - "expired": True, - "value": "some_value", - } - buf = serialise_ns10(**original_entry) - - # Manually hack the id - buf = bytearray(buf) - buf[4:8] = b"1234" - - with pytest.raises(WrongSchemaException): - deserialise_ns10(buf) - - def test_schema_type_is_in_global_serialisers_list(self): - assert "ns10" in SERIALISERS - assert "ns10" in DESERIALISERS diff --git a/python/tests/test_rf5k.py b/python/tests/test_rf5k.py deleted file mode 100644 index 079b4c9..0000000 --- a/python/tests/test_rf5k.py +++ /dev/null @@ -1,60 +0,0 @@ -import pytest - -from streaming_data_types import DESERIALISERS, SERIALISERS -from streaming_data_types.exceptions import WrongSchemaException -from streaming_data_types.fbschemas.forwarder_config_update_rf5k.UpdateType import ( - UpdateType, -) -from streaming_data_types.forwarder_config_update_rf5k import ( - Protocol, - StreamInfo, - deserialise_rf5k, - serialise_rf5k, -) - - -class TestSerialisationRf5k: - def test_serialises_and_deserialises_rf5k_message_with_streams_correctly(self): - """ - Round-trip to check what we serialise is what we get back. - """ - stream_1 = StreamInfo("channel1", "f142", "topic1", Protocol.Protocol.PVA) - stream_2 = StreamInfo("channel2", "TdcTime", "topic2", Protocol.Protocol.CA) - original_entry = { - "config_change": UpdateType.ADD, - "streams": [stream_1, stream_2], - } - - buf = serialise_rf5k(**original_entry) - entry = deserialise_rf5k(buf) - - assert entry.config_change == original_entry["config_change"] - assert stream_1 in entry.streams - assert stream_2 in entry.streams - - def test_serialises_and_deserialises_rf5k_message_without_streams_correctly(self): - """ - Round-trip to check what we serialise is what we get back. - """ - original_entry = {"config_change": UpdateType.REMOVEALL, "streams": []} - - buf = serialise_rf5k(**original_entry) - entry = deserialise_rf5k(buf) - - assert entry.config_change == original_entry["config_change"] - - def test_if_buffer_has_wrong_id_then_throws(self): - original_entry = {"config_change": UpdateType.REMOVEALL, "streams": []} - - buf = serialise_rf5k(**original_entry) - - # Manually hack the id - buf = bytearray(buf) - buf[4:8] = b"1234" - - with pytest.raises(WrongSchemaException): - deserialise_rf5k(buf) - - def test_schema_type_is_in_global_serialisers_list(self): - assert "rf5k" in SERIALISERS - assert "rf5k" in DESERIALISERS diff --git a/python/tests/test_se00.py b/python/tests/test_se00.py deleted file mode 100644 index 49121f3..0000000 --- a/python/tests/test_se00.py +++ /dev/null @@ -1,73 +0,0 @@ -import numpy as np -import pytest - -from streaming_data_types import DESERIALISERS, SERIALISERS -from streaming_data_types.array_1d_se00 import deserialise_se00, serialise_se00 -from streaming_data_types.fbschemas.array_1d_se00.Location import Location - -entry_1 = { - "name": "some_name", - "timestamp_unix_ns": 1668593863397138093, - "channel": 42, - "message_counter": 123456, - "sample_ts_delta": 0.005, - "values": np.arange(100, dtype=np.uint16), - "value_timestamps": np.arange(50) + 1111, - "ts_location": Location.End, -} - -entry_2 = { - "name": "some_name_other_name", - "timestamp_unix_ns": 1668593863397138094, - "channel": 11, - "message_counter": 654321, - "sample_ts_delta": 1.666, - "values": np.arange(1000, dtype=np.int64), - "value_timestamps": None, - "ts_location": Location.Middle, -} - -entry_3 = { - "name": "some_float_name", - "timestamp_unix_ns": 1668593863397138095, - "channel": 11, - "message_counter": 231465, - "sample_ts_delta": 1.666, - "values": np.arange(1000, dtype=np.float32), - "value_timestamps": None, - "ts_location": Location.Middle, -} - -entry_4 = { - "name": "some_double_name", - "timestamp_unix_ns": 1668593863397138096, - "channel": 11, - "message_counter": 324156, - "sample_ts_delta": 1.666, - "values": np.arange(1000, dtype=np.float64), - "value_timestamps": None, - "ts_location": Location.Middle, -} - - -class TestSerialisationSenv: - @pytest.mark.parametrize("input_entry", [entry_1, entry_2, entry_3, entry_4]) - def test_serialises_and_deserialises_se00(self, input_entry): - buf = serialise_se00(**input_entry) - deserialised_tuple = deserialise_se00(buf) - - assert input_entry["name"] == deserialised_tuple.name - assert input_entry["timestamp_unix_ns"] == deserialised_tuple.timestamp_unix_ns - assert input_entry["channel"] == deserialised_tuple.channel - assert input_entry["message_counter"] == deserialised_tuple.message_counter - assert input_entry["sample_ts_delta"] == deserialised_tuple.sample_ts_delta - assert np.array_equal(input_entry["values"], deserialised_tuple.values) - assert np.array_equal( - input_entry["value_timestamps"], deserialised_tuple.value_ts - ) - assert input_entry["values"].dtype == deserialised_tuple.values.dtype - assert input_entry["ts_location"] == deserialised_tuple.ts_location - - def test_schema_type_is_in_global_serialisers_list(self): - assert "se00" in SERIALISERS - assert "se00" in DESERIALISERS diff --git a/python/tests/test_senv.py b/python/tests/test_senv.py deleted file mode 100644 index e7d5ff1..0000000 --- a/python/tests/test_senv.py +++ /dev/null @@ -1,57 +0,0 @@ -from datetime import datetime, timezone - -import numpy as np -import pytest - -from streaming_data_types import DESERIALISERS, SERIALISERS -from streaming_data_types.fbschemas.sample_environment_senv.Location import Location -from streaming_data_types.sample_environment_senv import ( - deserialise_senv, - serialise_senv, -) - -entry_1 = { - "name": "some_name", - "timestamp": datetime.now(tz=timezone.utc), - "channel": 42, - "message_counter": 123456, - "sample_ts_delta": 0.005, - "values": np.arange(100, dtype=np.uint16), - "value_timestamps": np.arange(50) + 1111, - "ts_location": Location.End, -} - -entry_2 = { - "name": "some_name_other_name", - "timestamp": datetime.now(tz=timezone.utc), - "channel": 11, - "message_counter": 654321, - "sample_ts_delta": 1.666, - "values": np.arange(1000, dtype=np.int64), - "value_timestamps": None, - "ts_location": Location.Middle, -} - - -class TestSerialisationSenv: - @pytest.mark.parametrize("input_entry", [entry_1, entry_2]) - def test_serialises_and_deserialises_senv(self, input_entry): - original_entry = input_entry - buf = serialise_senv(**original_entry) - deserialised_tuple = deserialise_senv(buf) - - assert original_entry["name"] == deserialised_tuple.name - assert original_entry["timestamp"] == deserialised_tuple.timestamp - assert original_entry["channel"] == deserialised_tuple.channel - assert original_entry["message_counter"] == deserialised_tuple.message_counter - assert original_entry["sample_ts_delta"] == deserialised_tuple.sample_ts_delta - assert np.array_equal(original_entry["values"], deserialised_tuple.values) - assert np.array_equal( - original_entry["value_timestamps"], deserialised_tuple.value_ts - ) - assert original_entry["values"].dtype == deserialised_tuple.values.dtype - assert original_entry["ts_location"] == deserialised_tuple.ts_location - - def test_schema_type_is_in_global_serialisers_list(self): - assert "senv" in SERIALISERS - assert "senv" in DESERIALISERS diff --git a/python/tests/test_tdct.py b/python/tests/test_tdct.py deleted file mode 100644 index 10092b7..0000000 --- a/python/tests/test_tdct.py +++ /dev/null @@ -1,59 +0,0 @@ -import numpy as np -import pytest - -from streaming_data_types import DESERIALISERS, SERIALISERS -from streaming_data_types.exceptions import WrongSchemaException -from streaming_data_types.timestamps_tdct import deserialise_tdct, serialise_tdct - - -class TestSerialisationTdct: - original_entry = { - "name": "some_name", - "timestamps": [0, 1, 2, 3, 4], - "sequence_counter": 42, - } - - def test_serialises_and_deserialises_tdct_message_with_list_of_timestamps(self): - buf = serialise_tdct(**self.original_entry) - deserialised_tuple = deserialise_tdct(buf) - - assert deserialised_tuple.name == self.original_entry["name"] - assert np.allclose( - deserialised_tuple.timestamps, np.array(self.original_entry["timestamps"]) - ) - assert ( - deserialised_tuple.sequence_counter - == self.original_entry["sequence_counter"] - ) - - def test_serialises_and_deserialises_tdct_message_with_array_of_timestamps(self): - original_entry = {"name": "some_name", "timestamps": np.array([0, 1, 2, 3, 4])} - - buf = serialise_tdct(**original_entry) - deserialised_tuple = deserialise_tdct(buf) - - assert deserialised_tuple.name == original_entry["name"] - assert np.allclose(deserialised_tuple.timestamps, original_entry["timestamps"]) - - def test_serialises_and_deserialises_tdct_message_with_single_timestamp(self): - original_entry = {"name": "some_name", "timestamps": np.array(0)} - - buf = serialise_tdct(**original_entry) - deserialised_tuple = deserialise_tdct(buf) - - assert deserialised_tuple.name == original_entry["name"] - assert np.allclose(deserialised_tuple.timestamps, original_entry["timestamps"]) - - def test_if_buffer_has_wrong_id_then_throws(self): - buf = serialise_tdct(**self.original_entry) - - # Manually hack the id - buf = bytearray(buf) - buf[4:8] = b"1234" - - with pytest.raises(WrongSchemaException): - deserialise_tdct(buf) - - def test_schema_type_is_in_global_serialisers_list(self): - assert "tdct" in SERIALISERS - assert "tdct" in DESERIALISERS From 0eb2a16c1b3c664280c59490d9042f6b9f43368b Mon Sep 17 00:00:00 2001 From: Tom Willemsen Date: Wed, 18 Mar 2026 16:05:18 +0000 Subject: [PATCH 354/363] adjust readme/GH config --- .github/dependabot.yml | 10 +++++++--- .github/workflows/release.yml | 11 +++-------- README.md | 8 +++++--- 3 files changed, 15 insertions(+), 14 deletions(-) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index b728efb..a53e410 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -1,10 +1,14 @@ version: 2 updates: + - package-ecosystem: "cargo" + directory: "rust/" + schedule: + interval: "monthly" - package-ecosystem: "pip" - directory: "/" + directory: "python/" schedule: - interval: "daily" + interval: "monthly" - package-ecosystem: "github-actions" directory: "/" schedule: - interval: "daily" + interval: "monthly" diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 2d8134e..da0fa7b 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -1,12 +1,7 @@ name: Publish Python distribution to PyPI on: push jobs: - lint-and-test: - if: github.ref_type == 'tag' - name: Run linter and tests - uses: ./.github/workflows/Lint-and-test.yml - build: - needs: lint-and-test + py-build: if: github.ref_type == 'tag' name: build distribution runs-on: ubuntu-latest @@ -34,7 +29,7 @@ jobs: name: >- Publish Python distribution to PyPI if: github.ref_type == 'tag' - needs: [lint-and-test, build] + needs: [py-build] runs-on: ubuntu-latest environment: name: release @@ -53,7 +48,7 @@ jobs: name: >- Sign the Python distribution with Sigstore and upload them to GitHub Release - needs: [lint-and-test, build, publish-to-pypi] + needs: [py-build, publish-to-pypi] runs-on: ubuntu-latest permissions: diff --git a/README.md b/README.md index eaa490b..7658fa1 100644 --- a/README.md +++ b/README.md @@ -29,15 +29,17 @@ FlatBuffers is the format chosen for the ISIS data streaming system, derived fro ### Add `.fbs` file to `schemas directory -Check `ess-streaming-data-types` first; attempt not to diverge without reason. +Check `ess-streaming-data-types` first; attempt not to diverge without reason. If a new schema is really needed, then +add a new `.fbs` schema in the `schemas` directory. ### Python bindings Python bindings have low-level code (autogenerated by `flatc`) in the `fbschemas` directory, but **also** manually-written convenience serializers and deserializers in the top-level of the python module. -When adding or modifying a schema, these manually-written serializers & deserializers will need to be updated, -and added to the lists in `__init__.py`. +When adding or modifying a schema: +- The low-level code must be manually generated using `flatc` and added to the `fbschemas` directory +- Manually-written serializers & deserializers will need to be updated, and added to the lists in `__init__.py`. ### Rust bindings From 9876dc1e800ad76d93669029fc66c03d038e2467 Mon Sep 17 00:00:00 2001 From: Tom Willemsen Date: Wed, 18 Mar 2026 17:08:21 +0000 Subject: [PATCH 355/363] Add generated bindings --- .gitignore | 1 - README.md | 10 +- generate_bindings.py | 57 + .../flatbuffers_generated/6s4t_run_stop.rs | 252 ++ .../ad00_area_detector_array.rs | 575 ++++ rust/src/flatbuffers_generated/al00_alarm.rs | 327 ++ .../answ_action_response.rs | 473 +++ .../flatbuffers_generated/da00_dataarray.rs | 562 ++++ .../df12_det_spec_map.rs | 218 ++ .../ep01_epics_connection.rs | 344 ++ rust/src/flatbuffers_generated/ev44_events.rs | 272 ++ .../src/flatbuffers_generated/f144_logdata.rs | 2767 +++++++++++++++++ .../fc00_forwarder_config.rs | 545 ++++ .../hs01_event_histogram.rs | 1301 ++++++++ rust/src/flatbuffers_generated/json_json.rs | 184 ++ rust/src/flatbuffers_generated/mod.rs | 36 + .../flatbuffers_generated/pl72_run_start.rs | 518 +++ .../pu00_pulse_metadata.rs | 270 ++ rust/src/flatbuffers_generated/se00_data.rs | 1734 +++++++++++ rust/src/flatbuffers_generated/un00_units.rs | 219 ++ .../wrdn_finished_writing.rs | 272 ++ rust/src/flatbuffers_generated/x5f2_status.rs | 286 ++ 22 files changed, 11218 insertions(+), 5 deletions(-) create mode 100644 generate_bindings.py create mode 100644 rust/src/flatbuffers_generated/6s4t_run_stop.rs create mode 100644 rust/src/flatbuffers_generated/ad00_area_detector_array.rs create mode 100644 rust/src/flatbuffers_generated/al00_alarm.rs create mode 100644 rust/src/flatbuffers_generated/answ_action_response.rs create mode 100644 rust/src/flatbuffers_generated/da00_dataarray.rs create mode 100644 rust/src/flatbuffers_generated/df12_det_spec_map.rs create mode 100644 rust/src/flatbuffers_generated/ep01_epics_connection.rs create mode 100644 rust/src/flatbuffers_generated/ev44_events.rs create mode 100644 rust/src/flatbuffers_generated/f144_logdata.rs create mode 100644 rust/src/flatbuffers_generated/fc00_forwarder_config.rs create mode 100644 rust/src/flatbuffers_generated/hs01_event_histogram.rs create mode 100644 rust/src/flatbuffers_generated/json_json.rs create mode 100644 rust/src/flatbuffers_generated/mod.rs create mode 100644 rust/src/flatbuffers_generated/pl72_run_start.rs create mode 100644 rust/src/flatbuffers_generated/pu00_pulse_metadata.rs create mode 100644 rust/src/flatbuffers_generated/se00_data.rs create mode 100644 rust/src/flatbuffers_generated/un00_units.rs create mode 100644 rust/src/flatbuffers_generated/wrdn_finished_writing.rs create mode 100644 rust/src/flatbuffers_generated/x5f2_status.rs diff --git a/.gitignore b/.gitignore index 80feb90..d836711 100644 --- a/.gitignore +++ b/.gitignore @@ -137,4 +137,3 @@ dmypy.json .vscode python/src/streaming_data_types/_version.py -rust/src/flatbuffers_generated \ No newline at end of file diff --git a/README.md b/README.md index 7658fa1..1f420e4 100644 --- a/README.md +++ b/README.md @@ -32,18 +32,20 @@ FlatBuffers is the format chosen for the ISIS data streaming system, derived fro Check `ess-streaming-data-types` first; attempt not to diverge without reason. If a new schema is really needed, then add a new `.fbs` schema in the `schemas` directory. -### Python bindings +### Adding Python bindings Python bindings have low-level code (autogenerated by `flatc`) in the `fbschemas` directory, but **also** manually-written convenience serializers and deserializers in the top-level of the python module. When adding or modifying a schema: -- The low-level code must be manually generated using `flatc` and added to the `fbschemas` directory +- The low-level code must be generated by calling `generate_code.py` - Manually-written serializers & deserializers will need to be updated, and added to the lists in `__init__.py`. ### Rust bindings -Rust bindings have low-level code (autogenerated by `flatc` in a `build.rs` script), and a small high-level wrapper +Rust bindings have low-level code in `flatbuffers_generated`, and a small high-level wrapper to deserialize any message. -When adding a new schema, the wrapper (defined in `lib.rs`) will need to be updated with the new schema. +When adding or modifying a schema: +- The low-level code must be generated by calling `generate_code.py` +- The wrapper (defined in `lib.rs`) will need to be updated with the new schema. diff --git a/generate_bindings.py b/generate_bindings.py new file mode 100644 index 0000000..35b98fc --- /dev/null +++ b/generate_bindings.py @@ -0,0 +1,57 @@ +import os +import sys +import shutil +import subprocess + + +def generate_python_bindings(): + pass + + +def to_rust_file_name(schema: str): + name, ext = schema.split(".") + return f"{name}.rs" + + +def to_rust_mod_name(schema: str): + name, ext = schema.split(".") + return f"{name[5:]}_{name[0:4]}" + + +def generate_rust_bindings(): + shutil.rmtree("rust/src/flatbuffers_generated/") + os.makedirs("rust/src/flatbuffers_generated/") + + for schema in os.listdir("schemas"): + if not schema.endswith(".fbs"): + continue + subprocess.run( + [ + "flatc", + "--rust", + "-o", + os.path.join("rust", "src", "flatbuffers_generated"), + "--filename-suffix", + "", + "--gen-all", + os.path.join("schemas", schema), + ], + check=True, + ) + + with open("rust/src/flatbuffers_generated/mod.rs", "a") as f: + f.writelines( + [ + f'#[path = "{to_rust_file_name(schema)}"]\n', + f"pub mod {to_rust_mod_name(schema)};\n", + ] + ) + + +def main(): + generate_rust_bindings() + generate_python_bindings() + + +if __name__ == "__main__": + main() diff --git a/rust/src/flatbuffers_generated/6s4t_run_stop.rs b/rust/src/flatbuffers_generated/6s4t_run_stop.rs new file mode 100644 index 0000000..506c531 --- /dev/null +++ b/rust/src/flatbuffers_generated/6s4t_run_stop.rs @@ -0,0 +1,252 @@ +// automatically generated by the FlatBuffers compiler, do not modify +// @generated + +extern crate alloc; + +pub enum RunStopOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct RunStop<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for RunStop<'a> { + type Inner = RunStop<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> RunStop<'a> { + pub const VT_STOP_TIME: ::flatbuffers::VOffsetT = 4; + pub const VT_RUN_NAME: ::flatbuffers::VOffsetT = 6; + pub const VT_JOB_ID: ::flatbuffers::VOffsetT = 8; + pub const VT_SERVICE_ID: ::flatbuffers::VOffsetT = 10; + pub const VT_COMMAND_ID: ::flatbuffers::VOffsetT = 12; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + RunStop { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args RunStopArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = RunStopBuilder::new(_fbb); + builder.add_stop_time(args.stop_time); + if let Some(x) = args.command_id { builder.add_command_id(x); } + if let Some(x) = args.service_id { builder.add_service_id(x); } + if let Some(x) = args.job_id { builder.add_job_id(x); } + if let Some(x) = args.run_name { builder.add_run_name(x); } + builder.finish() + } + + + #[inline] + pub fn stop_time(&self) -> u64 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(RunStop::VT_STOP_TIME, Some(0)).unwrap()} + } + #[inline] + pub fn run_name(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(RunStop::VT_RUN_NAME, None)} + } + #[inline] + pub fn job_id(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(RunStop::VT_JOB_ID, None)} + } + #[inline] + pub fn service_id(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(RunStop::VT_SERVICE_ID, None)} + } + #[inline] + pub fn command_id(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(RunStop::VT_COMMAND_ID, None)} + } +} + +impl ::flatbuffers::Verifiable for RunStop<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::("stop_time", Self::VT_STOP_TIME, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("run_name", Self::VT_RUN_NAME, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("job_id", Self::VT_JOB_ID, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("service_id", Self::VT_SERVICE_ID, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("command_id", Self::VT_COMMAND_ID, false)? + .finish(); + Ok(()) + } +} +pub struct RunStopArgs<'a> { + pub stop_time: u64, + pub run_name: Option<::flatbuffers::WIPOffset<&'a str>>, + pub job_id: Option<::flatbuffers::WIPOffset<&'a str>>, + pub service_id: Option<::flatbuffers::WIPOffset<&'a str>>, + pub command_id: Option<::flatbuffers::WIPOffset<&'a str>>, +} +impl<'a> Default for RunStopArgs<'a> { + #[inline] + fn default() -> Self { + RunStopArgs { + stop_time: 0, + run_name: None, + job_id: None, + service_id: None, + command_id: None, + } + } +} + +pub struct RunStopBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> RunStopBuilder<'a, 'b, A> { + #[inline] + pub fn add_stop_time(&mut self, stop_time: u64) { + self.fbb_.push_slot::(RunStop::VT_STOP_TIME, stop_time, 0); + } + #[inline] + pub fn add_run_name(&mut self, run_name: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(RunStop::VT_RUN_NAME, run_name); + } + #[inline] + pub fn add_job_id(&mut self, job_id: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(RunStop::VT_JOB_ID, job_id); + } + #[inline] + pub fn add_service_id(&mut self, service_id: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(RunStop::VT_SERVICE_ID, service_id); + } + #[inline] + pub fn add_command_id(&mut self, command_id: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(RunStop::VT_COMMAND_ID, command_id); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> RunStopBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + RunStopBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for RunStop<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("RunStop"); + ds.field("stop_time", &self.stop_time()); + ds.field("run_name", &self.run_name()); + ds.field("job_id", &self.job_id()); + ds.field("service_id", &self.service_id()); + ds.field("command_id", &self.command_id()); + ds.finish() + } +} +#[inline] +/// Verifies that a buffer of bytes contains a `RunStop` +/// and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_run_stop_unchecked`. +pub fn root_as_run_stop(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root::(buf) +} +#[inline] +/// Verifies that a buffer of bytes contains a size prefixed +/// `RunStop` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `size_prefixed_root_as_run_stop_unchecked`. +pub fn size_prefixed_root_as_run_stop(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root::(buf) +} +#[inline] +/// Verifies, with the given options, that a buffer of bytes +/// contains a `RunStop` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_run_stop_unchecked`. +pub fn root_as_run_stop_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root_with_opts::>(opts, buf) +} +#[inline] +/// Verifies, with the given verifier options, that a buffer of +/// bytes contains a size prefixed `RunStop` and returns +/// it. Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_run_stop_unchecked`. +pub fn size_prefixed_root_as_run_stop_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root_with_opts::>(opts, buf) +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a RunStop and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid `RunStop`. +pub unsafe fn root_as_run_stop_unchecked(buf: &[u8]) -> RunStop<'_> { + unsafe { ::flatbuffers::root_unchecked::(buf) } +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a size prefixed RunStop and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid size prefixed `RunStop`. +pub unsafe fn size_prefixed_root_as_run_stop_unchecked(buf: &[u8]) -> RunStop<'_> { + unsafe { ::flatbuffers::size_prefixed_root_unchecked::(buf) } +} +pub const RUN_STOP_IDENTIFIER: &str = "6s4t"; + +#[inline] +pub fn run_stop_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, RUN_STOP_IDENTIFIER, false) +} + +#[inline] +pub fn run_stop_size_prefixed_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, RUN_STOP_IDENTIFIER, true) +} + +#[inline] +pub fn finish_run_stop_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>( + fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + root: ::flatbuffers::WIPOffset>) { + fbb.finish(root, Some(RUN_STOP_IDENTIFIER)); +} + +#[inline] +pub fn finish_size_prefixed_run_stop_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>(fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, root: ::flatbuffers::WIPOffset>) { + fbb.finish_size_prefixed(root, Some(RUN_STOP_IDENTIFIER)); +} diff --git a/rust/src/flatbuffers_generated/ad00_area_detector_array.rs b/rust/src/flatbuffers_generated/ad00_area_detector_array.rs new file mode 100644 index 0000000..6b87d1b --- /dev/null +++ b/rust/src/flatbuffers_generated/ad00_area_detector_array.rs @@ -0,0 +1,575 @@ +// automatically generated by the FlatBuffers compiler, do not modify +// @generated + +extern crate alloc; + +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +pub const ENUM_MIN_DTYPE: i8 = 0; +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +pub const ENUM_MAX_DTYPE: i8 = 10; +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +#[allow(non_camel_case_types)] +pub const ENUM_VALUES_DTYPE: [DType; 11] = [ + DType::int8, + DType::uint8, + DType::int16, + DType::uint16, + DType::int32, + DType::uint32, + DType::int64, + DType::uint64, + DType::float32, + DType::float64, + DType::c_string, +]; + +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] +#[repr(transparent)] +pub struct DType(pub i8); +#[allow(non_upper_case_globals)] +impl DType { + pub const int8: Self = Self(0); + pub const uint8: Self = Self(1); + pub const int16: Self = Self(2); + pub const uint16: Self = Self(3); + pub const int32: Self = Self(4); + pub const uint32: Self = Self(5); + pub const int64: Self = Self(6); + pub const uint64: Self = Self(7); + pub const float32: Self = Self(8); + pub const float64: Self = Self(9); + pub const c_string: Self = Self(10); + + pub const ENUM_MIN: i8 = 0; + pub const ENUM_MAX: i8 = 10; + pub const ENUM_VALUES: &'static [Self] = &[ + Self::int8, + Self::uint8, + Self::int16, + Self::uint16, + Self::int32, + Self::uint32, + Self::int64, + Self::uint64, + Self::float32, + Self::float64, + Self::c_string, + ]; + /// Returns the variant's name or "" if unknown. + pub fn variant_name(self) -> Option<&'static str> { + match self { + Self::int8 => Some("int8"), + Self::uint8 => Some("uint8"), + Self::int16 => Some("int16"), + Self::uint16 => Some("uint16"), + Self::int32 => Some("int32"), + Self::uint32 => Some("uint32"), + Self::int64 => Some("int64"), + Self::uint64 => Some("uint64"), + Self::float32 => Some("float32"), + Self::float64 => Some("float64"), + Self::c_string => Some("c_string"), + _ => None, + } + } +} +impl ::core::fmt::Debug for DType { + fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { + if let Some(name) = self.variant_name() { + f.write_str(name) + } else { + f.write_fmt(format_args!("", self.0)) + } + } +} +impl<'a> ::flatbuffers::Follow<'a> for DType { + type Inner = Self; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + let b = unsafe { ::flatbuffers::read_scalar_at::(buf, loc) }; + Self(b) + } +} + +impl ::flatbuffers::Push for DType { + type Output = DType; + #[inline] + unsafe fn push(&self, dst: &mut [u8], _written_len: usize) { + unsafe { ::flatbuffers::emplace_scalar::(dst, self.0) }; + } +} + +impl ::flatbuffers::EndianScalar for DType { + type Scalar = i8; + #[inline] + fn to_little_endian(self) -> i8 { + self.0.to_le() + } + #[inline] + #[allow(clippy::wrong_self_convention)] + fn from_little_endian(v: i8) -> Self { + let b = i8::from_le(v); + Self(b) + } +} + +impl<'a> ::flatbuffers::Verifiable for DType { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + i8::run_verifier(v, pos) + } +} + +impl ::flatbuffers::SimpleToVerifyInSlice for DType {} +pub enum AttributeOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct Attribute<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for Attribute<'a> { + type Inner = Attribute<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> Attribute<'a> { + pub const VT_NAME: ::flatbuffers::VOffsetT = 4; + pub const VT_DESCRIPTION: ::flatbuffers::VOffsetT = 6; + pub const VT_SOURCE: ::flatbuffers::VOffsetT = 8; + pub const VT_DATA_TYPE: ::flatbuffers::VOffsetT = 10; + pub const VT_DATA: ::flatbuffers::VOffsetT = 12; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + Attribute { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args AttributeArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = AttributeBuilder::new(_fbb); + if let Some(x) = args.data { builder.add_data(x); } + if let Some(x) = args.source { builder.add_source(x); } + if let Some(x) = args.description { builder.add_description(x); } + if let Some(x) = args.name { builder.add_name(x); } + builder.add_data_type(args.data_type); + builder.finish() + } + + + #[inline] + pub fn name(&self) -> &'a str { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(Attribute::VT_NAME, None).unwrap()} + } + #[inline] + pub fn description(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(Attribute::VT_DESCRIPTION, None)} + } + #[inline] + pub fn source(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(Attribute::VT_SOURCE, None)} + } + #[inline] + pub fn data_type(&self) -> DType { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(Attribute::VT_DATA_TYPE, Some(DType::int8)).unwrap()} + } + #[inline] + pub fn data(&self) -> ::flatbuffers::Vector<'a, u8> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, u8>>>(Attribute::VT_DATA, None).unwrap()} + } +} + +impl ::flatbuffers::Verifiable for Attribute<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("name", Self::VT_NAME, true)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("description", Self::VT_DESCRIPTION, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("source", Self::VT_SOURCE, false)? + .visit_field::("data_type", Self::VT_DATA_TYPE, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, u8>>>("data", Self::VT_DATA, true)? + .finish(); + Ok(()) + } +} +pub struct AttributeArgs<'a> { + pub name: Option<::flatbuffers::WIPOffset<&'a str>>, + pub description: Option<::flatbuffers::WIPOffset<&'a str>>, + pub source: Option<::flatbuffers::WIPOffset<&'a str>>, + pub data_type: DType, + pub data: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, u8>>>, +} +impl<'a> Default for AttributeArgs<'a> { + #[inline] + fn default() -> Self { + AttributeArgs { + name: None, // required field + description: None, + source: None, + data_type: DType::int8, + data: None, // required field + } + } +} + +pub struct AttributeBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> AttributeBuilder<'a, 'b, A> { + #[inline] + pub fn add_name(&mut self, name: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(Attribute::VT_NAME, name); + } + #[inline] + pub fn add_description(&mut self, description: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(Attribute::VT_DESCRIPTION, description); + } + #[inline] + pub fn add_source(&mut self, source: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(Attribute::VT_SOURCE, source); + } + #[inline] + pub fn add_data_type(&mut self, data_type: DType) { + self.fbb_.push_slot::(Attribute::VT_DATA_TYPE, data_type, DType::int8); + } + #[inline] + pub fn add_data(&mut self, data: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , u8>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(Attribute::VT_DATA, data); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> AttributeBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + AttributeBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + self.fbb_.required(o, Attribute::VT_NAME,"name"); + self.fbb_.required(o, Attribute::VT_DATA,"data"); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for Attribute<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("Attribute"); + ds.field("name", &self.name()); + ds.field("description", &self.description()); + ds.field("source", &self.source()); + ds.field("data_type", &self.data_type()); + ds.field("data", &self.data()); + ds.finish() + } +} +pub enum ad00_ADArrayOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct ad00_ADArray<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for ad00_ADArray<'a> { + type Inner = ad00_ADArray<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> ad00_ADArray<'a> { + pub const VT_SOURCE_NAME: ::flatbuffers::VOffsetT = 4; + pub const VT_ID: ::flatbuffers::VOffsetT = 6; + pub const VT_TIMESTAMP: ::flatbuffers::VOffsetT = 8; + pub const VT_DIMENSIONS: ::flatbuffers::VOffsetT = 10; + pub const VT_DATA_TYPE: ::flatbuffers::VOffsetT = 12; + pub const VT_DATA: ::flatbuffers::VOffsetT = 14; + pub const VT_ATTRIBUTES: ::flatbuffers::VOffsetT = 16; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + ad00_ADArray { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args ad00_ADArrayArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = ad00_ADArrayBuilder::new(_fbb); + builder.add_timestamp(args.timestamp); + if let Some(x) = args.attributes { builder.add_attributes(x); } + if let Some(x) = args.data { builder.add_data(x); } + if let Some(x) = args.dimensions { builder.add_dimensions(x); } + builder.add_id(args.id); + if let Some(x) = args.source_name { builder.add_source_name(x); } + builder.add_data_type(args.data_type); + builder.finish() + } + + + #[inline] + pub fn source_name(&self) -> &'a str { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(ad00_ADArray::VT_SOURCE_NAME, None).unwrap()} + } + #[inline] + pub fn id(&self) -> i32 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(ad00_ADArray::VT_ID, Some(0)).unwrap()} + } + #[inline] + pub fn timestamp(&self) -> i64 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(ad00_ADArray::VT_TIMESTAMP, Some(0)).unwrap()} + } + #[inline] + pub fn dimensions(&self) -> ::flatbuffers::Vector<'a, i64> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, i64>>>(ad00_ADArray::VT_DIMENSIONS, None).unwrap()} + } + #[inline] + pub fn data_type(&self) -> DType { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(ad00_ADArray::VT_DATA_TYPE, Some(DType::int8)).unwrap()} + } + #[inline] + pub fn data(&self) -> ::flatbuffers::Vector<'a, u8> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, u8>>>(ad00_ADArray::VT_DATA, None).unwrap()} + } + #[inline] + pub fn attributes(&self) -> Option<::flatbuffers::Vector<'a, ::flatbuffers::ForwardsUOffset>>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, ::flatbuffers::ForwardsUOffset>>>(ad00_ADArray::VT_ATTRIBUTES, None)} + } +} + +impl ::flatbuffers::Verifiable for ad00_ADArray<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("source_name", Self::VT_SOURCE_NAME, true)? + .visit_field::("id", Self::VT_ID, false)? + .visit_field::("timestamp", Self::VT_TIMESTAMP, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, i64>>>("dimensions", Self::VT_DIMENSIONS, true)? + .visit_field::("data_type", Self::VT_DATA_TYPE, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, u8>>>("data", Self::VT_DATA, true)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, ::flatbuffers::ForwardsUOffset>>>("attributes", Self::VT_ATTRIBUTES, false)? + .finish(); + Ok(()) + } +} +pub struct ad00_ADArrayArgs<'a> { + pub source_name: Option<::flatbuffers::WIPOffset<&'a str>>, + pub id: i32, + pub timestamp: i64, + pub dimensions: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, i64>>>, + pub data_type: DType, + pub data: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, u8>>>, + pub attributes: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, ::flatbuffers::ForwardsUOffset>>>>, +} +impl<'a> Default for ad00_ADArrayArgs<'a> { + #[inline] + fn default() -> Self { + ad00_ADArrayArgs { + source_name: None, // required field + id: 0, + timestamp: 0, + dimensions: None, // required field + data_type: DType::int8, + data: None, // required field + attributes: None, + } + } +} + +pub struct ad00_ADArrayBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> ad00_ADArrayBuilder<'a, 'b, A> { + #[inline] + pub fn add_source_name(&mut self, source_name: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(ad00_ADArray::VT_SOURCE_NAME, source_name); + } + #[inline] + pub fn add_id(&mut self, id: i32) { + self.fbb_.push_slot::(ad00_ADArray::VT_ID, id, 0); + } + #[inline] + pub fn add_timestamp(&mut self, timestamp: i64) { + self.fbb_.push_slot::(ad00_ADArray::VT_TIMESTAMP, timestamp, 0); + } + #[inline] + pub fn add_dimensions(&mut self, dimensions: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , i64>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(ad00_ADArray::VT_DIMENSIONS, dimensions); + } + #[inline] + pub fn add_data_type(&mut self, data_type: DType) { + self.fbb_.push_slot::(ad00_ADArray::VT_DATA_TYPE, data_type, DType::int8); + } + #[inline] + pub fn add_data(&mut self, data: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , u8>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(ad00_ADArray::VT_DATA, data); + } + #[inline] + pub fn add_attributes(&mut self, attributes: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , ::flatbuffers::ForwardsUOffset>>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(ad00_ADArray::VT_ATTRIBUTES, attributes); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> ad00_ADArrayBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + ad00_ADArrayBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + self.fbb_.required(o, ad00_ADArray::VT_SOURCE_NAME,"source_name"); + self.fbb_.required(o, ad00_ADArray::VT_DIMENSIONS,"dimensions"); + self.fbb_.required(o, ad00_ADArray::VT_DATA,"data"); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for ad00_ADArray<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("ad00_ADArray"); + ds.field("source_name", &self.source_name()); + ds.field("id", &self.id()); + ds.field("timestamp", &self.timestamp()); + ds.field("dimensions", &self.dimensions()); + ds.field("data_type", &self.data_type()); + ds.field("data", &self.data()); + ds.field("attributes", &self.attributes()); + ds.finish() + } +} +#[inline] +/// Verifies that a buffer of bytes contains a `ad00_ADArray` +/// and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_ad_00_adarray_unchecked`. +pub fn root_as_ad_00_adarray(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root::(buf) +} +#[inline] +/// Verifies that a buffer of bytes contains a size prefixed +/// `ad00_ADArray` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `size_prefixed_root_as_ad_00_adarray_unchecked`. +pub fn size_prefixed_root_as_ad_00_adarray(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root::(buf) +} +#[inline] +/// Verifies, with the given options, that a buffer of bytes +/// contains a `ad00_ADArray` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_ad_00_adarray_unchecked`. +pub fn root_as_ad_00_adarray_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root_with_opts::>(opts, buf) +} +#[inline] +/// Verifies, with the given verifier options, that a buffer of +/// bytes contains a size prefixed `ad00_ADArray` and returns +/// it. Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_ad_00_adarray_unchecked`. +pub fn size_prefixed_root_as_ad_00_adarray_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root_with_opts::>(opts, buf) +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a ad00_ADArray and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid `ad00_ADArray`. +pub unsafe fn root_as_ad_00_adarray_unchecked(buf: &[u8]) -> ad00_ADArray<'_> { + unsafe { ::flatbuffers::root_unchecked::(buf) } +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a size prefixed ad00_ADArray and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid size prefixed `ad00_ADArray`. +pub unsafe fn size_prefixed_root_as_ad_00_adarray_unchecked(buf: &[u8]) -> ad00_ADArray<'_> { + unsafe { ::flatbuffers::size_prefixed_root_unchecked::(buf) } +} +pub const AD_00_ADARRAY_IDENTIFIER: &str = "ad00"; + +#[inline] +pub fn ad_00_adarray_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, AD_00_ADARRAY_IDENTIFIER, false) +} + +#[inline] +pub fn ad_00_adarray_size_prefixed_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, AD_00_ADARRAY_IDENTIFIER, true) +} + +#[inline] +pub fn finish_ad_00_adarray_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>( + fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + root: ::flatbuffers::WIPOffset>) { + fbb.finish(root, Some(AD_00_ADARRAY_IDENTIFIER)); +} + +#[inline] +pub fn finish_size_prefixed_ad_00_adarray_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>(fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, root: ::flatbuffers::WIPOffset>) { + fbb.finish_size_prefixed(root, Some(AD_00_ADARRAY_IDENTIFIER)); +} diff --git a/rust/src/flatbuffers_generated/al00_alarm.rs b/rust/src/flatbuffers_generated/al00_alarm.rs new file mode 100644 index 0000000..c81667e --- /dev/null +++ b/rust/src/flatbuffers_generated/al00_alarm.rs @@ -0,0 +1,327 @@ +// automatically generated by the FlatBuffers compiler, do not modify +// @generated + +extern crate alloc; + +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +pub const ENUM_MIN_SEVERITY: i16 = 0; +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +pub const ENUM_MAX_SEVERITY: i16 = 3; +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +#[allow(non_camel_case_types)] +pub const ENUM_VALUES_SEVERITY: [Severity; 4] = [ + Severity::OK, + Severity::MINOR, + Severity::MAJOR, + Severity::INVALID, +]; + +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] +#[repr(transparent)] +pub struct Severity(pub i16); +#[allow(non_upper_case_globals)] +impl Severity { + pub const OK: Self = Self(0); + pub const MINOR: Self = Self(1); + pub const MAJOR: Self = Self(2); + pub const INVALID: Self = Self(3); + + pub const ENUM_MIN: i16 = 0; + pub const ENUM_MAX: i16 = 3; + pub const ENUM_VALUES: &'static [Self] = &[ + Self::OK, + Self::MINOR, + Self::MAJOR, + Self::INVALID, + ]; + /// Returns the variant's name or "" if unknown. + pub fn variant_name(self) -> Option<&'static str> { + match self { + Self::OK => Some("OK"), + Self::MINOR => Some("MINOR"), + Self::MAJOR => Some("MAJOR"), + Self::INVALID => Some("INVALID"), + _ => None, + } + } +} +impl ::core::fmt::Debug for Severity { + fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { + if let Some(name) = self.variant_name() { + f.write_str(name) + } else { + f.write_fmt(format_args!("", self.0)) + } + } +} +impl<'a> ::flatbuffers::Follow<'a> for Severity { + type Inner = Self; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + let b = unsafe { ::flatbuffers::read_scalar_at::(buf, loc) }; + Self(b) + } +} + +impl ::flatbuffers::Push for Severity { + type Output = Severity; + #[inline] + unsafe fn push(&self, dst: &mut [u8], _written_len: usize) { + unsafe { ::flatbuffers::emplace_scalar::(dst, self.0) }; + } +} + +impl ::flatbuffers::EndianScalar for Severity { + type Scalar = i16; + #[inline] + fn to_little_endian(self) -> i16 { + self.0.to_le() + } + #[inline] + #[allow(clippy::wrong_self_convention)] + fn from_little_endian(v: i16) -> Self { + let b = i16::from_le(v); + Self(b) + } +} + +impl<'a> ::flatbuffers::Verifiable for Severity { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + i16::run_verifier(v, pos) + } +} + +impl ::flatbuffers::SimpleToVerifyInSlice for Severity {} +pub enum AlarmOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct Alarm<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for Alarm<'a> { + type Inner = Alarm<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> Alarm<'a> { + pub const VT_SOURCE_NAME: ::flatbuffers::VOffsetT = 4; + pub const VT_TIMESTAMP: ::flatbuffers::VOffsetT = 6; + pub const VT_SEVERITY: ::flatbuffers::VOffsetT = 8; + pub const VT_MESSAGE: ::flatbuffers::VOffsetT = 10; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + Alarm { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args AlarmArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = AlarmBuilder::new(_fbb); + builder.add_timestamp(args.timestamp); + if let Some(x) = args.message { builder.add_message(x); } + if let Some(x) = args.source_name { builder.add_source_name(x); } + builder.add_severity(args.severity); + builder.finish() + } + + + #[inline] + pub fn source_name(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(Alarm::VT_SOURCE_NAME, None)} + } + #[inline] + pub fn timestamp(&self) -> i64 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(Alarm::VT_TIMESTAMP, Some(0)).unwrap()} + } + #[inline] + pub fn severity(&self) -> Severity { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(Alarm::VT_SEVERITY, Some(Severity::OK)).unwrap()} + } + #[inline] + pub fn message(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(Alarm::VT_MESSAGE, None)} + } +} + +impl ::flatbuffers::Verifiable for Alarm<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("source_name", Self::VT_SOURCE_NAME, false)? + .visit_field::("timestamp", Self::VT_TIMESTAMP, false)? + .visit_field::("severity", Self::VT_SEVERITY, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("message", Self::VT_MESSAGE, false)? + .finish(); + Ok(()) + } +} +pub struct AlarmArgs<'a> { + pub source_name: Option<::flatbuffers::WIPOffset<&'a str>>, + pub timestamp: i64, + pub severity: Severity, + pub message: Option<::flatbuffers::WIPOffset<&'a str>>, +} +impl<'a> Default for AlarmArgs<'a> { + #[inline] + fn default() -> Self { + AlarmArgs { + source_name: None, + timestamp: 0, + severity: Severity::OK, + message: None, + } + } +} + +pub struct AlarmBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> AlarmBuilder<'a, 'b, A> { + #[inline] + pub fn add_source_name(&mut self, source_name: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(Alarm::VT_SOURCE_NAME, source_name); + } + #[inline] + pub fn add_timestamp(&mut self, timestamp: i64) { + self.fbb_.push_slot::(Alarm::VT_TIMESTAMP, timestamp, 0); + } + #[inline] + pub fn add_severity(&mut self, severity: Severity) { + self.fbb_.push_slot::(Alarm::VT_SEVERITY, severity, Severity::OK); + } + #[inline] + pub fn add_message(&mut self, message: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(Alarm::VT_MESSAGE, message); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> AlarmBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + AlarmBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for Alarm<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("Alarm"); + ds.field("source_name", &self.source_name()); + ds.field("timestamp", &self.timestamp()); + ds.field("severity", &self.severity()); + ds.field("message", &self.message()); + ds.finish() + } +} +#[inline] +/// Verifies that a buffer of bytes contains a `Alarm` +/// and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_alarm_unchecked`. +pub fn root_as_alarm(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root::(buf) +} +#[inline] +/// Verifies that a buffer of bytes contains a size prefixed +/// `Alarm` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `size_prefixed_root_as_alarm_unchecked`. +pub fn size_prefixed_root_as_alarm(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root::(buf) +} +#[inline] +/// Verifies, with the given options, that a buffer of bytes +/// contains a `Alarm` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_alarm_unchecked`. +pub fn root_as_alarm_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root_with_opts::>(opts, buf) +} +#[inline] +/// Verifies, with the given verifier options, that a buffer of +/// bytes contains a size prefixed `Alarm` and returns +/// it. Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_alarm_unchecked`. +pub fn size_prefixed_root_as_alarm_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root_with_opts::>(opts, buf) +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a Alarm and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid `Alarm`. +pub unsafe fn root_as_alarm_unchecked(buf: &[u8]) -> Alarm<'_> { + unsafe { ::flatbuffers::root_unchecked::(buf) } +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a size prefixed Alarm and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid size prefixed `Alarm`. +pub unsafe fn size_prefixed_root_as_alarm_unchecked(buf: &[u8]) -> Alarm<'_> { + unsafe { ::flatbuffers::size_prefixed_root_unchecked::(buf) } +} +pub const ALARM_IDENTIFIER: &str = "al00"; + +#[inline] +pub fn alarm_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, ALARM_IDENTIFIER, false) +} + +#[inline] +pub fn alarm_size_prefixed_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, ALARM_IDENTIFIER, true) +} + +#[inline] +pub fn finish_alarm_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>( + fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + root: ::flatbuffers::WIPOffset>) { + fbb.finish(root, Some(ALARM_IDENTIFIER)); +} + +#[inline] +pub fn finish_size_prefixed_alarm_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>(fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, root: ::flatbuffers::WIPOffset>) { + fbb.finish_size_prefixed(root, Some(ALARM_IDENTIFIER)); +} diff --git a/rust/src/flatbuffers_generated/answ_action_response.rs b/rust/src/flatbuffers_generated/answ_action_response.rs new file mode 100644 index 0000000..ea7c1c3 --- /dev/null +++ b/rust/src/flatbuffers_generated/answ_action_response.rs @@ -0,0 +1,473 @@ +// automatically generated by the FlatBuffers compiler, do not modify +// @generated + +extern crate alloc; + +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +pub const ENUM_MIN_ACTION_TYPE: i8 = 0; +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +pub const ENUM_MAX_ACTION_TYPE: i8 = 1; +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +#[allow(non_camel_case_types)] +pub const ENUM_VALUES_ACTION_TYPE: [ActionType; 2] = [ + ActionType::StartJob, + ActionType::SetStopTime, +]; + +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] +#[repr(transparent)] +pub struct ActionType(pub i8); +#[allow(non_upper_case_globals)] +impl ActionType { + pub const StartJob: Self = Self(0); + pub const SetStopTime: Self = Self(1); + + pub const ENUM_MIN: i8 = 0; + pub const ENUM_MAX: i8 = 1; + pub const ENUM_VALUES: &'static [Self] = &[ + Self::StartJob, + Self::SetStopTime, + ]; + /// Returns the variant's name or "" if unknown. + pub fn variant_name(self) -> Option<&'static str> { + match self { + Self::StartJob => Some("StartJob"), + Self::SetStopTime => Some("SetStopTime"), + _ => None, + } + } +} +impl ::core::fmt::Debug for ActionType { + fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { + if let Some(name) = self.variant_name() { + f.write_str(name) + } else { + f.write_fmt(format_args!("", self.0)) + } + } +} +impl<'a> ::flatbuffers::Follow<'a> for ActionType { + type Inner = Self; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + let b = unsafe { ::flatbuffers::read_scalar_at::(buf, loc) }; + Self(b) + } +} + +impl ::flatbuffers::Push for ActionType { + type Output = ActionType; + #[inline] + unsafe fn push(&self, dst: &mut [u8], _written_len: usize) { + unsafe { ::flatbuffers::emplace_scalar::(dst, self.0) }; + } +} + +impl ::flatbuffers::EndianScalar for ActionType { + type Scalar = i8; + #[inline] + fn to_little_endian(self) -> i8 { + self.0.to_le() + } + #[inline] + #[allow(clippy::wrong_self_convention)] + fn from_little_endian(v: i8) -> Self { + let b = i8::from_le(v); + Self(b) + } +} + +impl<'a> ::flatbuffers::Verifiable for ActionType { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + i8::run_verifier(v, pos) + } +} + +impl ::flatbuffers::SimpleToVerifyInSlice for ActionType {} +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +pub const ENUM_MIN_ACTION_OUTCOME: i8 = 0; +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +pub const ENUM_MAX_ACTION_OUTCOME: i8 = 1; +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +#[allow(non_camel_case_types)] +pub const ENUM_VALUES_ACTION_OUTCOME: [ActionOutcome; 2] = [ + ActionOutcome::Success, + ActionOutcome::Failure, +]; + +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] +#[repr(transparent)] +pub struct ActionOutcome(pub i8); +#[allow(non_upper_case_globals)] +impl ActionOutcome { + pub const Success: Self = Self(0); + pub const Failure: Self = Self(1); + + pub const ENUM_MIN: i8 = 0; + pub const ENUM_MAX: i8 = 1; + pub const ENUM_VALUES: &'static [Self] = &[ + Self::Success, + Self::Failure, + ]; + /// Returns the variant's name or "" if unknown. + pub fn variant_name(self) -> Option<&'static str> { + match self { + Self::Success => Some("Success"), + Self::Failure => Some("Failure"), + _ => None, + } + } +} +impl ::core::fmt::Debug for ActionOutcome { + fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { + if let Some(name) = self.variant_name() { + f.write_str(name) + } else { + f.write_fmt(format_args!("", self.0)) + } + } +} +impl<'a> ::flatbuffers::Follow<'a> for ActionOutcome { + type Inner = Self; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + let b = unsafe { ::flatbuffers::read_scalar_at::(buf, loc) }; + Self(b) + } +} + +impl ::flatbuffers::Push for ActionOutcome { + type Output = ActionOutcome; + #[inline] + unsafe fn push(&self, dst: &mut [u8], _written_len: usize) { + unsafe { ::flatbuffers::emplace_scalar::(dst, self.0) }; + } +} + +impl ::flatbuffers::EndianScalar for ActionOutcome { + type Scalar = i8; + #[inline] + fn to_little_endian(self) -> i8 { + self.0.to_le() + } + #[inline] + #[allow(clippy::wrong_self_convention)] + fn from_little_endian(v: i8) -> Self { + let b = i8::from_le(v); + Self(b) + } +} + +impl<'a> ::flatbuffers::Verifiable for ActionOutcome { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + i8::run_verifier(v, pos) + } +} + +impl ::flatbuffers::SimpleToVerifyInSlice for ActionOutcome {} +pub enum ActionResponseOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct ActionResponse<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for ActionResponse<'a> { + type Inner = ActionResponse<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> ActionResponse<'a> { + pub const VT_SERVICE_ID: ::flatbuffers::VOffsetT = 4; + pub const VT_JOB_ID: ::flatbuffers::VOffsetT = 6; + pub const VT_ACTION: ::flatbuffers::VOffsetT = 8; + pub const VT_OUTCOME: ::flatbuffers::VOffsetT = 10; + pub const VT_STATUS_CODE: ::flatbuffers::VOffsetT = 12; + pub const VT_STOP_TIME: ::flatbuffers::VOffsetT = 14; + pub const VT_MESSAGE: ::flatbuffers::VOffsetT = 16; + pub const VT_COMMAND_ID: ::flatbuffers::VOffsetT = 18; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + ActionResponse { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args ActionResponseArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = ActionResponseBuilder::new(_fbb); + builder.add_stop_time(args.stop_time); + if let Some(x) = args.command_id { builder.add_command_id(x); } + if let Some(x) = args.message { builder.add_message(x); } + builder.add_status_code(args.status_code); + if let Some(x) = args.job_id { builder.add_job_id(x); } + if let Some(x) = args.service_id { builder.add_service_id(x); } + builder.add_outcome(args.outcome); + builder.add_action(args.action); + builder.finish() + } + + + #[inline] + pub fn service_id(&self) -> &'a str { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(ActionResponse::VT_SERVICE_ID, None).unwrap()} + } + #[inline] + pub fn job_id(&self) -> &'a str { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(ActionResponse::VT_JOB_ID, None).unwrap()} + } + #[inline] + pub fn action(&self) -> ActionType { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(ActionResponse::VT_ACTION, Some(ActionType::StartJob)).unwrap()} + } + #[inline] + pub fn outcome(&self) -> ActionOutcome { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(ActionResponse::VT_OUTCOME, Some(ActionOutcome::Success)).unwrap()} + } + #[inline] + pub fn status_code(&self) -> i32 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(ActionResponse::VT_STATUS_CODE, Some(0)).unwrap()} + } + #[inline] + pub fn stop_time(&self) -> u64 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(ActionResponse::VT_STOP_TIME, Some(0)).unwrap()} + } + #[inline] + pub fn message(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(ActionResponse::VT_MESSAGE, None)} + } + #[inline] + pub fn command_id(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(ActionResponse::VT_COMMAND_ID, None)} + } +} + +impl ::flatbuffers::Verifiable for ActionResponse<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("service_id", Self::VT_SERVICE_ID, true)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("job_id", Self::VT_JOB_ID, true)? + .visit_field::("action", Self::VT_ACTION, false)? + .visit_field::("outcome", Self::VT_OUTCOME, false)? + .visit_field::("status_code", Self::VT_STATUS_CODE, false)? + .visit_field::("stop_time", Self::VT_STOP_TIME, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("message", Self::VT_MESSAGE, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("command_id", Self::VT_COMMAND_ID, false)? + .finish(); + Ok(()) + } +} +pub struct ActionResponseArgs<'a> { + pub service_id: Option<::flatbuffers::WIPOffset<&'a str>>, + pub job_id: Option<::flatbuffers::WIPOffset<&'a str>>, + pub action: ActionType, + pub outcome: ActionOutcome, + pub status_code: i32, + pub stop_time: u64, + pub message: Option<::flatbuffers::WIPOffset<&'a str>>, + pub command_id: Option<::flatbuffers::WIPOffset<&'a str>>, +} +impl<'a> Default for ActionResponseArgs<'a> { + #[inline] + fn default() -> Self { + ActionResponseArgs { + service_id: None, // required field + job_id: None, // required field + action: ActionType::StartJob, + outcome: ActionOutcome::Success, + status_code: 0, + stop_time: 0, + message: None, + command_id: None, + } + } +} + +pub struct ActionResponseBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> ActionResponseBuilder<'a, 'b, A> { + #[inline] + pub fn add_service_id(&mut self, service_id: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(ActionResponse::VT_SERVICE_ID, service_id); + } + #[inline] + pub fn add_job_id(&mut self, job_id: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(ActionResponse::VT_JOB_ID, job_id); + } + #[inline] + pub fn add_action(&mut self, action: ActionType) { + self.fbb_.push_slot::(ActionResponse::VT_ACTION, action, ActionType::StartJob); + } + #[inline] + pub fn add_outcome(&mut self, outcome: ActionOutcome) { + self.fbb_.push_slot::(ActionResponse::VT_OUTCOME, outcome, ActionOutcome::Success); + } + #[inline] + pub fn add_status_code(&mut self, status_code: i32) { + self.fbb_.push_slot::(ActionResponse::VT_STATUS_CODE, status_code, 0); + } + #[inline] + pub fn add_stop_time(&mut self, stop_time: u64) { + self.fbb_.push_slot::(ActionResponse::VT_STOP_TIME, stop_time, 0); + } + #[inline] + pub fn add_message(&mut self, message: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(ActionResponse::VT_MESSAGE, message); + } + #[inline] + pub fn add_command_id(&mut self, command_id: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(ActionResponse::VT_COMMAND_ID, command_id); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> ActionResponseBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + ActionResponseBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + self.fbb_.required(o, ActionResponse::VT_SERVICE_ID,"service_id"); + self.fbb_.required(o, ActionResponse::VT_JOB_ID,"job_id"); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for ActionResponse<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("ActionResponse"); + ds.field("service_id", &self.service_id()); + ds.field("job_id", &self.job_id()); + ds.field("action", &self.action()); + ds.field("outcome", &self.outcome()); + ds.field("status_code", &self.status_code()); + ds.field("stop_time", &self.stop_time()); + ds.field("message", &self.message()); + ds.field("command_id", &self.command_id()); + ds.finish() + } +} +#[inline] +/// Verifies that a buffer of bytes contains a `ActionResponse` +/// and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_action_response_unchecked`. +pub fn root_as_action_response(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root::(buf) +} +#[inline] +/// Verifies that a buffer of bytes contains a size prefixed +/// `ActionResponse` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `size_prefixed_root_as_action_response_unchecked`. +pub fn size_prefixed_root_as_action_response(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root::(buf) +} +#[inline] +/// Verifies, with the given options, that a buffer of bytes +/// contains a `ActionResponse` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_action_response_unchecked`. +pub fn root_as_action_response_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root_with_opts::>(opts, buf) +} +#[inline] +/// Verifies, with the given verifier options, that a buffer of +/// bytes contains a size prefixed `ActionResponse` and returns +/// it. Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_action_response_unchecked`. +pub fn size_prefixed_root_as_action_response_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root_with_opts::>(opts, buf) +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a ActionResponse and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid `ActionResponse`. +pub unsafe fn root_as_action_response_unchecked(buf: &[u8]) -> ActionResponse<'_> { + unsafe { ::flatbuffers::root_unchecked::(buf) } +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a size prefixed ActionResponse and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid size prefixed `ActionResponse`. +pub unsafe fn size_prefixed_root_as_action_response_unchecked(buf: &[u8]) -> ActionResponse<'_> { + unsafe { ::flatbuffers::size_prefixed_root_unchecked::(buf) } +} +pub const ACTION_RESPONSE_IDENTIFIER: &str = "answ"; + +#[inline] +pub fn action_response_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, ACTION_RESPONSE_IDENTIFIER, false) +} + +#[inline] +pub fn action_response_size_prefixed_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, ACTION_RESPONSE_IDENTIFIER, true) +} + +#[inline] +pub fn finish_action_response_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>( + fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + root: ::flatbuffers::WIPOffset>) { + fbb.finish(root, Some(ACTION_RESPONSE_IDENTIFIER)); +} + +#[inline] +pub fn finish_size_prefixed_action_response_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>(fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, root: ::flatbuffers::WIPOffset>) { + fbb.finish_size_prefixed(root, Some(ACTION_RESPONSE_IDENTIFIER)); +} diff --git a/rust/src/flatbuffers_generated/da00_dataarray.rs b/rust/src/flatbuffers_generated/da00_dataarray.rs new file mode 100644 index 0000000..1e79f6c --- /dev/null +++ b/rust/src/flatbuffers_generated/da00_dataarray.rs @@ -0,0 +1,562 @@ +// automatically generated by the FlatBuffers compiler, do not modify +// @generated + +extern crate alloc; + +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +pub const ENUM_MIN_DA_00_DTYPE: i8 = 0; +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +pub const ENUM_MAX_DA_00_DTYPE: i8 = 11; +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +#[allow(non_camel_case_types)] +pub const ENUM_VALUES_DA_00_DTYPE: [da00_dtype; 12] = [ + da00_dtype::none, + da00_dtype::int8, + da00_dtype::uint8, + da00_dtype::int16, + da00_dtype::uint16, + da00_dtype::int32, + da00_dtype::uint32, + da00_dtype::int64, + da00_dtype::uint64, + da00_dtype::float32, + da00_dtype::float64, + da00_dtype::c_string, +]; + +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] +#[repr(transparent)] +pub struct da00_dtype(pub i8); +#[allow(non_upper_case_globals)] +impl da00_dtype { + pub const none: Self = Self(0); + pub const int8: Self = Self(1); + pub const uint8: Self = Self(2); + pub const int16: Self = Self(3); + pub const uint16: Self = Self(4); + pub const int32: Self = Self(5); + pub const uint32: Self = Self(6); + pub const int64: Self = Self(7); + pub const uint64: Self = Self(8); + pub const float32: Self = Self(9); + pub const float64: Self = Self(10); + pub const c_string: Self = Self(11); + + pub const ENUM_MIN: i8 = 0; + pub const ENUM_MAX: i8 = 11; + pub const ENUM_VALUES: &'static [Self] = &[ + Self::none, + Self::int8, + Self::uint8, + Self::int16, + Self::uint16, + Self::int32, + Self::uint32, + Self::int64, + Self::uint64, + Self::float32, + Self::float64, + Self::c_string, + ]; + /// Returns the variant's name or "" if unknown. + pub fn variant_name(self) -> Option<&'static str> { + match self { + Self::none => Some("none"), + Self::int8 => Some("int8"), + Self::uint8 => Some("uint8"), + Self::int16 => Some("int16"), + Self::uint16 => Some("uint16"), + Self::int32 => Some("int32"), + Self::uint32 => Some("uint32"), + Self::int64 => Some("int64"), + Self::uint64 => Some("uint64"), + Self::float32 => Some("float32"), + Self::float64 => Some("float64"), + Self::c_string => Some("c_string"), + _ => None, + } + } +} +impl ::core::fmt::Debug for da00_dtype { + fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { + if let Some(name) = self.variant_name() { + f.write_str(name) + } else { + f.write_fmt(format_args!("", self.0)) + } + } +} +impl<'a> ::flatbuffers::Follow<'a> for da00_dtype { + type Inner = Self; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + let b = unsafe { ::flatbuffers::read_scalar_at::(buf, loc) }; + Self(b) + } +} + +impl ::flatbuffers::Push for da00_dtype { + type Output = da00_dtype; + #[inline] + unsafe fn push(&self, dst: &mut [u8], _written_len: usize) { + unsafe { ::flatbuffers::emplace_scalar::(dst, self.0) }; + } +} + +impl ::flatbuffers::EndianScalar for da00_dtype { + type Scalar = i8; + #[inline] + fn to_little_endian(self) -> i8 { + self.0.to_le() + } + #[inline] + #[allow(clippy::wrong_self_convention)] + fn from_little_endian(v: i8) -> Self { + let b = i8::from_le(v); + Self(b) + } +} + +impl<'a> ::flatbuffers::Verifiable for da00_dtype { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + i8::run_verifier(v, pos) + } +} + +impl ::flatbuffers::SimpleToVerifyInSlice for da00_dtype {} +pub enum da00_VariableOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct da00_Variable<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for da00_Variable<'a> { + type Inner = da00_Variable<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> da00_Variable<'a> { + pub const VT_NAME: ::flatbuffers::VOffsetT = 4; + pub const VT_UNIT: ::flatbuffers::VOffsetT = 6; + pub const VT_LABEL: ::flatbuffers::VOffsetT = 8; + pub const VT_SOURCE: ::flatbuffers::VOffsetT = 10; + pub const VT_DATA_TYPE: ::flatbuffers::VOffsetT = 12; + pub const VT_AXES: ::flatbuffers::VOffsetT = 14; + pub const VT_SHAPE: ::flatbuffers::VOffsetT = 16; + pub const VT_DATA: ::flatbuffers::VOffsetT = 18; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + da00_Variable { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args da00_VariableArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = da00_VariableBuilder::new(_fbb); + if let Some(x) = args.data { builder.add_data(x); } + if let Some(x) = args.shape { builder.add_shape(x); } + if let Some(x) = args.axes { builder.add_axes(x); } + if let Some(x) = args.source { builder.add_source(x); } + if let Some(x) = args.label { builder.add_label(x); } + if let Some(x) = args.unit { builder.add_unit(x); } + if let Some(x) = args.name { builder.add_name(x); } + builder.add_data_type(args.data_type); + builder.finish() + } + + + #[inline] + pub fn name(&self) -> &'a str { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(da00_Variable::VT_NAME, None).unwrap()} + } + #[inline] + pub fn unit(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(da00_Variable::VT_UNIT, None)} + } + #[inline] + pub fn label(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(da00_Variable::VT_LABEL, None)} + } + #[inline] + pub fn source(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(da00_Variable::VT_SOURCE, None)} + } + #[inline] + pub fn data_type(&self) -> da00_dtype { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(da00_Variable::VT_DATA_TYPE, Some(da00_dtype::none)).unwrap()} + } + #[inline] + pub fn axes(&self) -> Option<::flatbuffers::Vector<'a, ::flatbuffers::ForwardsUOffset<&'a str>>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, ::flatbuffers::ForwardsUOffset<&'a str>>>>(da00_Variable::VT_AXES, None)} + } + #[inline] + pub fn shape(&self) -> ::flatbuffers::Vector<'a, i64> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, i64>>>(da00_Variable::VT_SHAPE, None).unwrap()} + } + #[inline] + pub fn data(&self) -> ::flatbuffers::Vector<'a, u8> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, u8>>>(da00_Variable::VT_DATA, None).unwrap()} + } +} + +impl ::flatbuffers::Verifiable for da00_Variable<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("name", Self::VT_NAME, true)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("unit", Self::VT_UNIT, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("label", Self::VT_LABEL, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("source", Self::VT_SOURCE, false)? + .visit_field::("data_type", Self::VT_DATA_TYPE, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, ::flatbuffers::ForwardsUOffset<&'_ str>>>>("axes", Self::VT_AXES, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, i64>>>("shape", Self::VT_SHAPE, true)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, u8>>>("data", Self::VT_DATA, true)? + .finish(); + Ok(()) + } +} +pub struct da00_VariableArgs<'a> { + pub name: Option<::flatbuffers::WIPOffset<&'a str>>, + pub unit: Option<::flatbuffers::WIPOffset<&'a str>>, + pub label: Option<::flatbuffers::WIPOffset<&'a str>>, + pub source: Option<::flatbuffers::WIPOffset<&'a str>>, + pub data_type: da00_dtype, + pub axes: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, ::flatbuffers::ForwardsUOffset<&'a str>>>>, + pub shape: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, i64>>>, + pub data: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, u8>>>, +} +impl<'a> Default for da00_VariableArgs<'a> { + #[inline] + fn default() -> Self { + da00_VariableArgs { + name: None, // required field + unit: None, + label: None, + source: None, + data_type: da00_dtype::none, + axes: None, + shape: None, // required field + data: None, // required field + } + } +} + +pub struct da00_VariableBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> da00_VariableBuilder<'a, 'b, A> { + #[inline] + pub fn add_name(&mut self, name: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(da00_Variable::VT_NAME, name); + } + #[inline] + pub fn add_unit(&mut self, unit: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(da00_Variable::VT_UNIT, unit); + } + #[inline] + pub fn add_label(&mut self, label: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(da00_Variable::VT_LABEL, label); + } + #[inline] + pub fn add_source(&mut self, source: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(da00_Variable::VT_SOURCE, source); + } + #[inline] + pub fn add_data_type(&mut self, data_type: da00_dtype) { + self.fbb_.push_slot::(da00_Variable::VT_DATA_TYPE, data_type, da00_dtype::none); + } + #[inline] + pub fn add_axes(&mut self, axes: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , ::flatbuffers::ForwardsUOffset<&'b str>>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(da00_Variable::VT_AXES, axes); + } + #[inline] + pub fn add_shape(&mut self, shape: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , i64>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(da00_Variable::VT_SHAPE, shape); + } + #[inline] + pub fn add_data(&mut self, data: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , u8>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(da00_Variable::VT_DATA, data); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> da00_VariableBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + da00_VariableBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + self.fbb_.required(o, da00_Variable::VT_NAME,"name"); + self.fbb_.required(o, da00_Variable::VT_SHAPE,"shape"); + self.fbb_.required(o, da00_Variable::VT_DATA,"data"); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for da00_Variable<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("da00_Variable"); + ds.field("name", &self.name()); + ds.field("unit", &self.unit()); + ds.field("label", &self.label()); + ds.field("source", &self.source()); + ds.field("data_type", &self.data_type()); + ds.field("axes", &self.axes()); + ds.field("shape", &self.shape()); + ds.field("data", &self.data()); + ds.finish() + } +} +pub enum da00_DataArrayOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct da00_DataArray<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for da00_DataArray<'a> { + type Inner = da00_DataArray<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> da00_DataArray<'a> { + pub const VT_SOURCE_NAME: ::flatbuffers::VOffsetT = 4; + pub const VT_TIMESTAMP: ::flatbuffers::VOffsetT = 6; + pub const VT_DATA: ::flatbuffers::VOffsetT = 8; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + da00_DataArray { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args da00_DataArrayArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = da00_DataArrayBuilder::new(_fbb); + builder.add_timestamp(args.timestamp); + if let Some(x) = args.data { builder.add_data(x); } + if let Some(x) = args.source_name { builder.add_source_name(x); } + builder.finish() + } + + + #[inline] + pub fn source_name(&self) -> &'a str { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(da00_DataArray::VT_SOURCE_NAME, None).unwrap()} + } + #[inline] + pub fn timestamp(&self) -> i64 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(da00_DataArray::VT_TIMESTAMP, Some(0)).unwrap()} + } + #[inline] + pub fn data(&self) -> ::flatbuffers::Vector<'a, ::flatbuffers::ForwardsUOffset>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, ::flatbuffers::ForwardsUOffset>>>(da00_DataArray::VT_DATA, None).unwrap()} + } +} + +impl ::flatbuffers::Verifiable for da00_DataArray<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("source_name", Self::VT_SOURCE_NAME, true)? + .visit_field::("timestamp", Self::VT_TIMESTAMP, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, ::flatbuffers::ForwardsUOffset>>>("data", Self::VT_DATA, true)? + .finish(); + Ok(()) + } +} +pub struct da00_DataArrayArgs<'a> { + pub source_name: Option<::flatbuffers::WIPOffset<&'a str>>, + pub timestamp: i64, + pub data: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, ::flatbuffers::ForwardsUOffset>>>>, +} +impl<'a> Default for da00_DataArrayArgs<'a> { + #[inline] + fn default() -> Self { + da00_DataArrayArgs { + source_name: None, // required field + timestamp: 0, + data: None, // required field + } + } +} + +pub struct da00_DataArrayBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> da00_DataArrayBuilder<'a, 'b, A> { + #[inline] + pub fn add_source_name(&mut self, source_name: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(da00_DataArray::VT_SOURCE_NAME, source_name); + } + #[inline] + pub fn add_timestamp(&mut self, timestamp: i64) { + self.fbb_.push_slot::(da00_DataArray::VT_TIMESTAMP, timestamp, 0); + } + #[inline] + pub fn add_data(&mut self, data: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , ::flatbuffers::ForwardsUOffset>>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(da00_DataArray::VT_DATA, data); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> da00_DataArrayBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + da00_DataArrayBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + self.fbb_.required(o, da00_DataArray::VT_SOURCE_NAME,"source_name"); + self.fbb_.required(o, da00_DataArray::VT_DATA,"data"); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for da00_DataArray<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("da00_DataArray"); + ds.field("source_name", &self.source_name()); + ds.field("timestamp", &self.timestamp()); + ds.field("data", &self.data()); + ds.finish() + } +} +#[inline] +/// Verifies that a buffer of bytes contains a `da00_DataArray` +/// and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_da_00_data_array_unchecked`. +pub fn root_as_da_00_data_array(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root::(buf) +} +#[inline] +/// Verifies that a buffer of bytes contains a size prefixed +/// `da00_DataArray` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `size_prefixed_root_as_da_00_data_array_unchecked`. +pub fn size_prefixed_root_as_da_00_data_array(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root::(buf) +} +#[inline] +/// Verifies, with the given options, that a buffer of bytes +/// contains a `da00_DataArray` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_da_00_data_array_unchecked`. +pub fn root_as_da_00_data_array_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root_with_opts::>(opts, buf) +} +#[inline] +/// Verifies, with the given verifier options, that a buffer of +/// bytes contains a size prefixed `da00_DataArray` and returns +/// it. Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_da_00_data_array_unchecked`. +pub fn size_prefixed_root_as_da_00_data_array_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root_with_opts::>(opts, buf) +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a da00_DataArray and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid `da00_DataArray`. +pub unsafe fn root_as_da_00_data_array_unchecked(buf: &[u8]) -> da00_DataArray<'_> { + unsafe { ::flatbuffers::root_unchecked::(buf) } +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a size prefixed da00_DataArray and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid size prefixed `da00_DataArray`. +pub unsafe fn size_prefixed_root_as_da_00_data_array_unchecked(buf: &[u8]) -> da00_DataArray<'_> { + unsafe { ::flatbuffers::size_prefixed_root_unchecked::(buf) } +} +pub const DA_00_DATA_ARRAY_IDENTIFIER: &str = "da00"; + +#[inline] +pub fn da_00_data_array_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, DA_00_DATA_ARRAY_IDENTIFIER, false) +} + +#[inline] +pub fn da_00_data_array_size_prefixed_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, DA_00_DATA_ARRAY_IDENTIFIER, true) +} + +#[inline] +pub fn finish_da_00_data_array_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>( + fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + root: ::flatbuffers::WIPOffset>) { + fbb.finish(root, Some(DA_00_DATA_ARRAY_IDENTIFIER)); +} + +#[inline] +pub fn finish_size_prefixed_da_00_data_array_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>(fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, root: ::flatbuffers::WIPOffset>) { + fbb.finish_size_prefixed(root, Some(DA_00_DATA_ARRAY_IDENTIFIER)); +} diff --git a/rust/src/flatbuffers_generated/df12_det_spec_map.rs b/rust/src/flatbuffers_generated/df12_det_spec_map.rs new file mode 100644 index 0000000..06399f8 --- /dev/null +++ b/rust/src/flatbuffers_generated/df12_det_spec_map.rs @@ -0,0 +1,218 @@ +// automatically generated by the FlatBuffers compiler, do not modify +// @generated + +extern crate alloc; + +pub enum SpectraDetectorMappingOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct SpectraDetectorMapping<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for SpectraDetectorMapping<'a> { + type Inner = SpectraDetectorMapping<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> SpectraDetectorMapping<'a> { + pub const VT_SPECTRUM: ::flatbuffers::VOffsetT = 4; + pub const VT_DETECTOR_ID: ::flatbuffers::VOffsetT = 6; + pub const VT_N_SPECTRA: ::flatbuffers::VOffsetT = 8; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + SpectraDetectorMapping { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args SpectraDetectorMappingArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = SpectraDetectorMappingBuilder::new(_fbb); + builder.add_n_spectra(args.n_spectra); + if let Some(x) = args.detector_id { builder.add_detector_id(x); } + if let Some(x) = args.spectrum { builder.add_spectrum(x); } + builder.finish() + } + + + #[inline] + pub fn spectrum(&self) -> Option<::flatbuffers::Vector<'a, i32>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, i32>>>(SpectraDetectorMapping::VT_SPECTRUM, None)} + } + #[inline] + pub fn detector_id(&self) -> Option<::flatbuffers::Vector<'a, i32>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, i32>>>(SpectraDetectorMapping::VT_DETECTOR_ID, None)} + } + #[inline] + pub fn n_spectra(&self) -> i32 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(SpectraDetectorMapping::VT_N_SPECTRA, Some(0)).unwrap()} + } +} + +impl ::flatbuffers::Verifiable for SpectraDetectorMapping<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, i32>>>("spectrum", Self::VT_SPECTRUM, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, i32>>>("detector_id", Self::VT_DETECTOR_ID, false)? + .visit_field::("n_spectra", Self::VT_N_SPECTRA, false)? + .finish(); + Ok(()) + } +} +pub struct SpectraDetectorMappingArgs<'a> { + pub spectrum: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, i32>>>, + pub detector_id: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, i32>>>, + pub n_spectra: i32, +} +impl<'a> Default for SpectraDetectorMappingArgs<'a> { + #[inline] + fn default() -> Self { + SpectraDetectorMappingArgs { + spectrum: None, + detector_id: None, + n_spectra: 0, + } + } +} + +pub struct SpectraDetectorMappingBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> SpectraDetectorMappingBuilder<'a, 'b, A> { + #[inline] + pub fn add_spectrum(&mut self, spectrum: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , i32>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(SpectraDetectorMapping::VT_SPECTRUM, spectrum); + } + #[inline] + pub fn add_detector_id(&mut self, detector_id: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , i32>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(SpectraDetectorMapping::VT_DETECTOR_ID, detector_id); + } + #[inline] + pub fn add_n_spectra(&mut self, n_spectra: i32) { + self.fbb_.push_slot::(SpectraDetectorMapping::VT_N_SPECTRA, n_spectra, 0); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> SpectraDetectorMappingBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + SpectraDetectorMappingBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for SpectraDetectorMapping<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("SpectraDetectorMapping"); + ds.field("spectrum", &self.spectrum()); + ds.field("detector_id", &self.detector_id()); + ds.field("n_spectra", &self.n_spectra()); + ds.finish() + } +} +#[inline] +/// Verifies that a buffer of bytes contains a `SpectraDetectorMapping` +/// and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_spectra_detector_mapping_unchecked`. +pub fn root_as_spectra_detector_mapping(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root::(buf) +} +#[inline] +/// Verifies that a buffer of bytes contains a size prefixed +/// `SpectraDetectorMapping` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `size_prefixed_root_as_spectra_detector_mapping_unchecked`. +pub fn size_prefixed_root_as_spectra_detector_mapping(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root::(buf) +} +#[inline] +/// Verifies, with the given options, that a buffer of bytes +/// contains a `SpectraDetectorMapping` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_spectra_detector_mapping_unchecked`. +pub fn root_as_spectra_detector_mapping_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root_with_opts::>(opts, buf) +} +#[inline] +/// Verifies, with the given verifier options, that a buffer of +/// bytes contains a size prefixed `SpectraDetectorMapping` and returns +/// it. Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_spectra_detector_mapping_unchecked`. +pub fn size_prefixed_root_as_spectra_detector_mapping_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root_with_opts::>(opts, buf) +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a SpectraDetectorMapping and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid `SpectraDetectorMapping`. +pub unsafe fn root_as_spectra_detector_mapping_unchecked(buf: &[u8]) -> SpectraDetectorMapping<'_> { + unsafe { ::flatbuffers::root_unchecked::(buf) } +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a size prefixed SpectraDetectorMapping and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid size prefixed `SpectraDetectorMapping`. +pub unsafe fn size_prefixed_root_as_spectra_detector_mapping_unchecked(buf: &[u8]) -> SpectraDetectorMapping<'_> { + unsafe { ::flatbuffers::size_prefixed_root_unchecked::(buf) } +} +pub const SPECTRA_DETECTOR_MAPPING_IDENTIFIER: &str = "df12"; + +#[inline] +pub fn spectra_detector_mapping_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, SPECTRA_DETECTOR_MAPPING_IDENTIFIER, false) +} + +#[inline] +pub fn spectra_detector_mapping_size_prefixed_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, SPECTRA_DETECTOR_MAPPING_IDENTIFIER, true) +} + +#[inline] +pub fn finish_spectra_detector_mapping_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>( + fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + root: ::flatbuffers::WIPOffset>) { + fbb.finish(root, Some(SPECTRA_DETECTOR_MAPPING_IDENTIFIER)); +} + +#[inline] +pub fn finish_size_prefixed_spectra_detector_mapping_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>(fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, root: ::flatbuffers::WIPOffset>) { + fbb.finish_size_prefixed(root, Some(SPECTRA_DETECTOR_MAPPING_IDENTIFIER)); +} diff --git a/rust/src/flatbuffers_generated/ep01_epics_connection.rs b/rust/src/flatbuffers_generated/ep01_epics_connection.rs new file mode 100644 index 0000000..6937886 --- /dev/null +++ b/rust/src/flatbuffers_generated/ep01_epics_connection.rs @@ -0,0 +1,344 @@ +// automatically generated by the FlatBuffers compiler, do not modify +// @generated + +extern crate alloc; + +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +pub const ENUM_MIN_CONNECTION_INFO: i16 = 0; +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +pub const ENUM_MAX_CONNECTION_INFO: i16 = 7; +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +#[allow(non_camel_case_types)] +pub const ENUM_VALUES_CONNECTION_INFO: [ConnectionInfo; 8] = [ + ConnectionInfo::UNKNOWN, + ConnectionInfo::NEVER_CONNECTED, + ConnectionInfo::CONNECTED, + ConnectionInfo::DISCONNECTED, + ConnectionInfo::DESTROYED, + ConnectionInfo::CANCELLED, + ConnectionInfo::FINISHED, + ConnectionInfo::REMOTE_ERROR, +]; + +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] +#[repr(transparent)] +pub struct ConnectionInfo(pub i16); +#[allow(non_upper_case_globals)] +impl ConnectionInfo { + pub const UNKNOWN: Self = Self(0); + pub const NEVER_CONNECTED: Self = Self(1); + pub const CONNECTED: Self = Self(2); + pub const DISCONNECTED: Self = Self(3); + pub const DESTROYED: Self = Self(4); + pub const CANCELLED: Self = Self(5); + pub const FINISHED: Self = Self(6); + pub const REMOTE_ERROR: Self = Self(7); + + pub const ENUM_MIN: i16 = 0; + pub const ENUM_MAX: i16 = 7; + pub const ENUM_VALUES: &'static [Self] = &[ + Self::UNKNOWN, + Self::NEVER_CONNECTED, + Self::CONNECTED, + Self::DISCONNECTED, + Self::DESTROYED, + Self::CANCELLED, + Self::FINISHED, + Self::REMOTE_ERROR, + ]; + /// Returns the variant's name or "" if unknown. + pub fn variant_name(self) -> Option<&'static str> { + match self { + Self::UNKNOWN => Some("UNKNOWN"), + Self::NEVER_CONNECTED => Some("NEVER_CONNECTED"), + Self::CONNECTED => Some("CONNECTED"), + Self::DISCONNECTED => Some("DISCONNECTED"), + Self::DESTROYED => Some("DESTROYED"), + Self::CANCELLED => Some("CANCELLED"), + Self::FINISHED => Some("FINISHED"), + Self::REMOTE_ERROR => Some("REMOTE_ERROR"), + _ => None, + } + } +} +impl ::core::fmt::Debug for ConnectionInfo { + fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { + if let Some(name) = self.variant_name() { + f.write_str(name) + } else { + f.write_fmt(format_args!("", self.0)) + } + } +} +impl<'a> ::flatbuffers::Follow<'a> for ConnectionInfo { + type Inner = Self; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + let b = unsafe { ::flatbuffers::read_scalar_at::(buf, loc) }; + Self(b) + } +} + +impl ::flatbuffers::Push for ConnectionInfo { + type Output = ConnectionInfo; + #[inline] + unsafe fn push(&self, dst: &mut [u8], _written_len: usize) { + unsafe { ::flatbuffers::emplace_scalar::(dst, self.0) }; + } +} + +impl ::flatbuffers::EndianScalar for ConnectionInfo { + type Scalar = i16; + #[inline] + fn to_little_endian(self) -> i16 { + self.0.to_le() + } + #[inline] + #[allow(clippy::wrong_self_convention)] + fn from_little_endian(v: i16) -> Self { + let b = i16::from_le(v); + Self(b) + } +} + +impl<'a> ::flatbuffers::Verifiable for ConnectionInfo { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + i16::run_verifier(v, pos) + } +} + +impl ::flatbuffers::SimpleToVerifyInSlice for ConnectionInfo {} +pub enum EpicsPVConnectionInfoOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct EpicsPVConnectionInfo<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for EpicsPVConnectionInfo<'a> { + type Inner = EpicsPVConnectionInfo<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> EpicsPVConnectionInfo<'a> { + pub const VT_TIMESTAMP: ::flatbuffers::VOffsetT = 4; + pub const VT_STATUS: ::flatbuffers::VOffsetT = 6; + pub const VT_SOURCE_NAME: ::flatbuffers::VOffsetT = 8; + pub const VT_SERVICE_ID: ::flatbuffers::VOffsetT = 10; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + EpicsPVConnectionInfo { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args EpicsPVConnectionInfoArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = EpicsPVConnectionInfoBuilder::new(_fbb); + builder.add_timestamp(args.timestamp); + if let Some(x) = args.service_id { builder.add_service_id(x); } + if let Some(x) = args.source_name { builder.add_source_name(x); } + builder.add_status(args.status); + builder.finish() + } + + + #[inline] + pub fn timestamp(&self) -> i64 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(EpicsPVConnectionInfo::VT_TIMESTAMP, Some(0)).unwrap()} + } + #[inline] + pub fn status(&self) -> ConnectionInfo { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(EpicsPVConnectionInfo::VT_STATUS, Some(ConnectionInfo::UNKNOWN)).unwrap()} + } + #[inline] + pub fn source_name(&self) -> &'a str { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(EpicsPVConnectionInfo::VT_SOURCE_NAME, None).unwrap()} + } + #[inline] + pub fn service_id(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(EpicsPVConnectionInfo::VT_SERVICE_ID, None)} + } +} + +impl ::flatbuffers::Verifiable for EpicsPVConnectionInfo<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::("timestamp", Self::VT_TIMESTAMP, false)? + .visit_field::("status", Self::VT_STATUS, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("source_name", Self::VT_SOURCE_NAME, true)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("service_id", Self::VT_SERVICE_ID, false)? + .finish(); + Ok(()) + } +} +pub struct EpicsPVConnectionInfoArgs<'a> { + pub timestamp: i64, + pub status: ConnectionInfo, + pub source_name: Option<::flatbuffers::WIPOffset<&'a str>>, + pub service_id: Option<::flatbuffers::WIPOffset<&'a str>>, +} +impl<'a> Default for EpicsPVConnectionInfoArgs<'a> { + #[inline] + fn default() -> Self { + EpicsPVConnectionInfoArgs { + timestamp: 0, + status: ConnectionInfo::UNKNOWN, + source_name: None, // required field + service_id: None, + } + } +} + +pub struct EpicsPVConnectionInfoBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> EpicsPVConnectionInfoBuilder<'a, 'b, A> { + #[inline] + pub fn add_timestamp(&mut self, timestamp: i64) { + self.fbb_.push_slot::(EpicsPVConnectionInfo::VT_TIMESTAMP, timestamp, 0); + } + #[inline] + pub fn add_status(&mut self, status: ConnectionInfo) { + self.fbb_.push_slot::(EpicsPVConnectionInfo::VT_STATUS, status, ConnectionInfo::UNKNOWN); + } + #[inline] + pub fn add_source_name(&mut self, source_name: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(EpicsPVConnectionInfo::VT_SOURCE_NAME, source_name); + } + #[inline] + pub fn add_service_id(&mut self, service_id: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(EpicsPVConnectionInfo::VT_SERVICE_ID, service_id); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> EpicsPVConnectionInfoBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + EpicsPVConnectionInfoBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + self.fbb_.required(o, EpicsPVConnectionInfo::VT_SOURCE_NAME,"source_name"); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for EpicsPVConnectionInfo<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("EpicsPVConnectionInfo"); + ds.field("timestamp", &self.timestamp()); + ds.field("status", &self.status()); + ds.field("source_name", &self.source_name()); + ds.field("service_id", &self.service_id()); + ds.finish() + } +} +#[inline] +/// Verifies that a buffer of bytes contains a `EpicsPVConnectionInfo` +/// and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_epics_pvconnection_info_unchecked`. +pub fn root_as_epics_pvconnection_info(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root::(buf) +} +#[inline] +/// Verifies that a buffer of bytes contains a size prefixed +/// `EpicsPVConnectionInfo` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `size_prefixed_root_as_epics_pvconnection_info_unchecked`. +pub fn size_prefixed_root_as_epics_pvconnection_info(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root::(buf) +} +#[inline] +/// Verifies, with the given options, that a buffer of bytes +/// contains a `EpicsPVConnectionInfo` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_epics_pvconnection_info_unchecked`. +pub fn root_as_epics_pvconnection_info_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root_with_opts::>(opts, buf) +} +#[inline] +/// Verifies, with the given verifier options, that a buffer of +/// bytes contains a size prefixed `EpicsPVConnectionInfo` and returns +/// it. Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_epics_pvconnection_info_unchecked`. +pub fn size_prefixed_root_as_epics_pvconnection_info_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root_with_opts::>(opts, buf) +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a EpicsPVConnectionInfo and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid `EpicsPVConnectionInfo`. +pub unsafe fn root_as_epics_pvconnection_info_unchecked(buf: &[u8]) -> EpicsPVConnectionInfo<'_> { + unsafe { ::flatbuffers::root_unchecked::(buf) } +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a size prefixed EpicsPVConnectionInfo and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid size prefixed `EpicsPVConnectionInfo`. +pub unsafe fn size_prefixed_root_as_epics_pvconnection_info_unchecked(buf: &[u8]) -> EpicsPVConnectionInfo<'_> { + unsafe { ::flatbuffers::size_prefixed_root_unchecked::(buf) } +} +pub const EPICS_PVCONNECTION_INFO_IDENTIFIER: &str = "ep01"; + +#[inline] +pub fn epics_pvconnection_info_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, EPICS_PVCONNECTION_INFO_IDENTIFIER, false) +} + +#[inline] +pub fn epics_pvconnection_info_size_prefixed_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, EPICS_PVCONNECTION_INFO_IDENTIFIER, true) +} + +#[inline] +pub fn finish_epics_pvconnection_info_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>( + fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + root: ::flatbuffers::WIPOffset>) { + fbb.finish(root, Some(EPICS_PVCONNECTION_INFO_IDENTIFIER)); +} + +#[inline] +pub fn finish_size_prefixed_epics_pvconnection_info_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>(fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, root: ::flatbuffers::WIPOffset>) { + fbb.finish_size_prefixed(root, Some(EPICS_PVCONNECTION_INFO_IDENTIFIER)); +} diff --git a/rust/src/flatbuffers_generated/ev44_events.rs b/rust/src/flatbuffers_generated/ev44_events.rs new file mode 100644 index 0000000..fa18ab4 --- /dev/null +++ b/rust/src/flatbuffers_generated/ev44_events.rs @@ -0,0 +1,272 @@ +// automatically generated by the FlatBuffers compiler, do not modify +// @generated + +extern crate alloc; + +pub enum Event44MessageOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct Event44Message<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for Event44Message<'a> { + type Inner = Event44Message<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> Event44Message<'a> { + pub const VT_SOURCE_NAME: ::flatbuffers::VOffsetT = 4; + pub const VT_MESSAGE_ID: ::flatbuffers::VOffsetT = 6; + pub const VT_REFERENCE_TIME: ::flatbuffers::VOffsetT = 8; + pub const VT_REFERENCE_TIME_INDEX: ::flatbuffers::VOffsetT = 10; + pub const VT_TIME_OF_FLIGHT: ::flatbuffers::VOffsetT = 12; + pub const VT_PIXEL_ID: ::flatbuffers::VOffsetT = 14; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + Event44Message { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args Event44MessageArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = Event44MessageBuilder::new(_fbb); + builder.add_message_id(args.message_id); + if let Some(x) = args.pixel_id { builder.add_pixel_id(x); } + if let Some(x) = args.time_of_flight { builder.add_time_of_flight(x); } + if let Some(x) = args.reference_time_index { builder.add_reference_time_index(x); } + if let Some(x) = args.reference_time { builder.add_reference_time(x); } + if let Some(x) = args.source_name { builder.add_source_name(x); } + builder.finish() + } + + + #[inline] + pub fn source_name(&self) -> &'a str { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(Event44Message::VT_SOURCE_NAME, None).unwrap()} + } + #[inline] + pub fn message_id(&self) -> i64 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(Event44Message::VT_MESSAGE_ID, Some(0)).unwrap()} + } + #[inline] + pub fn reference_time(&self) -> ::flatbuffers::Vector<'a, i64> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, i64>>>(Event44Message::VT_REFERENCE_TIME, None).unwrap()} + } + #[inline] + pub fn reference_time_index(&self) -> ::flatbuffers::Vector<'a, i32> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, i32>>>(Event44Message::VT_REFERENCE_TIME_INDEX, None).unwrap()} + } + #[inline] + pub fn time_of_flight(&self) -> Option<::flatbuffers::Vector<'a, i32>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, i32>>>(Event44Message::VT_TIME_OF_FLIGHT, None)} + } + #[inline] + pub fn pixel_id(&self) -> Option<::flatbuffers::Vector<'a, i32>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, i32>>>(Event44Message::VT_PIXEL_ID, None)} + } +} + +impl ::flatbuffers::Verifiable for Event44Message<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("source_name", Self::VT_SOURCE_NAME, true)? + .visit_field::("message_id", Self::VT_MESSAGE_ID, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, i64>>>("reference_time", Self::VT_REFERENCE_TIME, true)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, i32>>>("reference_time_index", Self::VT_REFERENCE_TIME_INDEX, true)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, i32>>>("time_of_flight", Self::VT_TIME_OF_FLIGHT, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, i32>>>("pixel_id", Self::VT_PIXEL_ID, false)? + .finish(); + Ok(()) + } +} +pub struct Event44MessageArgs<'a> { + pub source_name: Option<::flatbuffers::WIPOffset<&'a str>>, + pub message_id: i64, + pub reference_time: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, i64>>>, + pub reference_time_index: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, i32>>>, + pub time_of_flight: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, i32>>>, + pub pixel_id: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, i32>>>, +} +impl<'a> Default for Event44MessageArgs<'a> { + #[inline] + fn default() -> Self { + Event44MessageArgs { + source_name: None, // required field + message_id: 0, + reference_time: None, // required field + reference_time_index: None, // required field + time_of_flight: None, + pixel_id: None, + } + } +} + +pub struct Event44MessageBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> Event44MessageBuilder<'a, 'b, A> { + #[inline] + pub fn add_source_name(&mut self, source_name: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(Event44Message::VT_SOURCE_NAME, source_name); + } + #[inline] + pub fn add_message_id(&mut self, message_id: i64) { + self.fbb_.push_slot::(Event44Message::VT_MESSAGE_ID, message_id, 0); + } + #[inline] + pub fn add_reference_time(&mut self, reference_time: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , i64>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(Event44Message::VT_REFERENCE_TIME, reference_time); + } + #[inline] + pub fn add_reference_time_index(&mut self, reference_time_index: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , i32>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(Event44Message::VT_REFERENCE_TIME_INDEX, reference_time_index); + } + #[inline] + pub fn add_time_of_flight(&mut self, time_of_flight: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , i32>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(Event44Message::VT_TIME_OF_FLIGHT, time_of_flight); + } + #[inline] + pub fn add_pixel_id(&mut self, pixel_id: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , i32>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(Event44Message::VT_PIXEL_ID, pixel_id); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> Event44MessageBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + Event44MessageBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + self.fbb_.required(o, Event44Message::VT_SOURCE_NAME,"source_name"); + self.fbb_.required(o, Event44Message::VT_REFERENCE_TIME,"reference_time"); + self.fbb_.required(o, Event44Message::VT_REFERENCE_TIME_INDEX,"reference_time_index"); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for Event44Message<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("Event44Message"); + ds.field("source_name", &self.source_name()); + ds.field("message_id", &self.message_id()); + ds.field("reference_time", &self.reference_time()); + ds.field("reference_time_index", &self.reference_time_index()); + ds.field("time_of_flight", &self.time_of_flight()); + ds.field("pixel_id", &self.pixel_id()); + ds.finish() + } +} +#[inline] +/// Verifies that a buffer of bytes contains a `Event44Message` +/// and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_event_44_message_unchecked`. +pub fn root_as_event_44_message(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root::(buf) +} +#[inline] +/// Verifies that a buffer of bytes contains a size prefixed +/// `Event44Message` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `size_prefixed_root_as_event_44_message_unchecked`. +pub fn size_prefixed_root_as_event_44_message(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root::(buf) +} +#[inline] +/// Verifies, with the given options, that a buffer of bytes +/// contains a `Event44Message` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_event_44_message_unchecked`. +pub fn root_as_event_44_message_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root_with_opts::>(opts, buf) +} +#[inline] +/// Verifies, with the given verifier options, that a buffer of +/// bytes contains a size prefixed `Event44Message` and returns +/// it. Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_event_44_message_unchecked`. +pub fn size_prefixed_root_as_event_44_message_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root_with_opts::>(opts, buf) +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a Event44Message and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid `Event44Message`. +pub unsafe fn root_as_event_44_message_unchecked(buf: &[u8]) -> Event44Message<'_> { + unsafe { ::flatbuffers::root_unchecked::(buf) } +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a size prefixed Event44Message and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid size prefixed `Event44Message`. +pub unsafe fn size_prefixed_root_as_event_44_message_unchecked(buf: &[u8]) -> Event44Message<'_> { + unsafe { ::flatbuffers::size_prefixed_root_unchecked::(buf) } +} +pub const EVENT_44_MESSAGE_IDENTIFIER: &str = "ev44"; + +#[inline] +pub fn event_44_message_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, EVENT_44_MESSAGE_IDENTIFIER, false) +} + +#[inline] +pub fn event_44_message_size_prefixed_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, EVENT_44_MESSAGE_IDENTIFIER, true) +} + +#[inline] +pub fn finish_event_44_message_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>( + fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + root: ::flatbuffers::WIPOffset>) { + fbb.finish(root, Some(EVENT_44_MESSAGE_IDENTIFIER)); +} + +#[inline] +pub fn finish_size_prefixed_event_44_message_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>(fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, root: ::flatbuffers::WIPOffset>) { + fbb.finish_size_prefixed(root, Some(EVENT_44_MESSAGE_IDENTIFIER)); +} diff --git a/rust/src/flatbuffers_generated/f144_logdata.rs b/rust/src/flatbuffers_generated/f144_logdata.rs new file mode 100644 index 0000000..6e05b3a --- /dev/null +++ b/rust/src/flatbuffers_generated/f144_logdata.rs @@ -0,0 +1,2767 @@ +// automatically generated by the FlatBuffers compiler, do not modify +// @generated + +extern crate alloc; + +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +pub const ENUM_MIN_VALUE: u8 = 0; +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +pub const ENUM_MAX_VALUE: u8 = 20; +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +#[allow(non_camel_case_types)] +pub const ENUM_VALUES_VALUE: [Value; 21] = [ + Value::NONE, + Value::Byte, + Value::UByte, + Value::Short, + Value::UShort, + Value::Int, + Value::UInt, + Value::Long, + Value::ULong, + Value::Float, + Value::Double, + Value::ArrayByte, + Value::ArrayUByte, + Value::ArrayShort, + Value::ArrayUShort, + Value::ArrayInt, + Value::ArrayUInt, + Value::ArrayLong, + Value::ArrayULong, + Value::ArrayFloat, + Value::ArrayDouble, +]; + +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] +#[repr(transparent)] +pub struct Value(pub u8); +#[allow(non_upper_case_globals)] +impl Value { + pub const NONE: Self = Self(0); + pub const Byte: Self = Self(1); + pub const UByte: Self = Self(2); + pub const Short: Self = Self(3); + pub const UShort: Self = Self(4); + pub const Int: Self = Self(5); + pub const UInt: Self = Self(6); + pub const Long: Self = Self(7); + pub const ULong: Self = Self(8); + pub const Float: Self = Self(9); + pub const Double: Self = Self(10); + pub const ArrayByte: Self = Self(11); + pub const ArrayUByte: Self = Self(12); + pub const ArrayShort: Self = Self(13); + pub const ArrayUShort: Self = Self(14); + pub const ArrayInt: Self = Self(15); + pub const ArrayUInt: Self = Self(16); + pub const ArrayLong: Self = Self(17); + pub const ArrayULong: Self = Self(18); + pub const ArrayFloat: Self = Self(19); + pub const ArrayDouble: Self = Self(20); + + pub const ENUM_MIN: u8 = 0; + pub const ENUM_MAX: u8 = 20; + pub const ENUM_VALUES: &'static [Self] = &[ + Self::NONE, + Self::Byte, + Self::UByte, + Self::Short, + Self::UShort, + Self::Int, + Self::UInt, + Self::Long, + Self::ULong, + Self::Float, + Self::Double, + Self::ArrayByte, + Self::ArrayUByte, + Self::ArrayShort, + Self::ArrayUShort, + Self::ArrayInt, + Self::ArrayUInt, + Self::ArrayLong, + Self::ArrayULong, + Self::ArrayFloat, + Self::ArrayDouble, + ]; + /// Returns the variant's name or "" if unknown. + pub fn variant_name(self) -> Option<&'static str> { + match self { + Self::NONE => Some("NONE"), + Self::Byte => Some("Byte"), + Self::UByte => Some("UByte"), + Self::Short => Some("Short"), + Self::UShort => Some("UShort"), + Self::Int => Some("Int"), + Self::UInt => Some("UInt"), + Self::Long => Some("Long"), + Self::ULong => Some("ULong"), + Self::Float => Some("Float"), + Self::Double => Some("Double"), + Self::ArrayByte => Some("ArrayByte"), + Self::ArrayUByte => Some("ArrayUByte"), + Self::ArrayShort => Some("ArrayShort"), + Self::ArrayUShort => Some("ArrayUShort"), + Self::ArrayInt => Some("ArrayInt"), + Self::ArrayUInt => Some("ArrayUInt"), + Self::ArrayLong => Some("ArrayLong"), + Self::ArrayULong => Some("ArrayULong"), + Self::ArrayFloat => Some("ArrayFloat"), + Self::ArrayDouble => Some("ArrayDouble"), + _ => None, + } + } +} +impl ::core::fmt::Debug for Value { + fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { + if let Some(name) = self.variant_name() { + f.write_str(name) + } else { + f.write_fmt(format_args!("", self.0)) + } + } +} +impl<'a> ::flatbuffers::Follow<'a> for Value { + type Inner = Self; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + let b = unsafe { ::flatbuffers::read_scalar_at::(buf, loc) }; + Self(b) + } +} + +impl ::flatbuffers::Push for Value { + type Output = Value; + #[inline] + unsafe fn push(&self, dst: &mut [u8], _written_len: usize) { + unsafe { ::flatbuffers::emplace_scalar::(dst, self.0) }; + } +} + +impl ::flatbuffers::EndianScalar for Value { + type Scalar = u8; + #[inline] + fn to_little_endian(self) -> u8 { + self.0.to_le() + } + #[inline] + #[allow(clippy::wrong_self_convention)] + fn from_little_endian(v: u8) -> Self { + let b = u8::from_le(v); + Self(b) + } +} + +impl<'a> ::flatbuffers::Verifiable for Value { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + u8::run_verifier(v, pos) + } +} + +impl ::flatbuffers::SimpleToVerifyInSlice for Value {} +pub struct ValueUnionTableOffset {} + +pub enum ByteOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct Byte<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for Byte<'a> { + type Inner = Byte<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> Byte<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + Byte { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args ByteArgs + ) -> ::flatbuffers::WIPOffset> { + let mut builder = ByteBuilder::new(_fbb); + builder.add_value(args.value); + builder.finish() + } + + + #[inline] + pub fn value(&self) -> i8 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(Byte::VT_VALUE, Some(0)).unwrap()} + } +} + +impl ::flatbuffers::Verifiable for Byte<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::("value", Self::VT_VALUE, false)? + .finish(); + Ok(()) + } +} +pub struct ByteArgs { + pub value: i8, +} +impl<'a> Default for ByteArgs { + #[inline] + fn default() -> Self { + ByteArgs { + value: 0, + } + } +} + +pub struct ByteBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> ByteBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: i8) { + self.fbb_.push_slot::(Byte::VT_VALUE, value, 0); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> ByteBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + ByteBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for Byte<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("Byte"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum UByteOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct UByte<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for UByte<'a> { + type Inner = UByte<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> UByte<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + UByte { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args UByteArgs + ) -> ::flatbuffers::WIPOffset> { + let mut builder = UByteBuilder::new(_fbb); + builder.add_value(args.value); + builder.finish() + } + + + #[inline] + pub fn value(&self) -> u8 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(UByte::VT_VALUE, Some(0)).unwrap()} + } +} + +impl ::flatbuffers::Verifiable for UByte<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::("value", Self::VT_VALUE, false)? + .finish(); + Ok(()) + } +} +pub struct UByteArgs { + pub value: u8, +} +impl<'a> Default for UByteArgs { + #[inline] + fn default() -> Self { + UByteArgs { + value: 0, + } + } +} + +pub struct UByteBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> UByteBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: u8) { + self.fbb_.push_slot::(UByte::VT_VALUE, value, 0); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> UByteBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + UByteBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for UByte<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("UByte"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum ShortOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct Short<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for Short<'a> { + type Inner = Short<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> Short<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + Short { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args ShortArgs + ) -> ::flatbuffers::WIPOffset> { + let mut builder = ShortBuilder::new(_fbb); + builder.add_value(args.value); + builder.finish() + } + + + #[inline] + pub fn value(&self) -> i16 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(Short::VT_VALUE, Some(0)).unwrap()} + } +} + +impl ::flatbuffers::Verifiable for Short<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::("value", Self::VT_VALUE, false)? + .finish(); + Ok(()) + } +} +pub struct ShortArgs { + pub value: i16, +} +impl<'a> Default for ShortArgs { + #[inline] + fn default() -> Self { + ShortArgs { + value: 0, + } + } +} + +pub struct ShortBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> ShortBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: i16) { + self.fbb_.push_slot::(Short::VT_VALUE, value, 0); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> ShortBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + ShortBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for Short<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("Short"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum UShortOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct UShort<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for UShort<'a> { + type Inner = UShort<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> UShort<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + UShort { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args UShortArgs + ) -> ::flatbuffers::WIPOffset> { + let mut builder = UShortBuilder::new(_fbb); + builder.add_value(args.value); + builder.finish() + } + + + #[inline] + pub fn value(&self) -> u16 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(UShort::VT_VALUE, Some(0)).unwrap()} + } +} + +impl ::flatbuffers::Verifiable for UShort<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::("value", Self::VT_VALUE, false)? + .finish(); + Ok(()) + } +} +pub struct UShortArgs { + pub value: u16, +} +impl<'a> Default for UShortArgs { + #[inline] + fn default() -> Self { + UShortArgs { + value: 0, + } + } +} + +pub struct UShortBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> UShortBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: u16) { + self.fbb_.push_slot::(UShort::VT_VALUE, value, 0); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> UShortBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + UShortBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for UShort<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("UShort"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum IntOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct Int<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for Int<'a> { + type Inner = Int<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> Int<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + Int { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args IntArgs + ) -> ::flatbuffers::WIPOffset> { + let mut builder = IntBuilder::new(_fbb); + builder.add_value(args.value); + builder.finish() + } + + + #[inline] + pub fn value(&self) -> i32 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(Int::VT_VALUE, Some(0)).unwrap()} + } +} + +impl ::flatbuffers::Verifiable for Int<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::("value", Self::VT_VALUE, false)? + .finish(); + Ok(()) + } +} +pub struct IntArgs { + pub value: i32, +} +impl<'a> Default for IntArgs { + #[inline] + fn default() -> Self { + IntArgs { + value: 0, + } + } +} + +pub struct IntBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> IntBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: i32) { + self.fbb_.push_slot::(Int::VT_VALUE, value, 0); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> IntBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + IntBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for Int<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("Int"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum UIntOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct UInt<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for UInt<'a> { + type Inner = UInt<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> UInt<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + UInt { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args UIntArgs + ) -> ::flatbuffers::WIPOffset> { + let mut builder = UIntBuilder::new(_fbb); + builder.add_value(args.value); + builder.finish() + } + + + #[inline] + pub fn value(&self) -> u32 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(UInt::VT_VALUE, Some(0)).unwrap()} + } +} + +impl ::flatbuffers::Verifiable for UInt<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::("value", Self::VT_VALUE, false)? + .finish(); + Ok(()) + } +} +pub struct UIntArgs { + pub value: u32, +} +impl<'a> Default for UIntArgs { + #[inline] + fn default() -> Self { + UIntArgs { + value: 0, + } + } +} + +pub struct UIntBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> UIntBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: u32) { + self.fbb_.push_slot::(UInt::VT_VALUE, value, 0); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> UIntBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + UIntBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for UInt<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("UInt"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum LongOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct Long<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for Long<'a> { + type Inner = Long<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> Long<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + Long { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args LongArgs + ) -> ::flatbuffers::WIPOffset> { + let mut builder = LongBuilder::new(_fbb); + builder.add_value(args.value); + builder.finish() + } + + + #[inline] + pub fn value(&self) -> i64 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(Long::VT_VALUE, Some(0)).unwrap()} + } +} + +impl ::flatbuffers::Verifiable for Long<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::("value", Self::VT_VALUE, false)? + .finish(); + Ok(()) + } +} +pub struct LongArgs { + pub value: i64, +} +impl<'a> Default for LongArgs { + #[inline] + fn default() -> Self { + LongArgs { + value: 0, + } + } +} + +pub struct LongBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> LongBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: i64) { + self.fbb_.push_slot::(Long::VT_VALUE, value, 0); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> LongBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + LongBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for Long<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("Long"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum ULongOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct ULong<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for ULong<'a> { + type Inner = ULong<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> ULong<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + ULong { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args ULongArgs + ) -> ::flatbuffers::WIPOffset> { + let mut builder = ULongBuilder::new(_fbb); + builder.add_value(args.value); + builder.finish() + } + + + #[inline] + pub fn value(&self) -> u64 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(ULong::VT_VALUE, Some(0)).unwrap()} + } +} + +impl ::flatbuffers::Verifiable for ULong<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::("value", Self::VT_VALUE, false)? + .finish(); + Ok(()) + } +} +pub struct ULongArgs { + pub value: u64, +} +impl<'a> Default for ULongArgs { + #[inline] + fn default() -> Self { + ULongArgs { + value: 0, + } + } +} + +pub struct ULongBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> ULongBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: u64) { + self.fbb_.push_slot::(ULong::VT_VALUE, value, 0); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> ULongBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + ULongBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for ULong<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("ULong"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum FloatOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct Float<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for Float<'a> { + type Inner = Float<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> Float<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + Float { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args FloatArgs + ) -> ::flatbuffers::WIPOffset> { + let mut builder = FloatBuilder::new(_fbb); + builder.add_value(args.value); + builder.finish() + } + + + #[inline] + pub fn value(&self) -> f32 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(Float::VT_VALUE, Some(0.0)).unwrap()} + } +} + +impl ::flatbuffers::Verifiable for Float<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::("value", Self::VT_VALUE, false)? + .finish(); + Ok(()) + } +} +pub struct FloatArgs { + pub value: f32, +} +impl<'a> Default for FloatArgs { + #[inline] + fn default() -> Self { + FloatArgs { + value: 0.0, + } + } +} + +pub struct FloatBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> FloatBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: f32) { + self.fbb_.push_slot::(Float::VT_VALUE, value, 0.0); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> FloatBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + FloatBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for Float<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("Float"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum DoubleOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct Double<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for Double<'a> { + type Inner = Double<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> Double<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + Double { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args DoubleArgs + ) -> ::flatbuffers::WIPOffset> { + let mut builder = DoubleBuilder::new(_fbb); + builder.add_value(args.value); + builder.finish() + } + + + #[inline] + pub fn value(&self) -> f64 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(Double::VT_VALUE, Some(0.0)).unwrap()} + } +} + +impl ::flatbuffers::Verifiable for Double<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::("value", Self::VT_VALUE, false)? + .finish(); + Ok(()) + } +} +pub struct DoubleArgs { + pub value: f64, +} +impl<'a> Default for DoubleArgs { + #[inline] + fn default() -> Self { + DoubleArgs { + value: 0.0, + } + } +} + +pub struct DoubleBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> DoubleBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: f64) { + self.fbb_.push_slot::(Double::VT_VALUE, value, 0.0); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> DoubleBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + DoubleBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for Double<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("Double"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum ArrayByteOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct ArrayByte<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for ArrayByte<'a> { + type Inner = ArrayByte<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> ArrayByte<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + ArrayByte { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args ArrayByteArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = ArrayByteBuilder::new(_fbb); + if let Some(x) = args.value { builder.add_value(x); } + builder.finish() + } + + + #[inline] + pub fn value(&self) -> Option<::flatbuffers::Vector<'a, i8>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, i8>>>(ArrayByte::VT_VALUE, None)} + } +} + +impl ::flatbuffers::Verifiable for ArrayByte<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, i8>>>("value", Self::VT_VALUE, false)? + .finish(); + Ok(()) + } +} +pub struct ArrayByteArgs<'a> { + pub value: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, i8>>>, +} +impl<'a> Default for ArrayByteArgs<'a> { + #[inline] + fn default() -> Self { + ArrayByteArgs { + value: None, + } + } +} + +pub struct ArrayByteBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> ArrayByteBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , i8>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(ArrayByte::VT_VALUE, value); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> ArrayByteBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + ArrayByteBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for ArrayByte<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("ArrayByte"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum ArrayUByteOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct ArrayUByte<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for ArrayUByte<'a> { + type Inner = ArrayUByte<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> ArrayUByte<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + ArrayUByte { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args ArrayUByteArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = ArrayUByteBuilder::new(_fbb); + if let Some(x) = args.value { builder.add_value(x); } + builder.finish() + } + + + #[inline] + pub fn value(&self) -> Option<::flatbuffers::Vector<'a, u8>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, u8>>>(ArrayUByte::VT_VALUE, None)} + } +} + +impl ::flatbuffers::Verifiable for ArrayUByte<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, u8>>>("value", Self::VT_VALUE, false)? + .finish(); + Ok(()) + } +} +pub struct ArrayUByteArgs<'a> { + pub value: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, u8>>>, +} +impl<'a> Default for ArrayUByteArgs<'a> { + #[inline] + fn default() -> Self { + ArrayUByteArgs { + value: None, + } + } +} + +pub struct ArrayUByteBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> ArrayUByteBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , u8>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(ArrayUByte::VT_VALUE, value); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> ArrayUByteBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + ArrayUByteBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for ArrayUByte<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("ArrayUByte"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum ArrayShortOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct ArrayShort<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for ArrayShort<'a> { + type Inner = ArrayShort<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> ArrayShort<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + ArrayShort { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args ArrayShortArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = ArrayShortBuilder::new(_fbb); + if let Some(x) = args.value { builder.add_value(x); } + builder.finish() + } + + + #[inline] + pub fn value(&self) -> Option<::flatbuffers::Vector<'a, i16>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, i16>>>(ArrayShort::VT_VALUE, None)} + } +} + +impl ::flatbuffers::Verifiable for ArrayShort<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, i16>>>("value", Self::VT_VALUE, false)? + .finish(); + Ok(()) + } +} +pub struct ArrayShortArgs<'a> { + pub value: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, i16>>>, +} +impl<'a> Default for ArrayShortArgs<'a> { + #[inline] + fn default() -> Self { + ArrayShortArgs { + value: None, + } + } +} + +pub struct ArrayShortBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> ArrayShortBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , i16>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(ArrayShort::VT_VALUE, value); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> ArrayShortBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + ArrayShortBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for ArrayShort<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("ArrayShort"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum ArrayUShortOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct ArrayUShort<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for ArrayUShort<'a> { + type Inner = ArrayUShort<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> ArrayUShort<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + ArrayUShort { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args ArrayUShortArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = ArrayUShortBuilder::new(_fbb); + if let Some(x) = args.value { builder.add_value(x); } + builder.finish() + } + + + #[inline] + pub fn value(&self) -> Option<::flatbuffers::Vector<'a, u16>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, u16>>>(ArrayUShort::VT_VALUE, None)} + } +} + +impl ::flatbuffers::Verifiable for ArrayUShort<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, u16>>>("value", Self::VT_VALUE, false)? + .finish(); + Ok(()) + } +} +pub struct ArrayUShortArgs<'a> { + pub value: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, u16>>>, +} +impl<'a> Default for ArrayUShortArgs<'a> { + #[inline] + fn default() -> Self { + ArrayUShortArgs { + value: None, + } + } +} + +pub struct ArrayUShortBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> ArrayUShortBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , u16>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(ArrayUShort::VT_VALUE, value); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> ArrayUShortBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + ArrayUShortBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for ArrayUShort<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("ArrayUShort"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum ArrayIntOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct ArrayInt<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for ArrayInt<'a> { + type Inner = ArrayInt<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> ArrayInt<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + ArrayInt { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args ArrayIntArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = ArrayIntBuilder::new(_fbb); + if let Some(x) = args.value { builder.add_value(x); } + builder.finish() + } + + + #[inline] + pub fn value(&self) -> Option<::flatbuffers::Vector<'a, i32>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, i32>>>(ArrayInt::VT_VALUE, None)} + } +} + +impl ::flatbuffers::Verifiable for ArrayInt<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, i32>>>("value", Self::VT_VALUE, false)? + .finish(); + Ok(()) + } +} +pub struct ArrayIntArgs<'a> { + pub value: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, i32>>>, +} +impl<'a> Default for ArrayIntArgs<'a> { + #[inline] + fn default() -> Self { + ArrayIntArgs { + value: None, + } + } +} + +pub struct ArrayIntBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> ArrayIntBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , i32>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(ArrayInt::VT_VALUE, value); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> ArrayIntBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + ArrayIntBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for ArrayInt<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("ArrayInt"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum ArrayUIntOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct ArrayUInt<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for ArrayUInt<'a> { + type Inner = ArrayUInt<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> ArrayUInt<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + ArrayUInt { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args ArrayUIntArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = ArrayUIntBuilder::new(_fbb); + if let Some(x) = args.value { builder.add_value(x); } + builder.finish() + } + + + #[inline] + pub fn value(&self) -> Option<::flatbuffers::Vector<'a, u32>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, u32>>>(ArrayUInt::VT_VALUE, None)} + } +} + +impl ::flatbuffers::Verifiable for ArrayUInt<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, u32>>>("value", Self::VT_VALUE, false)? + .finish(); + Ok(()) + } +} +pub struct ArrayUIntArgs<'a> { + pub value: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, u32>>>, +} +impl<'a> Default for ArrayUIntArgs<'a> { + #[inline] + fn default() -> Self { + ArrayUIntArgs { + value: None, + } + } +} + +pub struct ArrayUIntBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> ArrayUIntBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , u32>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(ArrayUInt::VT_VALUE, value); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> ArrayUIntBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + ArrayUIntBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for ArrayUInt<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("ArrayUInt"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum ArrayLongOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct ArrayLong<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for ArrayLong<'a> { + type Inner = ArrayLong<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> ArrayLong<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + ArrayLong { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args ArrayLongArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = ArrayLongBuilder::new(_fbb); + if let Some(x) = args.value { builder.add_value(x); } + builder.finish() + } + + + #[inline] + pub fn value(&self) -> Option<::flatbuffers::Vector<'a, i64>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, i64>>>(ArrayLong::VT_VALUE, None)} + } +} + +impl ::flatbuffers::Verifiable for ArrayLong<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, i64>>>("value", Self::VT_VALUE, false)? + .finish(); + Ok(()) + } +} +pub struct ArrayLongArgs<'a> { + pub value: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, i64>>>, +} +impl<'a> Default for ArrayLongArgs<'a> { + #[inline] + fn default() -> Self { + ArrayLongArgs { + value: None, + } + } +} + +pub struct ArrayLongBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> ArrayLongBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , i64>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(ArrayLong::VT_VALUE, value); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> ArrayLongBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + ArrayLongBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for ArrayLong<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("ArrayLong"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum ArrayULongOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct ArrayULong<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for ArrayULong<'a> { + type Inner = ArrayULong<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> ArrayULong<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + ArrayULong { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args ArrayULongArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = ArrayULongBuilder::new(_fbb); + if let Some(x) = args.value { builder.add_value(x); } + builder.finish() + } + + + #[inline] + pub fn value(&self) -> Option<::flatbuffers::Vector<'a, u64>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, u64>>>(ArrayULong::VT_VALUE, None)} + } +} + +impl ::flatbuffers::Verifiable for ArrayULong<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, u64>>>("value", Self::VT_VALUE, false)? + .finish(); + Ok(()) + } +} +pub struct ArrayULongArgs<'a> { + pub value: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, u64>>>, +} +impl<'a> Default for ArrayULongArgs<'a> { + #[inline] + fn default() -> Self { + ArrayULongArgs { + value: None, + } + } +} + +pub struct ArrayULongBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> ArrayULongBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , u64>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(ArrayULong::VT_VALUE, value); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> ArrayULongBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + ArrayULongBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for ArrayULong<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("ArrayULong"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum ArrayFloatOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct ArrayFloat<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for ArrayFloat<'a> { + type Inner = ArrayFloat<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> ArrayFloat<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + ArrayFloat { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args ArrayFloatArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = ArrayFloatBuilder::new(_fbb); + if let Some(x) = args.value { builder.add_value(x); } + builder.finish() + } + + + #[inline] + pub fn value(&self) -> Option<::flatbuffers::Vector<'a, f32>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, f32>>>(ArrayFloat::VT_VALUE, None)} + } +} + +impl ::flatbuffers::Verifiable for ArrayFloat<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, f32>>>("value", Self::VT_VALUE, false)? + .finish(); + Ok(()) + } +} +pub struct ArrayFloatArgs<'a> { + pub value: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, f32>>>, +} +impl<'a> Default for ArrayFloatArgs<'a> { + #[inline] + fn default() -> Self { + ArrayFloatArgs { + value: None, + } + } +} + +pub struct ArrayFloatBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> ArrayFloatBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , f32>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(ArrayFloat::VT_VALUE, value); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> ArrayFloatBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + ArrayFloatBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for ArrayFloat<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("ArrayFloat"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum ArrayDoubleOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct ArrayDouble<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for ArrayDouble<'a> { + type Inner = ArrayDouble<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> ArrayDouble<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + ArrayDouble { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args ArrayDoubleArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = ArrayDoubleBuilder::new(_fbb); + if let Some(x) = args.value { builder.add_value(x); } + builder.finish() + } + + + #[inline] + pub fn value(&self) -> Option<::flatbuffers::Vector<'a, f64>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, f64>>>(ArrayDouble::VT_VALUE, None)} + } +} + +impl ::flatbuffers::Verifiable for ArrayDouble<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, f64>>>("value", Self::VT_VALUE, false)? + .finish(); + Ok(()) + } +} +pub struct ArrayDoubleArgs<'a> { + pub value: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, f64>>>, +} +impl<'a> Default for ArrayDoubleArgs<'a> { + #[inline] + fn default() -> Self { + ArrayDoubleArgs { + value: None, + } + } +} + +pub struct ArrayDoubleBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> ArrayDoubleBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , f64>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(ArrayDouble::VT_VALUE, value); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> ArrayDoubleBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + ArrayDoubleBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for ArrayDouble<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("ArrayDouble"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum f144_LogDataOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct f144_LogData<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for f144_LogData<'a> { + type Inner = f144_LogData<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> f144_LogData<'a> { + pub const VT_SOURCE_NAME: ::flatbuffers::VOffsetT = 4; + pub const VT_TIMESTAMP: ::flatbuffers::VOffsetT = 6; + pub const VT_VALUE_TYPE: ::flatbuffers::VOffsetT = 8; + pub const VT_VALUE: ::flatbuffers::VOffsetT = 10; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + f144_LogData { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args f144_LogDataArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = f144_LogDataBuilder::new(_fbb); + builder.add_timestamp(args.timestamp); + if let Some(x) = args.value { builder.add_value(x); } + if let Some(x) = args.source_name { builder.add_source_name(x); } + builder.add_value_type(args.value_type); + builder.finish() + } + + + #[inline] + pub fn source_name(&self) -> &'a str { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(f144_LogData::VT_SOURCE_NAME, None).unwrap()} + } + #[inline] + pub fn timestamp(&self) -> i64 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(f144_LogData::VT_TIMESTAMP, Some(0)).unwrap()} + } + #[inline] + pub fn value_type(&self) -> Value { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(f144_LogData::VT_VALUE_TYPE, Some(Value::NONE)).unwrap()} + } + #[inline] + pub fn value(&self) -> ::flatbuffers::Table<'a> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Table<'a>>>(f144_LogData::VT_VALUE, None).unwrap()} + } + #[inline] + #[allow(non_snake_case)] + pub fn value_as_byte(&self) -> Option> { + if self.value_type() == Value::Byte { + let u = self.value(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { Byte::init_from_table(u) }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn value_as_ubyte(&self) -> Option> { + if self.value_type() == Value::UByte { + let u = self.value(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { UByte::init_from_table(u) }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn value_as_short(&self) -> Option> { + if self.value_type() == Value::Short { + let u = self.value(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { Short::init_from_table(u) }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn value_as_ushort(&self) -> Option> { + if self.value_type() == Value::UShort { + let u = self.value(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { UShort::init_from_table(u) }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn value_as_int(&self) -> Option> { + if self.value_type() == Value::Int { + let u = self.value(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { Int::init_from_table(u) }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn value_as_uint(&self) -> Option> { + if self.value_type() == Value::UInt { + let u = self.value(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { UInt::init_from_table(u) }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn value_as_long(&self) -> Option> { + if self.value_type() == Value::Long { + let u = self.value(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { Long::init_from_table(u) }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn value_as_ulong(&self) -> Option> { + if self.value_type() == Value::ULong { + let u = self.value(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { ULong::init_from_table(u) }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn value_as_float(&self) -> Option> { + if self.value_type() == Value::Float { + let u = self.value(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { Float::init_from_table(u) }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn value_as_double(&self) -> Option> { + if self.value_type() == Value::Double { + let u = self.value(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { Double::init_from_table(u) }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn value_as_array_byte(&self) -> Option> { + if self.value_type() == Value::ArrayByte { + let u = self.value(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { ArrayByte::init_from_table(u) }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn value_as_array_ubyte(&self) -> Option> { + if self.value_type() == Value::ArrayUByte { + let u = self.value(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { ArrayUByte::init_from_table(u) }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn value_as_array_short(&self) -> Option> { + if self.value_type() == Value::ArrayShort { + let u = self.value(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { ArrayShort::init_from_table(u) }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn value_as_array_ushort(&self) -> Option> { + if self.value_type() == Value::ArrayUShort { + let u = self.value(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { ArrayUShort::init_from_table(u) }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn value_as_array_int(&self) -> Option> { + if self.value_type() == Value::ArrayInt { + let u = self.value(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { ArrayInt::init_from_table(u) }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn value_as_array_uint(&self) -> Option> { + if self.value_type() == Value::ArrayUInt { + let u = self.value(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { ArrayUInt::init_from_table(u) }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn value_as_array_long(&self) -> Option> { + if self.value_type() == Value::ArrayLong { + let u = self.value(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { ArrayLong::init_from_table(u) }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn value_as_array_ulong(&self) -> Option> { + if self.value_type() == Value::ArrayULong { + let u = self.value(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { ArrayULong::init_from_table(u) }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn value_as_array_float(&self) -> Option> { + if self.value_type() == Value::ArrayFloat { + let u = self.value(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { ArrayFloat::init_from_table(u) }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn value_as_array_double(&self) -> Option> { + if self.value_type() == Value::ArrayDouble { + let u = self.value(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { ArrayDouble::init_from_table(u) }) + } else { + None + } + } + +} + +impl ::flatbuffers::Verifiable for f144_LogData<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("source_name", Self::VT_SOURCE_NAME, true)? + .visit_field::("timestamp", Self::VT_TIMESTAMP, false)? + .visit_union::("value_type", Self::VT_VALUE_TYPE, "value", Self::VT_VALUE, true, |key, v, pos| { + match key { + Value::Byte => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Value::Byte", pos), + Value::UByte => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Value::UByte", pos), + Value::Short => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Value::Short", pos), + Value::UShort => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Value::UShort", pos), + Value::Int => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Value::Int", pos), + Value::UInt => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Value::UInt", pos), + Value::Long => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Value::Long", pos), + Value::ULong => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Value::ULong", pos), + Value::Float => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Value::Float", pos), + Value::Double => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Value::Double", pos), + Value::ArrayByte => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Value::ArrayByte", pos), + Value::ArrayUByte => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Value::ArrayUByte", pos), + Value::ArrayShort => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Value::ArrayShort", pos), + Value::ArrayUShort => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Value::ArrayUShort", pos), + Value::ArrayInt => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Value::ArrayInt", pos), + Value::ArrayUInt => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Value::ArrayUInt", pos), + Value::ArrayLong => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Value::ArrayLong", pos), + Value::ArrayULong => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Value::ArrayULong", pos), + Value::ArrayFloat => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Value::ArrayFloat", pos), + Value::ArrayDouble => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Value::ArrayDouble", pos), + _ => Ok(()), + } + })? + .finish(); + Ok(()) + } +} +pub struct f144_LogDataArgs<'a> { + pub source_name: Option<::flatbuffers::WIPOffset<&'a str>>, + pub timestamp: i64, + pub value_type: Value, + pub value: Option<::flatbuffers::WIPOffset<::flatbuffers::UnionWIPOffset>>, +} +impl<'a> Default for f144_LogDataArgs<'a> { + #[inline] + fn default() -> Self { + f144_LogDataArgs { + source_name: None, // required field + timestamp: 0, + value_type: Value::NONE, + value: None, // required field + } + } +} + +pub struct f144_LogDataBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> f144_LogDataBuilder<'a, 'b, A> { + #[inline] + pub fn add_source_name(&mut self, source_name: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(f144_LogData::VT_SOURCE_NAME, source_name); + } + #[inline] + pub fn add_timestamp(&mut self, timestamp: i64) { + self.fbb_.push_slot::(f144_LogData::VT_TIMESTAMP, timestamp, 0); + } + #[inline] + pub fn add_value_type(&mut self, value_type: Value) { + self.fbb_.push_slot::(f144_LogData::VT_VALUE_TYPE, value_type, Value::NONE); + } + #[inline] + pub fn add_value(&mut self, value: ::flatbuffers::WIPOffset<::flatbuffers::UnionWIPOffset>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(f144_LogData::VT_VALUE, value); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> f144_LogDataBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + f144_LogDataBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + self.fbb_.required(o, f144_LogData::VT_SOURCE_NAME,"source_name"); + self.fbb_.required(o, f144_LogData::VT_VALUE,"value"); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for f144_LogData<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("f144_LogData"); + ds.field("source_name", &self.source_name()); + ds.field("timestamp", &self.timestamp()); + ds.field("value_type", &self.value_type()); + match self.value_type() { + Value::Byte => { + if let Some(x) = self.value_as_byte() { + ds.field("value", &x) + } else { + ds.field("value", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + Value::UByte => { + if let Some(x) = self.value_as_ubyte() { + ds.field("value", &x) + } else { + ds.field("value", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + Value::Short => { + if let Some(x) = self.value_as_short() { + ds.field("value", &x) + } else { + ds.field("value", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + Value::UShort => { + if let Some(x) = self.value_as_ushort() { + ds.field("value", &x) + } else { + ds.field("value", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + Value::Int => { + if let Some(x) = self.value_as_int() { + ds.field("value", &x) + } else { + ds.field("value", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + Value::UInt => { + if let Some(x) = self.value_as_uint() { + ds.field("value", &x) + } else { + ds.field("value", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + Value::Long => { + if let Some(x) = self.value_as_long() { + ds.field("value", &x) + } else { + ds.field("value", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + Value::ULong => { + if let Some(x) = self.value_as_ulong() { + ds.field("value", &x) + } else { + ds.field("value", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + Value::Float => { + if let Some(x) = self.value_as_float() { + ds.field("value", &x) + } else { + ds.field("value", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + Value::Double => { + if let Some(x) = self.value_as_double() { + ds.field("value", &x) + } else { + ds.field("value", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + Value::ArrayByte => { + if let Some(x) = self.value_as_array_byte() { + ds.field("value", &x) + } else { + ds.field("value", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + Value::ArrayUByte => { + if let Some(x) = self.value_as_array_ubyte() { + ds.field("value", &x) + } else { + ds.field("value", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + Value::ArrayShort => { + if let Some(x) = self.value_as_array_short() { + ds.field("value", &x) + } else { + ds.field("value", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + Value::ArrayUShort => { + if let Some(x) = self.value_as_array_ushort() { + ds.field("value", &x) + } else { + ds.field("value", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + Value::ArrayInt => { + if let Some(x) = self.value_as_array_int() { + ds.field("value", &x) + } else { + ds.field("value", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + Value::ArrayUInt => { + if let Some(x) = self.value_as_array_uint() { + ds.field("value", &x) + } else { + ds.field("value", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + Value::ArrayLong => { + if let Some(x) = self.value_as_array_long() { + ds.field("value", &x) + } else { + ds.field("value", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + Value::ArrayULong => { + if let Some(x) = self.value_as_array_ulong() { + ds.field("value", &x) + } else { + ds.field("value", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + Value::ArrayFloat => { + if let Some(x) = self.value_as_array_float() { + ds.field("value", &x) + } else { + ds.field("value", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + Value::ArrayDouble => { + if let Some(x) = self.value_as_array_double() { + ds.field("value", &x) + } else { + ds.field("value", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + _ => { + let x: Option<()> = None; + ds.field("value", &x) + }, + }; + ds.finish() + } +} +#[inline] +/// Verifies that a buffer of bytes contains a `f144_LogData` +/// and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_f_144_log_data_unchecked`. +pub fn root_as_f_144_log_data(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root::(buf) +} +#[inline] +/// Verifies that a buffer of bytes contains a size prefixed +/// `f144_LogData` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `size_prefixed_root_as_f_144_log_data_unchecked`. +pub fn size_prefixed_root_as_f_144_log_data(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root::(buf) +} +#[inline] +/// Verifies, with the given options, that a buffer of bytes +/// contains a `f144_LogData` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_f_144_log_data_unchecked`. +pub fn root_as_f_144_log_data_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root_with_opts::>(opts, buf) +} +#[inline] +/// Verifies, with the given verifier options, that a buffer of +/// bytes contains a size prefixed `f144_LogData` and returns +/// it. Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_f_144_log_data_unchecked`. +pub fn size_prefixed_root_as_f_144_log_data_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root_with_opts::>(opts, buf) +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a f144_LogData and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid `f144_LogData`. +pub unsafe fn root_as_f_144_log_data_unchecked(buf: &[u8]) -> f144_LogData<'_> { + unsafe { ::flatbuffers::root_unchecked::(buf) } +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a size prefixed f144_LogData and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid size prefixed `f144_LogData`. +pub unsafe fn size_prefixed_root_as_f_144_log_data_unchecked(buf: &[u8]) -> f144_LogData<'_> { + unsafe { ::flatbuffers::size_prefixed_root_unchecked::(buf) } +} +pub const F_144_LOG_DATA_IDENTIFIER: &str = "f144"; + +#[inline] +pub fn f_144_log_data_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, F_144_LOG_DATA_IDENTIFIER, false) +} + +#[inline] +pub fn f_144_log_data_size_prefixed_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, F_144_LOG_DATA_IDENTIFIER, true) +} + +#[inline] +pub fn finish_f_144_log_data_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>( + fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + root: ::flatbuffers::WIPOffset>) { + fbb.finish(root, Some(F_144_LOG_DATA_IDENTIFIER)); +} + +#[inline] +pub fn finish_size_prefixed_f_144_log_data_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>(fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, root: ::flatbuffers::WIPOffset>) { + fbb.finish_size_prefixed(root, Some(F_144_LOG_DATA_IDENTIFIER)); +} diff --git a/rust/src/flatbuffers_generated/fc00_forwarder_config.rs b/rust/src/flatbuffers_generated/fc00_forwarder_config.rs new file mode 100644 index 0000000..144abb0 --- /dev/null +++ b/rust/src/flatbuffers_generated/fc00_forwarder_config.rs @@ -0,0 +1,545 @@ +// automatically generated by the FlatBuffers compiler, do not modify +// @generated + +extern crate alloc; + +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +pub const ENUM_MIN_UPDATE_TYPE: u16 = 0; +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +pub const ENUM_MAX_UPDATE_TYPE: u16 = 3; +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +#[allow(non_camel_case_types)] +pub const ENUM_VALUES_UPDATE_TYPE: [UpdateType; 4] = [ + UpdateType::ADD, + UpdateType::REMOVE, + UpdateType::REMOVEALL, + UpdateType::REPLACE, +]; + +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] +#[repr(transparent)] +pub struct UpdateType(pub u16); +#[allow(non_upper_case_globals)] +impl UpdateType { + pub const ADD: Self = Self(0); + pub const REMOVE: Self = Self(1); + pub const REMOVEALL: Self = Self(2); + pub const REPLACE: Self = Self(3); + + pub const ENUM_MIN: u16 = 0; + pub const ENUM_MAX: u16 = 3; + pub const ENUM_VALUES: &'static [Self] = &[ + Self::ADD, + Self::REMOVE, + Self::REMOVEALL, + Self::REPLACE, + ]; + /// Returns the variant's name or "" if unknown. + pub fn variant_name(self) -> Option<&'static str> { + match self { + Self::ADD => Some("ADD"), + Self::REMOVE => Some("REMOVE"), + Self::REMOVEALL => Some("REMOVEALL"), + Self::REPLACE => Some("REPLACE"), + _ => None, + } + } +} +impl ::core::fmt::Debug for UpdateType { + fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { + if let Some(name) = self.variant_name() { + f.write_str(name) + } else { + f.write_fmt(format_args!("", self.0)) + } + } +} +impl<'a> ::flatbuffers::Follow<'a> for UpdateType { + type Inner = Self; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + let b = unsafe { ::flatbuffers::read_scalar_at::(buf, loc) }; + Self(b) + } +} + +impl ::flatbuffers::Push for UpdateType { + type Output = UpdateType; + #[inline] + unsafe fn push(&self, dst: &mut [u8], _written_len: usize) { + unsafe { ::flatbuffers::emplace_scalar::(dst, self.0) }; + } +} + +impl ::flatbuffers::EndianScalar for UpdateType { + type Scalar = u16; + #[inline] + fn to_little_endian(self) -> u16 { + self.0.to_le() + } + #[inline] + #[allow(clippy::wrong_self_convention)] + fn from_little_endian(v: u16) -> Self { + let b = u16::from_le(v); + Self(b) + } +} + +impl<'a> ::flatbuffers::Verifiable for UpdateType { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + u16::run_verifier(v, pos) + } +} + +impl ::flatbuffers::SimpleToVerifyInSlice for UpdateType {} +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +pub const ENUM_MIN_PROTOCOL: u16 = 0; +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +pub const ENUM_MAX_PROTOCOL: u16 = 2; +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +#[allow(non_camel_case_types)] +pub const ENUM_VALUES_PROTOCOL: [Protocol; 3] = [ + Protocol::PVA, + Protocol::CA, + Protocol::FAKE, +]; + +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] +#[repr(transparent)] +pub struct Protocol(pub u16); +#[allow(non_upper_case_globals)] +impl Protocol { + pub const PVA: Self = Self(0); + pub const CA: Self = Self(1); + pub const FAKE: Self = Self(2); + + pub const ENUM_MIN: u16 = 0; + pub const ENUM_MAX: u16 = 2; + pub const ENUM_VALUES: &'static [Self] = &[ + Self::PVA, + Self::CA, + Self::FAKE, + ]; + /// Returns the variant's name or "" if unknown. + pub fn variant_name(self) -> Option<&'static str> { + match self { + Self::PVA => Some("PVA"), + Self::CA => Some("CA"), + Self::FAKE => Some("FAKE"), + _ => None, + } + } +} +impl ::core::fmt::Debug for Protocol { + fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { + if let Some(name) = self.variant_name() { + f.write_str(name) + } else { + f.write_fmt(format_args!("", self.0)) + } + } +} +impl<'a> ::flatbuffers::Follow<'a> for Protocol { + type Inner = Self; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + let b = unsafe { ::flatbuffers::read_scalar_at::(buf, loc) }; + Self(b) + } +} + +impl ::flatbuffers::Push for Protocol { + type Output = Protocol; + #[inline] + unsafe fn push(&self, dst: &mut [u8], _written_len: usize) { + unsafe { ::flatbuffers::emplace_scalar::(dst, self.0) }; + } +} + +impl ::flatbuffers::EndianScalar for Protocol { + type Scalar = u16; + #[inline] + fn to_little_endian(self) -> u16 { + self.0.to_le() + } + #[inline] + #[allow(clippy::wrong_self_convention)] + fn from_little_endian(v: u16) -> Self { + let b = u16::from_le(v); + Self(b) + } +} + +impl<'a> ::flatbuffers::Verifiable for Protocol { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + u16::run_verifier(v, pos) + } +} + +impl ::flatbuffers::SimpleToVerifyInSlice for Protocol {} +pub enum StreamOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct Stream<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for Stream<'a> { + type Inner = Stream<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> Stream<'a> { + pub const VT_CHANNEL: ::flatbuffers::VOffsetT = 4; + pub const VT_SCHEMA: ::flatbuffers::VOffsetT = 6; + pub const VT_TOPIC: ::flatbuffers::VOffsetT = 8; + pub const VT_PROTOCOL: ::flatbuffers::VOffsetT = 10; + pub const VT_PERIODIC: ::flatbuffers::VOffsetT = 12; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + Stream { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args StreamArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = StreamBuilder::new(_fbb); + builder.add_periodic(args.periodic); + if let Some(x) = args.topic { builder.add_topic(x); } + if let Some(x) = args.schema { builder.add_schema(x); } + if let Some(x) = args.channel { builder.add_channel(x); } + builder.add_protocol(args.protocol); + builder.finish() + } + + + #[inline] + pub fn channel(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(Stream::VT_CHANNEL, None)} + } + #[inline] + pub fn schema(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(Stream::VT_SCHEMA, None)} + } + #[inline] + pub fn topic(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(Stream::VT_TOPIC, None)} + } + #[inline] + pub fn protocol(&self) -> Protocol { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(Stream::VT_PROTOCOL, Some(Protocol::PVA)).unwrap()} + } + #[inline] + pub fn periodic(&self) -> i32 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(Stream::VT_PERIODIC, Some(0)).unwrap()} + } +} + +impl ::flatbuffers::Verifiable for Stream<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("channel", Self::VT_CHANNEL, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("schema", Self::VT_SCHEMA, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("topic", Self::VT_TOPIC, false)? + .visit_field::("protocol", Self::VT_PROTOCOL, false)? + .visit_field::("periodic", Self::VT_PERIODIC, false)? + .finish(); + Ok(()) + } +} +pub struct StreamArgs<'a> { + pub channel: Option<::flatbuffers::WIPOffset<&'a str>>, + pub schema: Option<::flatbuffers::WIPOffset<&'a str>>, + pub topic: Option<::flatbuffers::WIPOffset<&'a str>>, + pub protocol: Protocol, + pub periodic: i32, +} +impl<'a> Default for StreamArgs<'a> { + #[inline] + fn default() -> Self { + StreamArgs { + channel: None, + schema: None, + topic: None, + protocol: Protocol::PVA, + periodic: 0, + } + } +} + +pub struct StreamBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> StreamBuilder<'a, 'b, A> { + #[inline] + pub fn add_channel(&mut self, channel: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(Stream::VT_CHANNEL, channel); + } + #[inline] + pub fn add_schema(&mut self, schema: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(Stream::VT_SCHEMA, schema); + } + #[inline] + pub fn add_topic(&mut self, topic: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(Stream::VT_TOPIC, topic); + } + #[inline] + pub fn add_protocol(&mut self, protocol: Protocol) { + self.fbb_.push_slot::(Stream::VT_PROTOCOL, protocol, Protocol::PVA); + } + #[inline] + pub fn add_periodic(&mut self, periodic: i32) { + self.fbb_.push_slot::(Stream::VT_PERIODIC, periodic, 0); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> StreamBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + StreamBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for Stream<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("Stream"); + ds.field("channel", &self.channel()); + ds.field("schema", &self.schema()); + ds.field("topic", &self.topic()); + ds.field("protocol", &self.protocol()); + ds.field("periodic", &self.periodic()); + ds.finish() + } +} +pub enum fc00_ConfigUpdateOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct fc00_ConfigUpdate<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for fc00_ConfigUpdate<'a> { + type Inner = fc00_ConfigUpdate<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> fc00_ConfigUpdate<'a> { + pub const VT_CONFIG_CHANGE: ::flatbuffers::VOffsetT = 4; + pub const VT_STREAMS: ::flatbuffers::VOffsetT = 6; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + fc00_ConfigUpdate { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args fc00_ConfigUpdateArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = fc00_ConfigUpdateBuilder::new(_fbb); + if let Some(x) = args.streams { builder.add_streams(x); } + builder.add_config_change(args.config_change); + builder.finish() + } + + + #[inline] + pub fn config_change(&self) -> UpdateType { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(fc00_ConfigUpdate::VT_CONFIG_CHANGE, Some(UpdateType::ADD)).unwrap()} + } + #[inline] + pub fn streams(&self) -> Option<::flatbuffers::Vector<'a, ::flatbuffers::ForwardsUOffset>>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, ::flatbuffers::ForwardsUOffset>>>(fc00_ConfigUpdate::VT_STREAMS, None)} + } +} + +impl ::flatbuffers::Verifiable for fc00_ConfigUpdate<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::("config_change", Self::VT_CONFIG_CHANGE, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, ::flatbuffers::ForwardsUOffset>>>("streams", Self::VT_STREAMS, false)? + .finish(); + Ok(()) + } +} +pub struct fc00_ConfigUpdateArgs<'a> { + pub config_change: UpdateType, + pub streams: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, ::flatbuffers::ForwardsUOffset>>>>, +} +impl<'a> Default for fc00_ConfigUpdateArgs<'a> { + #[inline] + fn default() -> Self { + fc00_ConfigUpdateArgs { + config_change: UpdateType::ADD, + streams: None, + } + } +} + +pub struct fc00_ConfigUpdateBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> fc00_ConfigUpdateBuilder<'a, 'b, A> { + #[inline] + pub fn add_config_change(&mut self, config_change: UpdateType) { + self.fbb_.push_slot::(fc00_ConfigUpdate::VT_CONFIG_CHANGE, config_change, UpdateType::ADD); + } + #[inline] + pub fn add_streams(&mut self, streams: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , ::flatbuffers::ForwardsUOffset>>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(fc00_ConfigUpdate::VT_STREAMS, streams); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> fc00_ConfigUpdateBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + fc00_ConfigUpdateBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for fc00_ConfigUpdate<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("fc00_ConfigUpdate"); + ds.field("config_change", &self.config_change()); + ds.field("streams", &self.streams()); + ds.finish() + } +} +#[inline] +/// Verifies that a buffer of bytes contains a `fc00_ConfigUpdate` +/// and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_fc_00_config_update_unchecked`. +pub fn root_as_fc_00_config_update(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root::(buf) +} +#[inline] +/// Verifies that a buffer of bytes contains a size prefixed +/// `fc00_ConfigUpdate` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `size_prefixed_root_as_fc_00_config_update_unchecked`. +pub fn size_prefixed_root_as_fc_00_config_update(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root::(buf) +} +#[inline] +/// Verifies, with the given options, that a buffer of bytes +/// contains a `fc00_ConfigUpdate` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_fc_00_config_update_unchecked`. +pub fn root_as_fc_00_config_update_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root_with_opts::>(opts, buf) +} +#[inline] +/// Verifies, with the given verifier options, that a buffer of +/// bytes contains a size prefixed `fc00_ConfigUpdate` and returns +/// it. Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_fc_00_config_update_unchecked`. +pub fn size_prefixed_root_as_fc_00_config_update_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root_with_opts::>(opts, buf) +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a fc00_ConfigUpdate and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid `fc00_ConfigUpdate`. +pub unsafe fn root_as_fc_00_config_update_unchecked(buf: &[u8]) -> fc00_ConfigUpdate<'_> { + unsafe { ::flatbuffers::root_unchecked::(buf) } +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a size prefixed fc00_ConfigUpdate and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid size prefixed `fc00_ConfigUpdate`. +pub unsafe fn size_prefixed_root_as_fc_00_config_update_unchecked(buf: &[u8]) -> fc00_ConfigUpdate<'_> { + unsafe { ::flatbuffers::size_prefixed_root_unchecked::(buf) } +} +pub const FC_00_CONFIG_UPDATE_IDENTIFIER: &str = "fc00"; + +#[inline] +pub fn fc_00_config_update_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, FC_00_CONFIG_UPDATE_IDENTIFIER, false) +} + +#[inline] +pub fn fc_00_config_update_size_prefixed_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, FC_00_CONFIG_UPDATE_IDENTIFIER, true) +} + +#[inline] +pub fn finish_fc_00_config_update_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>( + fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + root: ::flatbuffers::WIPOffset>) { + fbb.finish(root, Some(FC_00_CONFIG_UPDATE_IDENTIFIER)); +} + +#[inline] +pub fn finish_size_prefixed_fc_00_config_update_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>(fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, root: ::flatbuffers::WIPOffset>) { + fbb.finish_size_prefixed(root, Some(FC_00_CONFIG_UPDATE_IDENTIFIER)); +} diff --git a/rust/src/flatbuffers_generated/hs01_event_histogram.rs b/rust/src/flatbuffers_generated/hs01_event_histogram.rs new file mode 100644 index 0000000..ed032bf --- /dev/null +++ b/rust/src/flatbuffers_generated/hs01_event_histogram.rs @@ -0,0 +1,1301 @@ +// automatically generated by the FlatBuffers compiler, do not modify +// @generated + +extern crate alloc; + +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +pub const ENUM_MIN_ARRAY: u8 = 0; +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +pub const ENUM_MAX_ARRAY: u8 = 4; +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +#[allow(non_camel_case_types)] +pub const ENUM_VALUES_ARRAY: [Array; 5] = [ + Array::NONE, + Array::ArrayInt, + Array::ArrayLong, + Array::ArrayDouble, + Array::ArrayFloat, +]; + +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] +#[repr(transparent)] +pub struct Array(pub u8); +#[allow(non_upper_case_globals)] +impl Array { + pub const NONE: Self = Self(0); + pub const ArrayInt: Self = Self(1); + pub const ArrayLong: Self = Self(2); + pub const ArrayDouble: Self = Self(3); + pub const ArrayFloat: Self = Self(4); + + pub const ENUM_MIN: u8 = 0; + pub const ENUM_MAX: u8 = 4; + pub const ENUM_VALUES: &'static [Self] = &[ + Self::NONE, + Self::ArrayInt, + Self::ArrayLong, + Self::ArrayDouble, + Self::ArrayFloat, + ]; + /// Returns the variant's name or "" if unknown. + pub fn variant_name(self) -> Option<&'static str> { + match self { + Self::NONE => Some("NONE"), + Self::ArrayInt => Some("ArrayInt"), + Self::ArrayLong => Some("ArrayLong"), + Self::ArrayDouble => Some("ArrayDouble"), + Self::ArrayFloat => Some("ArrayFloat"), + _ => None, + } + } +} +impl ::core::fmt::Debug for Array { + fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { + if let Some(name) = self.variant_name() { + f.write_str(name) + } else { + f.write_fmt(format_args!("", self.0)) + } + } +} +impl<'a> ::flatbuffers::Follow<'a> for Array { + type Inner = Self; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + let b = unsafe { ::flatbuffers::read_scalar_at::(buf, loc) }; + Self(b) + } +} + +impl ::flatbuffers::Push for Array { + type Output = Array; + #[inline] + unsafe fn push(&self, dst: &mut [u8], _written_len: usize) { + unsafe { ::flatbuffers::emplace_scalar::(dst, self.0) }; + } +} + +impl ::flatbuffers::EndianScalar for Array { + type Scalar = u8; + #[inline] + fn to_little_endian(self) -> u8 { + self.0.to_le() + } + #[inline] + #[allow(clippy::wrong_self_convention)] + fn from_little_endian(v: u8) -> Self { + let b = u8::from_le(v); + Self(b) + } +} + +impl<'a> ::flatbuffers::Verifiable for Array { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + u8::run_verifier(v, pos) + } +} + +impl ::flatbuffers::SimpleToVerifyInSlice for Array {} +pub struct ArrayUnionTableOffset {} + +pub enum ArrayIntOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct ArrayInt<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for ArrayInt<'a> { + type Inner = ArrayInt<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> ArrayInt<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + ArrayInt { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args ArrayIntArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = ArrayIntBuilder::new(_fbb); + if let Some(x) = args.value { builder.add_value(x); } + builder.finish() + } + + + #[inline] + pub fn value(&self) -> Option<::flatbuffers::Vector<'a, i32>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, i32>>>(ArrayInt::VT_VALUE, None)} + } +} + +impl ::flatbuffers::Verifiable for ArrayInt<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, i32>>>("value", Self::VT_VALUE, false)? + .finish(); + Ok(()) + } +} +pub struct ArrayIntArgs<'a> { + pub value: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, i32>>>, +} +impl<'a> Default for ArrayIntArgs<'a> { + #[inline] + fn default() -> Self { + ArrayIntArgs { + value: None, + } + } +} + +pub struct ArrayIntBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> ArrayIntBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , i32>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(ArrayInt::VT_VALUE, value); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> ArrayIntBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + ArrayIntBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for ArrayInt<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("ArrayInt"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum ArrayLongOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct ArrayLong<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for ArrayLong<'a> { + type Inner = ArrayLong<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> ArrayLong<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + ArrayLong { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args ArrayLongArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = ArrayLongBuilder::new(_fbb); + if let Some(x) = args.value { builder.add_value(x); } + builder.finish() + } + + + #[inline] + pub fn value(&self) -> Option<::flatbuffers::Vector<'a, i64>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, i64>>>(ArrayLong::VT_VALUE, None)} + } +} + +impl ::flatbuffers::Verifiable for ArrayLong<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, i64>>>("value", Self::VT_VALUE, false)? + .finish(); + Ok(()) + } +} +pub struct ArrayLongArgs<'a> { + pub value: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, i64>>>, +} +impl<'a> Default for ArrayLongArgs<'a> { + #[inline] + fn default() -> Self { + ArrayLongArgs { + value: None, + } + } +} + +pub struct ArrayLongBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> ArrayLongBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , i64>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(ArrayLong::VT_VALUE, value); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> ArrayLongBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + ArrayLongBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for ArrayLong<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("ArrayLong"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum ArrayDoubleOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct ArrayDouble<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for ArrayDouble<'a> { + type Inner = ArrayDouble<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> ArrayDouble<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + ArrayDouble { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args ArrayDoubleArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = ArrayDoubleBuilder::new(_fbb); + if let Some(x) = args.value { builder.add_value(x); } + builder.finish() + } + + + #[inline] + pub fn value(&self) -> Option<::flatbuffers::Vector<'a, f64>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, f64>>>(ArrayDouble::VT_VALUE, None)} + } +} + +impl ::flatbuffers::Verifiable for ArrayDouble<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, f64>>>("value", Self::VT_VALUE, false)? + .finish(); + Ok(()) + } +} +pub struct ArrayDoubleArgs<'a> { + pub value: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, f64>>>, +} +impl<'a> Default for ArrayDoubleArgs<'a> { + #[inline] + fn default() -> Self { + ArrayDoubleArgs { + value: None, + } + } +} + +pub struct ArrayDoubleBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> ArrayDoubleBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , f64>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(ArrayDouble::VT_VALUE, value); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> ArrayDoubleBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + ArrayDoubleBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for ArrayDouble<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("ArrayDouble"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum ArrayFloatOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct ArrayFloat<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for ArrayFloat<'a> { + type Inner = ArrayFloat<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> ArrayFloat<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + ArrayFloat { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args ArrayFloatArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = ArrayFloatBuilder::new(_fbb); + if let Some(x) = args.value { builder.add_value(x); } + builder.finish() + } + + + #[inline] + pub fn value(&self) -> Option<::flatbuffers::Vector<'a, f32>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, f32>>>(ArrayFloat::VT_VALUE, None)} + } +} + +impl ::flatbuffers::Verifiable for ArrayFloat<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, f32>>>("value", Self::VT_VALUE, false)? + .finish(); + Ok(()) + } +} +pub struct ArrayFloatArgs<'a> { + pub value: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, f32>>>, +} +impl<'a> Default for ArrayFloatArgs<'a> { + #[inline] + fn default() -> Self { + ArrayFloatArgs { + value: None, + } + } +} + +pub struct ArrayFloatBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> ArrayFloatBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , f32>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(ArrayFloat::VT_VALUE, value); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> ArrayFloatBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + ArrayFloatBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for ArrayFloat<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("ArrayFloat"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum DimensionMetaDataOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct DimensionMetaData<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for DimensionMetaData<'a> { + type Inner = DimensionMetaData<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> DimensionMetaData<'a> { + pub const VT_LENGTH: ::flatbuffers::VOffsetT = 4; + pub const VT_UNIT: ::flatbuffers::VOffsetT = 6; + pub const VT_LABEL: ::flatbuffers::VOffsetT = 8; + pub const VT_BIN_BOUNDARIES_TYPE: ::flatbuffers::VOffsetT = 10; + pub const VT_BIN_BOUNDARIES: ::flatbuffers::VOffsetT = 12; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + DimensionMetaData { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args DimensionMetaDataArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = DimensionMetaDataBuilder::new(_fbb); + if let Some(x) = args.bin_boundaries { builder.add_bin_boundaries(x); } + if let Some(x) = args.label { builder.add_label(x); } + if let Some(x) = args.unit { builder.add_unit(x); } + builder.add_length(args.length); + builder.add_bin_boundaries_type(args.bin_boundaries_type); + builder.finish() + } + + + #[inline] + pub fn length(&self) -> i32 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(DimensionMetaData::VT_LENGTH, Some(0)).unwrap()} + } + #[inline] + pub fn unit(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(DimensionMetaData::VT_UNIT, None)} + } + #[inline] + pub fn label(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(DimensionMetaData::VT_LABEL, None)} + } + #[inline] + pub fn bin_boundaries_type(&self) -> Array { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(DimensionMetaData::VT_BIN_BOUNDARIES_TYPE, Some(Array::NONE)).unwrap()} + } + #[inline] + pub fn bin_boundaries(&self) -> Option<::flatbuffers::Table<'a>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Table<'a>>>(DimensionMetaData::VT_BIN_BOUNDARIES, None)} + } + #[inline] + #[allow(non_snake_case)] + pub fn bin_boundaries_as_array_int(&self) -> Option> { + if self.bin_boundaries_type() == Array::ArrayInt { + self.bin_boundaries().map(|t| { + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + unsafe { ArrayInt::init_from_table(t) } + }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn bin_boundaries_as_array_long(&self) -> Option> { + if self.bin_boundaries_type() == Array::ArrayLong { + self.bin_boundaries().map(|t| { + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + unsafe { ArrayLong::init_from_table(t) } + }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn bin_boundaries_as_array_double(&self) -> Option> { + if self.bin_boundaries_type() == Array::ArrayDouble { + self.bin_boundaries().map(|t| { + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + unsafe { ArrayDouble::init_from_table(t) } + }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn bin_boundaries_as_array_float(&self) -> Option> { + if self.bin_boundaries_type() == Array::ArrayFloat { + self.bin_boundaries().map(|t| { + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + unsafe { ArrayFloat::init_from_table(t) } + }) + } else { + None + } + } + +} + +impl ::flatbuffers::Verifiable for DimensionMetaData<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::("length", Self::VT_LENGTH, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("unit", Self::VT_UNIT, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("label", Self::VT_LABEL, false)? + .visit_union::("bin_boundaries_type", Self::VT_BIN_BOUNDARIES_TYPE, "bin_boundaries", Self::VT_BIN_BOUNDARIES, false, |key, v, pos| { + match key { + Array::ArrayInt => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Array::ArrayInt", pos), + Array::ArrayLong => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Array::ArrayLong", pos), + Array::ArrayDouble => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Array::ArrayDouble", pos), + Array::ArrayFloat => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Array::ArrayFloat", pos), + _ => Ok(()), + } + })? + .finish(); + Ok(()) + } +} +pub struct DimensionMetaDataArgs<'a> { + pub length: i32, + pub unit: Option<::flatbuffers::WIPOffset<&'a str>>, + pub label: Option<::flatbuffers::WIPOffset<&'a str>>, + pub bin_boundaries_type: Array, + pub bin_boundaries: Option<::flatbuffers::WIPOffset<::flatbuffers::UnionWIPOffset>>, +} +impl<'a> Default for DimensionMetaDataArgs<'a> { + #[inline] + fn default() -> Self { + DimensionMetaDataArgs { + length: 0, + unit: None, + label: None, + bin_boundaries_type: Array::NONE, + bin_boundaries: None, + } + } +} + +pub struct DimensionMetaDataBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> DimensionMetaDataBuilder<'a, 'b, A> { + #[inline] + pub fn add_length(&mut self, length: i32) { + self.fbb_.push_slot::(DimensionMetaData::VT_LENGTH, length, 0); + } + #[inline] + pub fn add_unit(&mut self, unit: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(DimensionMetaData::VT_UNIT, unit); + } + #[inline] + pub fn add_label(&mut self, label: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(DimensionMetaData::VT_LABEL, label); + } + #[inline] + pub fn add_bin_boundaries_type(&mut self, bin_boundaries_type: Array) { + self.fbb_.push_slot::(DimensionMetaData::VT_BIN_BOUNDARIES_TYPE, bin_boundaries_type, Array::NONE); + } + #[inline] + pub fn add_bin_boundaries(&mut self, bin_boundaries: ::flatbuffers::WIPOffset<::flatbuffers::UnionWIPOffset>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(DimensionMetaData::VT_BIN_BOUNDARIES, bin_boundaries); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> DimensionMetaDataBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + DimensionMetaDataBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for DimensionMetaData<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("DimensionMetaData"); + ds.field("length", &self.length()); + ds.field("unit", &self.unit()); + ds.field("label", &self.label()); + ds.field("bin_boundaries_type", &self.bin_boundaries_type()); + match self.bin_boundaries_type() { + Array::ArrayInt => { + if let Some(x) = self.bin_boundaries_as_array_int() { + ds.field("bin_boundaries", &x) + } else { + ds.field("bin_boundaries", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + Array::ArrayLong => { + if let Some(x) = self.bin_boundaries_as_array_long() { + ds.field("bin_boundaries", &x) + } else { + ds.field("bin_boundaries", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + Array::ArrayDouble => { + if let Some(x) = self.bin_boundaries_as_array_double() { + ds.field("bin_boundaries", &x) + } else { + ds.field("bin_boundaries", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + Array::ArrayFloat => { + if let Some(x) = self.bin_boundaries_as_array_float() { + ds.field("bin_boundaries", &x) + } else { + ds.field("bin_boundaries", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + _ => { + let x: Option<()> = None; + ds.field("bin_boundaries", &x) + }, + }; + ds.finish() + } +} +pub enum EventHistogramOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct EventHistogram<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for EventHistogram<'a> { + type Inner = EventHistogram<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> EventHistogram<'a> { + pub const VT_SOURCE: ::flatbuffers::VOffsetT = 4; + pub const VT_TIMESTAMP: ::flatbuffers::VOffsetT = 6; + pub const VT_DIM_METADATA: ::flatbuffers::VOffsetT = 8; + pub const VT_LAST_METADATA_TIMESTAMP: ::flatbuffers::VOffsetT = 10; + pub const VT_CURRENT_SHAPE: ::flatbuffers::VOffsetT = 12; + pub const VT_OFFSET: ::flatbuffers::VOffsetT = 14; + pub const VT_DATA_TYPE: ::flatbuffers::VOffsetT = 16; + pub const VT_DATA: ::flatbuffers::VOffsetT = 18; + pub const VT_ERRORS_TYPE: ::flatbuffers::VOffsetT = 20; + pub const VT_ERRORS: ::flatbuffers::VOffsetT = 22; + pub const VT_INFO: ::flatbuffers::VOffsetT = 24; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + EventHistogram { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args EventHistogramArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = EventHistogramBuilder::new(_fbb); + builder.add_last_metadata_timestamp(args.last_metadata_timestamp); + builder.add_timestamp(args.timestamp); + if let Some(x) = args.info { builder.add_info(x); } + if let Some(x) = args.errors { builder.add_errors(x); } + if let Some(x) = args.data { builder.add_data(x); } + if let Some(x) = args.offset { builder.add_offset(x); } + if let Some(x) = args.current_shape { builder.add_current_shape(x); } + if let Some(x) = args.dim_metadata { builder.add_dim_metadata(x); } + if let Some(x) = args.source { builder.add_source(x); } + builder.add_errors_type(args.errors_type); + builder.add_data_type(args.data_type); + builder.finish() + } + + + #[inline] + pub fn source(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(EventHistogram::VT_SOURCE, None)} + } + #[inline] + pub fn timestamp(&self) -> i64 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(EventHistogram::VT_TIMESTAMP, Some(0)).unwrap()} + } + #[inline] + pub fn dim_metadata(&self) -> Option<::flatbuffers::Vector<'a, ::flatbuffers::ForwardsUOffset>>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, ::flatbuffers::ForwardsUOffset>>>(EventHistogram::VT_DIM_METADATA, None)} + } + #[inline] + pub fn last_metadata_timestamp(&self) -> i64 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(EventHistogram::VT_LAST_METADATA_TIMESTAMP, Some(0)).unwrap()} + } + #[inline] + pub fn current_shape(&self) -> ::flatbuffers::Vector<'a, i32> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, i32>>>(EventHistogram::VT_CURRENT_SHAPE, None).unwrap()} + } + #[inline] + pub fn offset(&self) -> Option<::flatbuffers::Vector<'a, i32>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, i32>>>(EventHistogram::VT_OFFSET, None)} + } + #[inline] + pub fn data_type(&self) -> Array { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(EventHistogram::VT_DATA_TYPE, Some(Array::NONE)).unwrap()} + } + #[inline] + pub fn data(&self) -> Option<::flatbuffers::Table<'a>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Table<'a>>>(EventHistogram::VT_DATA, None)} + } + #[inline] + pub fn errors_type(&self) -> Array { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(EventHistogram::VT_ERRORS_TYPE, Some(Array::NONE)).unwrap()} + } + #[inline] + pub fn errors(&self) -> Option<::flatbuffers::Table<'a>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Table<'a>>>(EventHistogram::VT_ERRORS, None)} + } + #[inline] + pub fn info(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(EventHistogram::VT_INFO, None)} + } + #[inline] + #[allow(non_snake_case)] + pub fn data_as_array_int(&self) -> Option> { + if self.data_type() == Array::ArrayInt { + self.data().map(|t| { + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + unsafe { ArrayInt::init_from_table(t) } + }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn data_as_array_long(&self) -> Option> { + if self.data_type() == Array::ArrayLong { + self.data().map(|t| { + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + unsafe { ArrayLong::init_from_table(t) } + }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn data_as_array_double(&self) -> Option> { + if self.data_type() == Array::ArrayDouble { + self.data().map(|t| { + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + unsafe { ArrayDouble::init_from_table(t) } + }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn data_as_array_float(&self) -> Option> { + if self.data_type() == Array::ArrayFloat { + self.data().map(|t| { + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + unsafe { ArrayFloat::init_from_table(t) } + }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn errors_as_array_int(&self) -> Option> { + if self.errors_type() == Array::ArrayInt { + self.errors().map(|t| { + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + unsafe { ArrayInt::init_from_table(t) } + }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn errors_as_array_long(&self) -> Option> { + if self.errors_type() == Array::ArrayLong { + self.errors().map(|t| { + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + unsafe { ArrayLong::init_from_table(t) } + }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn errors_as_array_double(&self) -> Option> { + if self.errors_type() == Array::ArrayDouble { + self.errors().map(|t| { + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + unsafe { ArrayDouble::init_from_table(t) } + }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn errors_as_array_float(&self) -> Option> { + if self.errors_type() == Array::ArrayFloat { + self.errors().map(|t| { + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + unsafe { ArrayFloat::init_from_table(t) } + }) + } else { + None + } + } + +} + +impl ::flatbuffers::Verifiable for EventHistogram<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("source", Self::VT_SOURCE, false)? + .visit_field::("timestamp", Self::VT_TIMESTAMP, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, ::flatbuffers::ForwardsUOffset>>>("dim_metadata", Self::VT_DIM_METADATA, false)? + .visit_field::("last_metadata_timestamp", Self::VT_LAST_METADATA_TIMESTAMP, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, i32>>>("current_shape", Self::VT_CURRENT_SHAPE, true)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, i32>>>("offset", Self::VT_OFFSET, false)? + .visit_union::("data_type", Self::VT_DATA_TYPE, "data", Self::VT_DATA, false, |key, v, pos| { + match key { + Array::ArrayInt => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Array::ArrayInt", pos), + Array::ArrayLong => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Array::ArrayLong", pos), + Array::ArrayDouble => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Array::ArrayDouble", pos), + Array::ArrayFloat => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Array::ArrayFloat", pos), + _ => Ok(()), + } + })? + .visit_union::("errors_type", Self::VT_ERRORS_TYPE, "errors", Self::VT_ERRORS, false, |key, v, pos| { + match key { + Array::ArrayInt => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Array::ArrayInt", pos), + Array::ArrayLong => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Array::ArrayLong", pos), + Array::ArrayDouble => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Array::ArrayDouble", pos), + Array::ArrayFloat => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Array::ArrayFloat", pos), + _ => Ok(()), + } + })? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("info", Self::VT_INFO, false)? + .finish(); + Ok(()) + } +} +pub struct EventHistogramArgs<'a> { + pub source: Option<::flatbuffers::WIPOffset<&'a str>>, + pub timestamp: i64, + pub dim_metadata: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, ::flatbuffers::ForwardsUOffset>>>>, + pub last_metadata_timestamp: i64, + pub current_shape: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, i32>>>, + pub offset: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, i32>>>, + pub data_type: Array, + pub data: Option<::flatbuffers::WIPOffset<::flatbuffers::UnionWIPOffset>>, + pub errors_type: Array, + pub errors: Option<::flatbuffers::WIPOffset<::flatbuffers::UnionWIPOffset>>, + pub info: Option<::flatbuffers::WIPOffset<&'a str>>, +} +impl<'a> Default for EventHistogramArgs<'a> { + #[inline] + fn default() -> Self { + EventHistogramArgs { + source: None, + timestamp: 0, + dim_metadata: None, + last_metadata_timestamp: 0, + current_shape: None, // required field + offset: None, + data_type: Array::NONE, + data: None, + errors_type: Array::NONE, + errors: None, + info: None, + } + } +} + +pub struct EventHistogramBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> EventHistogramBuilder<'a, 'b, A> { + #[inline] + pub fn add_source(&mut self, source: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(EventHistogram::VT_SOURCE, source); + } + #[inline] + pub fn add_timestamp(&mut self, timestamp: i64) { + self.fbb_.push_slot::(EventHistogram::VT_TIMESTAMP, timestamp, 0); + } + #[inline] + pub fn add_dim_metadata(&mut self, dim_metadata: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , ::flatbuffers::ForwardsUOffset>>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(EventHistogram::VT_DIM_METADATA, dim_metadata); + } + #[inline] + pub fn add_last_metadata_timestamp(&mut self, last_metadata_timestamp: i64) { + self.fbb_.push_slot::(EventHistogram::VT_LAST_METADATA_TIMESTAMP, last_metadata_timestamp, 0); + } + #[inline] + pub fn add_current_shape(&mut self, current_shape: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , i32>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(EventHistogram::VT_CURRENT_SHAPE, current_shape); + } + #[inline] + pub fn add_offset(&mut self, offset: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , i32>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(EventHistogram::VT_OFFSET, offset); + } + #[inline] + pub fn add_data_type(&mut self, data_type: Array) { + self.fbb_.push_slot::(EventHistogram::VT_DATA_TYPE, data_type, Array::NONE); + } + #[inline] + pub fn add_data(&mut self, data: ::flatbuffers::WIPOffset<::flatbuffers::UnionWIPOffset>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(EventHistogram::VT_DATA, data); + } + #[inline] + pub fn add_errors_type(&mut self, errors_type: Array) { + self.fbb_.push_slot::(EventHistogram::VT_ERRORS_TYPE, errors_type, Array::NONE); + } + #[inline] + pub fn add_errors(&mut self, errors: ::flatbuffers::WIPOffset<::flatbuffers::UnionWIPOffset>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(EventHistogram::VT_ERRORS, errors); + } + #[inline] + pub fn add_info(&mut self, info: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(EventHistogram::VT_INFO, info); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> EventHistogramBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + EventHistogramBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + self.fbb_.required(o, EventHistogram::VT_CURRENT_SHAPE,"current_shape"); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for EventHistogram<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("EventHistogram"); + ds.field("source", &self.source()); + ds.field("timestamp", &self.timestamp()); + ds.field("dim_metadata", &self.dim_metadata()); + ds.field("last_metadata_timestamp", &self.last_metadata_timestamp()); + ds.field("current_shape", &self.current_shape()); + ds.field("offset", &self.offset()); + ds.field("data_type", &self.data_type()); + match self.data_type() { + Array::ArrayInt => { + if let Some(x) = self.data_as_array_int() { + ds.field("data", &x) + } else { + ds.field("data", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + Array::ArrayLong => { + if let Some(x) = self.data_as_array_long() { + ds.field("data", &x) + } else { + ds.field("data", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + Array::ArrayDouble => { + if let Some(x) = self.data_as_array_double() { + ds.field("data", &x) + } else { + ds.field("data", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + Array::ArrayFloat => { + if let Some(x) = self.data_as_array_float() { + ds.field("data", &x) + } else { + ds.field("data", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + _ => { + let x: Option<()> = None; + ds.field("data", &x) + }, + }; + ds.field("errors_type", &self.errors_type()); + match self.errors_type() { + Array::ArrayInt => { + if let Some(x) = self.errors_as_array_int() { + ds.field("errors", &x) + } else { + ds.field("errors", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + Array::ArrayLong => { + if let Some(x) = self.errors_as_array_long() { + ds.field("errors", &x) + } else { + ds.field("errors", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + Array::ArrayDouble => { + if let Some(x) = self.errors_as_array_double() { + ds.field("errors", &x) + } else { + ds.field("errors", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + Array::ArrayFloat => { + if let Some(x) = self.errors_as_array_float() { + ds.field("errors", &x) + } else { + ds.field("errors", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + _ => { + let x: Option<()> = None; + ds.field("errors", &x) + }, + }; + ds.field("info", &self.info()); + ds.finish() + } +} +#[inline] +/// Verifies that a buffer of bytes contains a `EventHistogram` +/// and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_event_histogram_unchecked`. +pub fn root_as_event_histogram(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root::(buf) +} +#[inline] +/// Verifies that a buffer of bytes contains a size prefixed +/// `EventHistogram` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `size_prefixed_root_as_event_histogram_unchecked`. +pub fn size_prefixed_root_as_event_histogram(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root::(buf) +} +#[inline] +/// Verifies, with the given options, that a buffer of bytes +/// contains a `EventHistogram` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_event_histogram_unchecked`. +pub fn root_as_event_histogram_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root_with_opts::>(opts, buf) +} +#[inline] +/// Verifies, with the given verifier options, that a buffer of +/// bytes contains a size prefixed `EventHistogram` and returns +/// it. Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_event_histogram_unchecked`. +pub fn size_prefixed_root_as_event_histogram_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root_with_opts::>(opts, buf) +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a EventHistogram and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid `EventHistogram`. +pub unsafe fn root_as_event_histogram_unchecked(buf: &[u8]) -> EventHistogram<'_> { + unsafe { ::flatbuffers::root_unchecked::(buf) } +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a size prefixed EventHistogram and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid size prefixed `EventHistogram`. +pub unsafe fn size_prefixed_root_as_event_histogram_unchecked(buf: &[u8]) -> EventHistogram<'_> { + unsafe { ::flatbuffers::size_prefixed_root_unchecked::(buf) } +} +pub const EVENT_HISTOGRAM_IDENTIFIER: &str = "hs01"; + +#[inline] +pub fn event_histogram_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, EVENT_HISTOGRAM_IDENTIFIER, false) +} + +#[inline] +pub fn event_histogram_size_prefixed_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, EVENT_HISTOGRAM_IDENTIFIER, true) +} + +#[inline] +pub fn finish_event_histogram_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>( + fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + root: ::flatbuffers::WIPOffset>) { + fbb.finish(root, Some(EVENT_HISTOGRAM_IDENTIFIER)); +} + +#[inline] +pub fn finish_size_prefixed_event_histogram_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>(fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, root: ::flatbuffers::WIPOffset>) { + fbb.finish_size_prefixed(root, Some(EVENT_HISTOGRAM_IDENTIFIER)); +} diff --git a/rust/src/flatbuffers_generated/json_json.rs b/rust/src/flatbuffers_generated/json_json.rs new file mode 100644 index 0000000..1e77e5b --- /dev/null +++ b/rust/src/flatbuffers_generated/json_json.rs @@ -0,0 +1,184 @@ +// automatically generated by the FlatBuffers compiler, do not modify +// @generated + +extern crate alloc; + +pub enum JsonDataOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct JsonData<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for JsonData<'a> { + type Inner = JsonData<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> JsonData<'a> { + pub const VT_JSON: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + JsonData { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args JsonDataArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = JsonDataBuilder::new(_fbb); + if let Some(x) = args.json { builder.add_json(x); } + builder.finish() + } + + + #[inline] + pub fn json(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(JsonData::VT_JSON, None)} + } +} + +impl ::flatbuffers::Verifiable for JsonData<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("json", Self::VT_JSON, false)? + .finish(); + Ok(()) + } +} +pub struct JsonDataArgs<'a> { + pub json: Option<::flatbuffers::WIPOffset<&'a str>>, +} +impl<'a> Default for JsonDataArgs<'a> { + #[inline] + fn default() -> Self { + JsonDataArgs { + json: None, + } + } +} + +pub struct JsonDataBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> JsonDataBuilder<'a, 'b, A> { + #[inline] + pub fn add_json(&mut self, json: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(JsonData::VT_JSON, json); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> JsonDataBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + JsonDataBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for JsonData<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("JsonData"); + ds.field("json", &self.json()); + ds.finish() + } +} +#[inline] +/// Verifies that a buffer of bytes contains a `JsonData` +/// and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_json_data_unchecked`. +pub fn root_as_json_data(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root::(buf) +} +#[inline] +/// Verifies that a buffer of bytes contains a size prefixed +/// `JsonData` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `size_prefixed_root_as_json_data_unchecked`. +pub fn size_prefixed_root_as_json_data(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root::(buf) +} +#[inline] +/// Verifies, with the given options, that a buffer of bytes +/// contains a `JsonData` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_json_data_unchecked`. +pub fn root_as_json_data_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root_with_opts::>(opts, buf) +} +#[inline] +/// Verifies, with the given verifier options, that a buffer of +/// bytes contains a size prefixed `JsonData` and returns +/// it. Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_json_data_unchecked`. +pub fn size_prefixed_root_as_json_data_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root_with_opts::>(opts, buf) +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a JsonData and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid `JsonData`. +pub unsafe fn root_as_json_data_unchecked(buf: &[u8]) -> JsonData<'_> { + unsafe { ::flatbuffers::root_unchecked::(buf) } +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a size prefixed JsonData and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid size prefixed `JsonData`. +pub unsafe fn size_prefixed_root_as_json_data_unchecked(buf: &[u8]) -> JsonData<'_> { + unsafe { ::flatbuffers::size_prefixed_root_unchecked::(buf) } +} +pub const JSON_DATA_IDENTIFIER: &str = "json"; + +#[inline] +pub fn json_data_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, JSON_DATA_IDENTIFIER, false) +} + +#[inline] +pub fn json_data_size_prefixed_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, JSON_DATA_IDENTIFIER, true) +} + +#[inline] +pub fn finish_json_data_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>( + fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + root: ::flatbuffers::WIPOffset>) { + fbb.finish(root, Some(JSON_DATA_IDENTIFIER)); +} + +#[inline] +pub fn finish_size_prefixed_json_data_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>(fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, root: ::flatbuffers::WIPOffset>) { + fbb.finish_size_prefixed(root, Some(JSON_DATA_IDENTIFIER)); +} diff --git a/rust/src/flatbuffers_generated/mod.rs b/rust/src/flatbuffers_generated/mod.rs new file mode 100644 index 0000000..9aedece --- /dev/null +++ b/rust/src/flatbuffers_generated/mod.rs @@ -0,0 +1,36 @@ +#[path = "6s4t_run_stop.rs"] +pub mod run_stop_6s4t; +#[path = "ad00_area_detector_array.rs"] +pub mod area_detector_array_ad00; +#[path = "al00_alarm.rs"] +pub mod alarm_al00; +#[path = "answ_action_response.rs"] +pub mod action_response_answ; +#[path = "da00_dataarray.rs"] +pub mod dataarray_da00; +#[path = "df12_det_spec_map.rs"] +pub mod det_spec_map_df12; +#[path = "ep01_epics_connection.rs"] +pub mod epics_connection_ep01; +#[path = "ev44_events.rs"] +pub mod events_ev44; +#[path = "f144_logdata.rs"] +pub mod logdata_f144; +#[path = "fc00_forwarder_config.rs"] +pub mod forwarder_config_fc00; +#[path = "hs01_event_histogram.rs"] +pub mod event_histogram_hs01; +#[path = "json_json.rs"] +pub mod json_json; +#[path = "pl72_run_start.rs"] +pub mod run_start_pl72; +#[path = "pu00_pulse_metadata.rs"] +pub mod pulse_metadata_pu00; +#[path = "se00_data.rs"] +pub mod data_se00; +#[path = "un00_units.rs"] +pub mod units_un00; +#[path = "wrdn_finished_writing.rs"] +pub mod finished_writing_wrdn; +#[path = "x5f2_status.rs"] +pub mod status_x5f2; diff --git a/rust/src/flatbuffers_generated/pl72_run_start.rs b/rust/src/flatbuffers_generated/pl72_run_start.rs new file mode 100644 index 0000000..412df29 --- /dev/null +++ b/rust/src/flatbuffers_generated/pl72_run_start.rs @@ -0,0 +1,518 @@ +// automatically generated by the FlatBuffers compiler, do not modify +// @generated + +extern crate alloc; + +pub enum SpectraDetectorMappingOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct SpectraDetectorMapping<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for SpectraDetectorMapping<'a> { + type Inner = SpectraDetectorMapping<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> SpectraDetectorMapping<'a> { + pub const VT_SPECTRUM: ::flatbuffers::VOffsetT = 4; + pub const VT_DETECTOR_ID: ::flatbuffers::VOffsetT = 6; + pub const VT_N_SPECTRA: ::flatbuffers::VOffsetT = 8; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + SpectraDetectorMapping { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args SpectraDetectorMappingArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = SpectraDetectorMappingBuilder::new(_fbb); + builder.add_n_spectra(args.n_spectra); + if let Some(x) = args.detector_id { builder.add_detector_id(x); } + if let Some(x) = args.spectrum { builder.add_spectrum(x); } + builder.finish() + } + + + #[inline] + pub fn spectrum(&self) -> Option<::flatbuffers::Vector<'a, i32>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, i32>>>(SpectraDetectorMapping::VT_SPECTRUM, None)} + } + #[inline] + pub fn detector_id(&self) -> Option<::flatbuffers::Vector<'a, i32>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, i32>>>(SpectraDetectorMapping::VT_DETECTOR_ID, None)} + } + #[inline] + pub fn n_spectra(&self) -> i32 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(SpectraDetectorMapping::VT_N_SPECTRA, Some(0)).unwrap()} + } +} + +impl ::flatbuffers::Verifiable for SpectraDetectorMapping<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, i32>>>("spectrum", Self::VT_SPECTRUM, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, i32>>>("detector_id", Self::VT_DETECTOR_ID, false)? + .visit_field::("n_spectra", Self::VT_N_SPECTRA, false)? + .finish(); + Ok(()) + } +} +pub struct SpectraDetectorMappingArgs<'a> { + pub spectrum: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, i32>>>, + pub detector_id: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, i32>>>, + pub n_spectra: i32, +} +impl<'a> Default for SpectraDetectorMappingArgs<'a> { + #[inline] + fn default() -> Self { + SpectraDetectorMappingArgs { + spectrum: None, + detector_id: None, + n_spectra: 0, + } + } +} + +pub struct SpectraDetectorMappingBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> SpectraDetectorMappingBuilder<'a, 'b, A> { + #[inline] + pub fn add_spectrum(&mut self, spectrum: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , i32>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(SpectraDetectorMapping::VT_SPECTRUM, spectrum); + } + #[inline] + pub fn add_detector_id(&mut self, detector_id: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , i32>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(SpectraDetectorMapping::VT_DETECTOR_ID, detector_id); + } + #[inline] + pub fn add_n_spectra(&mut self, n_spectra: i32) { + self.fbb_.push_slot::(SpectraDetectorMapping::VT_N_SPECTRA, n_spectra, 0); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> SpectraDetectorMappingBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + SpectraDetectorMappingBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for SpectraDetectorMapping<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("SpectraDetectorMapping"); + ds.field("spectrum", &self.spectrum()); + ds.field("detector_id", &self.detector_id()); + ds.field("n_spectra", &self.n_spectra()); + ds.finish() + } +} +pub enum RunStartOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct RunStart<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for RunStart<'a> { + type Inner = RunStart<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> RunStart<'a> { + pub const VT_START_TIME: ::flatbuffers::VOffsetT = 4; + pub const VT_STOP_TIME: ::flatbuffers::VOffsetT = 6; + pub const VT_RUN_NAME: ::flatbuffers::VOffsetT = 8; + pub const VT_INSTRUMENT_NAME: ::flatbuffers::VOffsetT = 10; + pub const VT_NEXUS_STRUCTURE: ::flatbuffers::VOffsetT = 12; + pub const VT_JOB_ID: ::flatbuffers::VOffsetT = 14; + pub const VT_BROKER: ::flatbuffers::VOffsetT = 16; + pub const VT_SERVICE_ID: ::flatbuffers::VOffsetT = 18; + pub const VT_FILENAME: ::flatbuffers::VOffsetT = 20; + pub const VT_N_PERIODS: ::flatbuffers::VOffsetT = 22; + pub const VT_DETECTOR_SPECTRUM_MAP: ::flatbuffers::VOffsetT = 24; + pub const VT_METADATA: ::flatbuffers::VOffsetT = 26; + pub const VT_CONTROL_TOPIC: ::flatbuffers::VOffsetT = 28; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + RunStart { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args RunStartArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = RunStartBuilder::new(_fbb); + builder.add_stop_time(args.stop_time); + builder.add_start_time(args.start_time); + if let Some(x) = args.control_topic { builder.add_control_topic(x); } + if let Some(x) = args.metadata { builder.add_metadata(x); } + if let Some(x) = args.detector_spectrum_map { builder.add_detector_spectrum_map(x); } + builder.add_n_periods(args.n_periods); + if let Some(x) = args.filename { builder.add_filename(x); } + if let Some(x) = args.service_id { builder.add_service_id(x); } + if let Some(x) = args.broker { builder.add_broker(x); } + if let Some(x) = args.job_id { builder.add_job_id(x); } + if let Some(x) = args.nexus_structure { builder.add_nexus_structure(x); } + if let Some(x) = args.instrument_name { builder.add_instrument_name(x); } + if let Some(x) = args.run_name { builder.add_run_name(x); } + builder.finish() + } + + + #[inline] + pub fn start_time(&self) -> u64 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(RunStart::VT_START_TIME, Some(0)).unwrap()} + } + #[inline] + pub fn stop_time(&self) -> u64 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(RunStart::VT_STOP_TIME, Some(0)).unwrap()} + } + #[inline] + pub fn run_name(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(RunStart::VT_RUN_NAME, None)} + } + #[inline] + pub fn instrument_name(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(RunStart::VT_INSTRUMENT_NAME, None)} + } + #[inline] + pub fn nexus_structure(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(RunStart::VT_NEXUS_STRUCTURE, None)} + } + #[inline] + pub fn job_id(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(RunStart::VT_JOB_ID, None)} + } + #[inline] + pub fn broker(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(RunStart::VT_BROKER, None)} + } + #[inline] + pub fn service_id(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(RunStart::VT_SERVICE_ID, None)} + } + #[inline] + pub fn filename(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(RunStart::VT_FILENAME, None)} + } + #[inline] + pub fn n_periods(&self) -> u32 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(RunStart::VT_N_PERIODS, Some(1)).unwrap()} + } + #[inline] + pub fn detector_spectrum_map(&self) -> Option> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset>(RunStart::VT_DETECTOR_SPECTRUM_MAP, None)} + } + #[inline] + pub fn metadata(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(RunStart::VT_METADATA, None)} + } + #[inline] + pub fn control_topic(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(RunStart::VT_CONTROL_TOPIC, None)} + } +} + +impl ::flatbuffers::Verifiable for RunStart<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::("start_time", Self::VT_START_TIME, false)? + .visit_field::("stop_time", Self::VT_STOP_TIME, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("run_name", Self::VT_RUN_NAME, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("instrument_name", Self::VT_INSTRUMENT_NAME, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("nexus_structure", Self::VT_NEXUS_STRUCTURE, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("job_id", Self::VT_JOB_ID, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("broker", Self::VT_BROKER, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("service_id", Self::VT_SERVICE_ID, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("filename", Self::VT_FILENAME, false)? + .visit_field::("n_periods", Self::VT_N_PERIODS, false)? + .visit_field::<::flatbuffers::ForwardsUOffset>("detector_spectrum_map", Self::VT_DETECTOR_SPECTRUM_MAP, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("metadata", Self::VT_METADATA, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("control_topic", Self::VT_CONTROL_TOPIC, false)? + .finish(); + Ok(()) + } +} +pub struct RunStartArgs<'a> { + pub start_time: u64, + pub stop_time: u64, + pub run_name: Option<::flatbuffers::WIPOffset<&'a str>>, + pub instrument_name: Option<::flatbuffers::WIPOffset<&'a str>>, + pub nexus_structure: Option<::flatbuffers::WIPOffset<&'a str>>, + pub job_id: Option<::flatbuffers::WIPOffset<&'a str>>, + pub broker: Option<::flatbuffers::WIPOffset<&'a str>>, + pub service_id: Option<::flatbuffers::WIPOffset<&'a str>>, + pub filename: Option<::flatbuffers::WIPOffset<&'a str>>, + pub n_periods: u32, + pub detector_spectrum_map: Option<::flatbuffers::WIPOffset>>, + pub metadata: Option<::flatbuffers::WIPOffset<&'a str>>, + pub control_topic: Option<::flatbuffers::WIPOffset<&'a str>>, +} +impl<'a> Default for RunStartArgs<'a> { + #[inline] + fn default() -> Self { + RunStartArgs { + start_time: 0, + stop_time: 0, + run_name: None, + instrument_name: None, + nexus_structure: None, + job_id: None, + broker: None, + service_id: None, + filename: None, + n_periods: 1, + detector_spectrum_map: None, + metadata: None, + control_topic: None, + } + } +} + +pub struct RunStartBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> RunStartBuilder<'a, 'b, A> { + #[inline] + pub fn add_start_time(&mut self, start_time: u64) { + self.fbb_.push_slot::(RunStart::VT_START_TIME, start_time, 0); + } + #[inline] + pub fn add_stop_time(&mut self, stop_time: u64) { + self.fbb_.push_slot::(RunStart::VT_STOP_TIME, stop_time, 0); + } + #[inline] + pub fn add_run_name(&mut self, run_name: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(RunStart::VT_RUN_NAME, run_name); + } + #[inline] + pub fn add_instrument_name(&mut self, instrument_name: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(RunStart::VT_INSTRUMENT_NAME, instrument_name); + } + #[inline] + pub fn add_nexus_structure(&mut self, nexus_structure: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(RunStart::VT_NEXUS_STRUCTURE, nexus_structure); + } + #[inline] + pub fn add_job_id(&mut self, job_id: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(RunStart::VT_JOB_ID, job_id); + } + #[inline] + pub fn add_broker(&mut self, broker: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(RunStart::VT_BROKER, broker); + } + #[inline] + pub fn add_service_id(&mut self, service_id: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(RunStart::VT_SERVICE_ID, service_id); + } + #[inline] + pub fn add_filename(&mut self, filename: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(RunStart::VT_FILENAME, filename); + } + #[inline] + pub fn add_n_periods(&mut self, n_periods: u32) { + self.fbb_.push_slot::(RunStart::VT_N_PERIODS, n_periods, 1); + } + #[inline] + pub fn add_detector_spectrum_map(&mut self, detector_spectrum_map: ::flatbuffers::WIPOffset>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset>(RunStart::VT_DETECTOR_SPECTRUM_MAP, detector_spectrum_map); + } + #[inline] + pub fn add_metadata(&mut self, metadata: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(RunStart::VT_METADATA, metadata); + } + #[inline] + pub fn add_control_topic(&mut self, control_topic: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(RunStart::VT_CONTROL_TOPIC, control_topic); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> RunStartBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + RunStartBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for RunStart<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("RunStart"); + ds.field("start_time", &self.start_time()); + ds.field("stop_time", &self.stop_time()); + ds.field("run_name", &self.run_name()); + ds.field("instrument_name", &self.instrument_name()); + ds.field("nexus_structure", &self.nexus_structure()); + ds.field("job_id", &self.job_id()); + ds.field("broker", &self.broker()); + ds.field("service_id", &self.service_id()); + ds.field("filename", &self.filename()); + ds.field("n_periods", &self.n_periods()); + ds.field("detector_spectrum_map", &self.detector_spectrum_map()); + ds.field("metadata", &self.metadata()); + ds.field("control_topic", &self.control_topic()); + ds.finish() + } +} +#[inline] +/// Verifies that a buffer of bytes contains a `RunStart` +/// and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_run_start_unchecked`. +pub fn root_as_run_start(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root::(buf) +} +#[inline] +/// Verifies that a buffer of bytes contains a size prefixed +/// `RunStart` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `size_prefixed_root_as_run_start_unchecked`. +pub fn size_prefixed_root_as_run_start(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root::(buf) +} +#[inline] +/// Verifies, with the given options, that a buffer of bytes +/// contains a `RunStart` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_run_start_unchecked`. +pub fn root_as_run_start_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root_with_opts::>(opts, buf) +} +#[inline] +/// Verifies, with the given verifier options, that a buffer of +/// bytes contains a size prefixed `RunStart` and returns +/// it. Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_run_start_unchecked`. +pub fn size_prefixed_root_as_run_start_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root_with_opts::>(opts, buf) +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a RunStart and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid `RunStart`. +pub unsafe fn root_as_run_start_unchecked(buf: &[u8]) -> RunStart<'_> { + unsafe { ::flatbuffers::root_unchecked::(buf) } +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a size prefixed RunStart and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid size prefixed `RunStart`. +pub unsafe fn size_prefixed_root_as_run_start_unchecked(buf: &[u8]) -> RunStart<'_> { + unsafe { ::flatbuffers::size_prefixed_root_unchecked::(buf) } +} +pub const RUN_START_IDENTIFIER: &str = "pl72"; + +#[inline] +pub fn run_start_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, RUN_START_IDENTIFIER, false) +} + +#[inline] +pub fn run_start_size_prefixed_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, RUN_START_IDENTIFIER, true) +} + +#[inline] +pub fn finish_run_start_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>( + fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + root: ::flatbuffers::WIPOffset>) { + fbb.finish(root, Some(RUN_START_IDENTIFIER)); +} + +#[inline] +pub fn finish_size_prefixed_run_start_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>(fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, root: ::flatbuffers::WIPOffset>) { + fbb.finish_size_prefixed(root, Some(RUN_START_IDENTIFIER)); +} diff --git a/rust/src/flatbuffers_generated/pu00_pulse_metadata.rs b/rust/src/flatbuffers_generated/pu00_pulse_metadata.rs new file mode 100644 index 0000000..81487a2 --- /dev/null +++ b/rust/src/flatbuffers_generated/pu00_pulse_metadata.rs @@ -0,0 +1,270 @@ +// automatically generated by the FlatBuffers compiler, do not modify +// @generated + +extern crate alloc; + +pub enum Pu00MessageOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct Pu00Message<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for Pu00Message<'a> { + type Inner = Pu00Message<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> Pu00Message<'a> { + pub const VT_SOURCE_NAME: ::flatbuffers::VOffsetT = 4; + pub const VT_MESSAGE_ID: ::flatbuffers::VOffsetT = 6; + pub const VT_REFERENCE_TIME: ::flatbuffers::VOffsetT = 8; + pub const VT_VETOS: ::flatbuffers::VOffsetT = 10; + pub const VT_PERIOD_NUMBER: ::flatbuffers::VOffsetT = 12; + pub const VT_PROTON_CHARGE: ::flatbuffers::VOffsetT = 14; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + Pu00Message { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args Pu00MessageArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = Pu00MessageBuilder::new(_fbb); + builder.add_reference_time(args.reference_time); + builder.add_message_id(args.message_id); + if let Some(x) = args.proton_charge { builder.add_proton_charge(x); } + if let Some(x) = args.period_number { builder.add_period_number(x); } + if let Some(x) = args.vetos { builder.add_vetos(x); } + if let Some(x) = args.source_name { builder.add_source_name(x); } + builder.finish() + } + + + #[inline] + pub fn source_name(&self) -> &'a str { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(Pu00Message::VT_SOURCE_NAME, None).unwrap()} + } + #[inline] + pub fn message_id(&self) -> i64 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(Pu00Message::VT_MESSAGE_ID, Some(0)).unwrap()} + } + #[inline] + pub fn reference_time(&self) -> i64 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(Pu00Message::VT_REFERENCE_TIME, Some(0)).unwrap()} + } + #[inline] + pub fn vetos(&self) -> Option { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(Pu00Message::VT_VETOS, None)} + } + #[inline] + pub fn period_number(&self) -> Option { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(Pu00Message::VT_PERIOD_NUMBER, None)} + } + #[inline] + pub fn proton_charge(&self) -> Option { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(Pu00Message::VT_PROTON_CHARGE, None)} + } +} + +impl ::flatbuffers::Verifiable for Pu00Message<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("source_name", Self::VT_SOURCE_NAME, true)? + .visit_field::("message_id", Self::VT_MESSAGE_ID, false)? + .visit_field::("reference_time", Self::VT_REFERENCE_TIME, false)? + .visit_field::("vetos", Self::VT_VETOS, false)? + .visit_field::("period_number", Self::VT_PERIOD_NUMBER, false)? + .visit_field::("proton_charge", Self::VT_PROTON_CHARGE, false)? + .finish(); + Ok(()) + } +} +pub struct Pu00MessageArgs<'a> { + pub source_name: Option<::flatbuffers::WIPOffset<&'a str>>, + pub message_id: i64, + pub reference_time: i64, + pub vetos: Option, + pub period_number: Option, + pub proton_charge: Option, +} +impl<'a> Default for Pu00MessageArgs<'a> { + #[inline] + fn default() -> Self { + Pu00MessageArgs { + source_name: None, // required field + message_id: 0, + reference_time: 0, + vetos: None, + period_number: None, + proton_charge: None, + } + } +} + +pub struct Pu00MessageBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> Pu00MessageBuilder<'a, 'b, A> { + #[inline] + pub fn add_source_name(&mut self, source_name: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(Pu00Message::VT_SOURCE_NAME, source_name); + } + #[inline] + pub fn add_message_id(&mut self, message_id: i64) { + self.fbb_.push_slot::(Pu00Message::VT_MESSAGE_ID, message_id, 0); + } + #[inline] + pub fn add_reference_time(&mut self, reference_time: i64) { + self.fbb_.push_slot::(Pu00Message::VT_REFERENCE_TIME, reference_time, 0); + } + #[inline] + pub fn add_vetos(&mut self, vetos: u32) { + self.fbb_.push_slot_always::(Pu00Message::VT_VETOS, vetos); + } + #[inline] + pub fn add_period_number(&mut self, period_number: u32) { + self.fbb_.push_slot_always::(Pu00Message::VT_PERIOD_NUMBER, period_number); + } + #[inline] + pub fn add_proton_charge(&mut self, proton_charge: f32) { + self.fbb_.push_slot_always::(Pu00Message::VT_PROTON_CHARGE, proton_charge); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> Pu00MessageBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + Pu00MessageBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + self.fbb_.required(o, Pu00Message::VT_SOURCE_NAME,"source_name"); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for Pu00Message<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("Pu00Message"); + ds.field("source_name", &self.source_name()); + ds.field("message_id", &self.message_id()); + ds.field("reference_time", &self.reference_time()); + ds.field("vetos", &self.vetos()); + ds.field("period_number", &self.period_number()); + ds.field("proton_charge", &self.proton_charge()); + ds.finish() + } +} +#[inline] +/// Verifies that a buffer of bytes contains a `Pu00Message` +/// and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_pu_00_message_unchecked`. +pub fn root_as_pu_00_message(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root::(buf) +} +#[inline] +/// Verifies that a buffer of bytes contains a size prefixed +/// `Pu00Message` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `size_prefixed_root_as_pu_00_message_unchecked`. +pub fn size_prefixed_root_as_pu_00_message(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root::(buf) +} +#[inline] +/// Verifies, with the given options, that a buffer of bytes +/// contains a `Pu00Message` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_pu_00_message_unchecked`. +pub fn root_as_pu_00_message_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root_with_opts::>(opts, buf) +} +#[inline] +/// Verifies, with the given verifier options, that a buffer of +/// bytes contains a size prefixed `Pu00Message` and returns +/// it. Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_pu_00_message_unchecked`. +pub fn size_prefixed_root_as_pu_00_message_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root_with_opts::>(opts, buf) +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a Pu00Message and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid `Pu00Message`. +pub unsafe fn root_as_pu_00_message_unchecked(buf: &[u8]) -> Pu00Message<'_> { + unsafe { ::flatbuffers::root_unchecked::(buf) } +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a size prefixed Pu00Message and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid size prefixed `Pu00Message`. +pub unsafe fn size_prefixed_root_as_pu_00_message_unchecked(buf: &[u8]) -> Pu00Message<'_> { + unsafe { ::flatbuffers::size_prefixed_root_unchecked::(buf) } +} +pub const PU_00_MESSAGE_IDENTIFIER: &str = "pu00"; + +#[inline] +pub fn pu_00_message_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, PU_00_MESSAGE_IDENTIFIER, false) +} + +#[inline] +pub fn pu_00_message_size_prefixed_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, PU_00_MESSAGE_IDENTIFIER, true) +} + +#[inline] +pub fn finish_pu_00_message_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>( + fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + root: ::flatbuffers::WIPOffset>) { + fbb.finish(root, Some(PU_00_MESSAGE_IDENTIFIER)); +} + +#[inline] +pub fn finish_size_prefixed_pu_00_message_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>(fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, root: ::flatbuffers::WIPOffset>) { + fbb.finish_size_prefixed(root, Some(PU_00_MESSAGE_IDENTIFIER)); +} diff --git a/rust/src/flatbuffers_generated/se00_data.rs b/rust/src/flatbuffers_generated/se00_data.rs new file mode 100644 index 0000000..808b7f8 --- /dev/null +++ b/rust/src/flatbuffers_generated/se00_data.rs @@ -0,0 +1,1734 @@ +// automatically generated by the FlatBuffers compiler, do not modify +// @generated + +extern crate alloc; + +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +pub const ENUM_MIN_LOCATION: i8 = 0; +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +pub const ENUM_MAX_LOCATION: i8 = 3; +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +#[allow(non_camel_case_types)] +pub const ENUM_VALUES_LOCATION: [Location; 4] = [ + Location::Unknown, + Location::Start, + Location::Middle, + Location::End, +]; + +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] +#[repr(transparent)] +pub struct Location(pub i8); +#[allow(non_upper_case_globals)] +impl Location { + pub const Unknown: Self = Self(0); + pub const Start: Self = Self(1); + pub const Middle: Self = Self(2); + pub const End: Self = Self(3); + + pub const ENUM_MIN: i8 = 0; + pub const ENUM_MAX: i8 = 3; + pub const ENUM_VALUES: &'static [Self] = &[ + Self::Unknown, + Self::Start, + Self::Middle, + Self::End, + ]; + /// Returns the variant's name or "" if unknown. + pub fn variant_name(self) -> Option<&'static str> { + match self { + Self::Unknown => Some("Unknown"), + Self::Start => Some("Start"), + Self::Middle => Some("Middle"), + Self::End => Some("End"), + _ => None, + } + } +} +impl ::core::fmt::Debug for Location { + fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { + if let Some(name) = self.variant_name() { + f.write_str(name) + } else { + f.write_fmt(format_args!("", self.0)) + } + } +} +impl<'a> ::flatbuffers::Follow<'a> for Location { + type Inner = Self; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + let b = unsafe { ::flatbuffers::read_scalar_at::(buf, loc) }; + Self(b) + } +} + +impl ::flatbuffers::Push for Location { + type Output = Location; + #[inline] + unsafe fn push(&self, dst: &mut [u8], _written_len: usize) { + unsafe { ::flatbuffers::emplace_scalar::(dst, self.0) }; + } +} + +impl ::flatbuffers::EndianScalar for Location { + type Scalar = i8; + #[inline] + fn to_little_endian(self) -> i8 { + self.0.to_le() + } + #[inline] + #[allow(clippy::wrong_self_convention)] + fn from_little_endian(v: i8) -> Self { + let b = i8::from_le(v); + Self(b) + } +} + +impl<'a> ::flatbuffers::Verifiable for Location { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + i8::run_verifier(v, pos) + } +} + +impl ::flatbuffers::SimpleToVerifyInSlice for Location {} +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +pub const ENUM_MIN_VALUE_UNION: u8 = 0; +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +pub const ENUM_MAX_VALUE_UNION: u8 = 10; +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +#[allow(non_camel_case_types)] +pub const ENUM_VALUES_VALUE_UNION: [ValueUnion; 11] = [ + ValueUnion::NONE, + ValueUnion::Int8Array, + ValueUnion::UInt8Array, + ValueUnion::Int16Array, + ValueUnion::UInt16Array, + ValueUnion::Int32Array, + ValueUnion::UInt32Array, + ValueUnion::Int64Array, + ValueUnion::UInt64Array, + ValueUnion::DoubleArray, + ValueUnion::FloatArray, +]; + +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] +#[repr(transparent)] +pub struct ValueUnion(pub u8); +#[allow(non_upper_case_globals)] +impl ValueUnion { + pub const NONE: Self = Self(0); + pub const Int8Array: Self = Self(1); + pub const UInt8Array: Self = Self(2); + pub const Int16Array: Self = Self(3); + pub const UInt16Array: Self = Self(4); + pub const Int32Array: Self = Self(5); + pub const UInt32Array: Self = Self(6); + pub const Int64Array: Self = Self(7); + pub const UInt64Array: Self = Self(8); + pub const DoubleArray: Self = Self(9); + pub const FloatArray: Self = Self(10); + + pub const ENUM_MIN: u8 = 0; + pub const ENUM_MAX: u8 = 10; + pub const ENUM_VALUES: &'static [Self] = &[ + Self::NONE, + Self::Int8Array, + Self::UInt8Array, + Self::Int16Array, + Self::UInt16Array, + Self::Int32Array, + Self::UInt32Array, + Self::Int64Array, + Self::UInt64Array, + Self::DoubleArray, + Self::FloatArray, + ]; + /// Returns the variant's name or "" if unknown. + pub fn variant_name(self) -> Option<&'static str> { + match self { + Self::NONE => Some("NONE"), + Self::Int8Array => Some("Int8Array"), + Self::UInt8Array => Some("UInt8Array"), + Self::Int16Array => Some("Int16Array"), + Self::UInt16Array => Some("UInt16Array"), + Self::Int32Array => Some("Int32Array"), + Self::UInt32Array => Some("UInt32Array"), + Self::Int64Array => Some("Int64Array"), + Self::UInt64Array => Some("UInt64Array"), + Self::DoubleArray => Some("DoubleArray"), + Self::FloatArray => Some("FloatArray"), + _ => None, + } + } +} +impl ::core::fmt::Debug for ValueUnion { + fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { + if let Some(name) = self.variant_name() { + f.write_str(name) + } else { + f.write_fmt(format_args!("", self.0)) + } + } +} +impl<'a> ::flatbuffers::Follow<'a> for ValueUnion { + type Inner = Self; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + let b = unsafe { ::flatbuffers::read_scalar_at::(buf, loc) }; + Self(b) + } +} + +impl ::flatbuffers::Push for ValueUnion { + type Output = ValueUnion; + #[inline] + unsafe fn push(&self, dst: &mut [u8], _written_len: usize) { + unsafe { ::flatbuffers::emplace_scalar::(dst, self.0) }; + } +} + +impl ::flatbuffers::EndianScalar for ValueUnion { + type Scalar = u8; + #[inline] + fn to_little_endian(self) -> u8 { + self.0.to_le() + } + #[inline] + #[allow(clippy::wrong_self_convention)] + fn from_little_endian(v: u8) -> Self { + let b = u8::from_le(v); + Self(b) + } +} + +impl<'a> ::flatbuffers::Verifiable for ValueUnion { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + u8::run_verifier(v, pos) + } +} + +impl ::flatbuffers::SimpleToVerifyInSlice for ValueUnion {} +pub struct ValueUnionUnionTableOffset {} + +pub enum Int8ArrayOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct Int8Array<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for Int8Array<'a> { + type Inner = Int8Array<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> Int8Array<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + Int8Array { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args Int8ArrayArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = Int8ArrayBuilder::new(_fbb); + if let Some(x) = args.value { builder.add_value(x); } + builder.finish() + } + + + #[inline] + pub fn value(&self) -> ::flatbuffers::Vector<'a, i8> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, i8>>>(Int8Array::VT_VALUE, None).unwrap()} + } +} + +impl ::flatbuffers::Verifiable for Int8Array<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, i8>>>("value", Self::VT_VALUE, true)? + .finish(); + Ok(()) + } +} +pub struct Int8ArrayArgs<'a> { + pub value: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, i8>>>, +} +impl<'a> Default for Int8ArrayArgs<'a> { + #[inline] + fn default() -> Self { + Int8ArrayArgs { + value: None, // required field + } + } +} + +pub struct Int8ArrayBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> Int8ArrayBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , i8>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(Int8Array::VT_VALUE, value); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> Int8ArrayBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + Int8ArrayBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + self.fbb_.required(o, Int8Array::VT_VALUE,"value"); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for Int8Array<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("Int8Array"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum UInt8ArrayOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct UInt8Array<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for UInt8Array<'a> { + type Inner = UInt8Array<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> UInt8Array<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + UInt8Array { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args UInt8ArrayArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = UInt8ArrayBuilder::new(_fbb); + if let Some(x) = args.value { builder.add_value(x); } + builder.finish() + } + + + #[inline] + pub fn value(&self) -> ::flatbuffers::Vector<'a, u8> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, u8>>>(UInt8Array::VT_VALUE, None).unwrap()} + } +} + +impl ::flatbuffers::Verifiable for UInt8Array<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, u8>>>("value", Self::VT_VALUE, true)? + .finish(); + Ok(()) + } +} +pub struct UInt8ArrayArgs<'a> { + pub value: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, u8>>>, +} +impl<'a> Default for UInt8ArrayArgs<'a> { + #[inline] + fn default() -> Self { + UInt8ArrayArgs { + value: None, // required field + } + } +} + +pub struct UInt8ArrayBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> UInt8ArrayBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , u8>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(UInt8Array::VT_VALUE, value); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> UInt8ArrayBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + UInt8ArrayBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + self.fbb_.required(o, UInt8Array::VT_VALUE,"value"); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for UInt8Array<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("UInt8Array"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum Int16ArrayOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct Int16Array<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for Int16Array<'a> { + type Inner = Int16Array<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> Int16Array<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + Int16Array { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args Int16ArrayArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = Int16ArrayBuilder::new(_fbb); + if let Some(x) = args.value { builder.add_value(x); } + builder.finish() + } + + + #[inline] + pub fn value(&self) -> ::flatbuffers::Vector<'a, i16> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, i16>>>(Int16Array::VT_VALUE, None).unwrap()} + } +} + +impl ::flatbuffers::Verifiable for Int16Array<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, i16>>>("value", Self::VT_VALUE, true)? + .finish(); + Ok(()) + } +} +pub struct Int16ArrayArgs<'a> { + pub value: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, i16>>>, +} +impl<'a> Default for Int16ArrayArgs<'a> { + #[inline] + fn default() -> Self { + Int16ArrayArgs { + value: None, // required field + } + } +} + +pub struct Int16ArrayBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> Int16ArrayBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , i16>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(Int16Array::VT_VALUE, value); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> Int16ArrayBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + Int16ArrayBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + self.fbb_.required(o, Int16Array::VT_VALUE,"value"); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for Int16Array<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("Int16Array"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum UInt16ArrayOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct UInt16Array<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for UInt16Array<'a> { + type Inner = UInt16Array<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> UInt16Array<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + UInt16Array { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args UInt16ArrayArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = UInt16ArrayBuilder::new(_fbb); + if let Some(x) = args.value { builder.add_value(x); } + builder.finish() + } + + + #[inline] + pub fn value(&self) -> ::flatbuffers::Vector<'a, u16> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, u16>>>(UInt16Array::VT_VALUE, None).unwrap()} + } +} + +impl ::flatbuffers::Verifiable for UInt16Array<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, u16>>>("value", Self::VT_VALUE, true)? + .finish(); + Ok(()) + } +} +pub struct UInt16ArrayArgs<'a> { + pub value: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, u16>>>, +} +impl<'a> Default for UInt16ArrayArgs<'a> { + #[inline] + fn default() -> Self { + UInt16ArrayArgs { + value: None, // required field + } + } +} + +pub struct UInt16ArrayBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> UInt16ArrayBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , u16>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(UInt16Array::VT_VALUE, value); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> UInt16ArrayBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + UInt16ArrayBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + self.fbb_.required(o, UInt16Array::VT_VALUE,"value"); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for UInt16Array<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("UInt16Array"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum Int32ArrayOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct Int32Array<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for Int32Array<'a> { + type Inner = Int32Array<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> Int32Array<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + Int32Array { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args Int32ArrayArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = Int32ArrayBuilder::new(_fbb); + if let Some(x) = args.value { builder.add_value(x); } + builder.finish() + } + + + #[inline] + pub fn value(&self) -> ::flatbuffers::Vector<'a, i32> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, i32>>>(Int32Array::VT_VALUE, None).unwrap()} + } +} + +impl ::flatbuffers::Verifiable for Int32Array<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, i32>>>("value", Self::VT_VALUE, true)? + .finish(); + Ok(()) + } +} +pub struct Int32ArrayArgs<'a> { + pub value: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, i32>>>, +} +impl<'a> Default for Int32ArrayArgs<'a> { + #[inline] + fn default() -> Self { + Int32ArrayArgs { + value: None, // required field + } + } +} + +pub struct Int32ArrayBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> Int32ArrayBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , i32>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(Int32Array::VT_VALUE, value); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> Int32ArrayBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + Int32ArrayBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + self.fbb_.required(o, Int32Array::VT_VALUE,"value"); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for Int32Array<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("Int32Array"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum UInt32ArrayOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct UInt32Array<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for UInt32Array<'a> { + type Inner = UInt32Array<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> UInt32Array<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + UInt32Array { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args UInt32ArrayArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = UInt32ArrayBuilder::new(_fbb); + if let Some(x) = args.value { builder.add_value(x); } + builder.finish() + } + + + #[inline] + pub fn value(&self) -> ::flatbuffers::Vector<'a, u32> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, u32>>>(UInt32Array::VT_VALUE, None).unwrap()} + } +} + +impl ::flatbuffers::Verifiable for UInt32Array<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, u32>>>("value", Self::VT_VALUE, true)? + .finish(); + Ok(()) + } +} +pub struct UInt32ArrayArgs<'a> { + pub value: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, u32>>>, +} +impl<'a> Default for UInt32ArrayArgs<'a> { + #[inline] + fn default() -> Self { + UInt32ArrayArgs { + value: None, // required field + } + } +} + +pub struct UInt32ArrayBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> UInt32ArrayBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , u32>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(UInt32Array::VT_VALUE, value); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> UInt32ArrayBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + UInt32ArrayBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + self.fbb_.required(o, UInt32Array::VT_VALUE,"value"); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for UInt32Array<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("UInt32Array"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum Int64ArrayOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct Int64Array<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for Int64Array<'a> { + type Inner = Int64Array<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> Int64Array<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + Int64Array { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args Int64ArrayArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = Int64ArrayBuilder::new(_fbb); + if let Some(x) = args.value { builder.add_value(x); } + builder.finish() + } + + + #[inline] + pub fn value(&self) -> ::flatbuffers::Vector<'a, i64> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, i64>>>(Int64Array::VT_VALUE, None).unwrap()} + } +} + +impl ::flatbuffers::Verifiable for Int64Array<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, i64>>>("value", Self::VT_VALUE, true)? + .finish(); + Ok(()) + } +} +pub struct Int64ArrayArgs<'a> { + pub value: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, i64>>>, +} +impl<'a> Default for Int64ArrayArgs<'a> { + #[inline] + fn default() -> Self { + Int64ArrayArgs { + value: None, // required field + } + } +} + +pub struct Int64ArrayBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> Int64ArrayBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , i64>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(Int64Array::VT_VALUE, value); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> Int64ArrayBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + Int64ArrayBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + self.fbb_.required(o, Int64Array::VT_VALUE,"value"); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for Int64Array<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("Int64Array"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum UInt64ArrayOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct UInt64Array<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for UInt64Array<'a> { + type Inner = UInt64Array<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> UInt64Array<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + UInt64Array { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args UInt64ArrayArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = UInt64ArrayBuilder::new(_fbb); + if let Some(x) = args.value { builder.add_value(x); } + builder.finish() + } + + + #[inline] + pub fn value(&self) -> ::flatbuffers::Vector<'a, u64> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, u64>>>(UInt64Array::VT_VALUE, None).unwrap()} + } +} + +impl ::flatbuffers::Verifiable for UInt64Array<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, u64>>>("value", Self::VT_VALUE, true)? + .finish(); + Ok(()) + } +} +pub struct UInt64ArrayArgs<'a> { + pub value: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, u64>>>, +} +impl<'a> Default for UInt64ArrayArgs<'a> { + #[inline] + fn default() -> Self { + UInt64ArrayArgs { + value: None, // required field + } + } +} + +pub struct UInt64ArrayBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> UInt64ArrayBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , u64>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(UInt64Array::VT_VALUE, value); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> UInt64ArrayBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + UInt64ArrayBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + self.fbb_.required(o, UInt64Array::VT_VALUE,"value"); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for UInt64Array<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("UInt64Array"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum DoubleArrayOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct DoubleArray<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for DoubleArray<'a> { + type Inner = DoubleArray<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> DoubleArray<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + DoubleArray { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args DoubleArrayArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = DoubleArrayBuilder::new(_fbb); + if let Some(x) = args.value { builder.add_value(x); } + builder.finish() + } + + + #[inline] + pub fn value(&self) -> ::flatbuffers::Vector<'a, f64> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, f64>>>(DoubleArray::VT_VALUE, None).unwrap()} + } +} + +impl ::flatbuffers::Verifiable for DoubleArray<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, f64>>>("value", Self::VT_VALUE, true)? + .finish(); + Ok(()) + } +} +pub struct DoubleArrayArgs<'a> { + pub value: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, f64>>>, +} +impl<'a> Default for DoubleArrayArgs<'a> { + #[inline] + fn default() -> Self { + DoubleArrayArgs { + value: None, // required field + } + } +} + +pub struct DoubleArrayBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> DoubleArrayBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , f64>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(DoubleArray::VT_VALUE, value); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> DoubleArrayBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + DoubleArrayBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + self.fbb_.required(o, DoubleArray::VT_VALUE,"value"); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for DoubleArray<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("DoubleArray"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum FloatArrayOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct FloatArray<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for FloatArray<'a> { + type Inner = FloatArray<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> FloatArray<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + FloatArray { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args FloatArrayArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = FloatArrayBuilder::new(_fbb); + if let Some(x) = args.value { builder.add_value(x); } + builder.finish() + } + + + #[inline] + pub fn value(&self) -> ::flatbuffers::Vector<'a, f32> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, f32>>>(FloatArray::VT_VALUE, None).unwrap()} + } +} + +impl ::flatbuffers::Verifiable for FloatArray<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, f32>>>("value", Self::VT_VALUE, true)? + .finish(); + Ok(()) + } +} +pub struct FloatArrayArgs<'a> { + pub value: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, f32>>>, +} +impl<'a> Default for FloatArrayArgs<'a> { + #[inline] + fn default() -> Self { + FloatArrayArgs { + value: None, // required field + } + } +} + +pub struct FloatArrayBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> FloatArrayBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , f32>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(FloatArray::VT_VALUE, value); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> FloatArrayBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + FloatArrayBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + self.fbb_.required(o, FloatArray::VT_VALUE,"value"); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for FloatArray<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("FloatArray"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum se00_SampleEnvironmentDataOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct se00_SampleEnvironmentData<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for se00_SampleEnvironmentData<'a> { + type Inner = se00_SampleEnvironmentData<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> se00_SampleEnvironmentData<'a> { + pub const VT_NAME: ::flatbuffers::VOffsetT = 4; + pub const VT_CHANNEL: ::flatbuffers::VOffsetT = 6; + pub const VT_PACKET_TIMESTAMP: ::flatbuffers::VOffsetT = 8; + pub const VT_TIME_DELTA: ::flatbuffers::VOffsetT = 10; + pub const VT_TIMESTAMP_LOCATION: ::flatbuffers::VOffsetT = 12; + pub const VT_VALUES_TYPE: ::flatbuffers::VOffsetT = 14; + pub const VT_VALUES: ::flatbuffers::VOffsetT = 16; + pub const VT_TIMESTAMPS: ::flatbuffers::VOffsetT = 18; + pub const VT_MESSAGE_COUNTER: ::flatbuffers::VOffsetT = 20; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + se00_SampleEnvironmentData { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args se00_SampleEnvironmentDataArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = se00_SampleEnvironmentDataBuilder::new(_fbb); + builder.add_message_counter(args.message_counter); + builder.add_time_delta(args.time_delta); + builder.add_packet_timestamp(args.packet_timestamp); + if let Some(x) = args.timestamps { builder.add_timestamps(x); } + if let Some(x) = args.values { builder.add_values(x); } + builder.add_channel(args.channel); + if let Some(x) = args.name { builder.add_name(x); } + builder.add_values_type(args.values_type); + builder.add_timestamp_location(args.timestamp_location); + builder.finish() + } + + + #[inline] + pub fn name(&self) -> &'a str { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(se00_SampleEnvironmentData::VT_NAME, None).unwrap()} + } + #[inline] + pub fn channel(&self) -> i32 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(se00_SampleEnvironmentData::VT_CHANNEL, Some(0)).unwrap()} + } + #[inline] + pub fn packet_timestamp(&self) -> i64 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(se00_SampleEnvironmentData::VT_PACKET_TIMESTAMP, Some(0)).unwrap()} + } + #[inline] + pub fn time_delta(&self) -> f64 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(se00_SampleEnvironmentData::VT_TIME_DELTA, Some(0.0)).unwrap()} + } + #[inline] + pub fn timestamp_location(&self) -> Location { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(se00_SampleEnvironmentData::VT_TIMESTAMP_LOCATION, Some(Location::Unknown)).unwrap()} + } + #[inline] + pub fn values_type(&self) -> ValueUnion { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(se00_SampleEnvironmentData::VT_VALUES_TYPE, Some(ValueUnion::NONE)).unwrap()} + } + #[inline] + pub fn values(&self) -> ::flatbuffers::Table<'a> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Table<'a>>>(se00_SampleEnvironmentData::VT_VALUES, None).unwrap()} + } + #[inline] + pub fn timestamps(&self) -> Option<::flatbuffers::Vector<'a, i64>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, i64>>>(se00_SampleEnvironmentData::VT_TIMESTAMPS, None)} + } + #[inline] + pub fn message_counter(&self) -> i64 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(se00_SampleEnvironmentData::VT_MESSAGE_COUNTER, Some(0)).unwrap()} + } + #[inline] + #[allow(non_snake_case)] + pub fn values_as_int_8_array(&self) -> Option> { + if self.values_type() == ValueUnion::Int8Array { + let u = self.values(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { Int8Array::init_from_table(u) }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn values_as_uint_8_array(&self) -> Option> { + if self.values_type() == ValueUnion::UInt8Array { + let u = self.values(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { UInt8Array::init_from_table(u) }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn values_as_int_16_array(&self) -> Option> { + if self.values_type() == ValueUnion::Int16Array { + let u = self.values(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { Int16Array::init_from_table(u) }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn values_as_uint_16_array(&self) -> Option> { + if self.values_type() == ValueUnion::UInt16Array { + let u = self.values(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { UInt16Array::init_from_table(u) }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn values_as_int_32_array(&self) -> Option> { + if self.values_type() == ValueUnion::Int32Array { + let u = self.values(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { Int32Array::init_from_table(u) }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn values_as_uint_32_array(&self) -> Option> { + if self.values_type() == ValueUnion::UInt32Array { + let u = self.values(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { UInt32Array::init_from_table(u) }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn values_as_int_64_array(&self) -> Option> { + if self.values_type() == ValueUnion::Int64Array { + let u = self.values(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { Int64Array::init_from_table(u) }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn values_as_uint_64_array(&self) -> Option> { + if self.values_type() == ValueUnion::UInt64Array { + let u = self.values(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { UInt64Array::init_from_table(u) }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn values_as_double_array(&self) -> Option> { + if self.values_type() == ValueUnion::DoubleArray { + let u = self.values(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { DoubleArray::init_from_table(u) }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn values_as_float_array(&self) -> Option> { + if self.values_type() == ValueUnion::FloatArray { + let u = self.values(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { FloatArray::init_from_table(u) }) + } else { + None + } + } + +} + +impl ::flatbuffers::Verifiable for se00_SampleEnvironmentData<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("name", Self::VT_NAME, true)? + .visit_field::("channel", Self::VT_CHANNEL, false)? + .visit_field::("packet_timestamp", Self::VT_PACKET_TIMESTAMP, false)? + .visit_field::("time_delta", Self::VT_TIME_DELTA, false)? + .visit_field::("timestamp_location", Self::VT_TIMESTAMP_LOCATION, false)? + .visit_union::("values_type", Self::VT_VALUES_TYPE, "values", Self::VT_VALUES, true, |key, v, pos| { + match key { + ValueUnion::Int8Array => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("ValueUnion::Int8Array", pos), + ValueUnion::UInt8Array => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("ValueUnion::UInt8Array", pos), + ValueUnion::Int16Array => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("ValueUnion::Int16Array", pos), + ValueUnion::UInt16Array => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("ValueUnion::UInt16Array", pos), + ValueUnion::Int32Array => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("ValueUnion::Int32Array", pos), + ValueUnion::UInt32Array => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("ValueUnion::UInt32Array", pos), + ValueUnion::Int64Array => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("ValueUnion::Int64Array", pos), + ValueUnion::UInt64Array => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("ValueUnion::UInt64Array", pos), + ValueUnion::DoubleArray => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("ValueUnion::DoubleArray", pos), + ValueUnion::FloatArray => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("ValueUnion::FloatArray", pos), + _ => Ok(()), + } + })? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, i64>>>("timestamps", Self::VT_TIMESTAMPS, false)? + .visit_field::("message_counter", Self::VT_MESSAGE_COUNTER, false)? + .finish(); + Ok(()) + } +} +pub struct se00_SampleEnvironmentDataArgs<'a> { + pub name: Option<::flatbuffers::WIPOffset<&'a str>>, + pub channel: i32, + pub packet_timestamp: i64, + pub time_delta: f64, + pub timestamp_location: Location, + pub values_type: ValueUnion, + pub values: Option<::flatbuffers::WIPOffset<::flatbuffers::UnionWIPOffset>>, + pub timestamps: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, i64>>>, + pub message_counter: i64, +} +impl<'a> Default for se00_SampleEnvironmentDataArgs<'a> { + #[inline] + fn default() -> Self { + se00_SampleEnvironmentDataArgs { + name: None, // required field + channel: 0, + packet_timestamp: 0, + time_delta: 0.0, + timestamp_location: Location::Unknown, + values_type: ValueUnion::NONE, + values: None, // required field + timestamps: None, + message_counter: 0, + } + } +} + +pub struct se00_SampleEnvironmentDataBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> se00_SampleEnvironmentDataBuilder<'a, 'b, A> { + #[inline] + pub fn add_name(&mut self, name: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(se00_SampleEnvironmentData::VT_NAME, name); + } + #[inline] + pub fn add_channel(&mut self, channel: i32) { + self.fbb_.push_slot::(se00_SampleEnvironmentData::VT_CHANNEL, channel, 0); + } + #[inline] + pub fn add_packet_timestamp(&mut self, packet_timestamp: i64) { + self.fbb_.push_slot::(se00_SampleEnvironmentData::VT_PACKET_TIMESTAMP, packet_timestamp, 0); + } + #[inline] + pub fn add_time_delta(&mut self, time_delta: f64) { + self.fbb_.push_slot::(se00_SampleEnvironmentData::VT_TIME_DELTA, time_delta, 0.0); + } + #[inline] + pub fn add_timestamp_location(&mut self, timestamp_location: Location) { + self.fbb_.push_slot::(se00_SampleEnvironmentData::VT_TIMESTAMP_LOCATION, timestamp_location, Location::Unknown); + } + #[inline] + pub fn add_values_type(&mut self, values_type: ValueUnion) { + self.fbb_.push_slot::(se00_SampleEnvironmentData::VT_VALUES_TYPE, values_type, ValueUnion::NONE); + } + #[inline] + pub fn add_values(&mut self, values: ::flatbuffers::WIPOffset<::flatbuffers::UnionWIPOffset>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(se00_SampleEnvironmentData::VT_VALUES, values); + } + #[inline] + pub fn add_timestamps(&mut self, timestamps: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , i64>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(se00_SampleEnvironmentData::VT_TIMESTAMPS, timestamps); + } + #[inline] + pub fn add_message_counter(&mut self, message_counter: i64) { + self.fbb_.push_slot::(se00_SampleEnvironmentData::VT_MESSAGE_COUNTER, message_counter, 0); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> se00_SampleEnvironmentDataBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + se00_SampleEnvironmentDataBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + self.fbb_.required(o, se00_SampleEnvironmentData::VT_NAME,"name"); + self.fbb_.required(o, se00_SampleEnvironmentData::VT_VALUES,"values"); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for se00_SampleEnvironmentData<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("se00_SampleEnvironmentData"); + ds.field("name", &self.name()); + ds.field("channel", &self.channel()); + ds.field("packet_timestamp", &self.packet_timestamp()); + ds.field("time_delta", &self.time_delta()); + ds.field("timestamp_location", &self.timestamp_location()); + ds.field("values_type", &self.values_type()); + match self.values_type() { + ValueUnion::Int8Array => { + if let Some(x) = self.values_as_int_8_array() { + ds.field("values", &x) + } else { + ds.field("values", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + ValueUnion::UInt8Array => { + if let Some(x) = self.values_as_uint_8_array() { + ds.field("values", &x) + } else { + ds.field("values", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + ValueUnion::Int16Array => { + if let Some(x) = self.values_as_int_16_array() { + ds.field("values", &x) + } else { + ds.field("values", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + ValueUnion::UInt16Array => { + if let Some(x) = self.values_as_uint_16_array() { + ds.field("values", &x) + } else { + ds.field("values", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + ValueUnion::Int32Array => { + if let Some(x) = self.values_as_int_32_array() { + ds.field("values", &x) + } else { + ds.field("values", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + ValueUnion::UInt32Array => { + if let Some(x) = self.values_as_uint_32_array() { + ds.field("values", &x) + } else { + ds.field("values", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + ValueUnion::Int64Array => { + if let Some(x) = self.values_as_int_64_array() { + ds.field("values", &x) + } else { + ds.field("values", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + ValueUnion::UInt64Array => { + if let Some(x) = self.values_as_uint_64_array() { + ds.field("values", &x) + } else { + ds.field("values", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + ValueUnion::DoubleArray => { + if let Some(x) = self.values_as_double_array() { + ds.field("values", &x) + } else { + ds.field("values", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + ValueUnion::FloatArray => { + if let Some(x) = self.values_as_float_array() { + ds.field("values", &x) + } else { + ds.field("values", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + _ => { + let x: Option<()> = None; + ds.field("values", &x) + }, + }; + ds.field("timestamps", &self.timestamps()); + ds.field("message_counter", &self.message_counter()); + ds.finish() + } +} +#[inline] +/// Verifies that a buffer of bytes contains a `se00_SampleEnvironmentData` +/// and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_se_00_sample_environment_data_unchecked`. +pub fn root_as_se_00_sample_environment_data(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root::(buf) +} +#[inline] +/// Verifies that a buffer of bytes contains a size prefixed +/// `se00_SampleEnvironmentData` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `size_prefixed_root_as_se_00_sample_environment_data_unchecked`. +pub fn size_prefixed_root_as_se_00_sample_environment_data(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root::(buf) +} +#[inline] +/// Verifies, with the given options, that a buffer of bytes +/// contains a `se00_SampleEnvironmentData` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_se_00_sample_environment_data_unchecked`. +pub fn root_as_se_00_sample_environment_data_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root_with_opts::>(opts, buf) +} +#[inline] +/// Verifies, with the given verifier options, that a buffer of +/// bytes contains a size prefixed `se00_SampleEnvironmentData` and returns +/// it. Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_se_00_sample_environment_data_unchecked`. +pub fn size_prefixed_root_as_se_00_sample_environment_data_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root_with_opts::>(opts, buf) +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a se00_SampleEnvironmentData and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid `se00_SampleEnvironmentData`. +pub unsafe fn root_as_se_00_sample_environment_data_unchecked(buf: &[u8]) -> se00_SampleEnvironmentData<'_> { + unsafe { ::flatbuffers::root_unchecked::(buf) } +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a size prefixed se00_SampleEnvironmentData and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid size prefixed `se00_SampleEnvironmentData`. +pub unsafe fn size_prefixed_root_as_se_00_sample_environment_data_unchecked(buf: &[u8]) -> se00_SampleEnvironmentData<'_> { + unsafe { ::flatbuffers::size_prefixed_root_unchecked::(buf) } +} +pub const SE_00_SAMPLE_ENVIRONMENT_DATA_IDENTIFIER: &str = "se00"; + +#[inline] +pub fn se_00_sample_environment_data_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, SE_00_SAMPLE_ENVIRONMENT_DATA_IDENTIFIER, false) +} + +#[inline] +pub fn se_00_sample_environment_data_size_prefixed_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, SE_00_SAMPLE_ENVIRONMENT_DATA_IDENTIFIER, true) +} + +#[inline] +pub fn finish_se_00_sample_environment_data_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>( + fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + root: ::flatbuffers::WIPOffset>) { + fbb.finish(root, Some(SE_00_SAMPLE_ENVIRONMENT_DATA_IDENTIFIER)); +} + +#[inline] +pub fn finish_size_prefixed_se_00_sample_environment_data_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>(fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, root: ::flatbuffers::WIPOffset>) { + fbb.finish_size_prefixed(root, Some(SE_00_SAMPLE_ENVIRONMENT_DATA_IDENTIFIER)); +} diff --git a/rust/src/flatbuffers_generated/un00_units.rs b/rust/src/flatbuffers_generated/un00_units.rs new file mode 100644 index 0000000..c5adb82 --- /dev/null +++ b/rust/src/flatbuffers_generated/un00_units.rs @@ -0,0 +1,219 @@ +// automatically generated by the FlatBuffers compiler, do not modify +// @generated + +extern crate alloc; + +pub enum UnitsOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct Units<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for Units<'a> { + type Inner = Units<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> Units<'a> { + pub const VT_SOURCE_NAME: ::flatbuffers::VOffsetT = 4; + pub const VT_TIMESTAMP: ::flatbuffers::VOffsetT = 6; + pub const VT_UNITS: ::flatbuffers::VOffsetT = 8; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + Units { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args UnitsArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = UnitsBuilder::new(_fbb); + builder.add_timestamp(args.timestamp); + if let Some(x) = args.units { builder.add_units(x); } + if let Some(x) = args.source_name { builder.add_source_name(x); } + builder.finish() + } + + + #[inline] + pub fn source_name(&self) -> &'a str { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(Units::VT_SOURCE_NAME, None).unwrap()} + } + #[inline] + pub fn timestamp(&self) -> i64 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(Units::VT_TIMESTAMP, Some(0)).unwrap()} + } + #[inline] + pub fn units(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(Units::VT_UNITS, None)} + } +} + +impl ::flatbuffers::Verifiable for Units<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("source_name", Self::VT_SOURCE_NAME, true)? + .visit_field::("timestamp", Self::VT_TIMESTAMP, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("units", Self::VT_UNITS, false)? + .finish(); + Ok(()) + } +} +pub struct UnitsArgs<'a> { + pub source_name: Option<::flatbuffers::WIPOffset<&'a str>>, + pub timestamp: i64, + pub units: Option<::flatbuffers::WIPOffset<&'a str>>, +} +impl<'a> Default for UnitsArgs<'a> { + #[inline] + fn default() -> Self { + UnitsArgs { + source_name: None, // required field + timestamp: 0, + units: None, + } + } +} + +pub struct UnitsBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> UnitsBuilder<'a, 'b, A> { + #[inline] + pub fn add_source_name(&mut self, source_name: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(Units::VT_SOURCE_NAME, source_name); + } + #[inline] + pub fn add_timestamp(&mut self, timestamp: i64) { + self.fbb_.push_slot::(Units::VT_TIMESTAMP, timestamp, 0); + } + #[inline] + pub fn add_units(&mut self, units: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(Units::VT_UNITS, units); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> UnitsBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + UnitsBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + self.fbb_.required(o, Units::VT_SOURCE_NAME,"source_name"); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for Units<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("Units"); + ds.field("source_name", &self.source_name()); + ds.field("timestamp", &self.timestamp()); + ds.field("units", &self.units()); + ds.finish() + } +} +#[inline] +/// Verifies that a buffer of bytes contains a `Units` +/// and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_units_unchecked`. +pub fn root_as_units(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root::(buf) +} +#[inline] +/// Verifies that a buffer of bytes contains a size prefixed +/// `Units` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `size_prefixed_root_as_units_unchecked`. +pub fn size_prefixed_root_as_units(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root::(buf) +} +#[inline] +/// Verifies, with the given options, that a buffer of bytes +/// contains a `Units` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_units_unchecked`. +pub fn root_as_units_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root_with_opts::>(opts, buf) +} +#[inline] +/// Verifies, with the given verifier options, that a buffer of +/// bytes contains a size prefixed `Units` and returns +/// it. Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_units_unchecked`. +pub fn size_prefixed_root_as_units_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root_with_opts::>(opts, buf) +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a Units and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid `Units`. +pub unsafe fn root_as_units_unchecked(buf: &[u8]) -> Units<'_> { + unsafe { ::flatbuffers::root_unchecked::(buf) } +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a size prefixed Units and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid size prefixed `Units`. +pub unsafe fn size_prefixed_root_as_units_unchecked(buf: &[u8]) -> Units<'_> { + unsafe { ::flatbuffers::size_prefixed_root_unchecked::(buf) } +} +pub const UNITS_IDENTIFIER: &str = "un00"; + +#[inline] +pub fn units_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, UNITS_IDENTIFIER, false) +} + +#[inline] +pub fn units_size_prefixed_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, UNITS_IDENTIFIER, true) +} + +#[inline] +pub fn finish_units_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>( + fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + root: ::flatbuffers::WIPOffset>) { + fbb.finish(root, Some(UNITS_IDENTIFIER)); +} + +#[inline] +pub fn finish_size_prefixed_units_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>(fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, root: ::flatbuffers::WIPOffset>) { + fbb.finish_size_prefixed(root, Some(UNITS_IDENTIFIER)); +} diff --git a/rust/src/flatbuffers_generated/wrdn_finished_writing.rs b/rust/src/flatbuffers_generated/wrdn_finished_writing.rs new file mode 100644 index 0000000..5a7dd17 --- /dev/null +++ b/rust/src/flatbuffers_generated/wrdn_finished_writing.rs @@ -0,0 +1,272 @@ +// automatically generated by the FlatBuffers compiler, do not modify +// @generated + +extern crate alloc; + +pub enum FinishedWritingOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct FinishedWriting<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for FinishedWriting<'a> { + type Inner = FinishedWriting<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> FinishedWriting<'a> { + pub const VT_SERVICE_ID: ::flatbuffers::VOffsetT = 4; + pub const VT_JOB_ID: ::flatbuffers::VOffsetT = 6; + pub const VT_ERROR_ENCOUNTERED: ::flatbuffers::VOffsetT = 8; + pub const VT_FILE_NAME: ::flatbuffers::VOffsetT = 10; + pub const VT_METADATA: ::flatbuffers::VOffsetT = 12; + pub const VT_MESSAGE: ::flatbuffers::VOffsetT = 14; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + FinishedWriting { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args FinishedWritingArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = FinishedWritingBuilder::new(_fbb); + if let Some(x) = args.message { builder.add_message(x); } + if let Some(x) = args.metadata { builder.add_metadata(x); } + if let Some(x) = args.file_name { builder.add_file_name(x); } + if let Some(x) = args.job_id { builder.add_job_id(x); } + if let Some(x) = args.service_id { builder.add_service_id(x); } + builder.add_error_encountered(args.error_encountered); + builder.finish() + } + + + #[inline] + pub fn service_id(&self) -> &'a str { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(FinishedWriting::VT_SERVICE_ID, None).unwrap()} + } + #[inline] + pub fn job_id(&self) -> &'a str { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(FinishedWriting::VT_JOB_ID, None).unwrap()} + } + #[inline] + pub fn error_encountered(&self) -> bool { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(FinishedWriting::VT_ERROR_ENCOUNTERED, Some(false)).unwrap()} + } + #[inline] + pub fn file_name(&self) -> &'a str { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(FinishedWriting::VT_FILE_NAME, None).unwrap()} + } + #[inline] + pub fn metadata(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(FinishedWriting::VT_METADATA, None)} + } + #[inline] + pub fn message(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(FinishedWriting::VT_MESSAGE, None)} + } +} + +impl ::flatbuffers::Verifiable for FinishedWriting<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("service_id", Self::VT_SERVICE_ID, true)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("job_id", Self::VT_JOB_ID, true)? + .visit_field::("error_encountered", Self::VT_ERROR_ENCOUNTERED, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("file_name", Self::VT_FILE_NAME, true)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("metadata", Self::VT_METADATA, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("message", Self::VT_MESSAGE, false)? + .finish(); + Ok(()) + } +} +pub struct FinishedWritingArgs<'a> { + pub service_id: Option<::flatbuffers::WIPOffset<&'a str>>, + pub job_id: Option<::flatbuffers::WIPOffset<&'a str>>, + pub error_encountered: bool, + pub file_name: Option<::flatbuffers::WIPOffset<&'a str>>, + pub metadata: Option<::flatbuffers::WIPOffset<&'a str>>, + pub message: Option<::flatbuffers::WIPOffset<&'a str>>, +} +impl<'a> Default for FinishedWritingArgs<'a> { + #[inline] + fn default() -> Self { + FinishedWritingArgs { + service_id: None, // required field + job_id: None, // required field + error_encountered: false, + file_name: None, // required field + metadata: None, + message: None, + } + } +} + +pub struct FinishedWritingBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> FinishedWritingBuilder<'a, 'b, A> { + #[inline] + pub fn add_service_id(&mut self, service_id: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(FinishedWriting::VT_SERVICE_ID, service_id); + } + #[inline] + pub fn add_job_id(&mut self, job_id: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(FinishedWriting::VT_JOB_ID, job_id); + } + #[inline] + pub fn add_error_encountered(&mut self, error_encountered: bool) { + self.fbb_.push_slot::(FinishedWriting::VT_ERROR_ENCOUNTERED, error_encountered, false); + } + #[inline] + pub fn add_file_name(&mut self, file_name: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(FinishedWriting::VT_FILE_NAME, file_name); + } + #[inline] + pub fn add_metadata(&mut self, metadata: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(FinishedWriting::VT_METADATA, metadata); + } + #[inline] + pub fn add_message(&mut self, message: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(FinishedWriting::VT_MESSAGE, message); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> FinishedWritingBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + FinishedWritingBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + self.fbb_.required(o, FinishedWriting::VT_SERVICE_ID,"service_id"); + self.fbb_.required(o, FinishedWriting::VT_JOB_ID,"job_id"); + self.fbb_.required(o, FinishedWriting::VT_FILE_NAME,"file_name"); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for FinishedWriting<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("FinishedWriting"); + ds.field("service_id", &self.service_id()); + ds.field("job_id", &self.job_id()); + ds.field("error_encountered", &self.error_encountered()); + ds.field("file_name", &self.file_name()); + ds.field("metadata", &self.metadata()); + ds.field("message", &self.message()); + ds.finish() + } +} +#[inline] +/// Verifies that a buffer of bytes contains a `FinishedWriting` +/// and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_finished_writing_unchecked`. +pub fn root_as_finished_writing(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root::(buf) +} +#[inline] +/// Verifies that a buffer of bytes contains a size prefixed +/// `FinishedWriting` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `size_prefixed_root_as_finished_writing_unchecked`. +pub fn size_prefixed_root_as_finished_writing(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root::(buf) +} +#[inline] +/// Verifies, with the given options, that a buffer of bytes +/// contains a `FinishedWriting` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_finished_writing_unchecked`. +pub fn root_as_finished_writing_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root_with_opts::>(opts, buf) +} +#[inline] +/// Verifies, with the given verifier options, that a buffer of +/// bytes contains a size prefixed `FinishedWriting` and returns +/// it. Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_finished_writing_unchecked`. +pub fn size_prefixed_root_as_finished_writing_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root_with_opts::>(opts, buf) +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a FinishedWriting and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid `FinishedWriting`. +pub unsafe fn root_as_finished_writing_unchecked(buf: &[u8]) -> FinishedWriting<'_> { + unsafe { ::flatbuffers::root_unchecked::(buf) } +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a size prefixed FinishedWriting and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid size prefixed `FinishedWriting`. +pub unsafe fn size_prefixed_root_as_finished_writing_unchecked(buf: &[u8]) -> FinishedWriting<'_> { + unsafe { ::flatbuffers::size_prefixed_root_unchecked::(buf) } +} +pub const FINISHED_WRITING_IDENTIFIER: &str = "wrdn"; + +#[inline] +pub fn finished_writing_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, FINISHED_WRITING_IDENTIFIER, false) +} + +#[inline] +pub fn finished_writing_size_prefixed_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, FINISHED_WRITING_IDENTIFIER, true) +} + +#[inline] +pub fn finish_finished_writing_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>( + fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + root: ::flatbuffers::WIPOffset>) { + fbb.finish(root, Some(FINISHED_WRITING_IDENTIFIER)); +} + +#[inline] +pub fn finish_size_prefixed_finished_writing_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>(fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, root: ::flatbuffers::WIPOffset>) { + fbb.finish_size_prefixed(root, Some(FINISHED_WRITING_IDENTIFIER)); +} diff --git a/rust/src/flatbuffers_generated/x5f2_status.rs b/rust/src/flatbuffers_generated/x5f2_status.rs new file mode 100644 index 0000000..03ec509 --- /dev/null +++ b/rust/src/flatbuffers_generated/x5f2_status.rs @@ -0,0 +1,286 @@ +// automatically generated by the FlatBuffers compiler, do not modify +// @generated + +extern crate alloc; + +pub enum StatusOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct Status<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for Status<'a> { + type Inner = Status<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> Status<'a> { + pub const VT_SOFTWARE_NAME: ::flatbuffers::VOffsetT = 4; + pub const VT_SOFTWARE_VERSION: ::flatbuffers::VOffsetT = 6; + pub const VT_SERVICE_ID: ::flatbuffers::VOffsetT = 8; + pub const VT_HOST_NAME: ::flatbuffers::VOffsetT = 10; + pub const VT_PROCESS_ID: ::flatbuffers::VOffsetT = 12; + pub const VT_UPDATE_INTERVAL: ::flatbuffers::VOffsetT = 14; + pub const VT_STATUS_JSON: ::flatbuffers::VOffsetT = 16; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + Status { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args StatusArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = StatusBuilder::new(_fbb); + if let Some(x) = args.status_json { builder.add_status_json(x); } + builder.add_update_interval(args.update_interval); + builder.add_process_id(args.process_id); + if let Some(x) = args.host_name { builder.add_host_name(x); } + if let Some(x) = args.service_id { builder.add_service_id(x); } + if let Some(x) = args.software_version { builder.add_software_version(x); } + if let Some(x) = args.software_name { builder.add_software_name(x); } + builder.finish() + } + + + #[inline] + pub fn software_name(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(Status::VT_SOFTWARE_NAME, None)} + } + #[inline] + pub fn software_version(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(Status::VT_SOFTWARE_VERSION, None)} + } + #[inline] + pub fn service_id(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(Status::VT_SERVICE_ID, None)} + } + #[inline] + pub fn host_name(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(Status::VT_HOST_NAME, None)} + } + #[inline] + pub fn process_id(&self) -> u32 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(Status::VT_PROCESS_ID, Some(0)).unwrap()} + } + #[inline] + pub fn update_interval(&self) -> u32 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(Status::VT_UPDATE_INTERVAL, Some(0)).unwrap()} + } + #[inline] + pub fn status_json(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(Status::VT_STATUS_JSON, None)} + } +} + +impl ::flatbuffers::Verifiable for Status<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("software_name", Self::VT_SOFTWARE_NAME, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("software_version", Self::VT_SOFTWARE_VERSION, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("service_id", Self::VT_SERVICE_ID, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("host_name", Self::VT_HOST_NAME, false)? + .visit_field::("process_id", Self::VT_PROCESS_ID, false)? + .visit_field::("update_interval", Self::VT_UPDATE_INTERVAL, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("status_json", Self::VT_STATUS_JSON, false)? + .finish(); + Ok(()) + } +} +pub struct StatusArgs<'a> { + pub software_name: Option<::flatbuffers::WIPOffset<&'a str>>, + pub software_version: Option<::flatbuffers::WIPOffset<&'a str>>, + pub service_id: Option<::flatbuffers::WIPOffset<&'a str>>, + pub host_name: Option<::flatbuffers::WIPOffset<&'a str>>, + pub process_id: u32, + pub update_interval: u32, + pub status_json: Option<::flatbuffers::WIPOffset<&'a str>>, +} +impl<'a> Default for StatusArgs<'a> { + #[inline] + fn default() -> Self { + StatusArgs { + software_name: None, + software_version: None, + service_id: None, + host_name: None, + process_id: 0, + update_interval: 0, + status_json: None, + } + } +} + +pub struct StatusBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> StatusBuilder<'a, 'b, A> { + #[inline] + pub fn add_software_name(&mut self, software_name: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(Status::VT_SOFTWARE_NAME, software_name); + } + #[inline] + pub fn add_software_version(&mut self, software_version: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(Status::VT_SOFTWARE_VERSION, software_version); + } + #[inline] + pub fn add_service_id(&mut self, service_id: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(Status::VT_SERVICE_ID, service_id); + } + #[inline] + pub fn add_host_name(&mut self, host_name: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(Status::VT_HOST_NAME, host_name); + } + #[inline] + pub fn add_process_id(&mut self, process_id: u32) { + self.fbb_.push_slot::(Status::VT_PROCESS_ID, process_id, 0); + } + #[inline] + pub fn add_update_interval(&mut self, update_interval: u32) { + self.fbb_.push_slot::(Status::VT_UPDATE_INTERVAL, update_interval, 0); + } + #[inline] + pub fn add_status_json(&mut self, status_json: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(Status::VT_STATUS_JSON, status_json); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> StatusBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + StatusBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for Status<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("Status"); + ds.field("software_name", &self.software_name()); + ds.field("software_version", &self.software_version()); + ds.field("service_id", &self.service_id()); + ds.field("host_name", &self.host_name()); + ds.field("process_id", &self.process_id()); + ds.field("update_interval", &self.update_interval()); + ds.field("status_json", &self.status_json()); + ds.finish() + } +} +#[inline] +/// Verifies that a buffer of bytes contains a `Status` +/// and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_status_unchecked`. +pub fn root_as_status(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root::(buf) +} +#[inline] +/// Verifies that a buffer of bytes contains a size prefixed +/// `Status` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `size_prefixed_root_as_status_unchecked`. +pub fn size_prefixed_root_as_status(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root::(buf) +} +#[inline] +/// Verifies, with the given options, that a buffer of bytes +/// contains a `Status` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_status_unchecked`. +pub fn root_as_status_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root_with_opts::>(opts, buf) +} +#[inline] +/// Verifies, with the given verifier options, that a buffer of +/// bytes contains a size prefixed `Status` and returns +/// it. Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_status_unchecked`. +pub fn size_prefixed_root_as_status_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root_with_opts::>(opts, buf) +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a Status and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid `Status`. +pub unsafe fn root_as_status_unchecked(buf: &[u8]) -> Status<'_> { + unsafe { ::flatbuffers::root_unchecked::(buf) } +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a size prefixed Status and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid size prefixed `Status`. +pub unsafe fn size_prefixed_root_as_status_unchecked(buf: &[u8]) -> Status<'_> { + unsafe { ::flatbuffers::size_prefixed_root_unchecked::(buf) } +} +pub const STATUS_IDENTIFIER: &str = "x5f2"; + +#[inline] +pub fn status_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, STATUS_IDENTIFIER, false) +} + +#[inline] +pub fn status_size_prefixed_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, STATUS_IDENTIFIER, true) +} + +#[inline] +pub fn finish_status_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>( + fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + root: ::flatbuffers::WIPOffset>) { + fbb.finish(root, Some(STATUS_IDENTIFIER)); +} + +#[inline] +pub fn finish_size_prefixed_status_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>(fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, root: ::flatbuffers::WIPOffset>) { + fbb.finish_size_prefixed(root, Some(STATUS_IDENTIFIER)); +} From 56f669d5744c85ecb4c280d5af18871680043d8b Mon Sep 17 00:00:00 2001 From: Tom Willemsen Date: Wed, 18 Mar 2026 18:13:33 +0000 Subject: [PATCH 356/363] Maybe CI setup --- .github/workflows/Lint-and-test.yml | 20 ++++--- .github/workflows/release.yml | 41 ++++++++++--- README.md | 8 ++- ...e_bindings.py => generate_rust_bindings.py | 6 -- rust/Cargo.lock | 25 +------- rust/Cargo.toml | 13 ++-- rust/build.rs | 59 ------------------- rust/src/flatbuffers_generated/mod.rs | 18 ++++++ 8 files changed, 77 insertions(+), 113 deletions(-) rename generate_bindings.py => generate_rust_bindings.py (93%) delete mode 100644 rust/build.rs diff --git a/.github/workflows/Lint-and-test.yml b/.github/workflows/Lint-and-test.yml index d329f10..ba4d87a 100644 --- a/.github/workflows/Lint-and-test.yml +++ b/.github/workflows/Lint-and-test.yml @@ -6,17 +6,11 @@ name: Python application on: workflow_call: push: - branches: [ "main" ] + branches: [ "master" ] pull_request: - branches: [ "main" ] + branches: [ "master" ] jobs: - call-workflow: - uses: ISISComputingGroup/reusable-workflows/.github/workflows/linters.yml@main - with: - compare-branch: origin/main - python-ver: '3.13' - runs-on: 'ubuntu-latest' tests: strategy: matrix: @@ -31,13 +25,21 @@ jobs: python-version: ${{ matrix.version }} - name: Install dependencies run: uv sync --all-extras --dev + working-directory: python + - name: Ruff format + run: uv run ruff format --check + working-directory: python + - name: Ruff check + run: uv run ruff check + working-directory: python - name: Test with pytest run: uv run pytest tests + working-directory: python results: if: ${{ always() }} runs-on: ubuntu-latest name: Final Results - needs: [tests, call-workflow] + needs: [tests] steps: - run: exit 1 # see https://stackoverflow.com/a/67532120/4907315 diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index da0fa7b..dfd2ebf 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -20,16 +20,41 @@ jobs: --user - name: Build a binary wheel and a source tarball run: python3 -m build + working-directory: python - name: Store the distribution packages uses: actions/upload-artifact@v6 with: name: python-package-distributions - path: dist/ + path: python/dist/ + rust-build: + if: github.ref_type == 'tag' + name: build distribution + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v6 + - name: install stable rust + run: rustup install stable + - name: Format check + run: cargo fmt check + - name: Clippy + run: cargo clippy -- --deny warnings + - name: Tests + run: cargo test + - name: set version + run: | + export VERSION=${{ github.ref_name }} + sed -i "s/0.0.0/$VERSION/g" Cargo.toml + - name: publish + run: cargo publish --dry-run --allow-dirty + env: + CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }} + publish-to-pypi: name: >- Publish Python distribution to PyPI if: github.ref_type == 'tag' - needs: [py-build] + needs: [py-build, rust-build] # Don't publish anything until both python & rust builds pass... runs-on: ubuntu-latest environment: name: release @@ -41,9 +66,11 @@ jobs: uses: actions/download-artifact@v7 with: name: python-package-distributions - path: dist/ + path: python/dist/ - name: Publish distribution to PyPI uses: pypa/gh-action-pypi-publish@release/v1 + with: + packages-dir: python/ github-release: name: >- Sign the Python distribution with Sigstore @@ -60,13 +87,13 @@ jobs: uses: actions/download-artifact@v7 with: name: python-package-distributions - path: dist/ + path: python/dist/ - name: Sign the dists with Sigstore uses: sigstore/gh-action-sigstore-python@v3.2.0 with: inputs: >- - ./dist/*.tar.gz - ./dist/*.whl + ./python/dist/*.tar.gz + ./python/dist/*.whl - name: Create GitHub Release env: GITHUB_TOKEN: ${{ github.token }} @@ -83,5 +110,5 @@ jobs: # sigstore-produced signatures and certificates. run: >- gh release upload - '${{ github.ref_name }}' dist/** + '${{ github.ref_name }}' python/dist/** --repo '${{ github.repository }}' diff --git a/README.md b/README.md index 1f420e4..6f64417 100644 --- a/README.md +++ b/README.md @@ -32,13 +32,17 @@ FlatBuffers is the format chosen for the ISIS data streaming system, derived fro Check `ess-streaming-data-types` first; attempt not to diverge without reason. If a new schema is really needed, then add a new `.fbs` schema in the `schemas` directory. +Note: to generate code from `.fbs` schemas, you will need the `flatc` tool installed. It can be acquired from +https://github.com/google/flatbuffers/releases . + ### Adding Python bindings Python bindings have low-level code (autogenerated by `flatc`) in the `fbschemas` directory, but **also** manually-written convenience serializers and deserializers in the top-level of the python module. When adding or modifying a schema: -- The low-level code must be generated by calling `generate_code.py` +- The low-level code must be generated by manually calling `flatc --python schemas\some_schema.fbs` and adding + the resulting generated code to `python/src/streaming_data_types/fbschemas`. - Manually-written serializers & deserializers will need to be updated, and added to the lists in `__init__.py`. ### Rust bindings @@ -47,5 +51,5 @@ Rust bindings have low-level code in `flatbuffers_generated`, and a small high-l to deserialize any message. When adding or modifying a schema: -- The low-level code must be generated by calling `generate_code.py` +- The low-level code must be generated by calling `generate_rust_bindings.py` - The wrapper (defined in `lib.rs`) will need to be updated with the new schema. diff --git a/generate_bindings.py b/generate_rust_bindings.py similarity index 93% rename from generate_bindings.py rename to generate_rust_bindings.py index 35b98fc..800134b 100644 --- a/generate_bindings.py +++ b/generate_rust_bindings.py @@ -1,13 +1,8 @@ import os -import sys import shutil import subprocess -def generate_python_bindings(): - pass - - def to_rust_file_name(schema: str): name, ext = schema.split(".") return f"{name}.rs" @@ -50,7 +45,6 @@ def generate_rust_bindings(): def main(): generate_rust_bindings() - generate_python_bindings() if __name__ == "__main__": diff --git a/rust/Cargo.lock b/rust/Cargo.lock index 726725e..a9f9a21 100644 --- a/rust/Cargo.lock +++ b/rust/Cargo.lock @@ -2,12 +2,6 @@ # It is not intended for manual editing. version = 4 -[[package]] -name = "anyhow" -version = "1.0.102" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f202df86484c868dbad7eaa557ef785d5c66295e41b460ef922eca0723b842c" - [[package]] name = "bitflags" version = "2.11.0" @@ -24,30 +18,13 @@ dependencies = [ "rustc_version", ] -[[package]] -name = "flatc-rust" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57e61227926ef5b237af48bee74394cc4a5a221ebd10c5147a98e612f207851d" -dependencies = [ - "log", -] - [[package]] name = "isis_streaming_data_types" -version = "0.1.0" +version = "0.0.0" dependencies = [ - "anyhow", "flatbuffers", - "flatc-rust", ] -[[package]] -name = "log" -version = "0.4.29" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" - [[package]] name = "rustc_version" version = "0.4.1" diff --git a/rust/Cargo.toml b/rust/Cargo.toml index 918d3b7..625cac1 100644 --- a/rust/Cargo.toml +++ b/rust/Cargo.toml @@ -1,11 +1,12 @@ [package] name = "isis_streaming_data_types" -version = "0.1.0" +version = "0.0.0" edition = "2024" - -[build-dependencies] -flatc-rust = "0.2.0" +description = "Streaming data types for the ISIS Neutron & Muon Source" +license-file = "../LICENSE" +documentation = "https://github.com/isisComputingGroup/streaming-data-types" +homepage = "https://github.com/isisComputingGroup/streaming-data-types" +repository = "https://github.com/isisComputingGroup/streaming-data-types" [dependencies] -flatbuffers = "*" -anyhow = "*" \ No newline at end of file +flatbuffers = "*" \ No newline at end of file diff --git a/rust/build.rs b/rust/build.rs deleted file mode 100644 index d5bdf0b..0000000 --- a/rust/build.rs +++ /dev/null @@ -1,59 +0,0 @@ -use std::fs; -use std::fs::{File, OpenOptions, create_dir_all}; -use std::io::Write; -use std::path::Path; - -fn mod_name_from_stem(name: &str) -> String { - format!("{}_{}", name.get(5..).unwrap(), name.get(0..4).unwrap()).to_owned() -} - -fn main() { - create_dir_all("src/flatbuffers_generated") - .expect("Unable to create src/flatbuffers_generated"); - File::create("src/flatbuffers_generated/mod.rs") - .expect("Failed to create src/flatbuffers_generated/mod.rs"); - - let mut mod_file = OpenOptions::new() - .append(true) - .open("src/flatbuffers_generated/mod.rs") - .expect("Could not open src/flatbuffers_generated/mod.rs"); - - fs::read_dir("../schemas") - .expect("Could not read schemas directory") - .filter_map(|e| e.ok()) - .for_each(|entry| { - println!( - "cargo:rerun-if-changed={}", - entry.path().as_path().to_str().unwrap() - ); - flatc_rust::run(flatc_rust::Args { - inputs: &[entry.path().as_path()], - out_dir: Path::new("src/flatbuffers_generated/"), - extra: &[ - "--include-prefix", - "flatbuffers_generated", - "--filename-suffix", - "", - "--gen-all", - ], - ..Default::default() - }) - .expect("cannot find flatc compiler"); - - let path = entry.path(); - let stem = path - .file_stem() - .expect("Can't get file stem") - .to_str() - .expect("Can't convert file stem to str"); - let rust_name = mod_name_from_stem(stem); - - writeln!( - mod_file, - "#[path = \"{stem}.rs\"] -pub mod {rust_name}; -" - ) - .expect("Could not write to src/flatbuffers_generated/mod.rs"); - }) -} diff --git a/rust/src/flatbuffers_generated/mod.rs b/rust/src/flatbuffers_generated/mod.rs index 9aedece..b664214 100644 --- a/rust/src/flatbuffers_generated/mod.rs +++ b/rust/src/flatbuffers_generated/mod.rs @@ -1,36 +1,54 @@ #[path = "6s4t_run_stop.rs"] pub mod run_stop_6s4t; + #[path = "ad00_area_detector_array.rs"] pub mod area_detector_array_ad00; + #[path = "al00_alarm.rs"] pub mod alarm_al00; + #[path = "answ_action_response.rs"] pub mod action_response_answ; + #[path = "da00_dataarray.rs"] pub mod dataarray_da00; + #[path = "df12_det_spec_map.rs"] pub mod det_spec_map_df12; + #[path = "ep01_epics_connection.rs"] pub mod epics_connection_ep01; + #[path = "ev44_events.rs"] pub mod events_ev44; + #[path = "f144_logdata.rs"] pub mod logdata_f144; + #[path = "fc00_forwarder_config.rs"] pub mod forwarder_config_fc00; + #[path = "hs01_event_histogram.rs"] pub mod event_histogram_hs01; + #[path = "json_json.rs"] pub mod json_json; + #[path = "pl72_run_start.rs"] pub mod run_start_pl72; + #[path = "pu00_pulse_metadata.rs"] pub mod pulse_metadata_pu00; + #[path = "se00_data.rs"] pub mod data_se00; + #[path = "un00_units.rs"] pub mod units_un00; + #[path = "wrdn_finished_writing.rs"] pub mod finished_writing_wrdn; + #[path = "x5f2_status.rs"] pub mod status_x5f2; + From 57716b84b6759e869692322713473faae879f4ba Mon Sep 17 00:00:00 2001 From: Tom Willemsen Date: Wed, 18 Mar 2026 18:16:11 +0000 Subject: [PATCH 357/363] Add ruff.toml --- python/ruff.toml | 70 ++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 70 insertions(+) create mode 100644 python/ruff.toml diff --git a/python/ruff.toml b/python/ruff.toml new file mode 100644 index 0000000..aaebedc --- /dev/null +++ b/python/ruff.toml @@ -0,0 +1,70 @@ +# Exclude a variety of commonly ignored directories. +exclude = [ + ".bzr", + ".direnv", + ".eggs", + ".git", + ".git-rewrite", + ".hg", + ".ipynb_checkpoints", + ".mypy_cache", + ".nox", + ".pants.d", + ".pyenv", + ".pytest_cache", + ".pytype", + ".ruff_cache", + ".svn", + ".tox", + ".venv", + ".vscode", + "__pypackages__", + "_build", + "buck-out", + "build", + "dist", + "node_modules", + "site-packages", + "venv", + "uk.ac.stfc.isis.ibex.opis", + "ReflectometryServer/test_modules/test_config/" +] + +# Set the maximum line length to 100. +line-length = 100 +indent-width = 4 + +[lint] +extend-select = [ + "N", # pep8-naming + # "D", # pydocstyle (can use this later but for now causes too many errors) + "I", # isort (for imports) + "E501", # Line too long ({width} > {limit}) + "E", + "F", + "ANN", +] +ignore = [ + "D406", # Section name should end with a newline ("{name}") + "D407", # Missing dashed underline after section ("{name}") + "N999", # Ignore this because the repo itself would need to be renamed +] +[lint.per-file-ignores] +"{**/tests/**,/tests/**,**/*tests.py,tests/**,*tests.py,*test.py,**/*test.py,common_tests/**,test_*.py}" = [ + "N802", + "D100", + "D101", + "D102", + "E501", + "ANN", +] + +[lint.pydocstyle] +# Use Google-style docstrings. +convention = "google" + +[format] +quote-style = "double" +indent-style = "space" +docstring-code-format = true +line-ending = "auto" \ No newline at end of file From 5e0ec649c2ba1840c7d46e3f30cb598be321939c Mon Sep 17 00:00:00 2001 From: Tom Willemsen Date: Wed, 18 Mar 2026 18:18:30 +0000 Subject: [PATCH 358/363] Remove ruff for now --- .github/workflows/Lint-and-test.yml | 6 --- python/ruff.toml | 70 ----------------------------- 2 files changed, 76 deletions(-) delete mode 100644 python/ruff.toml diff --git a/.github/workflows/Lint-and-test.yml b/.github/workflows/Lint-and-test.yml index ba4d87a..c9057b5 100644 --- a/.github/workflows/Lint-and-test.yml +++ b/.github/workflows/Lint-and-test.yml @@ -26,12 +26,6 @@ jobs: - name: Install dependencies run: uv sync --all-extras --dev working-directory: python - - name: Ruff format - run: uv run ruff format --check - working-directory: python - - name: Ruff check - run: uv run ruff check - working-directory: python - name: Test with pytest run: uv run pytest tests working-directory: python diff --git a/python/ruff.toml b/python/ruff.toml deleted file mode 100644 index aaebedc..0000000 --- a/python/ruff.toml +++ /dev/null @@ -1,70 +0,0 @@ -# Exclude a variety of commonly ignored directories. -exclude = [ - ".bzr", - ".direnv", - ".eggs", - ".git", - ".git-rewrite", - ".hg", - ".ipynb_checkpoints", - ".mypy_cache", - ".nox", - ".pants.d", - ".pyenv", - ".pytest_cache", - ".pytype", - ".ruff_cache", - ".svn", - ".tox", - ".venv", - ".vscode", - "__pypackages__", - "_build", - "buck-out", - "build", - "dist", - "node_modules", - "site-packages", - "venv", - "uk.ac.stfc.isis.ibex.opis", - "ReflectometryServer/test_modules/test_config/" -] - -# Set the maximum line length to 100. -line-length = 100 -indent-width = 4 - -[lint] -extend-select = [ - "N", # pep8-naming - # "D", # pydocstyle (can use this later but for now causes too many errors) - "I", # isort (for imports) - "E501", # Line too long ({width} > {limit}) - "E", - "F", - "ANN", -] -ignore = [ - "D406", # Section name should end with a newline ("{name}") - "D407", # Missing dashed underline after section ("{name}") - "N999", # Ignore this because the repo itself would need to be renamed -] -[lint.per-file-ignores] -"{**/tests/**,/tests/**,**/*tests.py,tests/**,*tests.py,*test.py,**/*test.py,common_tests/**,test_*.py}" = [ - "N802", - "D100", - "D101", - "D102", - "E501", - "ANN", -] - -[lint.pydocstyle] -# Use Google-style docstrings. -convention = "google" - -[format] -quote-style = "double" -indent-style = "space" -docstring-code-format = true -line-ending = "auto" \ No newline at end of file From 8440f1dfe99b20da12a69f85ce234ecd3dac2adc Mon Sep 17 00:00:00 2001 From: Tom Willemsen Date: Thu, 19 Mar 2026 10:26:04 +0000 Subject: [PATCH 359/363] Delete pytest.ini --- python/pytest.ini | 3 --- 1 file changed, 3 deletions(-) delete mode 100644 python/pytest.ini diff --git a/python/pytest.ini b/python/pytest.ini deleted file mode 100644 index dbdfd63..0000000 --- a/python/pytest.ini +++ /dev/null @@ -1,3 +0,0 @@ -[pytest] -junit_family=xunit2 -testpaths = ./tests From 8e8ffc3c605444aad6cfb4023a274e4b0bb01d1f Mon Sep 17 00:00:00 2001 From: Tom Willemsen Date: Thu, 19 Mar 2026 10:31:52 +0000 Subject: [PATCH 360/363] no coverage --- python/pyproject.toml | 1 - 1 file changed, 1 deletion(-) diff --git a/python/pyproject.toml b/python/pyproject.toml index 96f5a56..180dcdd 100644 --- a/python/pyproject.toml +++ b/python/pyproject.toml @@ -66,7 +66,6 @@ branch = true source = ["src"] [tool.coverage.report] -fail_under = 100 exclude_lines = [ "pragma: no cover", "if TYPE_CHECKING:", From 7e7a8d5c50df5396dccea64325f24434f3dfb009 Mon Sep 17 00:00:00 2001 From: Tom Willemsen Date: Thu, 19 Mar 2026 10:45:20 +0000 Subject: [PATCH 361/363] Add unit tests --- rust/src/lib.rs | 48 +++++++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 45 insertions(+), 3 deletions(-) diff --git a/rust/src/lib.rs b/rust/src/lib.rs index 8197699..065212b 100644 --- a/rust/src/lib.rs +++ b/rust/src/lib.rs @@ -34,6 +34,7 @@ pub mod flatbuffers_generated; /// Enum containing all possible messages currently supported by /// `deserialize_message`. +#[derive(Debug, Clone, PartialEq)] pub enum DeserializedMessage<'a> { EventDataEv44(Event44Message<'a>), AreaDetectorAd00(ad00_ADArray<'a>), @@ -56,6 +57,7 @@ pub enum DeserializedMessage<'a> { /// Error raised from `deserialize_message` describing why a message /// cannot be deserialized +#[derive(Debug, Eq, PartialEq)] pub enum DeserializationError { UnsupportedSchema(String), InvalidFlatbuffer(InvalidFlatbuffer), @@ -116,11 +118,51 @@ pub fn deserialize_message(data: &[u8]) -> Result, Deser Some(b"da00") => Ok(DeserializedMessage::DataArrayDa00( root_as_da_00_data_array(data)?, )), - Some(b"un00") => Ok(DeserializedMessage::UnitsUn00( - root_as_units(data)?, - )), + Some(b"un00") => Ok(DeserializedMessage::UnitsUn00(root_as_units(data)?)), _ => Err(DeserializationError::UnsupportedSchema( "Unknown message type passed to deserialize".to_owned(), )), } } + +#[cfg(test)] +mod tests { + use super::*; + use crate::flatbuffers_generated::units_un00::{UnitsArgs, finish_units_buffer}; + use flatbuffers::FlatBufferBuilder; + + #[test] + fn test_deserialize_message() { + let mut fbb = FlatBufferBuilder::new(); + let un00_args = UnitsArgs { + source_name: Some(fbb.create_string("Hello")), + timestamp: 0, + units: Some(fbb.create_string("World")), + }; + let un00 = Units::create(&mut fbb, &un00_args); + finish_units_buffer(&mut fbb, un00); + + let deserialized = deserialize_message(fbb.finished_data()); + + match deserialized { + Ok(DeserializedMessage::UnitsUn00(msg)) => { + assert_eq!(msg.source_name(), "Hello"); + assert_eq!(msg.timestamp(), 0); + assert_eq!(msg.units(), Some("World")); + } + _ => panic!("Failed to deserialize message to correct type"), + } + } + + #[test] + fn test_fail_deserialize_message() { + let deserialized = deserialize_message(b"\0\0\0\0\0\0\0\0\0\0\0\0"); + + assert_eq!( + deserialized, + Err(DeserializationError::UnsupportedSchema( + "Unknown message type passed to deserialize".to_owned() + )) + ); + } +} From c81d91399baad110dff00f243ce124b8312569f4 Mon Sep 17 00:00:00 2001 From: Tom Willemsen Date: Thu, 19 Mar 2026 10:58:33 +0000 Subject: [PATCH 362/363] Add rust tests --- .github/workflows/Lint-and-test.yml | 28 ++++++++++++++++++++++++---- 1 file changed, 24 insertions(+), 4 deletions(-) diff --git a/.github/workflows/Lint-and-test.yml b/.github/workflows/Lint-and-test.yml index c9057b5..979bfe2 100644 --- a/.github/workflows/Lint-and-test.yml +++ b/.github/workflows/Lint-and-test.yml @@ -1,8 +1,6 @@ # This workflow will install Python dependencies, run tests and lint with a single version of Python # For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python -name: Python application - on: workflow_call: push: @@ -11,7 +9,7 @@ on: branches: [ "master" ] jobs: - tests: + python-tests: strategy: matrix: version: ['3.12', '3.13', '3.14'] @@ -29,11 +27,33 @@ jobs: - name: Test with pytest run: uv run pytest tests working-directory: python + rust-tests: + strategy: + matrix: + os: ["ubuntu-latest", "windows-latest"] + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@v6 + - name: install stable rust + run: rustup install stable + working-directory: rust + - name: Format check + run: cargo fmt check + working-directory: rust + - name: Clippy + run: cargo clippy -- --deny warnings + working-directory: rust + - name: Tests + run: cargo test + working-directory: rust + - name: Package + run: cargo package + working-directory: rust results: if: ${{ always() }} runs-on: ubuntu-latest name: Final Results - needs: [tests] + needs: [python-tests, rust-tests] steps: - run: exit 1 # see https://stackoverflow.com/a/67532120/4907315 From deb25c184aadf65eb2c385589f2e61e6ed0e2ad5 Mon Sep 17 00:00:00 2001 From: Tom Willemsen Date: Thu, 19 Mar 2026 11:03:15 +0000 Subject: [PATCH 363/363] Correct CI --- .github/workflows/Lint-and-test.yml | 2 +- .github/workflows/release.yml | 19 ++++++++++++++++--- 2 files changed, 17 insertions(+), 4 deletions(-) diff --git a/.github/workflows/Lint-and-test.yml b/.github/workflows/Lint-and-test.yml index 979bfe2..b2849e3 100644 --- a/.github/workflows/Lint-and-test.yml +++ b/.github/workflows/Lint-and-test.yml @@ -38,7 +38,7 @@ jobs: run: rustup install stable working-directory: rust - name: Format check - run: cargo fmt check + run: cargo fmt --check working-directory: rust - name: Clippy run: cargo clippy -- --deny warnings diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index dfd2ebf..5326564 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -1,4 +1,4 @@ -name: Publish Python distribution to PyPI +name: Publish on: push jobs: py-build: @@ -36,7 +36,7 @@ jobs: - name: install stable rust run: rustup install stable - name: Format check - run: cargo fmt check + run: cargo fmt --check - name: Clippy run: cargo clippy -- --deny warnings - name: Tests @@ -71,11 +71,24 @@ jobs: uses: pypa/gh-action-pypi-publish@release/v1 with: packages-dir: python/ + + publish-to-crates-io: + name: >- + Publish Rust distribution to crates.io + runs-on: ubuntu-latest + if: github.ref_type == 'tag' + needs: [py-build, rust-build] # Don't publish anything until both python & rust builds pass... + steps: + - name: publish + run: cargo publish --allow-dirty + env: + CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }} + github-release: name: >- Sign the Python distribution with Sigstore and upload them to GitHub Release - needs: [py-build, publish-to-pypi] + needs: [py-build, publish-to-pypi, rust-build, publish-to-crates-io] runs-on: ubuntu-latest permissions: