diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 0000000..dd84ea7 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,38 @@ +--- +name: Bug report +about: Create a report to help us improve +title: '' +labels: '' +assignees: '' + +--- + +**Describe the bug** +A clear and concise description of what the bug is. + +**To Reproduce** +Steps to reproduce the behavior: +1. Go to '...' +2. Click on '....' +3. Scroll down to '....' +4. See error + +**Expected behavior** +A clear and concise description of what you expected to happen. + +**Screenshots** +If applicable, add screenshots to help explain your problem. + +**Desktop (please complete the following information):** + - OS: [e.g. iOS] + - Browser [e.g. chrome, safari] + - Version [e.g. 22] + +**Smartphone (please complete the following information):** + - Device: [e.g. iPhone6] + - OS: [e.g. iOS8.1] + - Browser [e.g. stock browser, safari] + - Version [e.g. 22] + +**Additional context** +Add any other context about the problem here. diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 0000000..bbcbbe7 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,20 @@ +--- +name: Feature request +about: Suggest an idea for this project +title: '' +labels: '' +assignees: '' + +--- + +**Is your feature request related to a problem? Please describe.** +A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] + +**Describe the solution you'd like** +A clear and concise description of what you want to happen. + +**Describe alternatives you've considered** +A clear and concise description of any alternative solutions or features you've considered. + +**Additional context** +Add any other context or screenshots about the feature request here. diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..e43005e --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,96 @@ +name: CI +on: + push: + branches: + - main + pull_request: + types: [opened, synchronize, reopened] + + # Allows you to run this workflow manually from the Actions tab + workflow_dispatch: + +jobs: + build: + name: Build + runs-on: ubuntu-20.04 + env: + SONAR_SCANNER_VERSION: 4.6.1.2450 # Find the latest version in the "Windows" link on this page: + # https://docs.sonarqube.org/latest/analysis/scan/sonarscanner/ + SONAR_SERVER_URL: "https://sonarcloud.io" + BUILD_WRAPPER_OUT_DIR: build_wrapper_output_directory # Directory where build-wrapper output will be placed + steps: + - uses: actions/checkout@v2 + with: + fetch-depth: 0 # Shallow clones should be disabled for a better relevancy of analysis + - name: Set up JDK 11 + uses: actions/setup-java@v1 + with: + java-version: 11 + - name: Cache SonarQube packages + uses: actions/cache@v1 + with: + path: ~/.sonar/cache + key: ${{ runner.os }}-sonar + restore-keys: ${{ runner.os }}-sonar + - name: Download and set up sonar-scanner + env: + SONAR_SCANNER_DOWNLOAD_URL: https://binaries.sonarsource.com/Distribution/sonar-scanner-cli/sonar-scanner-cli-${{ env.SONAR_SCANNER_VERSION }}-linux.zip + run: | + mkdir -p $HOME/.sonar + curl -sSLo $HOME/.sonar/sonar-scanner.zip ${{ env.SONAR_SCANNER_DOWNLOAD_URL }} + unzip -o $HOME/.sonar/sonar-scanner.zip -d $HOME/.sonar/ + echo "$HOME/.sonar/sonar-scanner-${{ env.SONAR_SCANNER_VERSION }}-linux/bin" >> $GITHUB_PATH + - name: Download and set up build-wrapper + env: + BUILD_WRAPPER_DOWNLOAD_URL: ${{ env.SONAR_SERVER_URL }}/static/cpp/build-wrapper-linux-x86.zip + run: | + curl -sSLo $HOME/.sonar/build-wrapper-linux-x86.zip ${{ env.BUILD_WRAPPER_DOWNLOAD_URL }} + unzip -o $HOME/.sonar/build-wrapper-linux-x86.zip -d $HOME/.sonar/ + echo "$HOME/.sonar/build-wrapper-linux-x86" >> $GITHUB_PATH + - name: Download and install Fledge + env: + FLEDGE_REPO_URL: "https://github.com/fledge-iot/fledge/archive/refs/heads/develop.zip" + run: | + curl -sSLo fledge-pkg.zip ${{ env.FLEDGE_REPO_URL }} + unzip -o fledge-pkg.zip -d $HOME + mv $HOME/fledge-develop $HOME/fledge + cd $HOME/fledge + sudo apt-get update + sudo apt-get install libcurl4-openssl-dev + sudo $HOME/fledge/requirements.sh + sudo make install + sudo mkdir -p /usr/include/fledge/rapidjson/ + find $HOME/fledge/C/common/ -name '*.h' -exec sudo cp -prv '{}' '/usr/include/fledge/' ';' + find $HOME/fledge/C/plugins/ -name '*.h' -exec sudo cp -prv '{}' '/usr/include/fledge/' ';' + find $HOME/fledge/C/services/ -name '*.h' -exec sudo cp -prv '{}' '/usr/include/fledge/' ';' + find $HOME/fledge/C/tasks/ -name '*.h' -exec sudo cp -prv '{}' '/usr/include/fledge/' ';' + find $HOME/fledge/C/thirdparty/Simple-Web-Server/ -name '*.hpp' -exec sudo cp -prv '{}' '/usr/include/fledge/' ';' + sudo cp -prv $HOME/fledge/C/thirdparty/rapidjson/include/rapidjson/* /usr/include/fledge/rapidjson/ + sudo mkdir -p /usr/lib/fledge/ + sudo cp -prv /usr/local/fledge/lib/* /usr/lib/fledge/ + - name: Download and install Google Unit Test framework + run: | + sudo apt-get install libgtest-dev + cd /usr/src/gtest + sudo cmake CMakeLists.txt + sudo make + sudo apt-get install libgmock-dev + - name: Download and install gcovr + run: | + sudo apt-get install gcovr + - name: Run build-wrapper + run: | + chmod +x mkversion + export LD_LIBRARY_PATH=/usr/lib:/usr/local/lib + mkdir build + cmake -S . -B build -DCMAKE_BUILD_TYPE=Coverage + build-wrapper-linux-x86-64 --out-dir ${{ env.BUILD_WRAPPER_OUT_DIR }} cmake --build build/ --config Release + cd build + make + make hnz_pivot_filter_coverage_sonar + - name: Run sonar-scanner + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} + run: | + sonar-scanner --define sonar.host.url="${{ env.SONAR_SERVER_URL }}" --define sonar.cfamily.build-wrapper-output="${{ env.BUILD_WRAPPER_OUT_DIR }}" --define sonar.organization="fledge-power" --define sonar.projectKey="fledge-power_fledgepower-filter-hnztopivot" --define sonar.inclusions="src/*,include/*" --define sonar.coverageReportPaths="build/hnz_pivot_filter_coverage_sonar-sonarqube.xml" diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml new file mode 100644 index 0000000..c66b568 --- /dev/null +++ b/.github/workflows/publish.yml @@ -0,0 +1,75 @@ +name: Publish + +# Controls when the workflow will run +on: + push: + tags: + - v*-rc* # publish only rc (release candidates), example: v1.0.0-rc0 + + # Allows you to run this workflow manually from the Actions tab + workflow_dispatch: + +env: + IMAGE_REGISTRY: ghcr.io + REGISTRY_USER: ${{ github.actor }} + REGISTRY_PASSWORD: ${{ secrets.GITHUB_TOKEN }} + FLEDGE_IMAGE_NAME: 'fledgepower/fledge_hnz' + FLEDGE_GUI_IMAGE_NAME: 'fledgepower/fledge-gui' + IMAGE_TAG: 'latest' + +jobs: + build-and-push-image: + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + + steps: + - name: Checkout repository + uses: actions/checkout@v2 + + # Download and install Docker + - name: Download and install Docker + run: | + sudo apt-get update + sudo apt-get install \ + ca-certificates \ + curl \ + gnupg \ + lsb-release + curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg + echo \ + "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu \ + $(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null + sudo apt-get update + sudo apt-get install docker-ce docker-ce-cli containerd.io + + # Download fledgepower deployment + - name: Download fledgepower deployment + env: + FP_DEPLOY_REPO: "https://github.com/fledge-power/fledgepower-deployment.git" + run: | + git clone ${{ env.FP_DEPLOY_REPO }} + + # Log in to the Container registry ghcr.io + - name: Log in to the Container registry + uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9 + with: + registry: ${{ env.IMAGE_REGISTRY }} + username: ${{ env.REGISTRY_USER }} + password: ${{ env.REGISTRY_PASSWORD }} + + # Build image + - name: Build images + run: | + cd ./fledgepower-deployment/sHNZ-n104-ubuntu2004/fledge/ + sudo -E docker build -f fledge.dockerfile -t ${{ env.IMAGE_REGISTRY }}/${{ github.repository_owner }}/${{ env.FLEDGE_IMAGE_NAME }}:${{ env.IMAGE_TAG }} . --label ${{ github.ref }} + cd ../fledge-gui + sudo -E docker build -f fledge-gui.dockerfile -t ${{ env.IMAGE_REGISTRY }}/${{ github.repository_owner }}/${{ env.FLEDGE_GUI_IMAGE_NAME }}:${{ env.IMAGE_TAG }} . --label ${{ github.ref }} + + # Push to ghcr.io (Github Image Registry) + - name: Push images + run: | + sudo -E docker push ${{ env.IMAGE_REGISTRY }}/${{ github.repository_owner }}/${{ env.FLEDGE_IMAGE_NAME }}:${{ env.IMAGE_TAG }} + sudo -E docker push ${{ env.IMAGE_REGISTRY }}/${{ github.repository_owner }}/${{ env.FLEDGE_GUI_IMAGE_NAME }}:${{ env.IMAGE_TAG }} + diff --git a/.gitignore b/.gitignore index 259148f..dc71ebb 100644 --- a/.gitignore +++ b/.gitignore @@ -30,3 +30,4 @@ *.exe *.out *.app +.vscode/ diff --git a/CMakeLists.txt b/CMakeLists.txt new file mode 100644 index 0000000..4af9e52 --- /dev/null +++ b/CMakeLists.txt @@ -0,0 +1,117 @@ +cmake_minimum_required(VERSION 2.8) + +# Set the plugin name to build +project(hnz_pivot_filter) + +# Supported options: +# -DFLEDGE_INCLUDE +# -DFLEDGE_LIB +# -DFLEDGE_SRC +# -DFLEDGE_INSTALL +# +# If no -D options are given and FLEDGE_ROOT environment variable is set +# then Fledge libraries and header files are pulled from FLEDGE_ROOT path. + +list(APPEND CMAKE_MODULE_PATH "${PROJECT_SOURCE_DIR}/cmake") + +message(STATUS ${CMAKE_CXX_FLAGS}) + +if (${CMAKE_BUILD_TYPE} STREQUAL Coverage) + message("Coverage is going to be generated") + enable_testing() + add_subdirectory(tests) + include(CodeCoverage) + append_coverage_compiler_flags() + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -O3 --coverage") + set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -O3 --coverage") + set(GCOVR_ADDITIONAL_ARGS "--exclude-unreachable-branches" "--exclude-throw-branches" ) + + setup_target_for_coverage_gcovr_sonar(NAME "${PROJECT_NAME}_coverage_sonar" + EXECUTABLE RunTests + DEPENDENCIES RunTests + BASE_DIRECTORY "${PROJECT_SOURCE_DIR}" + EXCLUDE "tests/*" + ) + + setup_target_for_coverage_gcovr_html(NAME "${PROJECT_NAME}_coverage_html" + EXECUTABLE RunTests + DEPENDENCIES RunTests + BASE_DIRECTORY "${PROJECT_SOURCE_DIR}" + EXCLUDE "tests/*" + ) +else() + message("Build without Coverage") + + set(CMAKE_CXX_FLAGS "-std=c++11 -O3") +endif() + +set(CMAKE_CXX_FLAGS_DEBUG "-O0 -ggdb") + +# Generation version header file +set_source_files_properties(version.h PROPERTIES GENERATED TRUE) +add_custom_command( + OUTPUT version.h + DEPENDS ${CMAKE_SOURCE_DIR}/VERSION + COMMAND ${CMAKE_SOURCE_DIR}/mkversion ${CMAKE_SOURCE_DIR} + COMMENT "Generating version header" + VERBATIM +) +include_directories(${CMAKE_BINARY_DIR}) + + +# Set plugin type (south, north, filter) +set(PLUGIN_TYPE "filter") +# Add here all needed Fledge libraries as list +set(NEEDED_FLEDGE_LIBS common-lib filters-common-lib) + +# Find source files +file(GLOB SOURCES src/*.cpp) + +# Find Fledge includes and libs, by including FindFledge.cmak file +set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} ${CMAKE_CURRENT_SOURCE_DIR}) +find_package(Fledge) +# If errors: make clean and remove Makefile +if (NOT FLEDGE_FOUND) + if (EXISTS "${CMAKE_BINARY_DIR}/Makefile") + execute_process(COMMAND make clean WORKING_DIRECTORY ${CMAKE_BINARY_DIR}) + file(REMOVE "${CMAKE_BINARY_DIR}/Makefile") + endif() + # Stop the build process + message(FATAL_ERROR "Fledge plugin '${PROJECT_NAME}' build error.") +endif() +# On success, FLEDGE_INCLUDE_DIRS and FLEDGE_LIB_DIRS variables are set + + +# Add ./include +include_directories(include) +# Add Fledge include dir(s) +include_directories(${FLEDGE_INCLUDE_DIRS}) + +# Add Fledge lib path +link_directories(${FLEDGE_LIB_DIRS}) +# Create shared library + +if (FLEDGE_SRC) + message(STATUS "Using third-party includes " ${FLEDGE_SRC}/C/thirdparty) + include_directories(${FLEDGE_SRC}/C/thirdparty/rapidjson/include) +endif() + + +# Create shared library +add_library(${PROJECT_NAME} SHARED ${SOURCES} version.h) + +# Add Fledge library names +target_link_libraries(${PROJECT_NAME} ${NEEDED_FLEDGE_LIBS}) + +# Add additional libraries +target_link_libraries(${PROJECT_NAME} -lpthread -ldl) + +# Set the build version +set_target_properties(${PROJECT_NAME} PROPERTIES SOVERSION 1) + +set(FLEDGE_INSTALL "" CACHE INTERNAL "") +# Install library +if (FLEDGE_INSTALL) + message(STATUS "Installing ${PROJECT_NAME} in ${FLEDGE_INSTALL}/plugins/${PLUGIN_TYPE}/${PROJECT_NAME}") + install(TARGETS ${PROJECT_NAME} DESTINATION ${FLEDGE_INSTALL}/plugins/${PLUGIN_TYPE}/${PROJECT_NAME}) +endif() diff --git a/FindFledge.cmake b/FindFledge.cmake new file mode 100644 index 0000000..bb102ed --- /dev/null +++ b/FindFledge.cmake @@ -0,0 +1,139 @@ +# This CMake file locates the Fledge header files and libraries +# +# The following variables are set: +# FLEDGE_INCLUDE_DIRS - Path(s) to Fledge headers files found +# FLEDGE_LIB_DIRS - Path to Fledge shared libraries +# FLEDGE_SUCCESS - Set on succes +# +# In case of error use SEND_ERROR and return() +# + +# Set defaults paths of installed Fledge SDK package +set(FLEDGE_DEFAULT_INCLUDE_DIR "/usr/include/fledge" CACHE INTERNAL "") +set(FLEDGE_DEFAULT_LIB_DIR "/usr/lib/fledge" CACHE INTERNAL "") + +# CMakeLists.txt options +set(FLEDGE_SRC "" CACHE INTERNAL "") +set(FLEDGE_INCLUDE "" CACHE INTERNAL "") +set(FLEDGE_LIB "" CACHE INTERNAL "") + +# Return variables +set(FLEDGE_INCLUDE_DIRS "" CACHE INTERNAL "") +set(FLEDGE_LIB_DIRS "" CACHE INTERNAL "") +set(FLEDGE_FOUND "" CACHE INTERNAL "") + +# No options set +# If FLEDGE_ROOT env var is set, use it +if (NOT FLEDGE_SRC AND NOT FLEDGE_INCLUDE AND NOT FLEDGE_LIB) + if (DEFINED ENV{FLEDGE_ROOT}) + message(STATUS "No options set.\n" + " +Using found FLEDGE_ROOT $ENV{FLEDGE_ROOT}") + set(FLEDGE_SRC $ENV{FLEDGE_ROOT}) + endif() +endif() + +# -DFLEDGE_SRC=/some_path or FLEDGE_ROOT path +# Set return variable FLEDGE_INCLUDE_DIRS +if (FLEDGE_SRC) + unset(_INCLUDE_LIST CACHE) + file(GLOB_RECURSE _INCLUDE_COMMON "${FLEDGE_SRC}/C/common/*.h") + file(GLOB_RECURSE _INCLUDE_SERVICES "${FLEDGE_SRC}/C/services/common/*.h") + list(APPEND _INCLUDE_LIST ${_INCLUDE_COMMON} ${_INCLUDE_SERVICES}) + foreach(_ITEM ${_INCLUDE_LIST}) + get_filename_component(_ITEM_PATH ${_ITEM} DIRECTORY) + list(APPEND FLEDGE_INCLUDE_DIRS ${_ITEM_PATH}) + endforeach() + unset(INCLUDE_LIST CACHE) + + list(REMOVE_DUPLICATES FLEDGE_INCLUDE_DIRS) + + string (REPLACE ";" "\n +" DISPLAY_PATHS "${FLEDGE_INCLUDE_DIRS}") + if (NOT DEFINED ENV{FLEDGE_ROOT}) + message(STATUS "Using -DFLEDGE_SRC option for includes\n +" "${DISPLAY_PATHS}") + else() + message(STATUS "Using FLEDGE_ROOT for includes\n +" "${DISPLAY_PATHS}") + endif() + + if (NOT FLEDGE_INCLUDE_DIRS) + message(SEND_ERROR "Needed Fledge header files not found in path ${FLEDGE_SRC}/C") + return() + endif() +else() + # -DFLEDGE_INCLUDE=/some_path + if (NOT FLEDGE_INCLUDE) + set(FLEDGE_INCLUDE ${FLEDGE_DEFAULT_INCLUDE_DIR}) + message(STATUS "Using Fledge dev package includes " ${FLEDGE_INCLUDE}) + else() + message(STATUS "Using -DFLEDGE_INCLUDE option " ${FLEDGE_INCLUDE}) + endif() + # Remove current value from cache + unset(_FIND_INCLUDES CACHE) + # Get up to date var from find_path + find_path(_FIND_INCLUDES NAMES plugin_api.h PATHS ${FLEDGE_INCLUDE}) + if (_FIND_INCLUDES) + list(APPEND FLEDGE_INCLUDE_DIRS ${_FIND_INCLUDES}) + endif() + # Remove current value from cache + unset(_FIND_INCLUDES CACHE) + + if (NOT FLEDGE_INCLUDE_DIRS) + message(SEND_ERROR "Needed Fledge header files not found in path ${FLEDGE_INCLUDE}") + return() + endif() +endif() + +# +# Fledge Libraries +# +# Check -DFLEDGE_LIB=/some path is valid +# or use FLEDGE_SRC/cmake_build/C/lib +# FLEDGE_SRC might have been set to FLEDGE_ROOT above +# +if (FLEDGE_SRC) + # Set return variable FLEDGE_LIB_DIRS + set(FLEDGE_LIB "${FLEDGE_SRC}/cmake_build/C/lib") + + if (NOT DEFINED ENV{FLEDGE_ROOT}) + message(STATUS "Using -DFLEDGE_SRC option for libs \n +" "${FLEDGE_SRC}/cmake_build/C/lib") + else() + message(STATUS "Using FLEDGE_ROOT for libs \n +" "${FLEDGE_SRC}/cmake_build/C/lib") + endif() + + if (NOT EXISTS "${FLEDGE_SRC}/cmake_build") + message(SEND_ERROR "Fledge has not been built yet in ${FLEDGE_SRC} Compile it first.") + return() + endif() + + # Set return variable FLEDGE_LIB_DIRS + set(FLEDGE_LIB_DIRS "${FLEDGE_SRC}/cmake_build/C/lib") +else() + if (NOT FLEDGE_LIB) + set(FLEDGE_LIB ${FLEDGE_DEFAULT_LIB_DIR}) + message(STATUS "Using Fledge dev package libs " ${FLEDGE_LIB}) + else() + message(STATUS "Using -DFLEDGE_LIB option " ${FLEDGE_LIB}) + endif() + # Set return variable FLEDGE_LIB_DIRS + set(FLEDGE_LIB_DIRS ${FLEDGE_LIB}) +endif() + +# Check NEEDED_FLEDGE_LIBS in libraries in FLEDGE_LIB_DIRS +# NEEDED_FLEDGE_LIBS variables comes from CMakeLists.txt +foreach(_LIB ${NEEDED_FLEDGE_LIBS}) + # Remove current value from cache + unset(_FOUND_LIB CACHE) + # Get up to date var from find_library + find_library(_FOUND_LIB NAME ${_LIB} PATHS ${FLEDGE_LIB_DIRS}) + if (_FOUND_LIB) + # Extract path form founf library file + get_filename_component(_DIR_LIB ${_FOUND_LIB} DIRECTORY) + else() + message(SEND_ERROR "Needed Fledge library ${_LIB} not found in ${FLEDGE_LIB_DIRS}") + return() + endif() + # Remove current value from cache + unset(_FOUND_LIB CACHE) +endforeach() + +# Set return variable FLEDGE_FOUND +set(FLEDGE_FOUND "true") diff --git a/README.md b/README.md index a8a1d2e..4afc1b3 100644 --- a/README.md +++ b/README.md @@ -1,2 +1,2 @@ # fledgepower-filter-hnztopivot -A filter plugin which can be used to convert EDF HNZ data objects to FledgePower pivot model objects +A filter plugin which can be used to convert HNZ objects to FledgePower pivot model objects diff --git a/VERSION b/VERSION new file mode 100644 index 0000000..867bf6b --- /dev/null +++ b/VERSION @@ -0,0 +1 @@ +1.0.0-rc1 diff --git a/cmake/CodeCoverage.cmake b/cmake/CodeCoverage.cmake new file mode 100644 index 0000000..711e32f --- /dev/null +++ b/cmake/CodeCoverage.cmake @@ -0,0 +1,808 @@ +# Copyright (c) 2012 - 2017, Lars Bilke +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without modification, +# are permitted provided that the following conditions are met: +# +# 1. Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# 3. Neither the name of the copyright holder nor the names of its contributors +# may be used to endorse or promote products derived from this software without +# specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR +# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# +# CHANGES: +# +# 2012-01-31, Lars Bilke +# - Enable Code Coverage +# +# 2013-09-17, Joakim Söderberg +# - Added support for Clang. +# - Some additional usage instructions. +# +# 2016-02-03, Lars Bilke +# - Refactored functions to use named parameters +# +# 2017-06-02, Lars Bilke +# - Merged with modified version from github.com/ufz/ogs +# +# 2019-05-06, Anatolii Kurotych +# - Remove unnecessary --coverage flag +# +# 2019-12-13, FeRD (Frank Dana) +# - Deprecate COVERAGE_LCOVR_EXCLUDES and COVERAGE_GCOVR_EXCLUDES lists in favor +# of tool-agnostic COVERAGE_EXCLUDES variable, or EXCLUDE setup arguments. +# - CMake 3.4+: All excludes can be specified relative to BASE_DIRECTORY +# - All setup functions: accept BASE_DIRECTORY, EXCLUDE list +# - Set lcov basedir with -b argument +# - Add automatic --demangle-cpp in lcovr, if 'c++filt' is available (can be +# overridden with NO_DEMANGLE option in setup_target_for_coverage_lcovr().) +# - Delete output dir, .info file on 'make clean' +# - Remove Python detection, since version mismatches will break gcovr +# - Minor cleanup (lowercase function names, update examples...) +# +# 2019-12-19, FeRD (Frank Dana) +# - Rename Lcov outputs, make filtered file canonical, fix cleanup for targets +# +# 2020-01-19, Bob Apthorpe +# - Added gfortran support +# +# 2020-02-17, FeRD (Frank Dana) +# - Make all add_custom_target()s VERBATIM to auto-escape wildcard characters +# in EXCLUDEs, and remove manual escaping from gcovr targets +# +# 2021-01-19, Robin Mueller +# - Add CODE_COVERAGE_VERBOSE option which will allow to print out commands which are run +# - Added the option for users to set the GCOVR_ADDITIONAL_ARGS variable to supply additional +# flags to the gcovr command +# +# 2020-05-04, Mihchael Davis +# - Add -fprofile-abs-path to make gcno files contain absolute paths +# - Fix BASE_DIRECTORY not working when defined +# - Change BYPRODUCT from folder to index.html to stop ninja from complaining about double defines +# +# 2021-05-10, Martin Stump +# - Check if the generator is multi-config before warning about non-Debug builds +# +# USAGE: +# +# 1. Copy this file into your cmake modules path. +# +# 2. Add the following line to your CMakeLists.txt (best inside an if-condition +# using a CMake option() to enable it just optionally): +# include(CodeCoverage) +# +# 3. Append necessary compiler flags for all supported source files: +# append_coverage_compiler_flags() +# Or for specific target: +# append_coverage_compiler_flags_to_target(YOUR_TARGET_NAME) +# +# 3.a (OPTIONAL) Set appropriate optimization flags, e.g. -O0, -O1 or -Og +# +# 4. If you need to exclude additional directories from the report, specify them +# using full paths in the COVERAGE_EXCLUDES variable before calling +# setup_target_for_coverage_*(). +# Example: +# set(COVERAGE_EXCLUDES +# '${PROJECT_SOURCE_DIR}/src/dir1/*' +# '/path/to/my/src/dir2/*') +# Or, use the EXCLUDE argument to setup_target_for_coverage_*(). +# Example: +# setup_target_for_coverage_lcov( +# NAME coverage +# EXECUTABLE testrunner +# EXCLUDE "${PROJECT_SOURCE_DIR}/src/dir1/*" "/path/to/my/src/dir2/*") +# +# 4.a NOTE: With CMake 3.4+, COVERAGE_EXCLUDES or EXCLUDE can also be set +# relative to the BASE_DIRECTORY (default: PROJECT_SOURCE_DIR) +# Example: +# set(COVERAGE_EXCLUDES "dir1/*") +# setup_target_for_coverage_gcovr_html( +# NAME coverage +# EXECUTABLE testrunner +# BASE_DIRECTORY "${PROJECT_SOURCE_DIR}/src" +# EXCLUDE "dir2/*") +# +# 5. Use the functions described below to create a custom make target which +# runs your test executable and produces a code coverage report. +# +# 6. Build a Debug build: +# cmake -DCMAKE_BUILD_TYPE=Debug .. +# make +# make my_coverage_target +# + +include(CMakeParseArguments) + +option(CODE_COVERAGE_VERBOSE "Verbose information" FALSE) + +# Check prereqs +find_program( GCOV_PATH gcov ) +find_program( LCOV_PATH NAMES lcov lcov.bat lcov.exe lcov.perl) +find_program( FASTCOV_PATH NAMES fastcov fastcov.py ) +find_program( GENHTML_PATH NAMES genhtml genhtml.perl genhtml.bat ) +find_program( GCOVR_PATH gcovr PATHS ${CMAKE_SOURCE_DIR}/scripts/test) +find_program( CPPFILT_PATH NAMES c++filt ) + +if(NOT GCOV_PATH) + message(FATAL_ERROR "gcov not found! Aborting...") +endif() # NOT GCOV_PATH + +get_property(LANGUAGES GLOBAL PROPERTY ENABLED_LANGUAGES) +list(GET LANGUAGES 0 LANG) + +if("${CMAKE_${LANG}_COMPILER_ID}" MATCHES "(Apple)?[Cc]lang") + if("${CMAKE_${LANG}_COMPILER_VERSION}" VERSION_LESS 3) + message(FATAL_ERROR "Clang version must be 3.0.0 or greater! Aborting...") + endif() +elseif(NOT CMAKE_COMPILER_IS_GNUCXX) + if("${CMAKE_Fortran_COMPILER_ID}" MATCHES "[Ff]lang") + # Do nothing; exit conditional without error if true + elseif("${CMAKE_Fortran_COMPILER_ID}" MATCHES "GNU") + # Do nothing; exit conditional without error if true + else() + message(FATAL_ERROR "Compiler is not GNU gcc! Aborting...") + endif() +endif() + +set(COVERAGE_COMPILER_FLAGS "-g -fprofile-arcs -ftest-coverage" + CACHE INTERNAL "") +if(CMAKE_CXX_COMPILER_ID MATCHES "(GNU|Clang)") + include(CheckCXXCompilerFlag) + check_cxx_compiler_flag(-fprofile-abs-path HAVE_fprofile_abs_path) + if(HAVE_fprofile_abs_path) + set(COVERAGE_COMPILER_FLAGS "${COVERAGE_COMPILER_FLAGS} -fprofile-abs-path") + endif() +endif() + +set(CMAKE_Fortran_FLAGS_COVERAGE + ${COVERAGE_COMPILER_FLAGS} + CACHE STRING "Flags used by the Fortran compiler during coverage builds." + FORCE ) +set(CMAKE_CXX_FLAGS_COVERAGE + ${COVERAGE_COMPILER_FLAGS} + CACHE STRING "Flags used by the C++ compiler during coverage builds." + FORCE ) +set(CMAKE_C_FLAGS_COVERAGE + ${COVERAGE_COMPILER_FLAGS} + CACHE STRING "Flags used by the C compiler during coverage builds." + FORCE ) +set(CMAKE_EXE_LINKER_FLAGS_COVERAGE + "" + CACHE STRING "Flags used for linking binaries during coverage builds." + FORCE ) +set(CMAKE_SHARED_LINKER_FLAGS_COVERAGE + "" + CACHE STRING "Flags used by the shared libraries linker during coverage builds." + FORCE ) +mark_as_advanced( + CMAKE_Fortran_FLAGS_COVERAGE + CMAKE_CXX_FLAGS_COVERAGE + CMAKE_C_FLAGS_COVERAGE + CMAKE_EXE_LINKER_FLAGS_COVERAGE + CMAKE_SHARED_LINKER_FLAGS_COVERAGE ) + +get_property(GENERATOR_IS_MULTI_CONFIG GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG) +if(NOT (CMAKE_BUILD_TYPE STREQUAL "Debug" OR GENERATOR_IS_MULTI_CONFIG)) + message(WARNING "Code coverage results with an optimised (non-Debug) build may be misleading") +endif() # NOT (CMAKE_BUILD_TYPE STREQUAL "Debug" OR GENERATOR_IS_MULTI_CONFIG) + +if(CMAKE_C_COMPILER_ID STREQUAL "GNU" OR CMAKE_Fortran_COMPILER_ID STREQUAL "GNU") + link_libraries(gcov) +endif() + +# Defines a target for running and collection code coverage information +# Builds dependencies, runs the given executable and outputs reports. +# NOTE! The executable should always have a ZERO as exit code otherwise +# the coverage generation will not complete. +# +# setup_target_for_coverage_lcov( +# NAME testrunner_coverage # New target name +# EXECUTABLE testrunner -j ${PROCESSOR_COUNT} # Executable in PROJECT_BINARY_DIR +# DEPENDENCIES testrunner # Dependencies to build first +# BASE_DIRECTORY "../" # Base directory for report +# # (defaults to PROJECT_SOURCE_DIR) +# EXCLUDE "src/dir1/*" "src/dir2/*" # Patterns to exclude (can be relative +# # to BASE_DIRECTORY, with CMake 3.4+) +# NO_DEMANGLE # Don't demangle C++ symbols +# # even if c++filt is found +# ) +function(setup_target_for_coverage_lcov) + + set(options NO_DEMANGLE) + set(oneValueArgs BASE_DIRECTORY NAME) + set(multiValueArgs EXCLUDE EXECUTABLE EXECUTABLE_ARGS DEPENDENCIES LCOV_ARGS GENHTML_ARGS) + cmake_parse_arguments(Coverage "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN}) + + if(NOT LCOV_PATH) + message(FATAL_ERROR "lcov not found! Aborting...") + endif() # NOT LCOV_PATH + + if(NOT GENHTML_PATH) + message(FATAL_ERROR "genhtml not found! Aborting...") + endif() # NOT GENHTML_PATH + + # Set base directory (as absolute path), or default to PROJECT_SOURCE_DIR + if(DEFINED Coverage_BASE_DIRECTORY) + get_filename_component(BASEDIR ${Coverage_BASE_DIRECTORY} ABSOLUTE) + else() + set(BASEDIR ${PROJECT_SOURCE_DIR}) + endif() + + # Collect excludes (CMake 3.4+: Also compute absolute paths) + set(LCOV_EXCLUDES "") + foreach(EXCLUDE ${Coverage_EXCLUDE} ${COVERAGE_EXCLUDES} ${COVERAGE_LCOV_EXCLUDES}) + if(CMAKE_VERSION VERSION_GREATER 3.4) + get_filename_component(EXCLUDE ${EXCLUDE} ABSOLUTE BASE_DIR ${BASEDIR}) + endif() + list(APPEND LCOV_EXCLUDES "${EXCLUDE}") + endforeach() + list(REMOVE_DUPLICATES LCOV_EXCLUDES) + + # Conditional arguments + if(CPPFILT_PATH AND NOT ${Coverage_NO_DEMANGLE}) + set(GENHTML_EXTRA_ARGS "--demangle-cpp") + endif() + + # Setting up commands which will be run to generate coverage data. + # Cleanup lcov + set(LCOV_CLEAN_CMD + ${LCOV_PATH} ${Coverage_LCOV_ARGS} --gcov-tool ${GCOV_PATH} -directory . + -b ${BASEDIR} --zerocounters + ) + # Create baseline to make sure untouched files show up in the report + set(LCOV_BASELINE_CMD + ${LCOV_PATH} ${Coverage_LCOV_ARGS} --gcov-tool ${GCOV_PATH} -c -i -d . -b + ${BASEDIR} -o ${Coverage_NAME}.base + ) + # Run tests + set(LCOV_EXEC_TESTS_CMD + ${Coverage_EXECUTABLE} ${Coverage_EXECUTABLE_ARGS} + ) + # Capturing lcov counters and generating report + set(LCOV_CAPTURE_CMD + ${LCOV_PATH} ${Coverage_LCOV_ARGS} --gcov-tool ${GCOV_PATH} --directory . -b + ${BASEDIR} --capture --output-file ${Coverage_NAME}.capture + ) + # add baseline counters + set(LCOV_BASELINE_COUNT_CMD + ${LCOV_PATH} ${Coverage_LCOV_ARGS} --gcov-tool ${GCOV_PATH} -a ${Coverage_NAME}.base + -a ${Coverage_NAME}.capture --output-file ${Coverage_NAME}.total + ) + # filter collected data to final coverage report + set(LCOV_FILTER_CMD + ${LCOV_PATH} ${Coverage_LCOV_ARGS} --gcov-tool ${GCOV_PATH} --remove + ${Coverage_NAME}.total ${LCOV_EXCLUDES} --output-file ${Coverage_NAME}.info + ) + # Generate HTML output + set(LCOV_GEN_HTML_CMD + ${GENHTML_PATH} ${GENHTML_EXTRA_ARGS} ${Coverage_GENHTML_ARGS} -o + ${Coverage_NAME} ${Coverage_NAME}.info + ) + + + if(CODE_COVERAGE_VERBOSE) + message(STATUS "Executed command report") + message(STATUS "Command to clean up lcov: ") + string(REPLACE ";" " " LCOV_CLEAN_CMD_SPACED "${LCOV_CLEAN_CMD}") + message(STATUS "${LCOV_CLEAN_CMD_SPACED}") + + message(STATUS "Command to create baseline: ") + string(REPLACE ";" " " LCOV_BASELINE_CMD_SPACED "${LCOV_BASELINE_CMD}") + message(STATUS "${LCOV_BASELINE_CMD_SPACED}") + + message(STATUS "Command to run the tests: ") + string(REPLACE ";" " " LCOV_EXEC_TESTS_CMD_SPACED "${LCOV_EXEC_TESTS_CMD}") + message(STATUS "${LCOV_EXEC_TESTS_CMD_SPACED}") + + message(STATUS "Command to capture counters and generate report: ") + string(REPLACE ";" " " LCOV_CAPTURE_CMD_SPACED "${LCOV_CAPTURE_CMD}") + message(STATUS "${LCOV_CAPTURE_CMD_SPACED}") + + message(STATUS "Command to add baseline counters: ") + string(REPLACE ";" " " LCOV_BASELINE_COUNT_CMD_SPACED "${LCOV_BASELINE_COUNT_CMD}") + message(STATUS "${LCOV_BASELINE_COUNT_CMD_SPACED}") + + message(STATUS "Command to filter collected data: ") + string(REPLACE ";" " " LCOV_FILTER_CMD_SPACED "${LCOV_FILTER_CMD}") + message(STATUS "${LCOV_FILTER_CMD_SPACED}") + + message(STATUS "Command to generate lcov HTML output: ") + string(REPLACE ";" " " LCOV_GEN_HTML_CMD_SPACED "${LCOV_GEN_HTML_CMD}") + message(STATUS "${LCOV_GEN_HTML_CMD_SPACED}") + endif() + + # Setup target + add_custom_target(${Coverage_NAME} + COMMAND ${LCOV_CLEAN_CMD} + COMMAND ${LCOV_BASELINE_CMD} + COMMAND ${LCOV_EXEC_TESTS_CMD} + COMMAND ${LCOV_CAPTURE_CMD} + COMMAND ${LCOV_BASELINE_COUNT_CMD} + COMMAND ${LCOV_FILTER_CMD} + COMMAND ${LCOV_GEN_HTML_CMD} + + # Set output files as GENERATED (will be removed on 'make clean') + BYPRODUCTS + ${Coverage_NAME}.base + ${Coverage_NAME}.capture + ${Coverage_NAME}.total + ${Coverage_NAME}.info + ${Coverage_NAME}/index.html + WORKING_DIRECTORY ${PROJECT_BINARY_DIR} + DEPENDS ${Coverage_DEPENDENCIES} + VERBATIM # Protect arguments to commands + COMMENT "Resetting code coverage counters to zero.\nProcessing code coverage counters and generating report." + ) + + # Show where to find the lcov info report + add_custom_command(TARGET ${Coverage_NAME} POST_BUILD + COMMAND ; + COMMENT "Lcov code coverage info report saved in ${Coverage_NAME}.info." + ) + + # Show info where to find the report + add_custom_command(TARGET ${Coverage_NAME} POST_BUILD + COMMAND ; + COMMENT "Open ./${Coverage_NAME}/index.html in your browser to view the coverage report." + ) + +endfunction() # setup_target_for_coverage_lcov + +# Defines a target for running and collection code coverage information +# Builds dependencies, runs the given executable and outputs reports. +# NOTE! The executable should always have a ZERO as exit code otherwise +# the coverage generation will not complete. +# +# setup_target_for_coverage_gcovr_xml( +# NAME ctest_coverage # New target name +# EXECUTABLE ctest -j ${PROCESSOR_COUNT} # Executable in PROJECT_BINARY_DIR +# DEPENDENCIES executable_target # Dependencies to build first +# BASE_DIRECTORY "../" # Base directory for report +# # (defaults to PROJECT_SOURCE_DIR) +# EXCLUDE "src/dir1/*" "src/dir2/*" # Patterns to exclude (can be relative +# # to BASE_DIRECTORY, with CMake 3.4+) +# ) +# The user can set the variable GCOVR_ADDITIONAL_ARGS to supply additional flags to the +# GCVOR command. +function(setup_target_for_coverage_gcovr_xml) + + set(options NONE) + set(oneValueArgs BASE_DIRECTORY NAME) + set(multiValueArgs EXCLUDE EXECUTABLE EXECUTABLE_ARGS DEPENDENCIES) + cmake_parse_arguments(Coverage "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN}) + + if(NOT GCOVR_PATH) + message(FATAL_ERROR "gcovr not found! Aborting...") + endif() # NOT GCOVR_PATH + + # Set base directory (as absolute path), or default to PROJECT_SOURCE_DIR + if(DEFINED Coverage_BASE_DIRECTORY) + get_filename_component(BASEDIR ${Coverage_BASE_DIRECTORY} ABSOLUTE) + else() + set(BASEDIR ${PROJECT_SOURCE_DIR}) + endif() + + # Collect excludes (CMake 3.4+: Also compute absolute paths) + set(GCOVR_EXCLUDES "") + foreach(EXCLUDE ${Coverage_EXCLUDE} ${COVERAGE_EXCLUDES} ${COVERAGE_GCOVR_EXCLUDES}) + if(CMAKE_VERSION VERSION_GREATER 3.4) + get_filename_component(EXCLUDE ${EXCLUDE} ABSOLUTE BASE_DIR ${BASEDIR}) + endif() + list(APPEND GCOVR_EXCLUDES "${EXCLUDE}") + endforeach() + list(REMOVE_DUPLICATES GCOVR_EXCLUDES) + + # Combine excludes to several -e arguments + set(GCOVR_EXCLUDE_ARGS "") + foreach(EXCLUDE ${GCOVR_EXCLUDES}) + list(APPEND GCOVR_EXCLUDE_ARGS "-e") + list(APPEND GCOVR_EXCLUDE_ARGS "${EXCLUDE}") + endforeach() + + # Set up commands which will be run to generate coverage data + # Run tests + set(GCOVR_XML_EXEC_TESTS_CMD + ${Coverage_EXECUTABLE} ${Coverage_EXECUTABLE_ARGS} + ) + # Running gcovr + set(GCOVR_XML_CMD + ${GCOVR_PATH} --xml -r ${BASEDIR} ${GCOVR_ADDITIONAL_ARGS} ${GCOVR_EXCLUDE_ARGS} + --object-directory=${PROJECT_BINARY_DIR} -o ${Coverage_NAME}.xml + ) + + if(CODE_COVERAGE_VERBOSE) + message(STATUS "Executed command report") + + message(STATUS "Command to run tests: ") + string(REPLACE ";" " " GCOVR_XML_EXEC_TESTS_CMD_SPACED "${GCOVR_XML_EXEC_TESTS_CMD}") + message(STATUS "${GCOVR_XML_EXEC_TESTS_CMD_SPACED}") + + message(STATUS "Command to generate gcovr XML coverage data: ") + string(REPLACE ";" " " GCOVR_XML_CMD_SPACED "${GCOVR_XML_CMD}") + message(STATUS "${GCOVR_XML_CMD_SPACED}") + endif() + + add_custom_target(${Coverage_NAME} + COMMAND ${GCOVR_XML_EXEC_TESTS_CMD} + COMMAND ${GCOVR_XML_CMD} + + BYPRODUCTS ${Coverage_NAME}.xml + WORKING_DIRECTORY ${PROJECT_BINARY_DIR} + DEPENDS ${Coverage_DEPENDENCIES} + VERBATIM # Protect arguments to commands + COMMENT "Running gcovr to produce Cobertura code coverage report." + ) + + # Show info where to find the report + add_custom_command(TARGET ${Coverage_NAME} POST_BUILD + COMMAND ; + COMMENT "Cobertura code coverage report saved in ${Coverage_NAME}.xml." + ) +endfunction() # setup_target_for_coverage_gcovr_xml + +# Defines a target for running and collection code coverage information +# Builds dependencies, runs the given executable and outputs reports. +# NOTE! The executable should always have a ZERO as exit code otherwise +# the coverage generation will not complete. +# +# setup_target_for_coverage_gcovr_sonar( +# NAME ctest_coverage # New target name +# EXECUTABLE ctest -j ${PROCESSOR_COUNT} # Executable in PROJECT_BINARY_DIR +# DEPENDENCIES executable_target # Dependencies to build first +# BASE_DIRECTORY "../" # Base directory for report +# # (defaults to PROJECT_SOURCE_DIR) +# EXCLUDE "src/dir1/*" "src/dir2/*" # Patterns to exclude (can be relative +# # to BASE_DIRECTORY, with CMake 3.4+) +# ) +# The user can set the variable GCOVR_ADDITIONAL_ARGS to supply additional flags to the +# GCVOR command. +function(setup_target_for_coverage_gcovr_sonar) + + set(options NONE) + set(oneValueArgs BASE_DIRECTORY NAME) + set(multiValueArgs EXCLUDE EXECUTABLE EXECUTABLE_ARGS DEPENDENCIES) + cmake_parse_arguments(Coverage "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN}) + + if(NOT GCOVR_PATH) + message(FATAL_ERROR "gcovr not found! Aborting...") + endif() # NOT GCOVR_PATH + + # Set base directory (as absolute path), or default to PROJECT_SOURCE_DIR + if(DEFINED Coverage_BASE_DIRECTORY) + get_filename_component(BASEDIR ${Coverage_BASE_DIRECTORY} ABSOLUTE) + else() + set(BASEDIR ${PROJECT_SOURCE_DIR}) + endif() + + # Collect excludes (CMake 3.4+: Also compute absolute paths) + set(GCOVR_EXCLUDES "") + foreach(EXCLUDE ${Coverage_EXCLUDE} ${COVERAGE_EXCLUDES} ${COVERAGE_GCOVR_EXCLUDES}) + if(CMAKE_VERSION VERSION_GREATER 3.4) + get_filename_component(EXCLUDE ${EXCLUDE} ABSOLUTE BASE_DIR ${BASEDIR}) + endif() + list(APPEND GCOVR_EXCLUDES "${EXCLUDE}") + endforeach() + list(REMOVE_DUPLICATES GCOVR_EXCLUDES) + + # Combine excludes to several -e arguments + set(GCOVR_EXCLUDE_ARGS "") + foreach(EXCLUDE ${GCOVR_EXCLUDES}) + list(APPEND GCOVR_EXCLUDE_ARGS "-e") + list(APPEND GCOVR_EXCLUDE_ARGS "${EXCLUDE}") + endforeach() + + # Set up commands which will be run to generate coverage data + # Run tests + set(GCOVR_XML_EXEC_TESTS_CMD + ${Coverage_EXECUTABLE} ${Coverage_EXECUTABLE_ARGS} + ) + # Running gcovr + set(GCOVR_XML_CMD + ${GCOVR_PATH} --sonarqube -r ${BASEDIR} ${GCOVR_ADDITIONAL_ARGS} ${GCOVR_EXCLUDE_ARGS} + --object-directory=${PROJECT_BINARY_DIR} -o ${Coverage_NAME}-sonarqube.xml + ) + + if(CODE_COVERAGE_VERBOSE) + message(STATUS "Executed command report") + + message(STATUS "Command to run tests: ") + string(REPLACE ";" " " GCOVR_XML_EXEC_TESTS_CMD_SPACED "${GCOVR_XML_EXEC_TESTS_CMD}") + message(STATUS "${GCOVR_XML_EXEC_TESTS_CMD_SPACED}") + + message(STATUS "Command to generate gcovr XML coverage data: ") + string(REPLACE ";" " " GCOVR_XML_CMD_SPACED "${GCOVR_XML_CMD}") + message(STATUS "${GCOVR_XML_CMD_SPACED}") + endif() + + add_custom_target(${Coverage_NAME} + COMMAND ${GCOVR_XML_EXEC_TESTS_CMD} + COMMAND ${GCOVR_XML_CMD} + + BYPRODUCTS ${Coverage_NAME}.xml + WORKING_DIRECTORY ${PROJECT_BINARY_DIR} + DEPENDS ${Coverage_DEPENDENCIES} + VERBATIM # Protect arguments to commands + COMMENT "Running gcovr to produce Cobertura code coverage report." + ) + + # Show info where to find the report + add_custom_command(TARGET ${Coverage_NAME} POST_BUILD + COMMAND ; + COMMENT "Cobertura code coverage report saved in ${Coverage_NAME}-sonarqube.xml." + ) +endfunction() # setup_target_for_coverage_gcovr_sonar + +# Defines a target for running and collection code coverage information +# Builds dependencies, runs the given executable and outputs reports. +# NOTE! The executable should always have a ZERO as exit code otherwise +# the coverage generation will not complete. +# +# setup_target_for_coverage_gcovr_html( +# NAME ctest_coverage # New target name +# EXECUTABLE ctest -j ${PROCESSOR_COUNT} # Executable in PROJECT_BINARY_DIR +# DEPENDENCIES executable_target # Dependencies to build first +# BASE_DIRECTORY "../" # Base directory for report +# # (defaults to PROJECT_SOURCE_DIR) +# EXCLUDE "src/dir1/*" "src/dir2/*" # Patterns to exclude (can be relative +# # to BASE_DIRECTORY, with CMake 3.4+) +# ) +# The user can set the variable GCOVR_ADDITIONAL_ARGS to supply additional flags to the +# GCVOR command. +function(setup_target_for_coverage_gcovr_html) + + set(options NONE) + set(oneValueArgs BASE_DIRECTORY NAME) + set(multiValueArgs EXCLUDE EXECUTABLE EXECUTABLE_ARGS DEPENDENCIES) + cmake_parse_arguments(Coverage "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN}) + + if(NOT GCOVR_PATH) + message(FATAL_ERROR "gcovr not found! Aborting...") + endif() # NOT GCOVR_PATH + + # Set base directory (as absolute path), or default to PROJECT_SOURCE_DIR + if(DEFINED Coverage_BASE_DIRECTORY) + get_filename_component(BASEDIR ${Coverage_BASE_DIRECTORY} ABSOLUTE) + else() + set(BASEDIR ${PROJECT_SOURCE_DIR}) + endif() + + # Collect excludes (CMake 3.4+: Also compute absolute paths) + set(GCOVR_EXCLUDES "") + foreach(EXCLUDE ${Coverage_EXCLUDE} ${COVERAGE_EXCLUDES} ${COVERAGE_GCOVR_EXCLUDES}) + if(CMAKE_VERSION VERSION_GREATER 3.4) + get_filename_component(EXCLUDE ${EXCLUDE} ABSOLUTE BASE_DIR ${BASEDIR}) + endif() + list(APPEND GCOVR_EXCLUDES "${EXCLUDE}") + endforeach() + list(REMOVE_DUPLICATES GCOVR_EXCLUDES) + + # Combine excludes to several -e arguments + set(GCOVR_EXCLUDE_ARGS "") + foreach(EXCLUDE ${GCOVR_EXCLUDES}) + list(APPEND GCOVR_EXCLUDE_ARGS "-e") + list(APPEND GCOVR_EXCLUDE_ARGS "${EXCLUDE}") + endforeach() + + # Set up commands which will be run to generate coverage data + # Run tests + set(GCOVR_HTML_EXEC_TESTS_CMD + ${Coverage_EXECUTABLE} ${Coverage_EXECUTABLE_ARGS} + ) + # Create folder + set(GCOVR_HTML_FOLDER_CMD + ${CMAKE_COMMAND} -E make_directory ${PROJECT_BINARY_DIR}/${Coverage_NAME} + ) + # Running gcovr + set(GCOVR_HTML_CMD + ${GCOVR_PATH} --html --html-details -r ${BASEDIR} ${GCOVR_ADDITIONAL_ARGS} + ${GCOVR_EXCLUDE_ARGS} --object-directory=${PROJECT_BINARY_DIR} + -o ${Coverage_NAME}/index.html + ) + + if(CODE_COVERAGE_VERBOSE) + message(STATUS "Executed command report") + + message(STATUS "Command to run tests: ") + string(REPLACE ";" " " GCOVR_HTML_EXEC_TESTS_CMD_SPACED "${GCOVR_HTML_EXEC_TESTS_CMD}") + message(STATUS "${GCOVR_HTML_EXEC_TESTS_CMD_SPACED}") + + message(STATUS "Command to create a folder: ") + string(REPLACE ";" " " GCOVR_HTML_FOLDER_CMD_SPACED "${GCOVR_HTML_FOLDER_CMD}") + message(STATUS "${GCOVR_HTML_FOLDER_CMD_SPACED}") + + message(STATUS "Command to generate gcovr HTML coverage data: ") + string(REPLACE ";" " " GCOVR_HTML_CMD_SPACED "${GCOVR_HTML_CMD}") + message(STATUS "${GCOVR_HTML_CMD_SPACED}") + endif() + + add_custom_target(${Coverage_NAME} + COMMAND ${GCOVR_HTML_EXEC_TESTS_CMD} + COMMAND ${GCOVR_HTML_FOLDER_CMD} + COMMAND ${GCOVR_HTML_CMD} + + BYPRODUCTS ${PROJECT_BINARY_DIR}/${Coverage_NAME}/index.html # report directory + WORKING_DIRECTORY ${PROJECT_BINARY_DIR} + DEPENDS ${Coverage_DEPENDENCIES} + VERBATIM # Protect arguments to commands + COMMENT "Running gcovr to produce HTML code coverage report." + ) + + # Show info where to find the report + add_custom_command(TARGET ${Coverage_NAME} POST_BUILD + COMMAND ; + COMMENT "Open ./${Coverage_NAME}/index.html in your browser to view the coverage report." + ) + +endfunction() # setup_target_for_coverage_gcovr_html + +# Defines a target for running and collection code coverage information +# Builds dependencies, runs the given executable and outputs reports. +# NOTE! The executable should always have a ZERO as exit code otherwise +# the coverage generation will not complete. +# +# setup_target_for_coverage_fastcov( +# NAME testrunner_coverage # New target name +# EXECUTABLE testrunner -j ${PROCESSOR_COUNT} # Executable in PROJECT_BINARY_DIR +# DEPENDENCIES testrunner # Dependencies to build first +# BASE_DIRECTORY "../" # Base directory for report +# # (defaults to PROJECT_SOURCE_DIR) +# EXCLUDE "src/dir1/" "src/dir2/" # Patterns to exclude. +# NO_DEMANGLE # Don't demangle C++ symbols +# # even if c++filt is found +# SKIP_HTML # Don't create html report +# POST_CMD perl -i -pe s!${PROJECT_SOURCE_DIR}/!!g ctest_coverage.json # E.g. for stripping source dir from file paths +# ) +function(setup_target_for_coverage_fastcov) + + set(options NO_DEMANGLE SKIP_HTML) + set(oneValueArgs BASE_DIRECTORY NAME) + set(multiValueArgs EXCLUDE EXECUTABLE EXECUTABLE_ARGS DEPENDENCIES FASTCOV_ARGS GENHTML_ARGS POST_CMD) + cmake_parse_arguments(Coverage "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN}) + + if(NOT FASTCOV_PATH) + message(FATAL_ERROR "fastcov not found! Aborting...") + endif() + + if(NOT Coverage_SKIP_HTML AND NOT GENHTML_PATH) + message(FATAL_ERROR "genhtml not found! Aborting...") + endif() + + # Set base directory (as absolute path), or default to PROJECT_SOURCE_DIR + if(Coverage_BASE_DIRECTORY) + get_filename_component(BASEDIR ${Coverage_BASE_DIRECTORY} ABSOLUTE) + else() + set(BASEDIR ${PROJECT_SOURCE_DIR}) + endif() + + # Collect excludes (Patterns, not paths, for fastcov) + set(FASTCOV_EXCLUDES "") + foreach(EXCLUDE ${Coverage_EXCLUDE} ${COVERAGE_EXCLUDES} ${COVERAGE_FASTCOV_EXCLUDES}) + list(APPEND FASTCOV_EXCLUDES "${EXCLUDE}") + endforeach() + list(REMOVE_DUPLICATES FASTCOV_EXCLUDES) + + # Conditional arguments + if(CPPFILT_PATH AND NOT ${Coverage_NO_DEMANGLE}) + set(GENHTML_EXTRA_ARGS "--demangle-cpp") + endif() + + # Set up commands which will be run to generate coverage data + set(FASTCOV_EXEC_TESTS_CMD ${Coverage_EXECUTABLE} ${Coverage_EXECUTABLE_ARGS}) + + set(FASTCOV_CAPTURE_CMD ${FASTCOV_PATH} ${Coverage_FASTCOV_ARGS} --gcov ${GCOV_PATH} + --search-directory ${BASEDIR} + --process-gcno + --output ${Coverage_NAME}.json + --exclude ${FASTCOV_EXCLUDES} + --exclude ${FASTCOV_EXCLUDES} + ) + + set(FASTCOV_CONVERT_CMD ${FASTCOV_PATH} + -C ${Coverage_NAME}.json --lcov --output ${Coverage_NAME}.info + ) + + if(Coverage_SKIP_HTML) + set(FASTCOV_HTML_CMD ";") + else() + set(FASTCOV_HTML_CMD ${GENHTML_PATH} ${GENHTML_EXTRA_ARGS} ${Coverage_GENHTML_ARGS} + -o ${Coverage_NAME} ${Coverage_NAME}.info + ) + endif() + + set(FASTCOV_POST_CMD ";") + if(Coverage_POST_CMD) + set(FASTCOV_POST_CMD ${Coverage_POST_CMD}) + endif() + + if(CODE_COVERAGE_VERBOSE) + message(STATUS "Code coverage commands for target ${Coverage_NAME} (fastcov):") + + message(" Running tests:") + string(REPLACE ";" " " FASTCOV_EXEC_TESTS_CMD_SPACED "${FASTCOV_EXEC_TESTS_CMD}") + message(" ${FASTCOV_EXEC_TESTS_CMD_SPACED}") + + message(" Capturing fastcov counters and generating report:") + string(REPLACE ";" " " FASTCOV_CAPTURE_CMD_SPACED "${FASTCOV_CAPTURE_CMD}") + message(" ${FASTCOV_CAPTURE_CMD_SPACED}") + + message(" Converting fastcov .json to lcov .info:") + string(REPLACE ";" " " FASTCOV_CONVERT_CMD_SPACED "${FASTCOV_CONVERT_CMD}") + message(" ${FASTCOV_CONVERT_CMD_SPACED}") + + if(NOT Coverage_SKIP_HTML) + message(" Generating HTML report: ") + string(REPLACE ";" " " FASTCOV_HTML_CMD_SPACED "${FASTCOV_HTML_CMD}") + message(" ${FASTCOV_HTML_CMD_SPACED}") + endif() + if(Coverage_POST_CMD) + message(" Running post command: ") + string(REPLACE ";" " " FASTCOV_POST_CMD_SPACED "${FASTCOV_POST_CMD}") + message(" ${FASTCOV_POST_CMD_SPACED}") + endif() + endif() + + # Setup target + add_custom_target(${Coverage_NAME} + + # Cleanup fastcov + COMMAND ${FASTCOV_PATH} ${Coverage_FASTCOV_ARGS} --gcov ${GCOV_PATH} + --search-directory ${BASEDIR} + --zerocounters + + COMMAND ${FASTCOV_EXEC_TESTS_CMD} + COMMAND ${FASTCOV_CAPTURE_CMD} + COMMAND ${FASTCOV_CONVERT_CMD} + COMMAND ${FASTCOV_HTML_CMD} + COMMAND ${FASTCOV_POST_CMD} + + # Set output files as GENERATED (will be removed on 'make clean') + BYPRODUCTS + ${Coverage_NAME}.info + ${Coverage_NAME}.json + ${Coverage_NAME}/index.html # report directory + + WORKING_DIRECTORY ${PROJECT_BINARY_DIR} + DEPENDS ${Coverage_DEPENDENCIES} + VERBATIM # Protect arguments to commands + COMMENT "Resetting code coverage counters to zero. Processing code coverage counters and generating report." + ) + + set(INFO_MSG "fastcov code coverage info report saved in ${Coverage_NAME}.info and ${Coverage_NAME}.json.") + if(NOT Coverage_SKIP_HTML) + string(APPEND INFO_MSG " Open ${PROJECT_BINARY_DIR}/${Coverage_NAME}/index.html in your browser to view the coverage report.") + endif() + # Show where to find the fastcov info report + add_custom_command(TARGET ${Coverage_NAME} POST_BUILD + COMMAND ${CMAKE_COMMAND} -E echo ${INFO_MSG} + ) + +endfunction() # setup_target_for_coverage_fastcov + +function(append_coverage_compiler_flags) + set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${COVERAGE_COMPILER_FLAGS}" PARENT_SCOPE) + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${COVERAGE_COMPILER_FLAGS}" PARENT_SCOPE) + set(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} ${COVERAGE_COMPILER_FLAGS}" PARENT_SCOPE) + message(STATUS "Appending code coverage compiler flags: ${COVERAGE_COMPILER_FLAGS}") +endfunction() # append_coverage_compiler_flags + +# Setup coverage for specific library +function(append_coverage_compiler_flags_to_target name) + target_compile_options(${name} + PRIVATE ${COVERAGE_COMPILER_FLAGS}) +endfunction() diff --git a/fledge.version b/fledge.version new file mode 100644 index 0000000..2d02bb7 --- /dev/null +++ b/fledge.version @@ -0,0 +1 @@ +fledge_version>=1.8 diff --git a/include/hnz_pivot_filter.hpp b/include/hnz_pivot_filter.hpp new file mode 100644 index 0000000..40825cd --- /dev/null +++ b/include/hnz_pivot_filter.hpp @@ -0,0 +1,122 @@ +/* + * FledgePower IEC 104 <-> pivot filter plugin. + * + * Copyright (c) 2022, RTE (https://www.rte-france.com) + * + * Released under the Apache 2.0 Licence + * + * Author: Michael Zillgith (michael.zillgith at mz-automation.de) + * + */ + +#ifndef _HNZ_PIVOT_FILTER_H +#define _HNZ_PIVOT_FILTER_H + +#include +#include +#include +#include + +class Datapoint; +class HNZPivotConfig; +class HNZPivotDataPoint; + +class HNZPivotFilter : public FledgeFilter { + +public: + /* + * Struct used to store fields of a data object during processing + */ + struct GenericDataObject { + std::string doType = ""; + unsigned int doStation = 0; + unsigned int doAddress = 0; + Datapoint* doValue = nullptr; + unsigned int doValid = 0; + std::string doAn = ""; + bool doCg = false; + bool doOutdated = false; + unsigned long doTs = 0; + bool doTsIv = false; + bool doTsC = false; + bool doTsS = false; + }; + + /** + * Constructor for the HNZPivotFilter. + * + * We call the constructor of the base class and handle the initial + * configuration of the filter. + * + * @param filterName The name of the filter + * @param filterConfig The configuration category for this filter + * @param outHandle The handle of the next filter in the chain + * @param output A function pointer to call to output data to the next filter + */ + HNZPivotFilter(const std::string& filterName, + ConfigCategory& filterConfig, + OUTPUT_HANDLE* outHandle, + OUTPUT_STREAM output); + + /** + * The actual filtering code + * + * @param readingSet The reading data to filter + */ + void ingest(READINGSET* readingSet); + + /** + * Reconfiguration entry point to the filter. + * + * This method runs holding the configMutex to prevent + * ingest using the regex class that may be destroyed by this + * call. + * + * Pass the configuration to the base FilterPlugin class and + * then call the private method to handle the filter specific + * configuration. + * + * @param newConfig The JSON of the new configuration + */ + void reconfigure(const std::string& newConfig); + +private: + void readConfig(const ConfigCategory& config); + + Datapoint* addElement(Datapoint* dp, const std::string& elementPath); + + template + Datapoint* addElementWithValue(Datapoint* dp, const std::string& elementPath, const T value); + + Datapoint* createDp(const std::string& name); + + template + Datapoint* createDpWithValue(const std::string& name, const T value); + + void convertDatapoint(const std::string& assetName, Datapoint* dp, std::vector& convertedDatapoints); + + template + void static readAttribute(std::map& attributeFound, Datapoint* dp, + const std::string& targetName, T& out); + void static readAttribute(std::map& attributeFound, Datapoint* dp, + const std::string& targetName, Datapoint*& out); + void static readAttribute(std::map& attributeFound, Datapoint* dp, + const std::string& targetName, std::string& out); + Datapoint* convertDatapointToPivot(const std::string& assetName, Datapoint* sourceDp); + Datapoint* convertTSToPivot(const std::string& assetName, std::map& attributeFound, + const GenericDataObject& dataObject, std::shared_ptr exchangeConfig); + Datapoint* convertTMToPivot(const std::string& assetName, std::map& attributeFound, + const GenericDataObject& dataObject, std::shared_ptr exchangeConfig); + Datapoint* convertTCACKToPivot(const std::string& assetName, std::map& attributeFound, + const GenericDataObject& dataObject, std::shared_ptr exchangeConfig); + Datapoint* convertTVCACKToPivot(const std::string& assetName, std::map& attributeFound, + const GenericDataObject& dataObject, std::shared_ptr exchangeConfig); + + Datapoint* convertDatapointToHNZ(const std::string& assetName, Datapoint* sourceDp); + + std::shared_ptr m_filterConfig; + std::recursive_mutex m_configMutex; +}; + + +#endif /* _HNZ_PIVOT_FILTER_H */ \ No newline at end of file diff --git a/include/hnz_pivot_filter_config.hpp b/include/hnz_pivot_filter_config.hpp new file mode 100644 index 0000000..cb1ac23 --- /dev/null +++ b/include/hnz_pivot_filter_config.hpp @@ -0,0 +1,88 @@ +/* + * FledgePower HNZ <-> pivot filter plugin. + * + * Copyright (c) 2022, RTE (https://www.rte-france.com) + * + * Released under the Apache 2.0 Licence + * + * Author: Michael Zillgith (michael.zillgith at mz-automation.de) + * + */ + +#ifndef PIVOT_HNZ_CONFIG_H +#define PIVOT_HNZ_CONFIG_H + +#include +#include +#include +#include + +#define FILTER_NAME "hnz_pivot_filter" + +constexpr char JSON_NAME[] = "name"; +constexpr char JSON_VERSION[] = "version"; +constexpr char JSON_EXCHANGED_DATA_NAME[] = "exchanged_data"; +constexpr char DATAPOINTS[] = "datapoints"; +constexpr char LABEL[] = "label"; +constexpr char PIVOT_ID[] = "pivot_id"; +constexpr char PIVOT_TYPE[] = "pivot_type"; +constexpr char PROTOCOLS[] = "protocols"; +constexpr char HNZ_NAME[] = "hnzip"; +constexpr char MESSAGE_CODE[] = "typeid"; +constexpr char MESSAGE_ADDRESS[] = "address"; + +class HNZPivotDataPoint +{ +public: + HNZPivotDataPoint(const std::string& label, const std::string& pivotId, const std::string& pivotType, const std::string& typeIdStr, unsigned int address); + + const std::string& getLabel() const {return m_label;} + const std::string& getPivotId() const {return m_pivotId;} + const std::string& getPivotType() const {return m_pivotType;} + const std::string& getTypeId() const {return m_typeIdStr;} + unsigned int getAddress() const {return m_address;} + +private: + std::string m_label; + std::string m_pivotId; + std::string m_pivotType; + + std::string m_typeIdStr; + unsigned int m_address; +}; + +class HNZPivotConfig +{ +public: + HNZPivotConfig() = default; + + void importExchangeConfig(const std::string& exchangeConfig); + + const std::map>& getExchangeDefinitions() const {return m_exchangeDefinitions;} + std::string findPivotId(const std::string& typeIdStr, unsigned int address) const; + static const std::string& getPluginName(); + bool isComplete() const {return m_exchange_data_is_complete;}; + +private: + static bool m_check_string(const rapidjson::Value &json, const char *key); + static bool m_check_array(const rapidjson::Value &json, const char *key); + static bool m_check_object(const rapidjson::Value &json, const char *key); + + static bool m_retrieve(const rapidjson::Value &json, const char *key, unsigned int *target); + static bool m_retrieve(const rapidjson::Value &json, const char *key, unsigned int *target, unsigned int def); + static bool m_retrieve(const rapidjson::Value &json, const char *key, std::string *target); + static bool m_retrieve(const rapidjson::Value &json, const char *key, std::string *target, const std::string& def); + static bool m_retrieve(const rapidjson::Value &json, const char *key, long long int *target, long long int def); + + std::string m_getLookupHash(const std::string& typeIdStr, unsigned int address) const; + + bool m_exchange_data_is_complete = false; + + /* list of exchange data points -> the pivotId is the key */ + std::map> m_exchangeDefinitions; + /* Map used to find the pivotId from the combination of typeid and address + -> "typeid-address" is the key */ + std::map m_pivotIdLookup; +}; + +#endif /* PIVOT_HNZ_CONFIG_H */ \ No newline at end of file diff --git a/include/hnz_pivot_object.hpp b/include/hnz_pivot_object.hpp new file mode 100644 index 0000000..81c13f8 --- /dev/null +++ b/include/hnz_pivot_object.hpp @@ -0,0 +1,204 @@ +/* + * FledgePower HNZ <-> pivot filter plugin. + * + * Copyright (c) 2022, RTE (https://www.rte-france.com) + * + * Released under the Apache 2.0 Licence + * + * Author: Michael Zillgith (michael.zillgith at mz-automation.de) + * + */ + +#ifndef _HNZ_PIVOT_OBJECT_H +#define _HNZ_PIVOT_OBJECT_H + +#include +#include + +class Datapoint; +class HNZPivotDataPoint; + +class PivotObjectException : public std::exception //NOSONAR +{ + public: + explicit PivotObjectException(const std::string& context): + m_context(context) {} + + const std::string& getContext(void) const {return m_context;} + + private: + const std::string m_context; +}; + +class PivotTimestamp +{ +public: + explicit PivotTimestamp(Datapoint* timestampData); + PivotTimestamp(PivotTimestamp& other) = delete; + PivotTimestamp& operator=(const PivotTimestamp& other) = delete; + + int SecondSinceEpoch() const {return m_secondSinceEpoch;} + int FractionOfSecond() const {return m_fractionOfSecond;} + + bool ClockFailure() const {return m_clockFailure;} + bool LeapSecondKnown() const {return m_leapSecondKnown;} + bool ClockNotSynchronized() const {return m_clockNotSynchronized;} + int TimeAccuracy() const {return m_timeAccuracy;} + + /** + * Convert secondSinceEpoch and secondSinceEpoch to timestamp + * @param secondSinceEpoch : interval in seconds continuously counted from the epoch 1970-01-01 00:00:00 UTC + * @param fractionOfSecond : represents the fraction of the current second when the value of the TimeStamp has been determined. + * @return timestamp (ms) + */ + static long toTimestamp(long secondSinceEpoch, long fractionOfSecond); + + /** + * Convert timestamp (ms) in pair of secondSinceEpoch and fractionOfSecond + * @param timestamp : timestamp (ms) + * @return pair of secondSinceEpoch and fractionOfSecond + */ + static std::pair fromTimestamp(long timestamp); + + /** + * Get current timestamp in milisecond + * @return timestamp in ms + */ + static uint64_t getCurrentTimestampMs(); + +private: + + void handleTimeQuality(Datapoint* timeQuality); + + int m_secondSinceEpoch; + int m_fractionOfSecond; + + int m_timeAccuracy; + bool m_clockFailure; + bool m_leapSecondKnown; + bool m_clockNotSynchronized; +}; + +class PivotObject +{ +public: + + enum class PivotClass + { + GTIS, + GTIM, + GTIC + }; + + enum class PivotCdc + { + SPS, + DPS, + MV, + SPC, + DPC, + INC + }; + + enum class Validity + { + GOOD, + INVALID, + RESERVED, + QUESTIONABLE + }; + + enum class Source + { + PROCESS, + SUBSTITUTED + }; + + explicit PivotObject(Datapoint* pivotData); + PivotObject(const std::string& pivotLN, const std::string& valueType); + + void setIdentifier(const std::string& identifier); + void setCause(int cause); + + void setStVal(bool value); + void setStValStr(const std::string& value); + + void setMagF(float value); + void setMagI(int value); + + void setConfirmation(bool value); + + void addTimestamp(unsigned long doTs, bool doTsS); + + void addQuality(unsigned int doValid, bool doOutdated, bool doTsC, bool doTsS); + void addTmOrg(bool substituted); + void addTmValidity(bool invalid); + + Datapoint* toDatapoint() {return m_dp;} + + Datapoint* toHnzCommandObject(std::shared_ptr exchangeConfig); + + const std::string& getIdentifier() const {return m_identifier;} + const std::string& getComingFrom() const {return m_comingFrom;} + int getCause() const {return m_cause;} + bool isConfirmation() const {return m_isConfirmation;} + + Validity getValidity() const {return m_validity;} + Source getSource() const {return m_source;} + + bool BadReference() const {return m_badReference;} + bool Failure() const {return m_failure;} + bool Inconsistent() const {return m_inconsistent;} + bool OldData() const {return m_oldData;} + bool Oscillatory() const {return m_oscillatory;} + bool OutOfRange() const {return m_outOfRange;} + bool Overflow() const {return m_overflow;} + + bool OperatorBlocked() const {return m_operatorBlocked;} + bool Test() const {return m_test;} + + bool IsTimestampSubstituted() const {return m_timestampSubstituted;} + bool IsTimestampInvalid() const {return m_timestampInvalid;} + +private: + + Datapoint* getCdc(Datapoint* dp); + bool readBool(Datapoint* dp, const std::string& name, bool& out) const; + void handleGTIX(); + void handleCdc(Datapoint* cdc); + void handleDetailQuality(Datapoint* detailQuality); + void handleQuality(Datapoint* q); + + Datapoint* m_dp; + Datapoint* m_ln; + Datapoint* m_cdc; + PivotClass m_pivotClass; + PivotCdc m_pivotCdc; + + std::string m_comingFrom; + std::string m_identifier; + int m_cause = 0; + bool m_isConfirmation = false; + + Validity m_validity = Validity::GOOD; + bool m_badReference = false; + bool m_failure = false; + bool m_inconsistent = false; + bool m_inacurate = false; + bool m_oldData = false; + bool m_oscillatory = false; + bool m_outOfRange = false; + bool m_overflow = false; + Source m_source = Source::PROCESS; + bool m_operatorBlocked = false; + bool m_test = false; + + std::shared_ptr m_timestamp; + + bool m_timestampSubstituted = false; + bool m_timestampInvalid = false; + + long intVal = 0; +}; + +#endif /* _HNZ_PIVOT_OBJECT_H */ \ No newline at end of file diff --git a/include/hnz_pivot_utility.hpp b/include/hnz_pivot_utility.hpp new file mode 100644 index 0000000..9c817a2 --- /dev/null +++ b/include/hnz_pivot_utility.hpp @@ -0,0 +1,84 @@ +/* + * FledgePower HNZ <-> pivot filter utility functions. + * + * Copyright (c) 2022, RTE (https://www.rte-france.com) + * + * Released under the Apache 2.0 Licence + * + * Author: Michael Zillgith (michael.zillgith at mz-automation.de) + * + */ + +#ifndef _HNZ_PIVOT_UTILITY_H +#define _HNZ_PIVOT_UTILITY_H + +#include +#include +#include + +namespace PivotUtility { + /** + * Join a list of strings into a single string with the given separator + * @param list : List of strings to join + * @param sep : Separator to put bewteen each list element + * @return String containing the concatenation of all strings in the list with separator inbetween + */ + std::string join(const std::vector &list, const std::string &sep = ", "); + /** + * Split a string into a list of strings with the given separator + * @param str : List of strings to join + * @param sep : Separator split each list element + * @return List of strings extracted from the initial string + */ + std::vector split(const std::string& str, char sep); + + /* + * Log helper function that will log both in the Fledge syslog file and in stdout for unit tests + */ + template + void log_debug(const std::string& format, Args&&... args) { + #ifdef UNIT_TEST + printf(std::string(format).append("\n").c_str(), std::forward(args)...); + fflush(stdout); + #endif + Logger::getLogger()->debug(format.c_str(), std::forward(args)...); + } + + template + void log_info(const std::string& format, Args&&... args) { + #ifdef UNIT_TEST + printf(std::string(format).append("\n").c_str(), std::forward(args)...); + fflush(stdout); + #endif + Logger::getLogger()->info(format.c_str(), std::forward(args)...); + } + + template + void log_warn(const std::string& format, Args&&... args) { + #ifdef UNIT_TEST + printf(std::string(format).append("\n").c_str(), std::forward(args)...); + fflush(stdout); + #endif + Logger::getLogger()->warn(format.c_str(), std::forward(args)...); + } + + template + void log_error(const std::string& format, Args&&... args) { + #ifdef UNIT_TEST + printf(std::string(format).append("\n").c_str(), std::forward(args)...); + fflush(stdout); + #endif + Logger::getLogger()->error(format.c_str(), std::forward(args)...); + } + + template + void log_fatal(const std::string& format, Args&&... args) { + #ifdef UNIT_TEST + printf(std::string(format).append("\n").c_str(), std::forward(args)...); + fflush(stdout); + #endif + Logger::getLogger()->fatal(format.c_str(), std::forward(args)...); + } +} + +#endif /* _HNZ_PIVOT_UTILITY_H */ \ No newline at end of file diff --git a/mkversion b/mkversion new file mode 100644 index 0000000..6f71d5b --- /dev/null +++ b/mkversion @@ -0,0 +1,11 @@ +#!/bin/sh +cat > version.h << END_WARNING + +/* + * WARNING: This is an automatically generated file. + * Do not edit this file. + * To change the version edit the file VERSION + */ + +END_WARNING +/bin/echo '#define VERSION "'`cat $1/VERSION`'"' >> version.h diff --git a/src/hnz_pivot_filter.cpp b/src/hnz_pivot_filter.cpp new file mode 100644 index 0000000..76ca934 --- /dev/null +++ b/src/hnz_pivot_filter.cpp @@ -0,0 +1,535 @@ +/* + * FledgePower HNZ <-> pivot filter plugin. + * + * Copyright (c) 2022, RTE (https://www.rte-france.com) + * + * Released under the Apache 2.0 Licence + * + * Author: Michael Zillgith (michael.zillgith at mz-automation.de) + * + */ + +#include + +#include "hnz_pivot_filter.hpp" +#include "hnz_pivot_object.hpp" +#include "hnz_pivot_filter_config.hpp" +#include "hnz_pivot_utility.hpp" + + +HNZPivotFilter::HNZPivotFilter(const std::string& filterName, ConfigCategory& filterConfig, + OUTPUT_HANDLE *outHandle, OUTPUT_STREAM output): + FledgeFilter(filterName, filterConfig, outHandle, output), + m_filterConfig(std::make_shared()) +{ + (void)filterName; /* ignore parameter */ + readConfig(filterConfig); +} + +static bool checkLabelMatch(const std::string& incomingLabel, std::shared_ptr exchangeConfig) +{ + return incomingLabel == exchangeConfig->getLabel(); +} + +static bool checkPivotTypeMatch(const std::string& incomingType, std::shared_ptr exchangeConfig) +{ + const std::string& pivotType = exchangeConfig->getPivotType(); + if (incomingType == "TS") { + return (pivotType == "SpsTyp") || (pivotType == "DpsTyp"); + } + else if (incomingType == "TM") { + return pivotType == "MvTyp"; + } + else if ((incomingType == "TC") || (incomingType == "TVC")) { + return (pivotType == "SpcTyp") || (pivotType == "DpcTyp") || (pivotType == "IncTyp"); + } + return false; +} + +static bool checkValueRange(const std::string& beforeLog, long value, long min, long max, const std::string& type) +{ + if (value < min || value > max) { + PivotUtility::log_warn("%s do_value out of range [%ld..%ld] for %s: %ld", beforeLog.c_str(), min, max, type.c_str(), value); + return false; + } + return true; +} + +static void appendTimestamp(PivotObject& pivot, bool hasDoTs, unsigned long doTs, bool doTsIv, bool doTsS) +{ + if (hasDoTs) { + pivot.addTimestamp(doTs, doTsS); + pivot.addTmOrg(false); + pivot.addTmValidity(doTsIv); + } + // For any message that does not have a timestamp in the protocol, add one artifically + else { + doTs = PivotTimestamp::getCurrentTimestampMs(); + pivot.addTimestamp(doTs, false); + pivot.addTmOrg(true); + } +} + +template +void HNZPivotFilter::readAttribute(std::map& attributeFound, Datapoint* dp, + const std::string& targetName, T& out) { + const auto& name = dp->getName(); + if (name != targetName) { + return; + } + if (attributeFound[name]) { + return; + } + + if (dp->getData().getType() == DatapointValue::T_INTEGER) { + out = static_cast(dp->getData().toInt()); + attributeFound[name] = true; + } +} + +void HNZPivotFilter::readAttribute(std::map& attributeFound, Datapoint* dp, + const std::string& targetName, Datapoint*& out) { + const auto& name = dp->getName(); + if (name != targetName) { + return; + } + if (attributeFound[name]) { + return; + } + + out = dp; + attributeFound[name] = true; +} + +void HNZPivotFilter::readAttribute(std::map& attributeFound, Datapoint* dp, + const std::string& targetName, std::string& out) { + const auto& name = dp->getName(); + if (name != targetName) { + return; + } + if (attributeFound[name]) { + return; + } + + if (dp->getData().getType() == DatapointValue::T_STRING) { + out = dp->getData().toStringValue(); + attributeFound[name] = true; + } +} + +Datapoint* HNZPivotFilter::convertDatapointToPivot(const std::string& assetName, Datapoint* sourceDp) +{ + Datapoint* convertedDatapoint = nullptr; + std::string beforeLog = HNZPivotConfig::getPluginName() + " - " + assetName + " - HNZPivotFilter::convertDatapointToPivot -"; + + DatapointValue& dpv = sourceDp->getData(); + + if (dpv.getType() != DatapointValue::T_DP_DICT) + return nullptr; + + const std::vector* datapoints = dpv.getDpVec(); + std::map attributeFound = { + {"do_type", false}, + {"do_station", false}, + {"do_addr", false}, + {"do_value", false}, + {"do_valid", false}, + {"do_an", false}, + {"do_cg", false}, + {"do_outdated", false}, + {"do_ts", false}, + {"do_ts_iv", false}, + {"do_ts_c", false}, + {"do_ts_s", false}, + }; + + GenericDataObject dataObject; + + for (Datapoint* dp : *datapoints) + { + readAttribute(attributeFound, dp, "do_type", dataObject.doType); + readAttribute(attributeFound, dp, "do_station", dataObject.doStation); + readAttribute(attributeFound, dp, "do_addr", dataObject.doAddress); + readAttribute(attributeFound, dp, "do_value", dataObject.doValue); + readAttribute(attributeFound, dp, "do_valid", dataObject.doValid); + readAttribute(attributeFound, dp, "do_an", dataObject.doAn); + readAttribute(attributeFound, dp, "do_cg", dataObject.doCg); + readAttribute(attributeFound, dp, "do_outdated", dataObject.doOutdated); + readAttribute(attributeFound, dp, "do_ts", dataObject.doTs); + readAttribute(attributeFound, dp, "do_ts_iv", dataObject.doTsIv); + readAttribute(attributeFound, dp, "do_ts_c", dataObject.doTsC); + readAttribute(attributeFound, dp, "do_ts_s", dataObject.doTsS); + } + + // Get exchangeConfig from message type and address + if (!attributeFound["do_type"]) { + PivotUtility::log_error("%s Missing do_type", beforeLog.c_str()); + return nullptr; + } + if (!attributeFound["do_addr"]) { + PivotUtility::log_error("%s Missing do_addr", beforeLog.c_str()); + return nullptr; + } + const std::string& pivotId = m_filterConfig->findPivotId(dataObject.doType, dataObject.doAddress); + if (pivotId.empty()) { + PivotUtility::log_error("%s No pivot ID configured for typeid %s and address %u", + beforeLog.c_str(), dataObject.doType.c_str(), dataObject.doAddress); + return nullptr; + } + auto exchangeData = m_filterConfig->getExchangeDefinitions(); + if (exchangeData.count(pivotId) == 0) { + PivotUtility::log_error("%s Unknown pivot ID: %s", beforeLog.c_str(), pivotId.c_str()); + return nullptr; + } + auto exchangeConfig = exchangeData[pivotId]; + if (!checkLabelMatch(assetName, exchangeConfig)) { + PivotUtility::log_warn("%s Input label (%s) does not match configured label (%s) for pivot ID: %s", + beforeLog.c_str(), assetName.c_str(), exchangeConfig->getLabel().c_str(), pivotId.c_str()); + } + + //NOTE: when doValue is missing for a TS or TM, we are converting a quality reading + + if (dataObject.doType == "TS") { + convertedDatapoint = convertTSToPivot(assetName, attributeFound, dataObject, exchangeConfig); + } + else if (dataObject.doType == "TM") { + convertedDatapoint = convertTMToPivot(assetName, attributeFound, dataObject, exchangeConfig); + } + else if (dataObject.doType == "TC") // Acknowledgment of a TC + { + convertedDatapoint = convertTCACKToPivot(assetName, attributeFound, dataObject, exchangeConfig); + } + else if (dataObject.doType == "TVC") // Acknowledgment of a TVC + { + convertedDatapoint = convertTVCACKToPivot(assetName, attributeFound, dataObject, exchangeConfig); + } + else { + PivotUtility::log_error("%s Unknown do_type: %s", beforeLog.c_str(), dataObject.doType.c_str()); + return nullptr; + } + + return convertedDatapoint; +} + +Datapoint* HNZPivotFilter::convertTSToPivot(const std::string& assetName, std::map& attributeFound, + const GenericDataObject& dataObject, std::shared_ptr exchangeConfig) +{ + std::string beforeLog = HNZPivotConfig::getPluginName() + " - " + assetName + " - HNZPivotFilter::convertTSToPivot -"; + + // Message structure checks + if (!checkPivotTypeMatch(dataObject.doType, exchangeConfig)) { + PivotUtility::log_warn("%s Invalid pivot type (%s) for data object type (%s)", + beforeLog.c_str(), exchangeConfig->getPivotType().c_str(), dataObject.doType.c_str()); + } + if (!attributeFound["do_valid"]) { + PivotUtility::log_warn("%s Missing attribute do_valid in TS", beforeLog.c_str()); + } + if (!attributeFound["do_cg"]) { + PivotUtility::log_warn("%s Missing attribute do_cg in TS", beforeLog.c_str()); + } + else if (!dataObject.doCg) { + if (!attributeFound["do_ts"]) { + PivotUtility::log_warn("%s Missing attribute do_ts in TS CE", beforeLog.c_str()); + } + if (!attributeFound["do_ts_iv"]) { + PivotUtility::log_warn("%s Missing attribute do_ts_iv in TS CE", beforeLog.c_str()); + } + if (!attributeFound["do_ts_c"]) { + PivotUtility::log_warn("%s Missing attribute do_ts_c in TS CE", beforeLog.c_str()); + } + if (!attributeFound["do_ts_s"]) { + PivotUtility::log_warn("%s Missing attribute do_ts_s in TS CE", beforeLog.c_str()); + } + } + if (!attributeFound["do_outdated"]) { + PivotUtility::log_warn("%s Missing attribute do_outdated in TS", beforeLog.c_str()); + } + else if (!dataObject.doOutdated && !attributeFound["do_value"]) { + PivotUtility::log_warn("%s Missing attribute do_value in TS", beforeLog.c_str()); + } + // Pivot conversion + const std::string& pivotType = exchangeConfig->getPivotType(); + PivotObject pivot("GTIS", pivotType); + pivot.setIdentifier(exchangeConfig->getPivotId()); + pivot.setCause(dataObject.doCg ? 20 : 3); + + if (attributeFound["do_value"]) { + bool spsValue = false; + if (dataObject.doValue->getData().getType() == DatapointValue::T_INTEGER) { + // Value range check + long value = dataObject.doValue->getData().toInt(); + checkValueRange(beforeLog, value, 0, 1, "TS"); + spsValue = static_cast(value); + } + // Fill TS Double field from TS Simple infos + if (pivotType == "DpsTyp") { + pivot.setStValStr(spsValue?"on":"off"); + } + else { + pivot.setStVal(spsValue); + } + } + + pivot.addQuality(dataObject.doValid, dataObject.doOutdated, dataObject.doTsC, dataObject.doTsS); + + appendTimestamp(pivot, attributeFound["do_ts"], dataObject.doTs, dataObject.doTsIv, dataObject.doTsS); + + return pivot.toDatapoint(); +} + + +Datapoint* HNZPivotFilter::convertTMToPivot(const std::string& assetName, std::map& attributeFound, + const GenericDataObject& dataObject, std::shared_ptr exchangeConfig) +{ + std::string beforeLog = HNZPivotConfig::getPluginName() + " - " + assetName + " - HNZPivotFilter::convertTMToPivot -"; + + // Message structure checks + if (!checkPivotTypeMatch(dataObject.doType, exchangeConfig)) { + PivotUtility::log_warn("%s Invalid pivot type (%s) for data object type (%s)", + beforeLog.c_str(), exchangeConfig->getPivotType().c_str(), dataObject.doType.c_str()); + } + if (!attributeFound["do_valid"]) { + PivotUtility::log_warn("%s Missing attribute do_valid in TM", beforeLog.c_str()); + } + if (!attributeFound["do_an"]) { + PivotUtility::log_warn("%s Missing attribute do_an in TM", beforeLog.c_str()); + } + if (!attributeFound["do_outdated"]) { + PivotUtility::log_warn("%s Missing attribute do_outdated in TM", beforeLog.c_str()); + } + else if (!dataObject.doOutdated && !attributeFound["do_value"]) { + PivotUtility::log_warn("%s Missing attribute do_value in TM", beforeLog.c_str()); + } + // Pivot conversion + PivotObject pivot("GTIM", exchangeConfig->getPivotType()); + pivot.setIdentifier(exchangeConfig->getPivotId()); + pivot.setCause(1); + + if (attributeFound["do_value"] && (dataObject.doValue->getData().getType() == DatapointValue::T_INTEGER)) { + // Value range check + long value = dataObject.doValue->getData().toInt(); + if (attributeFound["do_an"]) { + if (dataObject.doAn == "TMA") { + checkValueRange(beforeLog, value, -127, 127, dataObject.doAn); + } + else if (dataObject.doAn == "TM8") { + checkValueRange(beforeLog, value, 0, 255, dataObject.doAn); + } + else if (dataObject.doAn == "TM16") { + checkValueRange(beforeLog, value, -32768, 32767, dataObject.doAn); + } + else { + PivotUtility::log_warn("%s Unknown do_an: %s", beforeLog.c_str(), dataObject.doAn.c_str()); + } + } + pivot.setMagI(static_cast(value)); + } + + pivot.addQuality(dataObject.doValid, dataObject.doOutdated, false, false); + + appendTimestamp(pivot, false, 0, false, false); + + return pivot.toDatapoint(); +} + +Datapoint* HNZPivotFilter::convertTCACKToPivot(const std::string& assetName, std::map& attributeFound, + const GenericDataObject& dataObject, std::shared_ptr exchangeConfig) +{ + std::string beforeLog = HNZPivotConfig::getPluginName() + " - " + assetName + " - HNZPivotFilter::convertTCACKToPivot -"; + + // Message structure checks + const std::string& pivotType = exchangeConfig->getPivotType(); + if (!checkPivotTypeMatch(dataObject.doType, exchangeConfig)) { + PivotUtility::log_warn("%s Invalid pivot type (%s) for data object type (%s)", + beforeLog.c_str(), pivotType.c_str(), dataObject.doType.c_str()); + } + if (!attributeFound["do_valid"]) { + PivotUtility::log_warn("%s Missing attribute do_valid in TC ACK", beforeLog.c_str()); + } + // Pivot conversion + + PivotObject pivot("GTIC", pivotType); + pivot.setIdentifier(exchangeConfig->getPivotId()); + pivot.setCause(7); + + pivot.addQuality(dataObject.doValid, false, false, false); + + appendTimestamp(pivot, false, 0, false, false); + + return pivot.toDatapoint(); +} + +Datapoint* HNZPivotFilter::convertTVCACKToPivot(const std::string& assetName, std::map& attributeFound, + const GenericDataObject& dataObject, std::shared_ptr exchangeConfig) +{ + std::string beforeLog = HNZPivotConfig::getPluginName() + " - " + assetName + " - HNZPivotFilter::convertTVCACKToPivot -"; + + // Message structure checks + if (!checkPivotTypeMatch(dataObject.doType, exchangeConfig)) { + PivotUtility::log_warn("%s Invalid pivot type (%s) for data object type (%s)", + beforeLog.c_str(), exchangeConfig->getPivotType().c_str(), dataObject.doType.c_str()); + } + if (!attributeFound["do_valid"]) { + PivotUtility::log_warn("%s Missing attribute do_valid in TVC ACK", beforeLog.c_str()); + } + // Pivot conversion + PivotObject pivot("GTIC", exchangeConfig->getPivotType()); + pivot.setIdentifier(exchangeConfig->getPivotId()); + pivot.setCause(7); + + pivot.addQuality(dataObject.doValid, false, false, false); + + appendTimestamp(pivot, false, 0, false, false); + + return pivot.toDatapoint(); +} + +Datapoint* HNZPivotFilter::convertDatapointToHNZ(const std::string& assetName, Datapoint* sourceDp) +{ + Datapoint* convertedDatapoint = nullptr; + std::string beforeLog = HNZPivotConfig::getPluginName() + " - " + assetName + " - HNZPivotFilter::convertDatapointToHNZ -"; + + try { + PivotObject pivotObject(sourceDp); + auto exchangeData = m_filterConfig->getExchangeDefinitions(); + const std::string& pivotId = pivotObject.getIdentifier(); + if (exchangeData.count(pivotId) == 0) { + std::vector pivotIds; + for(auto const& kvp: exchangeData) { + pivotIds.push_back(kvp.first); + } + PivotUtility::log_error("%s Unknown pivot ID: %s (available: %s)", + beforeLog.c_str(), pivotId.c_str(), PivotUtility::join(pivotIds).c_str()); + return nullptr; + } + auto exchangeConfig = exchangeData[pivotId]; + if (!checkLabelMatch(assetName, exchangeConfig)) { + PivotUtility::log_warn("%s Input label (%s) does not match configured label (%s) for pivot ID: %s", + beforeLog.c_str(), assetName.c_str(), exchangeConfig->getLabel().c_str(), pivotId.c_str()); + } + convertedDatapoint = pivotObject.toHnzCommandObject(exchangeConfig); + } + catch (PivotObjectException& e) + { + PivotUtility::log_error("%s Failed to convert pivot object: %s", beforeLog.c_str(), e.getContext().c_str()); + } + + return convertedDatapoint; +} + +void HNZPivotFilter::convertDatapoint(const std::string& assetName, Datapoint* dp, std::vector& convertedDatapoints) { + std::string beforeLog = HNZPivotConfig::getPluginName() + " - HNZPivotFilter::processDatapoint -"; + if (dp->getName() == "data_object") { + Datapoint* convertedDp = convertDatapointToPivot(assetName, dp); + + if (convertedDp) { + convertedDatapoints.push_back(convertedDp); + } + else { + PivotUtility::log_error("%s Failed to convert data_object", beforeLog.c_str()); + } + } + else if (dp->getName() == "PIVOT") { + Datapoint* convertedDp = convertDatapointToHNZ(assetName, dp); + + if (convertedDp) { + convertedDatapoints.push_back(convertedDp); + } + else { + PivotUtility::log_error("%s Failed to convert PIVOT object", beforeLog.c_str()); + } + } + else if (dp->getName() == "south_event") { + PivotUtility::log_debug("%s Forwarding south_event unchanged", beforeLog.c_str()); + convertedDatapoints.push_back(new Datapoint(dp->getName(), dp->getData())); + } + else { + PivotUtility::log_debug("%s Unknown reading type: %s, message removed", beforeLog.c_str(), dp->getName().c_str()); + } +} + +void HNZPivotFilter::ingest(READINGSET* readingSet) +{ + std::lock_guard guard(m_configMutex); + std::string beforeLog = HNZPivotConfig::getPluginName() + " - HNZPivotFilter::ingest -"; + if (!isEnabled()) { + return; + } + if (!readingSet) { + PivotUtility::log_error("%s No reading set provided", beforeLog.c_str()); + return; + } + /* apply transformation */ + std::vector* readings = readingSet->getAllReadingsPtr(); + + PivotUtility::log_info("%s %d readings", beforeLog.c_str(), readings->size()); + + auto readIt = readings->begin(); + + while(readIt != readings->end()) + { + Reading* reading = *readIt; + + std::string assetName = reading->getAssetName(); + beforeLog = HNZPivotConfig::getPluginName() + " - " + assetName + " - HNZPivotFilter::ingest -"; + + const std::vector& datapoints = reading->getReadingData(); + + std::vector convertedDatapoints; + + PivotUtility::log_debug("%s original Reading: %s", beforeLog.c_str(), reading->toJSON().c_str()); + + for (Datapoint* dp : datapoints) { + convertDatapoint(assetName, dp, convertedDatapoints); + } + + reading->removeAllDatapoints(); + + for (Datapoint* convertedDatapoint : convertedDatapoints) { + reading->addDatapoint(convertedDatapoint); + } + + PivotUtility::log_debug("%s converted Reading: %s", beforeLog.c_str(), reading->toJSON().c_str()); + + if (reading->getReadingData().empty()) { + readIt = readings->erase(readIt); + } + else { + readIt++; + } + } + + if (!readings->empty()) + { + if (m_func) { + PivotUtility::log_debug("%s Send %lu converted readings", beforeLog.c_str(), readings->size()); + + m_func(m_data, readingSet); + } + else { + PivotUtility::log_error("%s No function to call, discard %lu converted readings", beforeLog.c_str(), readings->size()); + } + } +} + +void HNZPivotFilter::reconfigure(const std::string& newConfig) { + std::lock_guard guard(m_configMutex); + std::string beforeLog = HNZPivotConfig::getPluginName() + " - HNZPivotFilter::reconfigure -"; + PivotUtility::log_debug("%s reconfigure called", beforeLog.c_str()); + setConfig(newConfig); + + ConfigCategory config("hnzpivot", newConfig); + readConfig(config); +} + +void HNZPivotFilter::readConfig(const ConfigCategory& config) { + std::lock_guard guard(m_configMutex); + std::string beforeLog = HNZPivotConfig::getPluginName() + " - HNZPivotFilter::readConfig -"; + if (config.itemExists("exchanged_data")) { + const std::string exchangedData = config.getValue("exchanged_data"); + m_filterConfig->importExchangeConfig(exchangedData); + } + else { + PivotUtility::log_error("%s Missing exchanged_data configuation", beforeLog.c_str()); + } +} diff --git a/src/hnz_pivot_filter_config.cpp b/src/hnz_pivot_filter_config.cpp new file mode 100644 index 0000000..1a4fec1 --- /dev/null +++ b/src/hnz_pivot_filter_config.cpp @@ -0,0 +1,212 @@ +/* + * FledgePower HNZ <-> pivot filter plugin. + * + * Copyright (c) 2022, RTE (https://www.rte-france.com) + * + * Released under the Apache 2.0 Licence + * + * Author: Michael Zillgith (michael.zillgith at mz-automation.de) + * + */ + +#include + +#include "hnz_pivot_utility.hpp" +#include "hnz_pivot_filter_config.hpp" + + +HNZPivotDataPoint::HNZPivotDataPoint(const std::string& label, const std::string& pivotId, const std::string& pivotType, + const std::string& typeIdStr, unsigned int address): + m_label(label), m_pivotId(pivotId), m_pivotType(pivotType), m_typeIdStr(typeIdStr), m_address(address) +{} + +void HNZPivotConfig::importExchangeConfig(const std::string& exchangeConfig) +{ + m_exchange_data_is_complete = false; + bool is_complete = true; + + rapidjson::Document document; + if (document.Parse(exchangeConfig.c_str()).HasParseError()) { + PivotUtility::log_fatal("Parsing error in exchanged_data json, offset " + + std::to_string(static_cast(document.GetErrorOffset())) + + " " + + GetParseError_En(document.GetParseError())); + return; + } + if (!document.IsObject()) return; + + if (!m_check_object(document, JSON_EXCHANGED_DATA_NAME)) return; + + const rapidjson::Value &info = document[JSON_EXCHANGED_DATA_NAME]; + + is_complete &= m_check_string(info, JSON_NAME); + is_complete &= m_check_string(info, JSON_VERSION); + + if (!m_check_array(info, DATAPOINTS)) return; + + for (const rapidjson::Value &msg : info[DATAPOINTS].GetArray()) { + if (!msg.IsObject()) return; + + std::string label; + std::string pivotId; + std::string pivotType; + + is_complete &= m_retrieve(msg, LABEL, &label); + is_complete &= m_retrieve(msg, PIVOT_ID, &pivotId); + is_complete &= m_retrieve(msg, PIVOT_TYPE, &pivotType); + + if (!m_check_array(msg, PROTOCOLS)) continue; + + for (const rapidjson::Value &protocol : msg[PROTOCOLS].GetArray()) { + if (!protocol.IsObject()) return; + + std::string protocol_name; + + is_complete &= m_retrieve(protocol, JSON_NAME, &protocol_name); + + if (protocol_name != HNZ_NAME) continue; + + std::string address; + std::string msg_code; + + is_complete &= m_retrieve(protocol, MESSAGE_ADDRESS, &address); + is_complete &= m_retrieve(protocol, MESSAGE_CODE, &msg_code); + + unsigned long tmp = std::stoul(address); + unsigned int msg_address = 0; + // Check if number is in range for unsigned int + if (tmp > static_cast(-1)) { + is_complete = false; + PivotUtility::log_error("Error with the field %s, the value is out of range for unsigned integer: %ld", MESSAGE_ADDRESS, tmp); + } else { + msg_address = static_cast(tmp); + } + auto newDp = std::make_shared(label, pivotId, pivotType, msg_code, msg_address); + m_exchangeDefinitions[pivotId] = newDp; + m_pivotIdLookup[m_getLookupHash(msg_code, msg_address)] = pivotId; + } + } + + m_exchange_data_is_complete = is_complete; +} + +bool HNZPivotConfig::m_check_string(const rapidjson::Value &json, const char *key) { + if (!json.HasMember(key) || !json[key].IsString()) { + std::string s = key; + PivotUtility::log_error( + "Error with the field " + s + + ", the value does not exist or is not a std::string."); + return false; + } + return true; +} + +bool HNZPivotConfig::m_check_array(const rapidjson::Value &json, const char *key) { + if (!json.HasMember(key) || !json[key].IsArray()) { + std::string s = key; + PivotUtility::log_error("The array " + s + + " is required but not found."); + return false; + } + return true; +} + +bool HNZPivotConfig::m_check_object(const rapidjson::Value &json, const char *key) { + if (!json.HasMember(key) || !json[key].IsObject()) { + std::string s = key; + PivotUtility::log_error("The object " + s + + " is required but not found."); + return false; + } + return true; +} + +bool HNZPivotConfig::m_retrieve(const rapidjson::Value &json, const char *key, + unsigned int *target) { + if (!json.HasMember(key) || !json[key].IsUint()) { + std::string s = key; + PivotUtility::log_error( + "Error with the field " + s + + ", the value does not exist or is not an unsigned integer."); + return false; + } + *target = json[key].GetUint(); + return true; +} + +bool HNZPivotConfig::m_retrieve(const rapidjson::Value &json, const char *key, + unsigned int *target, unsigned int def) { + if (!json.HasMember(key)) { + *target = def; + } else { + if (!json[key].IsUint()) { + std::string s = key; + PivotUtility::log_error("Error with the field " + s + + ", the value is not an unsigned integer."); + return false; + } + *target = json[key].GetUint(); + } + return true; +} + +bool HNZPivotConfig::m_retrieve(const rapidjson::Value &json, const char *key, std::string *target) { + if (!json.HasMember(key) || !json[key].IsString()) { + std::string s = key; + PivotUtility::log_error( + "Error with the field " + s + + ", the value does not exist or is not a std::string."); + return false; + } + *target = json[key].GetString(); + return true; +} + +bool HNZPivotConfig::m_retrieve(const rapidjson::Value &json, const char *key, std::string *target, + const std::string& def) { + if (!json.HasMember(key)) { + *target = def; + } else { + if (!json[key].IsString()) { + std::string s = key; + PivotUtility::log_error("Error with the field " + s + + ", the value is not a std::string."); + return false; + } + *target = json[key].GetString(); + } + return true; +} + +bool HNZPivotConfig::m_retrieve(const rapidjson::Value &json, const char *key, + long long int *target, long long int def) { + if (!json.HasMember(key)) { + *target = def; + } else { + if (!json[key].IsInt64()) { + std::string s = key; + PivotUtility::log_error("Error with the field " + s + + ", the value is not a long long integer."); + return false; + } + *target = json[key].GetInt64(); + } + return true; +} + +std::string HNZPivotConfig::findPivotId(const std::string& typeIdStr, unsigned int address) const { + const std::string& lookupHash = m_getLookupHash(typeIdStr, address); + if (m_pivotIdLookup.count(lookupHash) == 0){ + return ""; + } + return m_pivotIdLookup.at(lookupHash); +} + +const std::string& HNZPivotConfig::getPluginName() { + static std::string pluginName(FILTER_NAME); + return pluginName; +} + +std::string HNZPivotConfig::m_getLookupHash(const std::string& typeIdStr, unsigned int address) const { + return typeIdStr + "-" + std::to_string(address); +} \ No newline at end of file diff --git a/src/hnz_pivot_object.cpp b/src/hnz_pivot_object.cpp new file mode 100644 index 0000000..18cfa7d --- /dev/null +++ b/src/hnz_pivot_object.cpp @@ -0,0 +1,642 @@ +/* + * FledgePower HNZ <-> pivot filter plugin. + * + * Copyright (c) 2022, RTE (https://www.rte-france.com) + * + * Released under the Apache 2.0 Licence + * + * Author: Michael Zillgith (michael.zillgith at mz-automation.de) + * + */ + +#include +#include +#include + +#include "hnz_pivot_utility.hpp" +#include "hnz_pivot_object.hpp" +#include "hnz_pivot_filter_config.hpp" + + +static Datapoint* createDp(const std::string& name) +{ + auto datapoints = new std::vector; + + DatapointValue dpv(datapoints, true); + + auto dp = new Datapoint(name, dpv); + + return dp; +} + +template +static Datapoint* createDpWithValue(const std::string& name, const T value) +{ + DatapointValue dpv(value); + + auto dp = new Datapoint(name, dpv); + + return dp; +} + +static Datapoint* addElement(Datapoint* dp, const std::string& name) +{ + DatapointValue& dpv = dp->getData(); + + std::vector* subDatapoints = dpv.getDpVec(); + + Datapoint* element = createDp(name); + + if (element) { + subDatapoints->push_back(element); + } + + return element; +} + +template +static Datapoint* addElementWithValue(Datapoint* dp, const std::string& name, const T value) +{ + DatapointValue& dpv = dp->getData(); + + std::vector* subDatapoints = dpv.getDpVec(); + + Datapoint* element = createDpWithValue(name, value); + + if (element) { + subDatapoints->push_back(element); + } + + return element; +} + +static Datapoint* getChild(Datapoint* dp, const std::string& name) +{ + Datapoint* childDp = nullptr; + + DatapointValue& dpv = dp->getData(); + + if (dpv.getType() == DatapointValue::T_DP_DICT) { + const std::vector* datapoints = dpv.getDpVec(); + + for (Datapoint* child : *datapoints) { + if (child->getName() == name) { + childDp = child; + break; + } + } + } + + return childDp; +} + +static std::string getValueStr(Datapoint* dp) +{ + const DatapointValue& dpv = dp->getData(); + + if (dpv.getType() == DatapointValue::T_STRING) { + return dpv.toStringValue(); + } + else { + throw PivotObjectException("datapoint " + dp->getName() + " has not a string value"); + } +} + +static std::string getChildValueStr(Datapoint* dp, const std::string& name) +{ + Datapoint* childDp = getChild(dp, name); + + if (childDp) { + return getValueStr(childDp); + } + else { + throw PivotObjectException("No such child: " + name); + } +} + +static long getValueLong(Datapoint* dp) +{ + const DatapointValue& dpv = dp->getData(); + + if (dpv.getType() == DatapointValue::T_INTEGER) { + return dpv.toInt(); + } + else { + throw PivotObjectException("datapoint " + dp->getName() + " has not an int value"); + } +} + +static long getChildValueLong(Datapoint* dp, const std::string& name) +{ + Datapoint* childDp = getChild(dp, name); + + if (childDp) { + return getValueLong(childDp); + } + else { + throw PivotObjectException("No such child: " + name); + } +} + +static int getValueInt(Datapoint* dp) +{ + return static_cast(getValueLong(dp)); +} + +static int getChildValueInt(Datapoint* dp, const std::string& name) +{ + return static_cast(getChildValueLong(dp, name)); +} + +void PivotTimestamp::handleTimeQuality(Datapoint* timeQuality) +{ + DatapointValue& dpv = timeQuality->getData(); + if (dpv.getType() != DatapointValue::T_DP_DICT) { + return; + } + + const std::vector* datapoints = dpv.getDpVec(); + for (Datapoint* child : *datapoints) { + if (child->getName() == "clockFailure") { + if (getValueInt(child) > 0) + m_clockFailure = true; + else + m_clockFailure = false; + } + else if (child->getName() == "clockNotSynchronized") { + if (getValueInt(child) > 0) + m_clockNotSynchronized = true; + else + m_clockNotSynchronized = false; + } + else if (child->getName() == "leapSecondKnown") { + if (getValueInt(child) > 0) + m_leapSecondKnown = true; + else + m_leapSecondKnown = false; + } + else if (child->getName() == "timeAccuracy") { + m_timeAccuracy = getValueInt(child); + } + } +} + +PivotTimestamp::PivotTimestamp(Datapoint* timestampData) +{ + DatapointValue& dpv = timestampData->getData(); + if (dpv.getType() != DatapointValue::T_DP_DICT) { + return; + + } + + const std::vector* datapoints = dpv.getDpVec(); + for (Datapoint* child : *datapoints) + { + if (child->getName() == "SecondSinceEpoch") { + m_secondSinceEpoch = getValueInt(child); + } + else if (child->getName() == "FractionOfSecond") { + m_fractionOfSecond = getValueInt(child); + } + else if (child->getName() == "TimeQuality") { + handleTimeQuality(child); + } + } +} + +long PivotTimestamp::toTimestamp(long secondSinceEpoch, long fractionOfSecond) { + long timestamp = 0; + auto msPart = static_cast(round(static_cast(fractionOfSecond * 1000) / 16777216.0)); + timestamp = (secondSinceEpoch * 1000L) + msPart; + return timestamp; +} + +std::pair PivotTimestamp::fromTimestamp(long timestamp) { + long remainder = (timestamp % 1000L); + long fractionOfSecond = remainder * 16777 + ((remainder * 216) / 1000); + return std::make_pair(timestamp / 1000L, fractionOfSecond); +} + +uint64_t PivotTimestamp::getCurrentTimestampMs() +{ + return std::chrono::duration_cast(std::chrono::system_clock::now().time_since_epoch()).count(); +} + +Datapoint* PivotObject::getCdc(Datapoint* dp) +{ + Datapoint* cdcDp = nullptr; + std::vector unknownChildrenNames; + + DatapointValue& dpv = dp->getData(); + if (dpv.getType() != DatapointValue::T_DP_DICT) { + throw PivotObjectException("CDC type missing"); + } + + const std::vector* datapoints = dpv.getDpVec(); + for (Datapoint* child : *datapoints) { + if (child->getName() == "SpsTyp") { + cdcDp = child; + m_pivotCdc = PivotCdc::SPS; + } + else if (child->getName() == "MvTyp") { + cdcDp = child; + m_pivotCdc = PivotCdc::MV; + } + else if (child->getName() == "DpsTyp") { + cdcDp = child; + m_pivotCdc = PivotCdc::DPS; + } + else if (child->getName() == "SpcTyp") { + cdcDp = child; + m_pivotCdc = PivotCdc::SPC; + } + else if (child->getName() == "DpcTyp") { + cdcDp = child; + m_pivotCdc = PivotCdc::DPC; + } + else if (child->getName() == "IncTyp") { + cdcDp = child; + m_pivotCdc = PivotCdc::INC; + } + else { + unknownChildrenNames.push_back(child->getName()); + } + if (cdcDp != nullptr) { + break; + } + } + if(!unknownChildrenNames.empty()) { + throw PivotObjectException("CDC type unknown: " + PivotUtility::join(unknownChildrenNames)); + } + + return cdcDp; +} + +bool PivotObject::readBool(Datapoint* dp, const std::string& name, bool& out) const +{ + if (dp->getName() == name) { + if (getValueInt(dp) > 0) + out = true; + else + out = false; + return true; + } + return false; +} + +void PivotObject::handleDetailQuality(Datapoint* detailQuality) +{ + DatapointValue& dpv = detailQuality->getData(); + if (dpv.getType() != DatapointValue::T_DP_DICT) { + return; + } + + const std::vector* datapoints = dpv.getDpVec(); + for (Datapoint* child : *datapoints) { + if(readBool(child, "badReference", m_badReference)) continue; + if(readBool(child, "failure", m_failure)) continue; + if(readBool(child, "inconsistent", m_inconsistent)) continue; + if(readBool(child, "inacurate", m_inacurate)) continue; + if(readBool(child, "oldData", m_oldData)) continue; + if(readBool(child, "oscillatory", m_oscillatory)) continue; + if(readBool(child, "outOfRange", m_outOfRange)) continue; + if(readBool(child, "overflow", m_overflow)) continue; + } +} + +void PivotObject::handleQuality(Datapoint* q) +{ + DatapointValue& dpv = q->getData(); + if (dpv.getType() != DatapointValue::T_DP_DICT) { + return; + } + + const std::vector* datapoints = dpv.getDpVec(); + for (Datapoint* child : *datapoints) { + if (child->getName() == "Validity") { + std::string validityStr = getValueStr(child); + if (validityStr == "good") { + continue; + } + + if (validityStr == "invalid") { + m_validity = Validity::INVALID; + } + else if (validityStr == "questionable") { + m_validity = Validity::QUESTIONABLE; + } + else if (validityStr == "reserved") { + m_validity = Validity::RESERVED; + } + else { + throw PivotObjectException("Validity has invalid value: " + validityStr); + } + } + else if (child->getName() == "Source") { + std::string sourceStr = getValueStr(child); + if (sourceStr == "process") { + continue; + } + + if (sourceStr == "substituted") { + m_source = Source::SUBSTITUTED; + } + else { + throw PivotObjectException("Source has invalid value: " + sourceStr); + } + } + else if (child->getName() == "DetailQuality") { + handleDetailQuality(child); + } + else if (readBool(child, "operatorBlocked", m_operatorBlocked)) { + continue; + } + else if (readBool(child, "test", m_test)) { + continue; + } + } +} + +void PivotObject::handleCdc(Datapoint* cdc) { + if (cdc == nullptr) { + throw PivotObjectException("CDC element not found"); + } + + Datapoint* q = getChild(cdc, "q"); + + if (q) { + handleQuality(q); + } + + Datapoint* t = getChild(cdc, "t"); + + if (t) { + m_timestamp = std::make_shared(t); + } + + if (m_pivotCdc == PivotCdc::SPS) { + throw PivotObjectException("Pivot to HNZ not implemented for type SpsTyp"); + } + else if (m_pivotCdc == PivotCdc::DPS) { + throw PivotObjectException("Pivot to HNZ not implemented for type DpsTyp"); + } + else if (m_pivotCdc == PivotCdc::MV) { + throw PivotObjectException("Pivot to HNZ not implemented for type MvTyp"); + } + else if (m_pivotCdc == PivotCdc::SPC) { + Datapoint* ctlVal = getChild(cdc, "ctlVal"); + + if (ctlVal) { + if (getValueInt(ctlVal) > 0) { + intVal = 1; + } + else { + intVal = 0; + } + } + } + else if (m_pivotCdc == PivotCdc::DPC) { + Datapoint* ctlVal = getChild(cdc, "ctlVal"); + + if (ctlVal) { + std::string ctlValStr = getValueStr(ctlVal); + if (ctlValStr == "off") { + intVal = 0; + } + else if (ctlValStr == "on") { + intVal = 1; + } + else { + throw PivotObjectException("invalid DpcTyp value : " + ctlValStr); + } + } + } + else if (m_pivotCdc == PivotCdc::INC) { + Datapoint* ctlVal = getChild(cdc, "ctlVal"); + + if (ctlVal) { + intVal = getValueLong(ctlVal); + } + } +} + +void PivotObject::handleGTIX() { + m_identifier = getChildValueStr(m_ln, "Identifier"); + + if (getChild(m_ln, "ComingFrom")) { + m_comingFrom = getChildValueStr(m_ln, "ComingFrom"); + } + + Datapoint* cause = getChild(m_ln, "Cause"); + + if (cause) { + m_cause = getChildValueInt(cause, "stVal"); + } + + Datapoint* confirmation = getChild(m_ln, "Cause"); + + if (confirmation) { + int confirmationVal = getChildValueInt(confirmation, "stVal"); + + if (confirmationVal > 0) { + m_isConfirmation = true; + } + } + + Datapoint* tmOrg = getChild(m_ln, "TmOrg"); + + if (tmOrg) { + std::string tmOrgValue = getChildValueStr(tmOrg, "stVal"); + + if (tmOrgValue == "substituted") { + m_timestampSubstituted = true; + } + else { + m_timestampSubstituted = false; + } + } + + Datapoint* tmValidity = getChild(m_ln, "TmValidity"); + + if (tmValidity) { + std::string tmValidityValue = getChildValueStr(tmValidity, "stVal"); + + if (tmValidityValue == "invalid") { + m_timestampInvalid = true; + } + else { + m_timestampInvalid = false; + } + } + + Datapoint* cdc = getCdc(m_ln); + handleCdc(cdc); +} + +PivotObject::PivotObject(Datapoint* pivotData) { + if (pivotData->getName() != "PIVOT") { + throw PivotObjectException("No pivot object"); + } + + m_dp = pivotData; + m_ln = nullptr; + std::vector unknownChildrenNames; + + DatapointValue& dpv = pivotData->getData(); + if (dpv.getType() != DatapointValue::T_DP_DICT) { + throw PivotObjectException("pivot object not found"); + } + + const std::vector* datapoints = dpv.getDpVec(); + for (Datapoint* child : *datapoints) { + if (child->getName() == "GTIS") { + m_pivotClass = PivotClass::GTIS; + m_ln = child; + } + else if (child->getName() == "GTIM") { + m_pivotClass = PivotClass::GTIM; + m_ln = child; + } + else if (child->getName() == "GTIC") { + m_pivotClass = PivotClass::GTIC; + m_ln = child; + } + else { + unknownChildrenNames.push_back(child->getName()); + } + + if (m_ln != nullptr) { + break; + } + } + + if (m_ln == nullptr) { + throw PivotObjectException("pivot object type not supported: " + PivotUtility::join(unknownChildrenNames)); + } + + handleGTIX(); +} + +PivotObject::PivotObject(const std::string& pivotLN, const std::string& valueType) +{ + m_dp = createDp("PIVOT"); + + m_ln = addElement(m_dp, pivotLN); + + addElementWithValue(m_ln, "ComingFrom", "hnzip"); + + m_cdc = addElement(m_ln, valueType); +} + +void PivotObject::setIdentifier(const std::string& identifier) +{ + addElementWithValue(m_ln, "Identifier", identifier); +} + +void PivotObject::setCause(int cause) +{ + Datapoint* causeDp = addElement(m_ln, "Cause"); + + addElementWithValue(causeDp, "stVal", static_cast(cause)); +} + +void PivotObject::setStVal(bool value) +{ + addElementWithValue(m_cdc, "stVal", static_cast(value ? 1 : 0)); +} + +void PivotObject::setStValStr(const std::string& value) +{ + addElementWithValue(m_cdc, "stVal", value); +} + +void PivotObject::setMagF(float value) +{ + Datapoint* mag = addElement(m_cdc, "mag"); + + addElementWithValue(mag, "f", value); +} + +void PivotObject::setMagI(int value) +{ + Datapoint* mag = addElement(m_cdc, "mag"); + + addElementWithValue(mag, "i", static_cast(value)); +} + +void PivotObject::setConfirmation(bool value) +{ + Datapoint* confirmation = addElement(m_ln, "Confirmation"); + + if (confirmation) { + addElementWithValue(confirmation, "stVal", static_cast(value ? 1 : 0)); + } +} + +void PivotObject::addQuality(unsigned int doValid, bool doOutdated, bool doTsC, bool doTsS) +{ + Datapoint* q = addElement(m_cdc, "q"); + // doValid of 1 means "invalid" + if (doValid == 1) { + addElementWithValue(q, "Validity", "invalid"); + } + else if (doOutdated || doTsC || doTsS) { + addElementWithValue(q, "Validity", "questionable"); + } + else { + addElementWithValue(q, "Validity", "good"); + } + + if (doTsC || doOutdated) { + Datapoint* detailQuality = addElement(q, "DetailQuality"); + + addElementWithValue(detailQuality, "oldData", 1L); + } +} + +void PivotObject::addTmOrg(bool substituted) +{ + Datapoint* tmOrg = addElement(m_ln, "TmOrg"); + + if (substituted) + addElementWithValue(tmOrg, "stVal", "substituted"); + else + addElementWithValue(tmOrg, "stVal", "genuine"); +} + +void PivotObject::addTmValidity(bool invalid) +{ + Datapoint* tmValidity = addElement(m_ln, "TmValidity"); + + if (invalid) + addElementWithValue(tmValidity, "stVal", "invalid"); + else + addElementWithValue(tmValidity, "stVal", "good"); +} + +void PivotObject::addTimestamp(unsigned long doTs, bool doTsS) +{ + Datapoint* t = addElement(m_cdc, "t"); + + auto timePair = PivotTimestamp::fromTimestamp(static_cast(doTs)); + addElementWithValue(t, "SecondSinceEpoch", timePair.first); + addElementWithValue(t, "FractionOfSecond", timePair.second); + + if (doTsS) { + Datapoint* timeQuality = addElement(t, "TimeQuality"); + addElementWithValue(timeQuality, "clockNotSynchronized", 1L); + } +} + +Datapoint* PivotObject::toHnzCommandObject(std::shared_ptr exchangeConfig) +{ + Datapoint* commandObject = createDp("command_object"); + + if (commandObject) { + addElementWithValue(commandObject, "co_type", exchangeConfig->getTypeId()); + addElementWithValue(commandObject, "co_addr", static_cast(exchangeConfig->getAddress())); + addElementWithValue(commandObject, "co_value", intVal); + } + + return commandObject; +} \ No newline at end of file diff --git a/src/hnz_pivot_utility.cpp b/src/hnz_pivot_utility.cpp new file mode 100644 index 0000000..38005be --- /dev/null +++ b/src/hnz_pivot_utility.cpp @@ -0,0 +1,38 @@ +/* + * FledgePower HNZ <-> pivot filter utility functions. + * + * Copyright (c) 2022, RTE (https://www.rte-france.com) + * + * Released under the Apache 2.0 Licence + * + * Author: Michael Zillgith (michael.zillgith at mz-automation.de) + * + */ + +#include +#include "hnz_pivot_utility.hpp" + +std::string PivotUtility::join(const std::vector &list, const std::string &sep /*= ", "*/) +{ + std::string ret; + for(const auto &str : list) { + if(!ret.empty()) { + ret += sep; + } + ret += str; + } + return ret; +} + +std::vector PivotUtility::split(const std::string& str, char sep) { + std::stringstream ss(str); + std::string item; + std::vector elems; + while (std::getline(ss, item, sep)) { + elems.push_back(std::move(item)); + } + if (elems.empty()) { + elems.push_back(str); + } + return elems; +} diff --git a/src/plugin.cpp b/src/plugin.cpp new file mode 100644 index 0000000..eb41419 --- /dev/null +++ b/src/plugin.cpp @@ -0,0 +1,179 @@ +/* + * FledgePower HNZ <-> pivot filter plugin. + * + * Copyright (c) 2022, RTE (https://www.rte-france.com) + * + * Released under the Apache 2.0 Licence + * + * Author: Michael Zillgith (michael.zillgith at mz-automation.de) + * + */ + +#include +#include +#include + +#include "hnz_pivot_utility.hpp" +#include "hnz_pivot_filter.hpp" +#include "hnz_pivot_filter_config.hpp" + +extern "C" { + +/** + * Plugin specific default configuration + */ +static const char *default_config = QUOTE({ + "plugin" : { + "description" : "HNZ to pivot filter plugin", + "type" : "string", + "default" : FILTER_NAME, + "readonly" : "true" + }, + "enable": { + "description": "A switch that can be used to enable or disable execution of the filter.", + "displayName": "Enabled", + "type": "boolean", + "default": "true" + }, + "exchanged_data": { + "description" : "exchanged data list", + "type" : "JSON", + "displayName" : "Exchanged data list", + "order" : "1", + "default" : QUOTE({ + "exchanged_data" : { + "name" : "SAMPLE", + "version" : "1.0", + "datapoints" : [ + { + "label" : "TS1", + "pivot_id" : "ID114562", + "pivot_type" : "SpsTyp", + "protocols" : [ + { + "name" : "iec104", + "address" : "45-672", + "typeid" : "M_SP_TB_1" + }, + { + "name" : "tase2", + "address" : "S_114562", + "typeid" : "Data_StateQTimeTagExtended" + }, + { + "name" : "hnzip", + "address" : "511", + "typeid" : "TS" + } + ] + }, + { + "label" : "TM1", + "pivot_id" : "ID99876", + "pivot_type" : "MvTyp", + "protocols" : [ + { + "name" : "iec104", + "address" : "45-984", + "typeid" : "M_ME_NA_1" + }, + { + "name" : "tase2", + "address" : "S_114563", + "typeid" : "Data_RealQ" + }, + { + "name" : "hnzip", + "address" : "512", + "typeid" : "TM" + } + ] + } + ] + } + }) + } +}); + + +/** + * The C API plugin information structure + */ +static PLUGIN_INFORMATION pluginInfo = { + FILTER_NAME, // Name + VERSION, // Version + 0, // Flags + PLUGIN_TYPE_FILTER, // Type + "1.0.0", // Interface version + default_config // Configuration +}; + +/** + * Return the information about this plugin + */ +PLUGIN_INFORMATION *plugin_info() +{ + return &pluginInfo; +} + +/** + * Initialise the plugin with configuration. + * + * This function is called to get the plugin handle. + */ +PLUGIN_HANDLE plugin_init(ConfigCategory* config, + OUTPUT_HANDLE *outHandle, + OUTPUT_STREAM output) +{ + std::string beforeLog = HNZPivotConfig::getPluginName() + " -"; + PivotUtility::log_info("%s Initializing filter", beforeLog.c_str()); + + if (config == nullptr) { + PivotUtility::log_warn("%s No config provided for filter, using default config", beforeLog.c_str()); + auto info = plugin_info(); + config = new ConfigCategory("hnztopivot", info->config); + config->setItemsValueFromDefault(); + } + + auto pivotFilter = new HNZPivotFilter(FILTER_NAME, + *config, outHandle, output); + + return static_cast(pivotFilter); +} + +/** + * Ingest a set of readings into the plugin for processing + * + * @param handle The plugin handle returned from plugin_init + * @param readingSet The readings to process + */ +void plugin_ingest(PLUGIN_HANDLE handle, + READINGSET *readingSet) +{ + auto pivotFilter = static_cast(handle); + pivotFilter->ingest(readingSet); +} + +/** + * Plugin reconfiguration method + * + * @param handle The plugin handle + * @param newConfig The updated configuration + */ +void plugin_reconfigure(PLUGIN_HANDLE handle, const std::string& newConfig) +{ + auto pivotFilter = static_cast(handle); + pivotFilter->reconfigure(newConfig); +} + +/** + * Call the shutdown method in the plugin + */ +void plugin_shutdown(PLUGIN_HANDLE handle) +{ + auto pivotFilter = static_cast(handle); + delete pivotFilter; +} + +// End of extern "C" +}; diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt new file mode 100644 index 0000000..24c5f89 --- /dev/null +++ b/tests/CMakeLists.txt @@ -0,0 +1,97 @@ +cmake_minimum_required(VERSION 2.8) + +project(RunTests) + +# Supported options: +# -DFLEDGE_INCLUDE +# -DFLEDGE_LIB +# -DFLEDGE_SRC +# -DFLEDGE_INSTALL +# +# If no -D options are given and FLEDGE_ROOT environment variable is set +# then Fledge libraries and header files are pulled from FLEDGE_ROOT path. + +set(CMAKE_CXX_FLAGS "-std=c++11 -O3") + +# Generation version header file +set_source_files_properties(version.h PROPERTIES GENERATED TRUE) + +if (${CMAKE_BUILD_TYPE} STREQUAL Coverage) + add_custom_command( + OUTPUT version.h + DEPENDS ${CMAKE_SOURCE_DIR}/VERSION + COMMAND ${CMAKE_SOURCE_DIR}/mkversion ${CMAKE_SOURCE_DIR} + COMMENT "Generating version header" + VERBATIM + ) +else() + add_custom_command( + OUTPUT version.h + DEPENDS ${CMAKE_SOURCE_DIR}/../VERSION + COMMAND ${CMAKE_SOURCE_DIR}/../mkversion ${CMAKE_SOURCE_DIR}/.. + COMMENT "Generating version header" + VERBATIM + ) +endif() + +include_directories(${CMAKE_BINARY_DIR}) + +# Set plugin type (south, north, filter) +set(PLUGIN_TYPE "filter") + +# Add here all needed Fledge libraries as list +set(NEEDED_FLEDGE_LIBS common-lib services-common-lib filters-common-lib) + +set(BOOST_COMPONENTS system thread) + +find_package(Boost 1.53.0 COMPONENTS ${BOOST_COMPONENTS} REQUIRED) +include_directories(SYSTEM ${Boost_INCLUDE_DIR}) + +# Find source files +file(GLOB SOURCES ../src/*.cpp) +file(GLOB unittests "*.cpp") + +# Find Fledge includes and libs, by including FindFledge.cmak file +set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} ${CMAKE_CURRENT_SOURCE_DIR}/..) +find_package(Fledge) +# If errors: make clean and remove Makefile +if (NOT FLEDGE_FOUND) + if (EXISTS "${CMAKE_BINARY_DIR}/Makefile") + execute_process(COMMAND make clean WORKING_DIRECTORY ${CMAKE_BINARY_DIR}) + file(REMOVE "${CMAKE_BINARY_DIR}/Makefile") + endif() + # Stop the build process + message(FATAL_ERROR "Fledge plugin '${PROJECT_NAME}' build error.") +endif() +# On success, FLEDGE_INCLUDE_DIRS and FLEDGE_LIB_DIRS variables are set + +# Locate GTest +find_package(GTest REQUIRED) +include_directories(${GTEST_INCLUDE_DIRS}) + +# Add ../include +include_directories(../include) +include_directories(/usr/local/include/lib60870) +# Add Fledge include dir(s) +include_directories(${FLEDGE_INCLUDE_DIRS}) + +# Add Fledge lib path +link_directories(${FLEDGE_LIB_DIRS}) + +# Link runTests with what we want to test and the GTest and pthread library +add_executable(RunTests ${unittests} ${SOURCES} version.h) + +set(FLEDGE_INSTALL "" CACHE INTERNAL "") +# Install library +if (FLEDGE_INSTALL) + message(STATUS "Installing ${PROJECT_NAME} in ${FLEDGE_INSTALL}/plugins/${PLUGIN_TYPE}/${PROJECT_NAME}") + install(TARGETS ${PROJECT_NAME} DESTINATION ${FLEDGE_INSTALL}/plugins/${PLUGIN_TYPE}/${PROJECT_NAME}) +endif() + +target_link_libraries(${PROJECT_NAME} ${GTEST_LIBRARIES} pthread) +target_link_libraries(${PROJECT_NAME} ${NEEDED_FLEDGE_LIBS}) +target_link_libraries(${PROJECT_NAME} ${Boost_LIBRARIES}) + +target_link_libraries(${PROJECT_NAME} -lpthread -ldl) + +target_compile_definitions(${PROJECT_NAME} PRIVATE UNIT_TEST) diff --git a/tests/main.cpp b/tests/main.cpp new file mode 100644 index 0000000..af5aa21 --- /dev/null +++ b/tests/main.cpp @@ -0,0 +1,14 @@ +#include + +using namespace std; + +int main(int argc, char **argv) { + testing::InitGoogleTest(&argc, argv); + + testing::GTEST_FLAG(repeat) = 2; + testing::GTEST_FLAG(shuffle) = true; + testing::GTEST_FLAG(death_test_style) = "threadsafe"; + + return RUN_ALL_TESTS(); +} + diff --git a/tests/test_pivot_filter_config.cpp b/tests/test_pivot_filter_config.cpp new file mode 100644 index 0000000..a9f73dd --- /dev/null +++ b/tests/test_pivot_filter_config.cpp @@ -0,0 +1,137 @@ +#include +#include + +#include "hnz_pivot_filter_config.hpp" + +TEST(PivotHNZPluginConfig, PivotConfigValid) +{ + HNZPivotConfig testConfig; + testConfig.importExchangeConfig(QUOTE({ + "exchanged_data" : { + "name" : "SAMPLE", + "version" : "1.0", + "datapoints" : [ + { + "label" : "TS1", + "pivot_id" : "ID114562", + "pivot_type" : "SpsTyp", + "protocols" : [ + { + "name" : "iec104", + "address" : "45-672", + "typeid" : "M_SP_TB_1" + }, + { + "name" : "tase2", + "address" : "S_114562", + "typeid" : "Data_StateQTimeTagExtended" + }, + { + "name" : "hnzip", + "address" : "511", + "typeid" : "TS" + } + ] + }, + { + "label" : "TM1", + "pivot_id" : "ID99876", + "pivot_type" : "MvTyp", + "protocols" : [ + { + "name" : "iec104", + "address" : "45-984", + "typeid" : "M_ME_NA_1" + }, + { + "name" : "tase2", + "address" : "S_114563", + "typeid" : "Data_RealQ" + }, + { + "name" : "hnzip", + "address" : "512", + "typeid" : "TM" + } + ] + } + ] + } + })); + ASSERT_TRUE(testConfig.isComplete()); +} + +TEST(PivotHNZPluginConfig, PivotConfigInvalidJson) +{ + HNZPivotConfig testConfig; + testConfig.importExchangeConfig("invalid json config"); + ASSERT_FALSE(testConfig.isComplete()); +} + +TEST(PivotHNZPluginConfig, PivotConfigMissingRootObject) +{ + HNZPivotConfig testConfig; + testConfig.importExchangeConfig("{}"); + ASSERT_FALSE(testConfig.isComplete()); +} + +TEST(PivotHNZPluginConfig, PivotConfigMissingFields) +{ + HNZPivotConfig testConfig; + testConfig.importExchangeConfig(QUOTE({ + "exchanged_data" : {} + })); + ASSERT_FALSE(testConfig.isComplete()); +} + +TEST(PivotHNZPluginConfig, PivotConfigInvalidFields) +{ + HNZPivotConfig testConfig; + testConfig.importExchangeConfig(QUOTE({ + "exchanged_data" : { + "name" : 42, + "version" : false, + "datapoints" : [ + { + "label" : 42, + "pivot_id" : 42, + "pivot_type" : 42, + "protocols" : [ + { + "name" : 42, + "address" : "aaa", + "typeid" : 42 + } + ] + } + ] + } + })); + ASSERT_FALSE(testConfig.isComplete()); +} + +TEST(PivotHNZPluginConfig, PivotConfigMsgAddressOutOfRange) +{ + HNZPivotConfig testConfig; + testConfig.importExchangeConfig(QUOTE({ + "exchanged_data" : { + "name" : "SAMPLE", + "version" : "1.0", + "datapoints" : [ + { + "label" : "TS1", + "pivot_id" : "ID114562", + "pivot_type" : "SpsTyp", + "protocols" : [ + { + "name" : "hnzip", + "address" : "9999999999", + "typeid" : "TS" + } + ] + } + ] + } + })); + ASSERT_FALSE(testConfig.isComplete()); +} \ No newline at end of file diff --git a/tests/test_pivot_utility.cpp b/tests/test_pivot_utility.cpp new file mode 100644 index 0000000..8d66ea9 --- /dev/null +++ b/tests/test_pivot_utility.cpp @@ -0,0 +1,39 @@ +#include + +#include "hnz_pivot_utility.hpp" + +TEST(PivotHNZPluginUtility, Join) +{ + ASSERT_STREQ(PivotUtility::join({}).c_str(), ""); + ASSERT_STREQ(PivotUtility::join({"TEST"}).c_str(), "TEST"); + ASSERT_STREQ(PivotUtility::join({"TEST", "TOAST", "TASTE"}).c_str(), "TEST, TOAST, TASTE"); + ASSERT_STREQ(PivotUtility::join({"TEST", "", "TORTOISE"}).c_str(), "TEST, , TORTOISE"); + ASSERT_STREQ(PivotUtility::join({}, "-").c_str(), ""); + ASSERT_STREQ(PivotUtility::join({"TEST"}, "-").c_str(), "TEST"); + ASSERT_STREQ(PivotUtility::join({"TEST", "TOAST", "TASTE"}, "-").c_str(), "TEST-TOAST-TASTE"); + ASSERT_STREQ(PivotUtility::join({"TEST", "", "TORTOISE"}, "-").c_str(), "TEST--TORTOISE"); + ASSERT_STREQ(PivotUtility::join({}, "").c_str(), ""); + ASSERT_STREQ(PivotUtility::join({"TEST"}, "").c_str(), "TEST"); + ASSERT_STREQ(PivotUtility::join({"TEST", "TOAST", "TASTE"}, "").c_str(), "TESTTOASTTASTE"); + ASSERT_STREQ(PivotUtility::join({"TEST", "", "TORTOISE"}, "").c_str(), "TESTTORTOISE"); +} + +TEST(PivotHNZPluginUtility, Split) +{ + ASSERT_EQ(PivotUtility::split("", '-'), std::vector{""}); + ASSERT_EQ(PivotUtility::split("TEST", '-'), std::vector{"TEST"}); + std::vector out1{"TEST", "TOAST", "TASTE"}; + ASSERT_EQ(PivotUtility::split("TEST-TOAST-TASTE", '-'), out1); + std::vector out2{"TEST", "", "TORTOISE"}; + ASSERT_EQ(PivotUtility::split("TEST--TORTOISE", '-'), out2); +} + +TEST(PivotHNZPluginUtility, Logs) +{ + std::string text("This message is at level %s"); + ASSERT_NO_THROW(PivotUtility::log_debug(text.c_str(), "debug")); + ASSERT_NO_THROW(PivotUtility::log_info(text.c_str(), "info")); + ASSERT_NO_THROW(PivotUtility::log_warn(text.c_str(), "warning")); + ASSERT_NO_THROW(PivotUtility::log_error(text.c_str(), "error")); + ASSERT_NO_THROW(PivotUtility::log_fatal(text.c_str(), "fatal")); +} \ No newline at end of file diff --git a/tests/test_plugin_info.cpp b/tests/test_plugin_info.cpp new file mode 100644 index 0000000..3dc675f --- /dev/null +++ b/tests/test_plugin_info.cpp @@ -0,0 +1,33 @@ +#include +#include +#include + +#include "version.h" + +using namespace rapidjson; + +extern "C" { + PLUGIN_INFORMATION *plugin_info(); +}; + +TEST(PivotHNZPluginInfo, PluginInfo) +{ + PLUGIN_INFORMATION *info = plugin_info(); + ASSERT_STREQ(info->name, "hnz_pivot_filter"); + ASSERT_STREQ(info->version, VERSION); + ASSERT_EQ(info->options, 0); + ASSERT_STREQ(info->type, PLUGIN_TYPE_FILTER); + ASSERT_STREQ(info->interface, "1.0.0"); +} + +TEST(PivotHNZPluginInfo, PluginInfoConfigParse) +{ + PLUGIN_INFORMATION *info = plugin_info(); + Document doc; + doc.Parse(info->config); + ASSERT_EQ(doc.HasParseError(), false); + ASSERT_EQ(doc.IsObject(), true); + ASSERT_EQ(doc.HasMember("plugin"), true); + ASSERT_EQ(doc.HasMember("enable"), true); + ASSERT_EQ(doc.HasMember("exchanged_data"), true); +} diff --git a/tests/test_plugin_ingest.cpp b/tests/test_plugin_ingest.cpp new file mode 100644 index 0000000..74ffa06 --- /dev/null +++ b/tests/test_plugin_ingest.cpp @@ -0,0 +1,2076 @@ +#include +#include +#include +#include +#include +#include +#include "hnz_pivot_filter.hpp" +#include "hnz_pivot_utility.hpp" +#include "hnz_pivot_object.hpp" + +using namespace rapidjson; + +extern "C" { + PLUGIN_INFORMATION *plugin_info(); + + PLUGIN_HANDLE plugin_init(ConfigCategory* config, + OUTPUT_HANDLE *outHandle, + OUTPUT_STREAM output); + + void plugin_shutdown(PLUGIN_HANDLE handle); + void plugin_reconfigure(PLUGIN_HANDLE *handle, const std::string& newConfig); + void plugin_ingest(PLUGIN_HANDLE handle, READINGSET *readingSet); +}; + +static const std::string test_config = QUOTE({ + "enable" :{ + "value": "true" + }, + "exchanged_data": { + "value" : { + "exchanged_data": { + "name" : "SAMPLE", + "version" : "1.0", + "datapoints" : [ + { + "label" : "TS1", + "pivot_id" : "ID114561", + "pivot_type" : "SpsTyp", + "protocols" : [ + { + "name" : "hnzip", + "address" : "511", + "typeid" : "TS" + } + ] + }, + { + "label" : "TS2", + "pivot_id" : "ID114562", + "pivot_type" : "DpsTyp", + "protocols" : [ + { + "name" : "hnzip", + "address" : "522", + "typeid" : "TS" + } + ] + }, + { + "label" : "TS3", + "pivot_id" : "ID114567", + "pivot_type" : "SpsTyp", + "protocols" : [ + { + "name" : "hnzip", + "address" : "577", + "typeid" : "TS" + } + ] + }, + { + "label" : "TM1", + "pivot_id" : "ID111111", + "pivot_type" : "MvTyp", + "protocols" : [ + { + "name" : "hnzip", + "address" : "20", + "typeid" : "TM" + } + ] + }, + { + "label" : "TM2", + "pivot_id" : "ID111222", + "pivot_type" : "MvTyp", + "protocols" : [ + { + "name" : "hnzip", + "address" : "21", + "typeid" : "TM" + } + ] + }, + { + "label" : "TM3", + "pivot_id" : "ID111333", + "pivot_type" : "MvTyp", + "protocols" : [ + { + "name" : "hnzip", + "address" : "22", + "typeid" : "TM" + } + ] + }, + { + "label" : "TM4", + "pivot_id" : "ID111444", + "pivot_type" : "MvTyp", + "protocols" : [ + { + "name" : "hnzip", + "address" : "23", + "typeid" : "TM" + } + ] + }, + { + "label" : "TC1", + "pivot_id" : "ID222222", + "pivot_type" : "SpcTyp", + "protocols" : [ + { + "name": "hnzip", + "address" : "142", + "typeid" : "TC" + } + ] + }, + { + "label" : "TC2", + "pivot_id" : "ID333333", + "pivot_type" : "DpcTyp", + "protocols" : [ + { + "name": "hnzip", + "address" : "143", + "typeid" : "TC" + } + ] + }, + { + "label" : "TVC1", + "pivot_id" : "ID444444", + "pivot_type" : "IncTyp", + "protocols" : [ + { + "name": "hnzip", + "address" : "31", + "typeid" : "TVC" + } + ] + } + ] + } + } + } +}); + +static int outputHandlerCalled = 0; +static std::shared_ptr lastReading = nullptr; +// Dummy object used to be able to call parseJson() freely +static DatapointValue dummyValue(""); +static Datapoint dummyDataPoint({}, dummyValue); + +static const std::vector allCommandAttributeNames = { + "co_type", "co_addr", "co_value" +}; +static const std::vector allPivotAttributeNames = { + // TS messages + "GTIS.ComingFrom", "GTIS.Identifier", "GTIS.Cause.stVal", "GTIS.TmValidity.stVal", "GTIS.TmOrg.stVal", + "GTIS.SpsTyp.stVal", "GTIS.SpsTyp.q.Validity", "GTIS.SpsTyp.q.DetailQuality.oldData", + "GTIS.SpsTyp.t.SecondSinceEpoch", "GTIS.SpsTyp.t.FractionOfSecond", "GTIS.SpsTyp.t.TimeQuality.clockNotSynchronized", + "GTIS.DpsTyp.stVal", "GTIS.DpsTyp.q.Validity", "GTIS.DpsTyp.q.DetailQuality.oldData", + "GTIS.DpsTyp.t.SecondSinceEpoch", "GTIS.DpsTyp.t.FractionOfSecond", "GTIS.DpsTyp.t.TimeQuality.clockNotSynchronized", + // TM messages + "GTIM.ComingFrom", "GTIM.Identifier", "GTIM.Cause.stVal", "GTIM.TmValidity.stVal", "GTIM.TmOrg.stVal", + "GTIM.MvTyp.mag.i", "GTIM.MvTyp.q.Validity", "GTIM.MvTyp.q.DetailQuality.oldData", + "GTIM.MvTyp.t.SecondSinceEpoch", "GTIM.MvTyp.t.FractionOfSecond", "GTIM.MvTyp.t.TimeQuality.clockNotSynchronized", + // TC/TVC messages + "GTIC.ComingFrom", "GTIC.Identifier", "GTIC.Cause.stVal", "GTIC.TmValidity.stVal", "GTIC.TmOrg.stVal", + "GTIC.SpcTyp.stVal", "GTIC.SpcTyp.ctlVal", "GTIC.SpcTyp.q.Validity", "GTIC.SpcTyp.q.DetailQuality.oldData", + "GTIC.SpcTyp.t.SecondSinceEpoch", "GTIC.SpcTyp.t.FractionOfSecond", "GTIC.SpcTyp.t.TimeQuality.clockNotSynchronized", + "GTIC.DpcTyp.stVal", "GTIC.DpcTyp.ctlVal", "GTIC.DpcTyp.q.Validity", "GTIC.DpcTyp.q.DetailQuality.oldData", + "GTIC.DpcTyp.t.SecondSinceEpoch", "GTIC.DpcTyp.t.FractionOfSecond", "GTIC.DpcTyp.t.TimeQuality.clockNotSynchronized", + "GTIC.IncTyp.stVal", "GTIC.IncTyp.ctlVal", "GTIC.IncTyp.q.Validity", "GTIC.IncTyp.q.DetailQuality.oldData", + "GTIC.IncTyp.t.SecondSinceEpoch", "GTIC.IncTyp.t.FractionOfSecond", "GTIC.IncTyp.t.TimeQuality.clockNotSynchronized", +}; +static const std::vector allSouthEventAttributeNames = { + "connx_status", "gi_status" +}; + +static void createReadingSet(ReadingSet*& outReadingSet, const std::string& assetName, const std::vector& jsons) +{ + std::vector *allPoints = new std::vector(); + for(const std::string& json: jsons) { + // Create Reading + std::vector *p = nullptr; + ASSERT_NO_THROW(p = dummyDataPoint.parseJson(json)); + ASSERT_NE(p, nullptr); + allPoints->insert(std::end(*allPoints), std::begin(*p), std::end(*p)); + delete p; + } + Reading *reading = new Reading(assetName, *allPoints); + std::vector *readings = new std::vector(); + readings->push_back(reading); + // Create ReadingSet + outReadingSet = new ReadingSet(readings); +} + +static void createReadingSet(ReadingSet*& outReadingSet, const std::string& assetName, const std::string& json) +{ + createReadingSet(outReadingSet, assetName, std::vector{json}); +} + +static bool hasChild(Datapoint& dp, const std::string& childLabel) { + DatapointValue& dpv = dp.getData(); + + const std::vector* dps = dpv.getDpVec(); + + for (auto sdp : *dps) { + if (sdp->getName() == childLabel) { + return true; + } + } + + return false; +} + +static Datapoint* getChild(Datapoint& dp, const std::string& childLabel) { + DatapointValue& dpv = dp.getData(); + + const std::vector* dps = dpv.getDpVec(); + + for (Datapoint* childDp : *dps) { + if (childDp->getName() == childLabel) { + return childDp; + } + } + + return nullptr; +} + +template +static T callOnLastPathElement(Datapoint& dp, const std::string& childPath, std::function func) { + if (childPath.find(".") != std::string::npos) { + // Check if the first element in the path is a child of current datapoint + std::vector splittedPath = PivotUtility::split(childPath, '.'); + const std::string& topNode(splittedPath[0]); + Datapoint* child = getChild(dp, topNode); + if (child == nullptr) { + return static_cast(0); + } + // If it is, call recursively this function on the datapoint found with the rest of the path + splittedPath.erase(splittedPath.begin()); + const std::string& remainingPath(PivotUtility::join(splittedPath, ".")); + return callOnLastPathElement(*child, remainingPath, func); + } + else { + // If last element of the path reached, call function on it + return func(dp, childPath); + } +} + +static int64_t getIntValue(const Datapoint& dp) { + return dp.getData().toInt(); +} + +static std::string getStrValue(const Datapoint& dp) { + return dp.getData().toStringValue(); +} + +static bool hasObject(const Reading& reading, const std::string& label) { + std::vector dataPoints = reading.getReadingData(); + + for (Datapoint* dp : dataPoints) { + if (dp->getName() == label) { + return true; + } + } + + return false; +} + +static Datapoint* getObject(const Reading& reading, const std::string& label) { + std::vector dataPoints = reading.getReadingData(); + + for (Datapoint* dp : dataPoints) { + if (dp->getName() == label) { + return dp; + } + } + + return nullptr; +} + +static void testOutputStream(OUTPUT_HANDLE * handle, READINGSET* readingSet) +{ + const std::vector& readings = readingSet->getAllReadings(); + + for (Reading* reading : readings) { + printf("output: Reading: %s\n", reading->getAssetName().c_str()); + + const std::vector& datapoints = reading->getReadingData(); + + for (Datapoint* dp : datapoints) { + printf("output: datapoint: %s -> %s\n", dp->getName().c_str(), dp->getData().toString().c_str()); + } + + lastReading = std::make_shared(*reading); + } + + *(READINGSET **)handle = readingSet; + outputHandlerCalled++; +} + +struct ReadingInfo { + std::string type; + std::string value; +}; +static void validateReading(std::shared_ptr currentReading, const std::string& assetName, const std::string& rootObjectName, + const std::vector& allAttributeNames, const std::map& attributes) { + ASSERT_NE(nullptr, currentReading.get()) << assetName << ": Invalid reading"; + ASSERT_EQ(assetName, currentReading->getAssetName()); + // Validate data_object structure received + ASSERT_TRUE(hasObject(*currentReading, rootObjectName)) << assetName << ": " << rootObjectName << " not found"; + Datapoint* data_object = getObject(*currentReading, rootObjectName); + ASSERT_NE(nullptr, data_object) << assetName << ": " << rootObjectName << " is null"; + // Validate existance of the required keys and non-existance of the others + for (const auto &kvp : attributes) { + const std::string& name(kvp.first); + ASSERT_TRUE(std::find(allAttributeNames.begin(), allAttributeNames.end(), name) != allAttributeNames.end()) + << assetName << ": Attribute not listed in full list: " << name; + } + for (const std::string& name : allAttributeNames) { + bool attributeIsExpected = static_cast(attributes.count(name)); + std::function hasChildFn(&hasChild); + ASSERT_EQ(callOnLastPathElement(*data_object, name, hasChildFn), + attributeIsExpected) << assetName << ": Attribute " << (attributeIsExpected ? "not found: " : "should not exist: ") << name; + } + // Validate value and type of each key + for (auto const& kvp : attributes) { + const std::string& name = kvp.first; + const std::string& type = kvp.second.type; + const std::string& expectedValue = kvp.second.value; + std::function getChildFn(&getChild); + if (type == std::string("string")) { + ASSERT_EQ(expectedValue, getStrValue(*callOnLastPathElement(*data_object, name, getChildFn))) << assetName << ": Unexpected value for attribute " << name; + } + else if (type == std::string("int64_t")) { + ASSERT_EQ(std::stoll(expectedValue), getIntValue(*callOnLastPathElement(*data_object, name, getChildFn))) << assetName << ": Unexpected value for attribute " << name; + } + else if (type == std::string("int64_t_range")) { + auto splitted = PivotUtility::split(expectedValue, ';'); + ASSERT_EQ(splitted.size(), 2); + const std::string& expectedRangeMin = splitted.front(); + const std::string& expectedRangeMax = splitted.back(); + int64_t value = getIntValue(*callOnLastPathElement(*data_object, name, getChildFn)); + ASSERT_GE(value, std::stoll(expectedRangeMin)) << assetName << ": Value lower than min for attribute " << name; + ASSERT_LE(value, std::stoll(expectedRangeMax)) << assetName << ": Value higher than max for attribute " << name; + } + else { + FAIL() << assetName << ": Unknown type: " << type; + } + } +} + +TEST(PivotHNZPluginIngestRaw, IngestWithNoCallbackDefined) +{ + PLUGIN_HANDLE handle = nullptr; + ASSERT_NO_THROW(handle = plugin_init(nullptr, nullptr, nullptr)); + ASSERT_TRUE(handle != nullptr); + HNZPivotFilter* filter = static_cast(handle); + + ASSERT_NO_THROW(plugin_reconfigure(static_cast(handle), test_config)); + ASSERT_EQ(filter->isEnabled(), true); + + std::string jsonMessageTSCE = QUOTE({ + "data_object":{ + "do_type":"TS", + "do_station":12, + "do_addr":511, + "do_value":1, + "do_valid":0, + "do_cg":0, + "do_outdated":0, + "do_ts": 1685019425432, + "do_ts_iv":0, + "do_ts_c":0, + "do_ts_s":0 + } + }); + ReadingSet* readingSet = nullptr; + createReadingSet(readingSet, "TS1", jsonMessageTSCE); + if(HasFatalFailure()) return; + ASSERT_NE(readingSet, nullptr); + + outputHandlerCalled = 0; + ASSERT_NO_THROW(plugin_ingest(filter, static_cast(readingSet))); + ASSERT_EQ(outputHandlerCalled, 0); + + ASSERT_NO_THROW(plugin_shutdown(reinterpret_cast(filter))); +} + +class PivotHNZPluginIngest : public testing::Test +{ +protected: + HNZPivotFilter *filter = nullptr; // Object on which we call for tests + ReadingSet *resultReading; + + // Setup is ran for every tests, so each variable are reinitialised + void SetUp() override + { + PLUGIN_HANDLE handle = nullptr; + ASSERT_NO_THROW(handle = plugin_init(nullptr, &resultReading, testOutputStream)); + filter = static_cast(handle); + + ASSERT_NO_THROW(plugin_reconfigure(static_cast(handle), test_config)); + ASSERT_EQ(filter->isEnabled(), true); + + outputHandlerCalled = 0; + lastReading = nullptr; + } + + // TearDown is ran for every tests, so each variable are destroyed again + void TearDown() override + { + if (filter) { + ASSERT_NO_THROW(plugin_shutdown(reinterpret_cast(filter))); + } + } +}; + +TEST_F(PivotHNZPluginIngest, IngestOnEmptyReadingSet) +{ + ASSERT_NO_THROW(plugin_ingest(filter, nullptr)); + ASSERT_EQ(outputHandlerCalled, 0); +} + +TEST_F(PivotHNZPluginIngest, IngestOnPluginDisabled) +{ + static std::string reconfigure = QUOTE({ + "enable": { + "value": "false" + } + }); + + ASSERT_NO_THROW(plugin_reconfigure(reinterpret_cast(filter), reconfigure)); + ASSERT_EQ(filter->isEnabled(), false); + + std::string jsonMessageTSCE = QUOTE({ + "data_object":{ + "do_type":"TS", + "do_station":12, + "do_addr":511, + "do_value":1, + "do_valid":0, + "do_cg":0, + "do_outdated":0, + "do_ts": 1685019425432, + "do_ts_iv":0, + "do_ts_c":0, + "do_ts_s":0 + } + }); + ReadingSet* readingSet = nullptr; + createReadingSet(readingSet, "TS1", jsonMessageTSCE); + if(HasFatalFailure()) return; + ASSERT_NE(readingSet, nullptr); + + ASSERT_NO_THROW(plugin_ingest(filter, static_cast(readingSet))); + ASSERT_EQ(outputHandlerCalled, 0); +} + +TEST_F(PivotHNZPluginIngest, OneReadingMultipleDatapoints) +{ + std::string jsonMessagePivot1 = QUOTE({ + "PIVOT": { + "GTIC": { + "SpcTyp": { + "q": { + "Source": "process", + "Validity": "good" + }, + "t": { + "FractionOfSecond": 9529458, + "SecondSinceEpoch": 1669714183 + }, + "ctlVal": 1 + }, + "Identifier": "ID222222", + "TmOrg": { + "stVal": "genuine" + } + } + } + }); + std::string jsonMessagePivot2 = QUOTE({ + "PIVOT": { + "GTIC": { + "SpcTyp": { + "q": { + "Source": "process", + "Validity": "good" + }, + "t": { + "FractionOfSecond": 9529458, + "SecondSinceEpoch": 1669714184 + }, + "ctlVal": 0 + }, + "Identifier": "ID222222", + "TmOrg": { + "stVal": "genuine" + } + } + } + }); + std::string jsonMessagePivot3 = QUOTE({ + "PIVOT": { + "GTIC": { + "SpcTyp": { + "q": { + "Source": "process", + "Validity": "good" + }, + "t": { + "FractionOfSecond": 9529458, + "SecondSinceEpoch": 1669714185 + }, + "ctlVal": 0 + }, + "Identifier": "ID222222", + "TmOrg": { + "stVal": "genuine" + } + } + } + }); + ReadingSet* readingSet = nullptr; + createReadingSet(readingSet, "TC1", {jsonMessagePivot1, jsonMessagePivot2, jsonMessagePivot3}); + if(HasFatalFailure()) return; + ASSERT_NE(readingSet, nullptr); + + ASSERT_NO_THROW(plugin_ingest(filter, static_cast(readingSet))); + ASSERT_EQ(outputHandlerCalled, 1); + const std::vector& results = resultReading->getAllReadings(); + ASSERT_EQ(results.size(), 1); + const std::vector& datapoints = results[0]->getReadingData(); + int dataPointsFound = datapoints.size(); + ASSERT_EQ(dataPointsFound, 3); +} + +TEST_F(PivotHNZPluginIngest, TSCEToPivot) +{ + std::string jsonMessageTSCE = QUOTE({ + "data_object":{ + "do_type":"TS", + "do_station":12, + "do_addr":511, + "do_value":1, + "do_valid":0, + "do_cg":0, + "do_outdated":0, + "do_ts": 1685019425432, + "do_ts_iv":0, + "do_ts_c":0, + "do_ts_s":0 + } + }); + ReadingSet* readingSet = nullptr; + createReadingSet(readingSet, "TS1", jsonMessageTSCE); + if(HasFatalFailure()) return; + ASSERT_NE(readingSet, nullptr); + + ASSERT_NO_THROW(plugin_ingest(filter, static_cast(readingSet))); + ASSERT_EQ(outputHandlerCalled, 1); + auto pivotTimestampPair = PivotTimestamp::fromTimestamp(1685019425432); + validateReading(lastReading, "TS1", "PIVOT", allPivotAttributeNames, { + {"GTIS.ComingFrom", {"string", "hnzip"}}, + {"GTIS.Identifier", {"string", "ID114561"}}, + {"GTIS.Cause.stVal", {"int64_t", "3"}}, + {"GTIS.TmOrg.stVal", {"string", "genuine"}}, + {"GTIS.SpsTyp.stVal", {"int64_t", "1"}}, + {"GTIS.SpsTyp.q.Validity", {"string", "good"}}, + {"GTIS.SpsTyp.t.SecondSinceEpoch", {"int64_t", std::to_string(pivotTimestampPair.first)}}, + {"GTIS.SpsTyp.t.FractionOfSecond", {"int64_t", std::to_string(pivotTimestampPair.second)}}, + {"GTIS.TmValidity.stVal", {"string", "good"}}, + }); + if(HasFatalFailure()) return; +} + +TEST_F(PivotHNZPluginIngest, TSCGToPivot) +{ + std::string jsonMessageTSCG = QUOTE({ + "data_object":{ + "do_type":"TS", + "do_station":12, + "do_addr":511, + "do_value":1, + "do_valid":0, + "do_cg":1, + "do_outdated":0 + } + }); + ReadingSet* readingSet = nullptr; + createReadingSet(readingSet, "TS1", jsonMessageTSCG); + if(HasFatalFailure()) return; + ASSERT_NE(readingSet, nullptr); + + auto pivotTimestampPair = PivotTimestamp::fromTimestamp(PivotTimestamp::getCurrentTimestampMs()); + long sec = pivotTimestampPair.first; + ASSERT_NO_THROW(plugin_ingest(filter, static_cast(readingSet))); + ASSERT_EQ(outputHandlerCalled, 1); + validateReading(lastReading, "TS1", "PIVOT", allPivotAttributeNames, { + {"GTIS.ComingFrom", {"string", "hnzip"}}, + {"GTIS.Identifier", {"string", "ID114561"}}, + {"GTIS.Cause.stVal", {"int64_t", "20"}}, + {"GTIS.TmOrg.stVal", {"string", "substituted"}}, + {"GTIS.SpsTyp.stVal", {"int64_t", "1"}}, + {"GTIS.SpsTyp.q.Validity", {"string", "good"}}, + // NB: Time was added by hnztopivot plugin + {"GTIS.SpsTyp.t.SecondSinceEpoch", {"int64_t_range", std::to_string(sec) + ";" + std::to_string(sec+1)}}, + {"GTIS.SpsTyp.t.FractionOfSecond", {"int64_t_range", "0;99999999"}}, + }); + if(HasFatalFailure()) return; +} + +TEST_F(PivotHNZPluginIngest, TSToPivotDouble) +{ + std::string jsonMessageTSCEDouble = QUOTE({ + "data_object":{ + "do_type":"TS", + "do_station":12, + "do_addr":522, + "do_value":1, + "do_valid":0, + "do_cg":0, + "do_outdated":0, + "do_ts": 1685019425432, + "do_ts_iv":0, + "do_ts_c":0, + "do_ts_s":0 + } + }); + ReadingSet* readingSet = nullptr; + createReadingSet(readingSet, "TS2", jsonMessageTSCEDouble); + if(HasFatalFailure()) return; + ASSERT_NE(readingSet, nullptr); + + ASSERT_NO_THROW(plugin_ingest(filter, static_cast(readingSet))); + ASSERT_EQ(outputHandlerCalled, 1); + auto pivotTimestampPair = PivotTimestamp::fromTimestamp(1685019425432); + validateReading(lastReading, "TS2", "PIVOT", allPivotAttributeNames, { + {"GTIS.ComingFrom", {"string", "hnzip"}}, + {"GTIS.Identifier", {"string", "ID114562"}}, + {"GTIS.Cause.stVal", {"int64_t", "3"}}, + {"GTIS.TmOrg.stVal", {"string", "genuine"}}, + {"GTIS.DpsTyp.stVal", {"string", "on"}}, + {"GTIS.DpsTyp.q.Validity", {"string", "good"}}, + {"GTIS.DpsTyp.t.SecondSinceEpoch", {"int64_t", std::to_string(pivotTimestampPair.first)}}, + {"GTIS.DpsTyp.t.FractionOfSecond", {"int64_t", std::to_string(pivotTimestampPair.second)}}, + {"GTIS.TmValidity.stVal", {"string", "good"}}, + }); + if(HasFatalFailure()) return; +} + +TEST_F(PivotHNZPluginIngest, TSQualityToPivot) +{ + printf("Testing TSCE with do_valid=1\n"); + std::string jsonMessageTSCEInvalid = QUOTE({ + "data_object":{ + "do_type":"TS", + "do_station":12, + "do_addr":577, + "do_value":1, + "do_valid":1, + "do_cg":0, + "do_outdated":0, + "do_ts": 1685019425432, + "do_ts_iv":0, + "do_ts_c":0, + "do_ts_s":0 + } + }); + ReadingSet* readingSet = nullptr; + createReadingSet(readingSet, "TS3", jsonMessageTSCEInvalid); + if(HasFatalFailure()) return; + ASSERT_NE(readingSet, nullptr); + + ASSERT_NO_THROW(plugin_ingest(filter, static_cast(readingSet))); + ASSERT_EQ(outputHandlerCalled, 1); + auto pivotTimestampPair = PivotTimestamp::fromTimestamp(1685019425432); + validateReading(lastReading, "TS3", "PIVOT", allPivotAttributeNames, { + {"GTIS.ComingFrom", {"string", "hnzip"}}, + {"GTIS.Identifier", {"string", "ID114567"}}, + {"GTIS.Cause.stVal", {"int64_t", "3"}}, + {"GTIS.TmOrg.stVal", {"string", "genuine"}}, + {"GTIS.SpsTyp.stVal", {"int64_t", "1"}}, + {"GTIS.SpsTyp.q.Validity", {"string", "invalid"}}, + {"GTIS.SpsTyp.t.SecondSinceEpoch", {"int64_t", std::to_string(pivotTimestampPair.first)}}, + {"GTIS.SpsTyp.t.FractionOfSecond", {"int64_t", std::to_string(pivotTimestampPair.second)}}, + {"GTIS.TmValidity.stVal", {"string", "good"}}, + }); + if(HasFatalFailure()) return; + + printf("Testing TSCE with do_outdated=1\n"); + std::string jsonMessageTSCEOutdated = QUOTE({ + "data_object":{ + "do_type":"TS", + "do_station":12, + "do_addr":577, + "do_value":1, + "do_valid":0, + "do_cg":0, + "do_outdated":1, + "do_ts": 1685019425432, + "do_ts_iv":0, + "do_ts_c":0, + "do_ts_s":0 + } + }); + createReadingSet(readingSet, "TS3", jsonMessageTSCEOutdated); + if(HasFatalFailure()) return; + ASSERT_NE(readingSet, nullptr); + + outputHandlerCalled = 0; + ASSERT_NO_THROW(plugin_ingest(filter, static_cast(readingSet))); + ASSERT_EQ(outputHandlerCalled, 1); + validateReading(lastReading, "TS3", "PIVOT", allPivotAttributeNames, { + {"GTIS.ComingFrom", {"string", "hnzip"}}, + {"GTIS.Identifier", {"string", "ID114567"}}, + {"GTIS.Cause.stVal", {"int64_t", "3"}}, + {"GTIS.TmOrg.stVal", {"string", "genuine"}}, + {"GTIS.SpsTyp.stVal", {"int64_t", "1"}}, + {"GTIS.SpsTyp.q.Validity", {"string", "questionable"}}, + {"GTIS.SpsTyp.q.DetailQuality.oldData", {"int64_t", "1"}}, + {"GTIS.SpsTyp.t.SecondSinceEpoch", {"int64_t", std::to_string(pivotTimestampPair.first)}}, + {"GTIS.SpsTyp.t.FractionOfSecond", {"int64_t", std::to_string(pivotTimestampPair.second)}}, + {"GTIS.TmValidity.stVal", {"string", "good"}}, + }); + if(HasFatalFailure()) return; + + printf("Testing TSCE with do_ts_iv=1\n"); + std::string jsonMessageTSCETimestampInvalid = QUOTE({ + "data_object":{ + "do_type":"TS", + "do_station":12, + "do_addr":577, + "do_value":1, + "do_valid":0, + "do_cg":0, + "do_outdated":0, + "do_ts": 1685019425432, + "do_ts_iv":1, + "do_ts_c":0, + "do_ts_s":0 + } + }); + createReadingSet(readingSet, "TS3", jsonMessageTSCETimestampInvalid); + if(HasFatalFailure()) return; + ASSERT_NE(readingSet, nullptr); + + outputHandlerCalled = 0; + ASSERT_NO_THROW(plugin_ingest(filter, static_cast(readingSet))); + ASSERT_EQ(outputHandlerCalled, 1); + validateReading(lastReading, "TS3", "PIVOT", allPivotAttributeNames, { + {"GTIS.ComingFrom", {"string", "hnzip"}}, + {"GTIS.Identifier", {"string", "ID114567"}}, + {"GTIS.Cause.stVal", {"int64_t", "3"}}, + {"GTIS.TmOrg.stVal", {"string", "genuine"}}, + {"GTIS.SpsTyp.stVal", {"int64_t", "1"}}, + {"GTIS.SpsTyp.q.Validity", {"string", "good"}}, + {"GTIS.SpsTyp.t.SecondSinceEpoch", {"int64_t", std::to_string(pivotTimestampPair.first)}}, + {"GTIS.SpsTyp.t.FractionOfSecond", {"int64_t", std::to_string(pivotTimestampPair.second)}}, + {"GTIS.TmValidity.stVal", {"string", "invalid"}}, + }); + if(HasFatalFailure()) return; + + printf("Testing TSCE with do_ts_c=1\n"); + std::string jsonMessageTSCEChronoLoss = QUOTE({ + "data_object":{ + "do_type":"TS", + "do_station":12, + "do_addr":577, + "do_value":1, + "do_valid":0, + "do_cg":0, + "do_outdated":0, + "do_ts": 1685019425432, + "do_ts_iv":0, + "do_ts_c":1, + "do_ts_s":0 + } + }); + createReadingSet(readingSet, "TS3", jsonMessageTSCEChronoLoss); + if(HasFatalFailure()) return; + ASSERT_NE(readingSet, nullptr); + + outputHandlerCalled = 0; + ASSERT_NO_THROW(plugin_ingest(filter, static_cast(readingSet))); + ASSERT_EQ(outputHandlerCalled, 1); + validateReading(lastReading, "TS3", "PIVOT", allPivotAttributeNames, { + {"GTIS.ComingFrom", {"string", "hnzip"}}, + {"GTIS.Identifier", {"string", "ID114567"}}, + {"GTIS.Cause.stVal", {"int64_t", "3"}}, + {"GTIS.TmOrg.stVal", {"string", "genuine"}}, + {"GTIS.SpsTyp.stVal", {"int64_t", "1"}}, + {"GTIS.SpsTyp.q.Validity", {"string", "questionable"}}, + {"GTIS.SpsTyp.q.DetailQuality.oldData", {"int64_t", "1"}}, + {"GTIS.SpsTyp.t.SecondSinceEpoch", {"int64_t", std::to_string(pivotTimestampPair.first)}}, + {"GTIS.SpsTyp.t.FractionOfSecond", {"int64_t", std::to_string(pivotTimestampPair.second)}}, + {"GTIS.TmValidity.stVal", {"string", "good"}}, + }); + if(HasFatalFailure()) return; + + printf("Testing TSCE with do_ts_s=1\n"); + std::string jsonMessageTSCENotSynchro = QUOTE({ + "data_object":{ + "do_type":"TS", + "do_station":12, + "do_addr":577, + "do_value":1, + "do_valid":0, + "do_cg":0, + "do_outdated":0, + "do_ts": 1685019425432, + "do_ts_iv":0, + "do_ts_c":0, + "do_ts_s":1 + } + }); + createReadingSet(readingSet, "TS3", jsonMessageTSCENotSynchro); + if(HasFatalFailure()) return; + ASSERT_NE(readingSet, nullptr); + + outputHandlerCalled = 0; + ASSERT_NO_THROW(plugin_ingest(filter, static_cast(readingSet))); + ASSERT_EQ(outputHandlerCalled, 1); + validateReading(lastReading, "TS3", "PIVOT", allPivotAttributeNames, { + {"GTIS.ComingFrom", {"string", "hnzip"}}, + {"GTIS.Identifier", {"string", "ID114567"}}, + {"GTIS.Cause.stVal", {"int64_t", "3"}}, + {"GTIS.TmOrg.stVal", {"string", "genuine"}}, + {"GTIS.SpsTyp.stVal", {"int64_t", "1"}}, + {"GTIS.SpsTyp.q.Validity", {"string", "questionable"}}, + {"GTIS.SpsTyp.t.SecondSinceEpoch", {"int64_t", std::to_string(pivotTimestampPair.first)}}, + {"GTIS.SpsTyp.t.FractionOfSecond", {"int64_t", std::to_string(pivotTimestampPair.second)}}, + {"GTIS.SpsTyp.t.TimeQuality.clockNotSynchronized", {"int64_t", "1"}}, + {"GTIS.TmValidity.stVal", {"string", "good"}}, + }); + if(HasFatalFailure()) return; + + + printf("Testing TSCE with all error flags\n"); + std::string jsonMessageTSCEAllFail = QUOTE({ + "data_object":{ + "do_type":"TS", + "do_station":12, + "do_addr":577, + "do_value":1, + "do_valid":1, + "do_cg":0, + "do_outdated":1, + "do_ts": 1685019425432, + "do_ts_iv":1, + "do_ts_c":1, + "do_ts_s":1 + } + }); + createReadingSet(readingSet, "TS3", jsonMessageTSCEAllFail); + if(HasFatalFailure()) return; + ASSERT_NE(readingSet, nullptr); + + printf("Testing TS quality update\n"); + std::string jsonMessageTSQualUpdate = QUOTE({ + "data_object":{ + "do_type":"TS", + "do_station":12, + "do_addr":577, + "do_valid":0, + "do_cg":0, + "do_outdated":1, + "do_ts": 1685019425432, + "do_ts_iv":0, + "do_ts_c":0, + "do_ts_s":0 + } + }); + createReadingSet(readingSet, "TS3", jsonMessageTSQualUpdate); + if(HasFatalFailure()) return; + ASSERT_NE(readingSet, nullptr); + + outputHandlerCalled = 0; + ASSERT_NO_THROW(plugin_ingest(filter, static_cast(readingSet))); + ASSERT_EQ(outputHandlerCalled, 1); + validateReading(lastReading, "TS3", "PIVOT", allPivotAttributeNames, { + {"GTIS.ComingFrom", {"string", "hnzip"}}, + {"GTIS.Identifier", {"string", "ID114567"}}, + {"GTIS.Cause.stVal", {"int64_t", "3"}}, + {"GTIS.TmOrg.stVal", {"string", "genuine"}}, + {"GTIS.SpsTyp.q.Validity", {"string", "questionable"}}, + {"GTIS.SpsTyp.q.DetailQuality.oldData", {"int64_t", "1"}}, + {"GTIS.SpsTyp.t.SecondSinceEpoch", {"int64_t", std::to_string(pivotTimestampPair.first)}}, + {"GTIS.SpsTyp.t.FractionOfSecond", {"int64_t", std::to_string(pivotTimestampPair.second)}}, + {"GTIS.TmValidity.stVal", {"string", "good"}}, + }); + if(HasFatalFailure()) return; + + printf("Testing TSCG with do_valid=1\n"); + std::string jsonMessageTSCGInvalid = QUOTE({ + "data_object":{ + "do_type":"TS", + "do_station":12, + "do_addr":577, + "do_value":1, + "do_valid":1, + "do_cg":1, + "do_outdated":0 + } + }); + createReadingSet(readingSet, "TS3", jsonMessageTSCGInvalid); + if(HasFatalFailure()) return; + ASSERT_NE(readingSet, nullptr); + + outputHandlerCalled = 0; + pivotTimestampPair = PivotTimestamp::fromTimestamp(PivotTimestamp::getCurrentTimestampMs()); + long sec = pivotTimestampPair.first; + ASSERT_NO_THROW(plugin_ingest(filter, static_cast(readingSet))); + ASSERT_EQ(outputHandlerCalled, 1); + validateReading(lastReading, "TS3", "PIVOT", allPivotAttributeNames, { + {"GTIS.ComingFrom", {"string", "hnzip"}}, + {"GTIS.Identifier", {"string", "ID114567"}}, + {"GTIS.Cause.stVal", {"int64_t", "20"}}, + {"GTIS.TmOrg.stVal", {"string", "substituted"}}, + {"GTIS.SpsTyp.stVal", {"int64_t", "1"}}, + {"GTIS.SpsTyp.q.Validity", {"string", "invalid"}}, + // NB: Time was added by hnztopivot plugin + {"GTIS.SpsTyp.t.SecondSinceEpoch", {"int64_t_range", std::to_string(sec) + ";" + std::to_string(sec+1)}}, + {"GTIS.SpsTyp.t.FractionOfSecond", {"int64_t_range", "0;99999999"}}, + }); + if(HasFatalFailure()) return; + + printf("Testing TSCG with do_outdated=1\n"); + std::string jsonMessageTSCGOutdated = QUOTE({ + "data_object":{ + "do_type":"TS", + "do_station":12, + "do_addr":577, + "do_value":1, + "do_valid":0, + "do_cg":1, + "do_outdated":1 + } + }); + createReadingSet(readingSet, "TS3", jsonMessageTSCGOutdated); + if(HasFatalFailure()) return; + ASSERT_NE(readingSet, nullptr); + + outputHandlerCalled = 0; + pivotTimestampPair = PivotTimestamp::fromTimestamp(PivotTimestamp::getCurrentTimestampMs()); + sec = pivotTimestampPair.first; + ASSERT_NO_THROW(plugin_ingest(filter, static_cast(readingSet))); + ASSERT_EQ(outputHandlerCalled, 1); + validateReading(lastReading, "TS3", "PIVOT", allPivotAttributeNames, { + {"GTIS.ComingFrom", {"string", "hnzip"}}, + {"GTIS.Identifier", {"string", "ID114567"}}, + {"GTIS.Cause.stVal", {"int64_t", "20"}}, + {"GTIS.TmOrg.stVal", {"string", "substituted"}}, + {"GTIS.SpsTyp.stVal", {"int64_t", "1"}}, + {"GTIS.SpsTyp.q.Validity", {"string", "questionable"}}, + {"GTIS.SpsTyp.q.DetailQuality.oldData", {"int64_t", "1"}}, + // NB: Time was added by hnztopivot plugin + {"GTIS.SpsTyp.t.SecondSinceEpoch", {"int64_t_range", std::to_string(sec) + ";" + std::to_string(sec+1)}}, + {"GTIS.SpsTyp.t.FractionOfSecond", {"int64_t_range", "0;99999999"}}, + }); + if(HasFatalFailure()) return; + + printf("Testing TSCG with all error flags\n"); + std::string jsonMessageTSCGAllFail = QUOTE({ + "data_object":{ + "do_type":"TS", + "do_station":12, + "do_addr":577, + "do_value":1, + "do_valid":1, + "do_cg":1, + "do_outdated":1 + } + }); + createReadingSet(readingSet, "TS3", jsonMessageTSCGAllFail); + if(HasFatalFailure()) return; + ASSERT_NE(readingSet, nullptr); + + outputHandlerCalled = 0; + pivotTimestampPair = PivotTimestamp::fromTimestamp(PivotTimestamp::getCurrentTimestampMs()); + sec = pivotTimestampPair.first; + ASSERT_NO_THROW(plugin_ingest(filter, static_cast(readingSet))); + ASSERT_EQ(outputHandlerCalled, 1); + validateReading(lastReading, "TS3", "PIVOT", allPivotAttributeNames, { + {"GTIS.ComingFrom", {"string", "hnzip"}}, + {"GTIS.Identifier", {"string", "ID114567"}}, + {"GTIS.Cause.stVal", {"int64_t", "20"}}, + {"GTIS.TmOrg.stVal", {"string", "substituted"}}, + {"GTIS.SpsTyp.stVal", {"int64_t", "1"}}, + {"GTIS.SpsTyp.q.Validity", {"string", "invalid"}}, + {"GTIS.SpsTyp.q.DetailQuality.oldData", {"int64_t", "1"}}, + // NB: Time was added by hnztopivot plugin + {"GTIS.SpsTyp.t.SecondSinceEpoch", {"int64_t_range", std::to_string(sec) + ";" + std::to_string(sec+1)}}, + {"GTIS.SpsTyp.t.FractionOfSecond", {"int64_t_range", "0;99999999"}}, + }); + if(HasFatalFailure()) return; +} + +TEST_F(PivotHNZPluginIngest, TMAToPivot) +{ + std::string jsonMessageTMA = QUOTE({ + "data_object":{ + "do_type":"TM", + "do_station":12, + "do_addr":20, + "do_value":-42, + "do_valid":0, + "do_an":"TMA", + "do_outdated":0 + } + }); + ReadingSet* readingSet = nullptr; + createReadingSet(readingSet, "TM1", jsonMessageTMA); + if(HasFatalFailure()) return; + ASSERT_NE(readingSet, nullptr); + + auto pivotTimestampPair = PivotTimestamp::fromTimestamp(PivotTimestamp::getCurrentTimestampMs()); + long sec = pivotTimestampPair.first; + ASSERT_NO_THROW(plugin_ingest(filter, static_cast(readingSet))); + ASSERT_EQ(outputHandlerCalled, 1); + validateReading(lastReading, "TM1", "PIVOT", allPivotAttributeNames, { + {"GTIM.ComingFrom", {"string", "hnzip"}}, + {"GTIM.Identifier", {"string", "ID111111"}}, + {"GTIM.Cause.stVal", {"int64_t", "1"}}, + {"GTIM.TmOrg.stVal", {"string", "substituted"}}, + {"GTIM.MvTyp.mag.i", {"int64_t", "-42"}}, + {"GTIM.MvTyp.q.Validity", {"string", "good"}}, + // NB: Time was added by hnztopivot plugin + {"GTIM.MvTyp.t.SecondSinceEpoch", {"int64_t_range", std::to_string(sec) + ";" + std::to_string(sec+1)}}, + {"GTIM.MvTyp.t.FractionOfSecond", {"int64_t_range", "0;99999999"}}, + }); + if(HasFatalFailure()) return; +} + +TEST_F(PivotHNZPluginIngest, TM8ToPivot) +{ + std::string jsonMessageTMA = QUOTE({ + "data_object":{ + "do_type":"TM", + "do_station":12, + "do_addr":20, + "do_value":142, + "do_valid":0, + "do_an":"TM8", + "do_outdated":0 + } + }); + ReadingSet* readingSet = nullptr; + createReadingSet(readingSet, "TM1", jsonMessageTMA); + if(HasFatalFailure()) return; + ASSERT_NE(readingSet, nullptr); + + auto pivotTimestampPair = PivotTimestamp::fromTimestamp(PivotTimestamp::getCurrentTimestampMs()); + long sec = pivotTimestampPair.first; + ASSERT_NO_THROW(plugin_ingest(filter, static_cast(readingSet))); + ASSERT_EQ(outputHandlerCalled, 1); + validateReading(lastReading, "TM1", "PIVOT", allPivotAttributeNames, { + {"GTIM.ComingFrom", {"string", "hnzip"}}, + {"GTIM.Identifier", {"string", "ID111111"}}, + {"GTIM.Cause.stVal", {"int64_t", "1"}}, + {"GTIM.TmOrg.stVal", {"string", "substituted"}}, + {"GTIM.MvTyp.mag.i", {"int64_t", "142"}}, + {"GTIM.MvTyp.q.Validity", {"string", "good"}}, + // NB: Time was added by hnztopivot plugin + {"GTIM.MvTyp.t.SecondSinceEpoch", {"int64_t_range", std::to_string(sec) + ";" + std::to_string(sec+1)}}, + {"GTIM.MvTyp.t.FractionOfSecond", {"int64_t_range", "0;99999999"}}, + }); + if(HasFatalFailure()) return; +} + +TEST_F(PivotHNZPluginIngest, TM16ToPivot) +{ + std::string jsonMessageTMA = QUOTE({ + "data_object":{ + "do_type":"TM", + "do_station":12, + "do_addr":20, + "do_value":-142, + "do_valid":0, + "do_an":"TM16", + "do_outdated":0 + } + }); + ReadingSet* readingSet = nullptr; + createReadingSet(readingSet, "TM1", jsonMessageTMA); + if(HasFatalFailure()) return; + ASSERT_NE(readingSet, nullptr); + + auto pivotTimestampPair = PivotTimestamp::fromTimestamp(PivotTimestamp::getCurrentTimestampMs()); + long sec = pivotTimestampPair.first; + ASSERT_NO_THROW(plugin_ingest(filter, static_cast(readingSet))); + ASSERT_EQ(outputHandlerCalled, 1); + validateReading(lastReading, "TM1", "PIVOT", allPivotAttributeNames, { + {"GTIM.ComingFrom", {"string", "hnzip"}}, + {"GTIM.Identifier", {"string", "ID111111"}}, + {"GTIM.Cause.stVal", {"int64_t", "1"}}, + {"GTIM.TmOrg.stVal", {"string", "substituted"}}, + {"GTIM.MvTyp.mag.i", {"int64_t", "-142"}}, + {"GTIM.MvTyp.q.Validity", {"string", "good"}}, + // NB: Time was added by hnztopivot plugin + {"GTIM.MvTyp.t.SecondSinceEpoch", {"int64_t_range", std::to_string(sec) + ";" + std::to_string(sec+1)}}, + {"GTIM.MvTyp.t.FractionOfSecond", {"int64_t_range", "0;99999999"}}, + }); + if(HasFatalFailure()) return; +} + +TEST_F(PivotHNZPluginIngest, TMQualityToPivot) +{ + printf("Testing TM with do_valid=1\n"); + std::string jsonMessageTMInvalid = QUOTE({ + "data_object":{ + "do_type":"TM", + "do_station":12, + "do_addr":20, + "do_value":-42, + "do_valid":1, + "do_an":"TMA", + "do_outdated":0 + } + }); + ReadingSet* readingSet = nullptr; + createReadingSet(readingSet, "TM1", jsonMessageTMInvalid); + if(HasFatalFailure()) return; + ASSERT_NE(readingSet, nullptr); + + auto pivotTimestampPair = PivotTimestamp::fromTimestamp(PivotTimestamp::getCurrentTimestampMs()); + long sec = pivotTimestampPair.first; + ASSERT_NO_THROW(plugin_ingest(filter, static_cast(readingSet))); + ASSERT_EQ(outputHandlerCalled, 1); + validateReading(lastReading, "TM1", "PIVOT", allPivotAttributeNames, { + {"GTIM.ComingFrom", {"string", "hnzip"}}, + {"GTIM.Identifier", {"string", "ID111111"}}, + {"GTIM.Cause.stVal", {"int64_t", "1"}}, + {"GTIM.TmOrg.stVal", {"string", "substituted"}}, + {"GTIM.MvTyp.mag.i", {"int64_t", "-42"}}, + {"GTIM.MvTyp.q.Validity", {"string", "invalid"}}, + // NB: Time was added by hnztopivot plugin + {"GTIM.MvTyp.t.SecondSinceEpoch", {"int64_t_range", std::to_string(sec) + ";" + std::to_string(sec+1)}}, + {"GTIM.MvTyp.t.FractionOfSecond", {"int64_t_range", "0;99999999"}}, + }); + if(HasFatalFailure()) return; + + printf("Testing TM with do_outdated=1\n"); + std::string jsonMessageTMOutdated = QUOTE({ + "data_object":{ + "do_type":"TM", + "do_station":12, + "do_addr":20, + "do_value":-42, + "do_valid":0, + "do_an":"TMA", + "do_outdated":1 + } + }); + createReadingSet(readingSet, "TM1", jsonMessageTMOutdated); + if(HasFatalFailure()) return; + ASSERT_NE(readingSet, nullptr); + + pivotTimestampPair = PivotTimestamp::fromTimestamp(PivotTimestamp::getCurrentTimestampMs()); + sec = pivotTimestampPair.first; + outputHandlerCalled = 0; + ASSERT_NO_THROW(plugin_ingest(filter, static_cast(readingSet))); + ASSERT_EQ(outputHandlerCalled, 1); + validateReading(lastReading, "TM1", "PIVOT", allPivotAttributeNames, { + {"GTIM.ComingFrom", {"string", "hnzip"}}, + {"GTIM.Identifier", {"string", "ID111111"}}, + {"GTIM.Cause.stVal", {"int64_t", "1"}}, + {"GTIM.TmOrg.stVal", {"string", "substituted"}}, + {"GTIM.MvTyp.mag.i", {"int64_t", "-42"}}, + {"GTIM.MvTyp.q.Validity", {"string", "questionable"}}, + {"GTIM.MvTyp.q.DetailQuality.oldData", {"int64_t", "1"}}, + // NB: Time was added by hnztopivot plugin + {"GTIM.MvTyp.t.SecondSinceEpoch", {"int64_t_range", std::to_string(sec) + ";" + std::to_string(sec+1)}}, + {"GTIM.MvTyp.t.FractionOfSecond", {"int64_t_range", "0;99999999"}}, + }); + if(HasFatalFailure()) return; + + printf("Testing TM with all error flags\n"); + std::string jsonMessageTMAllFail = QUOTE({ + "data_object":{ + "do_type":"TM", + "do_station":12, + "do_addr":20, + "do_value":-42, + "do_valid":1, + "do_an":"TMA", + "do_outdated":1 + } + }); + createReadingSet(readingSet, "TM1", jsonMessageTMAllFail); + if(HasFatalFailure()) return; + ASSERT_NE(readingSet, nullptr); + + pivotTimestampPair = PivotTimestamp::fromTimestamp(PivotTimestamp::getCurrentTimestampMs()); + sec = pivotTimestampPair.first; + outputHandlerCalled = 0; + ASSERT_NO_THROW(plugin_ingest(filter, static_cast(readingSet))); + ASSERT_EQ(outputHandlerCalled, 1); + validateReading(lastReading, "TM1", "PIVOT", allPivotAttributeNames, { + {"GTIM.ComingFrom", {"string", "hnzip"}}, + {"GTIM.Identifier", {"string", "ID111111"}}, + {"GTIM.Cause.stVal", {"int64_t", "1"}}, + {"GTIM.TmOrg.stVal", {"string", "substituted"}}, + {"GTIM.MvTyp.mag.i", {"int64_t", "-42"}}, + {"GTIM.MvTyp.q.Validity", {"string", "invalid"}}, + {"GTIM.MvTyp.q.DetailQuality.oldData", {"int64_t", "1"}}, + // NB: Time was added by hnztopivot plugin + {"GTIM.MvTyp.t.SecondSinceEpoch", {"int64_t_range", std::to_string(sec) + ";" + std::to_string(sec+1)}}, + {"GTIM.MvTyp.t.FractionOfSecond", {"int64_t_range", "0;99999999"}}, + }); + if(HasFatalFailure()) return; + + printf("Testing TM quality update\n"); + std::string jsonMessageTMQualUpdate = QUOTE({ + "data_object":{ + "do_type":"TM", + "do_station":12, + "do_addr":20, + "do_valid":0, + "do_an":"TMA", + "do_outdated":1 + } + }); + createReadingSet(readingSet, "TM1", jsonMessageTMQualUpdate); + if(HasFatalFailure()) return; + ASSERT_NE(readingSet, nullptr); + + pivotTimestampPair = PivotTimestamp::fromTimestamp(PivotTimestamp::getCurrentTimestampMs()); + sec = pivotTimestampPair.first; + outputHandlerCalled = 0; + ASSERT_NO_THROW(plugin_ingest(filter, static_cast(readingSet))); + ASSERT_EQ(outputHandlerCalled, 1); + validateReading(lastReading, "TM1", "PIVOT", allPivotAttributeNames, { + {"GTIM.ComingFrom", {"string", "hnzip"}}, + {"GTIM.Identifier", {"string", "ID111111"}}, + {"GTIM.Cause.stVal", {"int64_t", "1"}}, + {"GTIM.TmOrg.stVal", {"string", "substituted"}}, + {"GTIM.MvTyp.q.Validity", {"string", "questionable"}}, + {"GTIM.MvTyp.q.DetailQuality.oldData", {"int64_t", "1"}}, + // NB: Time was added by hnztopivot plugin + {"GTIM.MvTyp.t.SecondSinceEpoch", {"int64_t_range", std::to_string(sec) + ";" + std::to_string(sec+1)}}, + {"GTIM.MvTyp.t.FractionOfSecond", {"int64_t_range", "0;99999999"}}, + }); + if(HasFatalFailure()) return; +} + +TEST_F(PivotHNZPluginIngest, TCAckToPivot) +{ + std::string jsonMessageTCAck = QUOTE({ + "data_object":{ + "do_type":"TC", + "do_station":12, + "do_addr":142, + "do_value":0, + "do_valid":0 + } + }); + ReadingSet* readingSet = nullptr; + createReadingSet(readingSet, "TC1", jsonMessageTCAck); + if(HasFatalFailure()) return; + ASSERT_NE(readingSet, nullptr); + + auto pivotTimestampPair = PivotTimestamp::fromTimestamp(PivotTimestamp::getCurrentTimestampMs()); + long sec = pivotTimestampPair.first; + ASSERT_NO_THROW(plugin_ingest(filter, static_cast(readingSet))); + ASSERT_EQ(outputHandlerCalled, 1); + validateReading(lastReading, "TC1", "PIVOT", allPivotAttributeNames, { + {"GTIC.ComingFrom", {"string", "hnzip"}}, + {"GTIC.Identifier", {"string", "ID222222"}}, + {"GTIC.Cause.stVal", {"int64_t", "7"}}, + {"GTIC.TmOrg.stVal", {"string", "substituted"}}, + {"GTIC.SpcTyp.q.Validity", {"string", "good"}}, + // NB: Time was added by hnztopivot plugin + {"GTIC.SpcTyp.t.SecondSinceEpoch", {"int64_t_range", std::to_string(sec) + ";" + std::to_string(sec+1)}}, + {"GTIC.SpcTyp.t.FractionOfSecond", {"int64_t_range", "0;99999999"}}, + }); + if(HasFatalFailure()) return; +} + +TEST_F(PivotHNZPluginIngest, TCAckToPivotDouble) +{ + std::string jsonMessageTCAck = QUOTE({ + "data_object":{ + "do_type":"TC", + "do_station":12, + "do_addr":143, + "do_value":0, + "do_valid":0 + } + }); + ReadingSet* readingSet = nullptr; + createReadingSet(readingSet, "TC2", jsonMessageTCAck); + if(HasFatalFailure()) return; + ASSERT_NE(readingSet, nullptr); + + auto pivotTimestampPair = PivotTimestamp::fromTimestamp(PivotTimestamp::getCurrentTimestampMs()); + long sec = pivotTimestampPair.first; + ASSERT_NO_THROW(plugin_ingest(filter, static_cast(readingSet))); + ASSERT_EQ(outputHandlerCalled, 1); + validateReading(lastReading, "TC2", "PIVOT", allPivotAttributeNames, { + {"GTIC.ComingFrom", {"string", "hnzip"}}, + {"GTIC.Identifier", {"string", "ID333333"}}, + {"GTIC.Cause.stVal", {"int64_t", "7"}}, + {"GTIC.TmOrg.stVal", {"string", "substituted"}}, + {"GTIC.DpcTyp.q.Validity", {"string", "good"}}, + // NB: Time was added by hnztopivot plugin + {"GTIC.DpcTyp.t.SecondSinceEpoch", {"int64_t_range", std::to_string(sec) + ";" + std::to_string(sec+1)}}, + {"GTIC.DpcTyp.t.FractionOfSecond", {"int64_t_range", "0;99999999"}}, + }); + if(HasFatalFailure()) return; +} + +TEST_F(PivotHNZPluginIngest, TVCAckToPivot) +{ + std::string jsonMessageTVCAck = QUOTE({ + "data_object":{ + "do_type":"TVC", + "do_station":12, + "do_addr":31, + "do_value":0, + "do_valid":0 + } + }); + ReadingSet* readingSet = nullptr; + createReadingSet(readingSet, "TVC1", jsonMessageTVCAck); + if(HasFatalFailure()) return; + ASSERT_NE(readingSet, nullptr); + + auto pivotTimestampPair = PivotTimestamp::fromTimestamp(PivotTimestamp::getCurrentTimestampMs()); + long sec = pivotTimestampPair.first; + ASSERT_NO_THROW(plugin_ingest(filter, static_cast(readingSet))); + ASSERT_EQ(outputHandlerCalled, 1); + validateReading(lastReading, "TVC1", "PIVOT", allPivotAttributeNames, { + {"GTIC.ComingFrom", {"string", "hnzip"}}, + {"GTIC.Identifier", {"string", "ID444444"}}, + {"GTIC.Cause.stVal", {"int64_t", "7"}}, + {"GTIC.TmOrg.stVal", {"string", "substituted"}}, + {"GTIC.IncTyp.q.Validity", {"string", "good"}}, + // NB: Time was added by hnztopivot plugin + {"GTIC.IncTyp.t.SecondSinceEpoch", {"int64_t_range", std::to_string(sec) + ";" + std::to_string(sec+1)}}, + {"GTIC.IncTyp.t.FractionOfSecond", {"int64_t_range", "0;99999999"}}, + }); + if(HasFatalFailure()) return; +} + +TEST_F(PivotHNZPluginIngest, PivotToTC) +{ + std::string jsonMessagePivotTC = QUOTE({ + "PIVOT": { + "GTIC": { + "SpcTyp": { + "q": { + "Source": "process", + "Validity": "good" + }, + "t": { + "FractionOfSecond": 9529458, + "SecondSinceEpoch": 1669714185 + }, + "ctlVal": 1 + }, + "Identifier": "ID222222", + "TmOrg": { + "stVal": "substituted" + } + } + } + }); + ReadingSet* readingSet = nullptr; + createReadingSet(readingSet, "TC1", jsonMessagePivotTC); + if(HasFatalFailure()) return; + ASSERT_NE(readingSet, nullptr); + + ASSERT_NO_THROW(plugin_ingest(filter, static_cast(readingSet))); + ASSERT_EQ(outputHandlerCalled, 1); + validateReading(lastReading, "TC1", "command_object", allCommandAttributeNames, { + {"co_type", {"string", "TC"}}, + {"co_addr", {"int64_t", "142"}}, + {"co_value", {"int64_t", "1"}}, + }); + if(HasFatalFailure()) return; +} + +TEST_F(PivotHNZPluginIngest, PivotDoubleToTC) +{ + std::string jsonMessagePivotTCDouble = QUOTE({ + "PIVOT": { + "GTIC": { + "DpcTyp": { + "q": { + "Source": "process", + "Validity": "good" + }, + "t": { + "FractionOfSecond": 9529458, + "SecondSinceEpoch": 1669714185 + }, + "ctlVal": "off" + }, + "Identifier": "ID333333", + "TmOrg": { + "stVal": "substituted" + } + } + } + }); + ReadingSet* readingSet = nullptr; + createReadingSet(readingSet, "TC2", jsonMessagePivotTCDouble); + if(HasFatalFailure()) return; + ASSERT_NE(readingSet, nullptr); + + ASSERT_NO_THROW(plugin_ingest(filter, static_cast(readingSet))); + ASSERT_EQ(outputHandlerCalled, 1); + validateReading(lastReading, "TC2", "command_object", allCommandAttributeNames, { + {"co_type", {"string", "TC"}}, + {"co_addr", {"int64_t", "143"}}, + {"co_value", {"int64_t", "0"}}, + }); + if(HasFatalFailure()) return; +} + +TEST_F(PivotHNZPluginIngest, PivotToTVC) +{ + std::string jsonMessagePivotTVC = QUOTE({ + "PIVOT": { + "GTIC": { + "IncTyp": { + "q": { + "Source": "process", + "Validity": "good" + }, + "t": { + "FractionOfSecond": 9529458, + "SecondSinceEpoch": 1669714185 + }, + "ctlVal": 42 + }, + "Identifier": "ID444444", + "TmOrg": { + "stVal": "substituted" + } + } + } + }); + ReadingSet* readingSet = nullptr; + createReadingSet(readingSet, "TVC1", jsonMessagePivotTVC); + if(HasFatalFailure()) return; + ASSERT_NE(readingSet, nullptr); + + ASSERT_NO_THROW(plugin_ingest(filter, static_cast(readingSet))); + ASSERT_EQ(outputHandlerCalled, 1); + validateReading(lastReading, "TVC1", "command_object", allCommandAttributeNames, { + {"co_type", {"string", "TVC"}}, + {"co_addr", {"int64_t", "31"}}, + {"co_value", {"int64_t", "42"}}, + }); + if(HasFatalFailure()) return; +} + +TEST_F(PivotHNZPluginIngest, SouthEvent) +{ + std::string jsonMessageSouthEvent = QUOTE({ + "south_event":{ + "connx_status": "not connected", + "gi_status": "idle" + } + }); + ReadingSet* readingSet = nullptr; + createReadingSet(readingSet, "CONNECTION-1", jsonMessageSouthEvent); + if(HasFatalFailure()) return; + ASSERT_NE(readingSet, nullptr); + + ASSERT_NO_THROW(plugin_ingest(filter, static_cast(readingSet))); + ASSERT_EQ(outputHandlerCalled, 1); + validateReading(lastReading, "CONNECTION-1", "south_event", allSouthEventAttributeNames, { + {"connx_status", {"string", "not connected"}}, + {"gi_status", {"string", "idle"}}, + }); + if(HasFatalFailure()) return; +} + +TEST_F(PivotHNZPluginIngest, InvalidMessages) +{ + printf("Testing message with invalid root object\n"); + std::string jsonMessageInvalidRoot = QUOTE({ + "invalid_message":{ + "val": 42 + } + }); + ReadingSet* readingSet = nullptr; + createReadingSet(readingSet, "INVALID", jsonMessageInvalidRoot); + if(HasFatalFailure()) return; + ASSERT_NE(readingSet, nullptr); + + ASSERT_NO_THROW(plugin_ingest(filter, static_cast(readingSet))); + ASSERT_EQ(outputHandlerCalled, 0); + + printf("Testing data_object message with missing do_type\n"); + std::string jsonMessageMissingType = QUOTE({ + "data_object":{ + "do_station":12, + "do_addr":31, + "do_value":0, + "do_valid":0 + } + }); + createReadingSet(readingSet, "TVC1", jsonMessageMissingType); + if(HasFatalFailure()) return; + ASSERT_NE(readingSet, nullptr); + + ASSERT_NO_THROW(plugin_ingest(filter, static_cast(readingSet))); + ASSERT_EQ(outputHandlerCalled, 0); + + printf("Testing data_object message with missing do_addr\n"); + std::string jsonMessageMissingAddress = QUOTE({ + "data_object":{ + "do_type":"TVC", + "do_station":12, + "do_value":0, + "do_valid":0 + } + }); + createReadingSet(readingSet, "TVC1", jsonMessageMissingAddress); + if(HasFatalFailure()) return; + ASSERT_NE(readingSet, nullptr); + + ASSERT_NO_THROW(plugin_ingest(filter, static_cast(readingSet))); + ASSERT_EQ(outputHandlerCalled, 0); + + printf("Testing data_object message with missing pivot id for given type and address\n"); + std::string jsonMessageUnknownPivotId = QUOTE({ + "data_object":{ + "do_type":"TVC", + "do_station":12, + "do_addr":1, + "do_value":0, + "do_valid":0 + } + }); + createReadingSet(readingSet, "TVC1", jsonMessageUnknownPivotId); + if(HasFatalFailure()) return; + ASSERT_NE(readingSet, nullptr); + + ASSERT_NO_THROW(plugin_ingest(filter, static_cast(readingSet))); + ASSERT_EQ(outputHandlerCalled, 0); + + printf("Testing PIVOT message with missing/invalid root type\n"); + std::string jsonMessageInvalidPivotRootType = QUOTE({ + "PIVOT": { + "GTIX": { + "SpcTyp": { + "q": { + "Source": "process", + "Validity": "good" + }, + "t": { + "FractionOfSecond": 9529458, + "SecondSinceEpoch": 1669714185 + }, + "ctlVal": 1 + }, + "Identifier": "ID222222", + "TmOrg": { + "stVal": "substituted" + } + } + } + }); + createReadingSet(readingSet, "TC1", jsonMessageInvalidPivotRootType); + if(HasFatalFailure()) return; + ASSERT_NE(readingSet, nullptr); + + ASSERT_NO_THROW(plugin_ingest(filter, static_cast(readingSet))); + ASSERT_EQ(outputHandlerCalled, 0); + + printf("Testing PIVOT message with missing pivot type\n"); + std::string jsonMessageMissingPivotType = QUOTE({ + "PIVOT": { + "GTIC": { + "Identifier": "ID222222", + "TmOrg": { + "stVal": "substituted" + } + } + } + }); + createReadingSet(readingSet, "TC1", jsonMessageMissingPivotType); + if(HasFatalFailure()) return; + ASSERT_NE(readingSet, nullptr); + + ASSERT_NO_THROW(plugin_ingest(filter, static_cast(readingSet))); + ASSERT_EQ(outputHandlerCalled, 0); + + printf("Testing PIVOT message with invalid pivot type\n"); + std::string jsonMessageInvalidPivotType = QUOTE({ + "PIVOT": { + "GTIC": { + "SpsTyp": { + "q": { + "Source": "process", + "Validity": "good" + }, + "t": { + "FractionOfSecond": 9529458, + "SecondSinceEpoch": 1669714185 + }, + "ctlVal": 1 + }, + "Identifier": "ID222222", + "TmOrg": { + "stVal": "substituted" + } + } + } + }); + createReadingSet(readingSet, "TC1", jsonMessageInvalidPivotType); + if(HasFatalFailure()) return; + ASSERT_NE(readingSet, nullptr); + + ASSERT_NO_THROW(plugin_ingest(filter, static_cast(readingSet))); + ASSERT_EQ(outputHandlerCalled, 0); + + printf("Testing PIVOT message with missing Identifier\n"); + std::string jsonMessageMissingId = QUOTE({ + "PIVOT": { + "GTIC": { + "SpcTyp": { + "q": { + "Source": "process", + "Validity": "good" + }, + "t": { + "FractionOfSecond": 9529458, + "SecondSinceEpoch": 1669714185 + }, + "ctlVal": 1 + }, + "TmOrg": { + "stVal": "substituted" + } + } + } + }); + createReadingSet(readingSet, "TC1", jsonMessageMissingId); + if(HasFatalFailure()) return; + ASSERT_NE(readingSet, nullptr); + + ASSERT_NO_THROW(plugin_ingest(filter, static_cast(readingSet))); + ASSERT_EQ(outputHandlerCalled, 0); + + printf("Testing PIVOT message with unknown Identifier\n"); + std::string jsonMessageUnknownId = QUOTE({ + "PIVOT": { + "GTIC": { + "SpcTyp": { + "q": { + "Source": "process", + "Validity": "good" + }, + "t": { + "FractionOfSecond": 9529458, + "SecondSinceEpoch": 1669714185 + }, + "ctlVal": 1 + }, + "Identifier": "ID000000", + "TmOrg": { + "stVal": "substituted" + } + } + } + }); + createReadingSet(readingSet, "TC1", jsonMessageUnknownId); + if(HasFatalFailure()) return; + ASSERT_NE(readingSet, nullptr); + + ASSERT_NO_THROW(plugin_ingest(filter, static_cast(readingSet))); + ASSERT_EQ(outputHandlerCalled, 0); + + // Cases below only generates a warning and still process the reading + + printf("Testing data_object label mismatch\n"); + std::string jsonMessageLabelMismatch = QUOTE({ + "data_object":{ + "do_type":"TVC", + "do_station":12, + "do_addr":31, + "do_value":0, + "do_valid":0 + } + }); + createReadingSet(readingSet, "TVC42", jsonMessageLabelMismatch); + if(HasFatalFailure()) return; + ASSERT_NE(readingSet, nullptr); + + auto pivotTimestampPair = PivotTimestamp::fromTimestamp(PivotTimestamp::getCurrentTimestampMs()); + long sec = pivotTimestampPair.first; + outputHandlerCalled = 0; + ASSERT_NO_THROW(plugin_ingest(filter, static_cast(readingSet))); + ASSERT_EQ(outputHandlerCalled, 1); + validateReading(lastReading, "TVC42", "PIVOT", allPivotAttributeNames, { + {"GTIC.ComingFrom", {"string", "hnzip"}}, + {"GTIC.Identifier", {"string", "ID444444"}}, + {"GTIC.Cause.stVal", {"int64_t", "7"}}, + {"GTIC.TmOrg.stVal", {"string", "substituted"}}, + {"GTIC.IncTyp.q.Validity", {"string", "good"}}, + // NB: Time was added by hnztopivot plugin + {"GTIC.IncTyp.t.SecondSinceEpoch", {"int64_t_range", std::to_string(sec) + ";" + std::to_string(sec+1)}}, + {"GTIC.IncTyp.t.FractionOfSecond", {"int64_t_range", "0;99999999"}}, + }); + if(HasFatalFailure()) return; + + printf("Testing TS with missing attributes\n"); + std::string jsonMessageTSInvalid = QUOTE({ + "data_object":{ + "do_type":"TS", + "do_addr":577 + } + }); + createReadingSet(readingSet, "TS3", jsonMessageTSInvalid); + if(HasFatalFailure()) return; + ASSERT_NE(readingSet, nullptr); + + pivotTimestampPair = PivotTimestamp::fromTimestamp(PivotTimestamp::getCurrentTimestampMs()); + sec = pivotTimestampPair.first; + outputHandlerCalled = 0; + ASSERT_NO_THROW(plugin_ingest(filter, static_cast(readingSet))); + ASSERT_EQ(outputHandlerCalled, 1); + validateReading(lastReading, "TS3", "PIVOT", allPivotAttributeNames, { + {"GTIS.ComingFrom", {"string", "hnzip"}}, + {"GTIS.Identifier", {"string", "ID114567"}}, + {"GTIS.Cause.stVal", {"int64_t", "3"}}, + {"GTIS.TmOrg.stVal", {"string", "substituted"}}, + {"GTIS.SpsTyp.q.Validity", {"string", "good"}}, + // NB: Time was added by hnztopivot plugin + {"GTIS.SpsTyp.t.SecondSinceEpoch", {"int64_t_range", std::to_string(sec) + ";" + std::to_string(sec+1)}}, + {"GTIS.SpsTyp.t.FractionOfSecond", {"int64_t_range", "0;99999999"}}, + }); + if(HasFatalFailure()) return; + + printf("Testing TSCE with missing CE attributes\n"); + std::string jsonMessageTSCEMissingAttributes = QUOTE({ + "data_object":{ + "do_type":"TS", + "do_addr":577, + "do_cg":0 + } + }); + createReadingSet(readingSet, "TS3", jsonMessageTSCEMissingAttributes); + if(HasFatalFailure()) return; + ASSERT_NE(readingSet, nullptr); + + pivotTimestampPair = PivotTimestamp::fromTimestamp(PivotTimestamp::getCurrentTimestampMs()); + sec = pivotTimestampPair.first; + outputHandlerCalled = 0; + ASSERT_NO_THROW(plugin_ingest(filter, static_cast(readingSet))); + ASSERT_EQ(outputHandlerCalled, 1); + validateReading(lastReading, "TS3", "PIVOT", allPivotAttributeNames, { + {"GTIS.ComingFrom", {"string", "hnzip"}}, + {"GTIS.Identifier", {"string", "ID114567"}}, + {"GTIS.Cause.stVal", {"int64_t", "3"}}, + {"GTIS.TmOrg.stVal", {"string", "substituted"}}, + {"GTIS.SpsTyp.q.Validity", {"string", "good"}}, + // NB: Time was added by hnztopivot plugin + {"GTIS.SpsTyp.t.SecondSinceEpoch", {"int64_t_range", std::to_string(sec) + ";" + std::to_string(sec+1)}}, + {"GTIS.SpsTyp.t.FractionOfSecond", {"int64_t_range", "0;99999999"}}, + }); + if(HasFatalFailure()) return; + + printf("Testing TS with value out of range\n"); + std::string jsonMessageTSValueOutOfRange = QUOTE({ + "data_object":{ + "do_type":"TS", + "do_station":12, + "do_addr":577, + "do_value":-1, + "do_valid":0, + "do_cg":1, + "do_outdated":0 + } + }); + createReadingSet(readingSet, "TS3", jsonMessageTSValueOutOfRange); + if(HasFatalFailure()) return; + ASSERT_NE(readingSet, nullptr); + + pivotTimestampPair = PivotTimestamp::fromTimestamp(PivotTimestamp::getCurrentTimestampMs()); + sec = pivotTimestampPair.first; + outputHandlerCalled = 0; + ASSERT_NO_THROW(plugin_ingest(filter, static_cast(readingSet))); + ASSERT_EQ(outputHandlerCalled, 1); + validateReading(lastReading, "TS3", "PIVOT", allPivotAttributeNames, { + {"GTIS.ComingFrom", {"string", "hnzip"}}, + {"GTIS.Identifier", {"string", "ID114567"}}, + {"GTIS.Cause.stVal", {"int64_t", "20"}}, + {"GTIS.TmOrg.stVal", {"string", "substituted"}}, + {"GTIS.SpsTyp.stVal", {"int64_t", "1"}}, + {"GTIS.SpsTyp.q.Validity", {"string", "good"}}, + // NB: Time was added by hnztopivot plugin + {"GTIS.SpsTyp.t.SecondSinceEpoch", {"int64_t_range", std::to_string(sec) + ";" + std::to_string(sec+1)}}, + {"GTIS.SpsTyp.t.FractionOfSecond", {"int64_t_range", "0;99999999"}}, + }); + if(HasFatalFailure()) return; + + printf("Testing TM with missing attributes\n"); + std::string jsonMessageTMAMissingAttributes = QUOTE({ + "data_object":{ + "do_type":"TM", + "do_addr":20 + } + }); + createReadingSet(readingSet, "TM1", jsonMessageTMAMissingAttributes); + if(HasFatalFailure()) return; + ASSERT_NE(readingSet, nullptr); + + pivotTimestampPair = PivotTimestamp::fromTimestamp(PivotTimestamp::getCurrentTimestampMs()); + sec = pivotTimestampPair.first; + outputHandlerCalled = 0; + ASSERT_NO_THROW(plugin_ingest(filter, static_cast(readingSet))); + ASSERT_EQ(outputHandlerCalled, 1); + validateReading(lastReading, "TM1", "PIVOT", allPivotAttributeNames, { + {"GTIM.ComingFrom", {"string", "hnzip"}}, + {"GTIM.Identifier", {"string", "ID111111"}}, + {"GTIM.Cause.stVal", {"int64_t", "1"}}, + {"GTIM.TmOrg.stVal", {"string", "substituted"}}, + {"GTIM.MvTyp.q.Validity", {"string", "good"}}, + // NB: Time was added by hnztopivot plugin + {"GTIM.MvTyp.t.SecondSinceEpoch", {"int64_t_range", std::to_string(sec) + ";" + std::to_string(sec+1)}}, + {"GTIM.MvTyp.t.FractionOfSecond", {"int64_t_range", "0;99999999"}}, + }); + if(HasFatalFailure()) return; + + printf("Testing TM with invalid do_an\n"); + std::string jsonMessageTMInvalidType = QUOTE({ + "data_object":{ + "do_type":"TM", + "do_station":12, + "do_addr":20, + "do_value":999999, + "do_valid":0, + "do_an":"TMT", + "do_outdated":0 + } + }); + createReadingSet(readingSet, "TM1", jsonMessageTMInvalidType); + if(HasFatalFailure()) return; + ASSERT_NE(readingSet, nullptr); + + pivotTimestampPair = PivotTimestamp::fromTimestamp(PivotTimestamp::getCurrentTimestampMs()); + sec = pivotTimestampPair.first; + outputHandlerCalled = 0; + ASSERT_NO_THROW(plugin_ingest(filter, static_cast(readingSet))); + ASSERT_EQ(outputHandlerCalled, 1); + validateReading(lastReading, "TM1", "PIVOT", allPivotAttributeNames, { + {"GTIM.ComingFrom", {"string", "hnzip"}}, + {"GTIM.Identifier", {"string", "ID111111"}}, + {"GTIM.Cause.stVal", {"int64_t", "1"}}, + {"GTIM.TmOrg.stVal", {"string", "substituted"}}, + {"GTIM.MvTyp.mag.i", {"int64_t", "999999"}}, + {"GTIM.MvTyp.q.Validity", {"string", "good"}}, + // NB: Time was added by hnztopivot plugin + {"GTIM.MvTyp.t.SecondSinceEpoch", {"int64_t_range", std::to_string(sec) + ";" + std::to_string(sec+1)}}, + {"GTIM.MvTyp.t.FractionOfSecond", {"int64_t_range", "0;99999999"}}, + }); + if(HasFatalFailure()) return; + + printf("Testing TMA with value out of range\n"); + std::string jsonMessageTMAValueOutOfRange = QUOTE({ + "data_object":{ + "do_type":"TM", + "do_station":12, + "do_addr":20, + "do_value":142, + "do_valid":0, + "do_an":"TMA", + "do_outdated":0 + } + }); + createReadingSet(readingSet, "TM1", jsonMessageTMAValueOutOfRange); + if(HasFatalFailure()) return; + ASSERT_NE(readingSet, nullptr); + + pivotTimestampPair = PivotTimestamp::fromTimestamp(PivotTimestamp::getCurrentTimestampMs()); + sec = pivotTimestampPair.first; + outputHandlerCalled = 0; + ASSERT_NO_THROW(plugin_ingest(filter, static_cast(readingSet))); + ASSERT_EQ(outputHandlerCalled, 1); + validateReading(lastReading, "TM1", "PIVOT", allPivotAttributeNames, { + {"GTIM.ComingFrom", {"string", "hnzip"}}, + {"GTIM.Identifier", {"string", "ID111111"}}, + {"GTIM.Cause.stVal", {"int64_t", "1"}}, + {"GTIM.TmOrg.stVal", {"string", "substituted"}}, + {"GTIM.MvTyp.mag.i", {"int64_t", "142"}}, + {"GTIM.MvTyp.q.Validity", {"string", "good"}}, + // NB: Time was added by hnztopivot plugin + {"GTIM.MvTyp.t.SecondSinceEpoch", {"int64_t_range", std::to_string(sec) + ";" + std::to_string(sec+1)}}, + {"GTIM.MvTyp.t.FractionOfSecond", {"int64_t_range", "0;99999999"}}, + }); + if(HasFatalFailure()) return; + + printf("Testing TM8 with value out of range\n"); + std::string jsonMessageTM8ValueOutOfRange = QUOTE({ + "data_object":{ + "do_type":"TM", + "do_station":12, + "do_addr":20, + "do_value":-1, + "do_valid":0, + "do_an":"TMA", + "do_outdated":0 + } + }); + createReadingSet(readingSet, "TM1", jsonMessageTM8ValueOutOfRange); + if(HasFatalFailure()) return; + ASSERT_NE(readingSet, nullptr); + + pivotTimestampPair = PivotTimestamp::fromTimestamp(PivotTimestamp::getCurrentTimestampMs()); + sec = pivotTimestampPair.first; + outputHandlerCalled = 0; + ASSERT_NO_THROW(plugin_ingest(filter, static_cast(readingSet))); + ASSERT_EQ(outputHandlerCalled, 1); + validateReading(lastReading, "TM1", "PIVOT", allPivotAttributeNames, { + {"GTIM.ComingFrom", {"string", "hnzip"}}, + {"GTIM.Identifier", {"string", "ID111111"}}, + {"GTIM.Cause.stVal", {"int64_t", "1"}}, + {"GTIM.TmOrg.stVal", {"string", "substituted"}}, + {"GTIM.MvTyp.mag.i", {"int64_t", "-1"}}, + {"GTIM.MvTyp.q.Validity", {"string", "good"}}, + // NB: Time was added by hnztopivot plugin + {"GTIM.MvTyp.t.SecondSinceEpoch", {"int64_t_range", std::to_string(sec) + ";" + std::to_string(sec+1)}}, + {"GTIM.MvTyp.t.FractionOfSecond", {"int64_t_range", "0;99999999"}}, + }); + if(HasFatalFailure()) return; + + printf("Testing TM16 with value out of range\n"); + std::string jsonMessageTM16ValueOutOfRange = QUOTE({ + "data_object":{ + "do_type":"TM", + "do_station":12, + "do_addr":20, + "do_value":999999, + "do_valid":0, + "do_an":"TMA", + "do_outdated":0 + } + }); + createReadingSet(readingSet, "TM1", jsonMessageTM16ValueOutOfRange); + if(HasFatalFailure()) return; + ASSERT_NE(readingSet, nullptr); + + pivotTimestampPair = PivotTimestamp::fromTimestamp(PivotTimestamp::getCurrentTimestampMs()); + sec = pivotTimestampPair.first; + outputHandlerCalled = 0; + ASSERT_NO_THROW(plugin_ingest(filter, static_cast(readingSet))); + ASSERT_EQ(outputHandlerCalled, 1); + validateReading(lastReading, "TM1", "PIVOT", allPivotAttributeNames, { + {"GTIM.ComingFrom", {"string", "hnzip"}}, + {"GTIM.Identifier", {"string", "ID111111"}}, + {"GTIM.Cause.stVal", {"int64_t", "1"}}, + {"GTIM.TmOrg.stVal", {"string", "substituted"}}, + {"GTIM.MvTyp.mag.i", {"int64_t", "999999"}}, + {"GTIM.MvTyp.q.Validity", {"string", "good"}}, + // NB: Time was added by hnztopivot plugin + {"GTIM.MvTyp.t.SecondSinceEpoch", {"int64_t_range", std::to_string(sec) + ";" + std::to_string(sec+1)}}, + {"GTIM.MvTyp.t.FractionOfSecond", {"int64_t_range", "0;99999999"}}, + }); + if(HasFatalFailure()) return; + + printf("Testing TC ACK with missing attributes\n"); + std::string jsonMessageTCAck = QUOTE({ + "data_object":{ + "do_type":"TC", + "do_addr":142 + } + }); + createReadingSet(readingSet, "TC1", jsonMessageTCAck); + if(HasFatalFailure()) return; + ASSERT_NE(readingSet, nullptr); + + pivotTimestampPair = PivotTimestamp::fromTimestamp(PivotTimestamp::getCurrentTimestampMs()); + sec = pivotTimestampPair.first; + outputHandlerCalled = 0; + ASSERT_NO_THROW(plugin_ingest(filter, static_cast(readingSet))); + ASSERT_EQ(outputHandlerCalled, 1); + validateReading(lastReading, "TC1", "PIVOT", allPivotAttributeNames, { + {"GTIC.ComingFrom", {"string", "hnzip"}}, + {"GTIC.Identifier", {"string", "ID222222"}}, + {"GTIC.Cause.stVal", {"int64_t", "7"}}, + {"GTIC.TmOrg.stVal", {"string", "substituted"}}, + {"GTIC.SpcTyp.q.Validity", {"string", "good"}}, + // NB: Time was added by hnztopivot plugin + {"GTIC.SpcTyp.t.SecondSinceEpoch", {"int64_t_range", std::to_string(sec) + ";" + std::to_string(sec+1)}}, + {"GTIC.SpcTyp.t.FractionOfSecond", {"int64_t_range", "0;99999999"}}, + }); + if(HasFatalFailure()) return; + + printf("Testing TVC ACK with missing attributes\n"); + std::string jsonMessageTVCAck = QUOTE({ + "data_object":{ + "do_type":"TVC", + "do_addr":31 + } + }); + createReadingSet(readingSet, "TVC1", jsonMessageTVCAck); + if(HasFatalFailure()) return; + ASSERT_NE(readingSet, nullptr); + + pivotTimestampPair = PivotTimestamp::fromTimestamp(PivotTimestamp::getCurrentTimestampMs()); + sec = pivotTimestampPair.first; + outputHandlerCalled = 0; + ASSERT_NO_THROW(plugin_ingest(filter, static_cast(readingSet))); + ASSERT_EQ(outputHandlerCalled, 1); + validateReading(lastReading, "TVC1", "PIVOT", allPivotAttributeNames, { + {"GTIC.ComingFrom", {"string", "hnzip"}}, + {"GTIC.Identifier", {"string", "ID444444"}}, + {"GTIC.Cause.stVal", {"int64_t", "7"}}, + {"GTIC.TmOrg.stVal", {"string", "substituted"}}, + {"GTIC.IncTyp.q.Validity", {"string", "good"}}, + // NB: Time was added by hnztopivot plugin + {"GTIC.IncTyp.t.SecondSinceEpoch", {"int64_t_range", std::to_string(sec) + ";" + std::to_string(sec+1)}}, + {"GTIC.IncTyp.t.FractionOfSecond", {"int64_t_range", "0;99999999"}}, + }); + if(HasFatalFailure()) return; + + printf("Testing PIVOT label mismatch\n"); + std::string jsonMessagePivotLabelMismatch = QUOTE({ + "PIVOT": { + "GTIC": { + "IncTyp": { + "q": { + "Source": "process", + "Validity": "good" + }, + "t": { + "FractionOfSecond": 9529458, + "SecondSinceEpoch": 1669714185 + }, + "ctlVal": 42 + }, + "Identifier": "ID444444", + "TmOrg": { + "stVal": "substituted" + } + } + } + }); + createReadingSet(readingSet, "TVC42", jsonMessagePivotLabelMismatch); + if(HasFatalFailure()) return; + ASSERT_NE(readingSet, nullptr); + + outputHandlerCalled = 0; + ASSERT_NO_THROW(plugin_ingest(filter, static_cast(readingSet))); + ASSERT_EQ(outputHandlerCalled, 1); + validateReading(lastReading, "TVC42", "command_object", allCommandAttributeNames, { + {"co_type", {"string", "TVC"}}, + {"co_addr", {"int64_t", "31"}}, + {"co_value", {"int64_t", "42"}}, + }); + if(HasFatalFailure()) return; +} \ No newline at end of file diff --git a/tests/test_plugin_init_shutdown.cpp b/tests/test_plugin_init_shutdown.cpp new file mode 100644 index 0000000..de4ccc8 --- /dev/null +++ b/tests/test_plugin_init_shutdown.cpp @@ -0,0 +1,42 @@ +#include + +#include "hnz_pivot_filter.hpp" + +extern "C" { + PLUGIN_INFORMATION *plugin_info(); + PLUGIN_HANDLE plugin_init(ConfigCategory *config, + OUTPUT_HANDLE *outHandle, + OUTPUT_STREAM output); + + void plugin_shutdown(PLUGIN_HANDLE *handle); +}; + +TEST(PivotHNZPluginInitShutdown, PluginInitNoConfig) +{ + PLUGIN_HANDLE handle = nullptr; + ASSERT_NO_THROW(handle = plugin_init(nullptr, nullptr, nullptr)); + ASSERT_TRUE(handle != nullptr); + + HNZPivotFilter* filter = static_cast(handle); + ASSERT_EQ(filter->isEnabled(), true); + + ASSERT_NO_THROW(plugin_shutdown(static_cast(handle))); +} + +TEST(PivotHNZPluginInitShutdown, PluginShutdown) +{ + PLUGIN_INFORMATION *info = plugin_info(); + ConfigCategory *config = new ConfigCategory("hnztopivot", info->config); + + ASSERT_NE(config, nullptr); + config->setItemsValueFromDefault(); + + PLUGIN_HANDLE handle = nullptr; + ASSERT_NO_THROW(handle = plugin_init(config, nullptr, nullptr)); + ASSERT_TRUE(handle != nullptr); + + HNZPivotFilter* filter = static_cast(handle); + ASSERT_EQ(filter->isEnabled(), true); + + ASSERT_NO_THROW(plugin_shutdown(static_cast(handle))); +} \ No newline at end of file diff --git a/tests/test_plugin_reconfigure.cpp b/tests/test_plugin_reconfigure.cpp new file mode 100644 index 0000000..c5eaec9 --- /dev/null +++ b/tests/test_plugin_reconfigure.cpp @@ -0,0 +1,39 @@ +#include + +#include "hnz_pivot_filter.hpp" + + +static std::string reconfigure = QUOTE({ + "enable": { + "value": "false" + } +}); + +extern "C" { + PLUGIN_INFORMATION *plugin_info(); + PLUGIN_HANDLE plugin_init(ConfigCategory *config, + OUTPUT_HANDLE *outHandle, + OUTPUT_STREAM output); + + void plugin_reconfigure(PLUGIN_HANDLE *handle, const std::string& newConfig); + void plugin_shutdown(PLUGIN_HANDLE *handle); +}; + +TEST(PivotHNZPluginReconfigure, Reconfigure) +{ + PLUGIN_INFORMATION *info = plugin_info(); + ConfigCategory *config = new ConfigCategory("hnztopivot", info->config); + + ASSERT_NE(config, nullptr); + config->setItemsValueFromDefault(); + config->setValue("enable", "true"); + + PLUGIN_HANDLE handle = nullptr; + ASSERT_NO_THROW(handle = plugin_init(config, nullptr, nullptr)); + HNZPivotFilter* filter = static_cast(handle); + + ASSERT_NO_THROW(plugin_reconfigure(static_cast(handle), reconfigure)); + ASSERT_EQ(filter->isEnabled(), false); + + ASSERT_NO_THROW(plugin_shutdown(static_cast(handle))); +} \ No newline at end of file