2021-06-03 03:34:29 -04:00
|
|
|
/* JSON serializtion for Kafka schema/payloads.
|
|
|
|
*
|
2022-03-15 09:18:01 -04:00
|
|
|
* Author: Steffen Vogel <post@steffenvogel.de>
|
2022-03-15 09:28:57 -04:00
|
|
|
* SPDX-FileCopyrightText: 2014-2023 Institute for Automation of Complex Power Systems, RWTH Aachen University
|
2022-07-04 18:20:03 +02:00
|
|
|
* SPDX-License-Identifier: Apache-2.0
|
2021-06-03 03:34:29 -04:00
|
|
|
*/
|
|
|
|
|
2021-05-10 00:12:30 +02:00
|
|
|
#include <villas/exceptions.hpp>
|
|
|
|
#include <villas/formats/json_kafka.hpp>
|
2021-08-10 10:12:48 -04:00
|
|
|
#include <villas/timing.hpp>
|
2021-05-10 00:12:30 +02:00
|
|
|
#include <villas/utils.hpp>
|
|
|
|
|
|
|
|
using namespace villas;
|
|
|
|
using namespace villas::node;
|
2021-06-03 03:34:29 -04:00
|
|
|
|
2021-05-10 00:12:30 +02:00
|
|
|
const char *JsonKafkaFormat::villasToKafkaType(enum SignalType vt) {
|
2021-06-03 03:34:29 -04:00
|
|
|
switch (vt) {
|
|
|
|
case SignalType::FLOAT:
|
|
|
|
return "double";
|
|
|
|
|
|
|
|
case SignalType::INTEGER:
|
|
|
|
return "int64";
|
|
|
|
|
|
|
|
case SignalType::BOOLEAN:
|
2021-06-03 03:34:55 -04:00
|
|
|
return "boolean";
|
2021-06-03 03:34:29 -04:00
|
|
|
|
|
|
|
default:
|
2021-06-03 03:34:55 -04:00
|
|
|
case SignalType::COMPLEX:
|
2021-06-03 03:34:29 -04:00
|
|
|
case SignalType::INVALID:
|
|
|
|
return "unknown";
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-08-10 10:12:48 -04:00
|
|
|
int JsonKafkaFormat::packSample(json_t **json_smp, const struct Sample *smp) {
|
2021-05-10 00:12:30 +02:00
|
|
|
json_t *json_payload, *json_fields, *json_field, *json_value;
|
2023-09-07 11:46:39 +02:00
|
|
|
|
2021-06-03 03:34:29 -04:00
|
|
|
json_fields = json_array();
|
|
|
|
json_payload = json_object();
|
2023-09-07 11:46:39 +02:00
|
|
|
|
2021-06-03 03:34:29 -04:00
|
|
|
// Include sample timestamp
|
|
|
|
if (smp->flags & (int)SampleFlags::HAS_TS_ORIGIN) {
|
|
|
|
json_field = json_pack("{ s: s, s: b, s: s }", "type", "int64", "optional",
|
|
|
|
false, "field", "timestamp");
|
2023-09-07 11:46:39 +02:00
|
|
|
|
2021-06-03 03:34:29 -04:00
|
|
|
uint64_t ts_origin_ms =
|
|
|
|
smp->ts.origin.tv_sec * 1e3 + smp->ts.origin.tv_nsec / 1e6;
|
2021-05-10 00:12:30 +02:00
|
|
|
json_array_append_new(json_fields, json_field);
|
|
|
|
json_object_set_new(json_payload, "timestamp", json_integer(ts_origin_ms));
|
2021-06-03 03:34:29 -04:00
|
|
|
}
|
2023-09-07 11:46:39 +02:00
|
|
|
|
2021-06-03 03:34:29 -04:00
|
|
|
// Include sample sequence no
|
|
|
|
if (smp->flags & (int)SampleFlags::HAS_SEQUENCE) {
|
|
|
|
json_field = json_pack("{ s: s, s: b, s: s }", "type", "int64", "optional",
|
|
|
|
false, "field", "sequence");
|
2023-09-07 11:46:39 +02:00
|
|
|
|
2021-05-10 00:12:30 +02:00
|
|
|
json_array_append_new(json_fields, json_field);
|
|
|
|
json_object_set_new(json_payload, "sequence", json_integer(smp->sequence));
|
2021-06-03 03:34:29 -04:00
|
|
|
}
|
2023-09-07 11:46:39 +02:00
|
|
|
|
2021-06-03 03:34:29 -04:00
|
|
|
// Include sample data
|
2021-08-10 10:12:48 -04:00
|
|
|
for (size_t i = 0; i < MIN(smp->length, smp->signals->size()); i++) {
|
|
|
|
const auto sig = smp->signals->getByIndex(i);
|
|
|
|
const auto *data = &smp->data[i];
|
2023-09-07 11:46:39 +02:00
|
|
|
|
2021-06-03 03:34:29 -04:00
|
|
|
json_field =
|
2021-05-10 00:12:30 +02:00
|
|
|
json_pack("{ s: s, s: b, s: s }", "type", villasToKafkaType(sig->type),
|
2021-06-03 03:34:29 -04:00
|
|
|
"optional", false, "field", sig->name);
|
2023-09-07 11:46:39 +02:00
|
|
|
|
2021-08-10 10:12:48 -04:00
|
|
|
json_value = data->toJson(sig->type);
|
2023-09-07 11:46:39 +02:00
|
|
|
|
2021-05-10 00:12:30 +02:00
|
|
|
json_array_append_new(json_fields, json_field);
|
2021-08-10 10:12:48 -04:00
|
|
|
json_object_set_new(json_payload, sig->name.c_str(), json_value);
|
2021-06-03 03:34:29 -04:00
|
|
|
}
|
2023-09-07 11:46:39 +02:00
|
|
|
|
2021-05-10 00:12:30 +02:00
|
|
|
json_object_set_new(json_schema, "fields", json_fields);
|
2023-09-07 11:46:39 +02:00
|
|
|
|
2021-05-10 00:12:30 +02:00
|
|
|
json_incref(json_schema);
|
|
|
|
*json_smp = json_pack("{ s: o, s: o }", "schema", json_schema, "payload",
|
2021-06-03 03:34:29 -04:00
|
|
|
json_payload);
|
2021-05-10 00:12:30 +02:00
|
|
|
if (*json_smp == nullptr)
|
|
|
|
return -1;
|
2023-09-07 11:46:39 +02:00
|
|
|
|
2021-06-03 03:34:29 -04:00
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2021-08-10 10:12:48 -04:00
|
|
|
int JsonKafkaFormat::unpackSample(json_t *json_smp, struct Sample *smp) {
|
2021-06-03 03:34:29 -04:00
|
|
|
json_t *json_payload, *json_value;
|
|
|
|
|
|
|
|
json_payload = json_object_get(json_smp, "payload");
|
|
|
|
if (!json_payload)
|
|
|
|
return -1;
|
|
|
|
|
|
|
|
smp->length = 0;
|
|
|
|
smp->flags = 0;
|
2021-05-10 00:12:30 +02:00
|
|
|
smp->signals = signals;
|
2021-06-03 03:34:29 -04:00
|
|
|
|
2021-06-03 03:34:55 -04:00
|
|
|
// Unpack timestamp
|
2021-06-03 03:34:29 -04:00
|
|
|
json_value = json_object_get(json_payload, "timestamp");
|
|
|
|
if (json_value) {
|
|
|
|
uint64_t ts_origin_ms = json_integer_value(json_value);
|
|
|
|
smp->ts.origin = time_from_double(ts_origin_ms / 1e3);
|
|
|
|
|
|
|
|
smp->flags |= (int)SampleFlags::HAS_TS_ORIGIN;
|
|
|
|
}
|
|
|
|
|
2021-06-03 03:34:55 -04:00
|
|
|
// Unpack sequence no
|
2021-06-03 03:34:29 -04:00
|
|
|
json_value = json_object_get(json_payload, "sequence");
|
|
|
|
if (json_value) {
|
|
|
|
smp->sequence = json_integer_value(json_value);
|
2021-06-03 03:34:55 -04:00
|
|
|
|
2021-06-03 03:34:29 -04:00
|
|
|
smp->flags |= (int)SampleFlags::HAS_SEQUENCE;
|
|
|
|
}
|
|
|
|
|
2021-06-03 03:34:55 -04:00
|
|
|
// Unpack signal data
|
2021-08-10 10:12:48 -04:00
|
|
|
for (size_t i = 0; i < signals->size(); i++) {
|
|
|
|
auto sig = signals->getByIndex(i);
|
2021-06-03 03:34:55 -04:00
|
|
|
|
2021-08-10 10:12:48 -04:00
|
|
|
json_value = json_object_get(json_payload, sig->name.c_str());
|
2021-06-03 03:34:29 -04:00
|
|
|
if (!json_value)
|
|
|
|
continue;
|
|
|
|
|
2021-08-10 10:12:48 -04:00
|
|
|
smp->data[i].parseJson(sig->type, json_value);
|
2021-06-03 03:34:29 -04:00
|
|
|
smp->length++;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (smp->length > 0)
|
|
|
|
smp->flags |= (int)SampleFlags::HAS_DATA;
|
|
|
|
|
2021-05-10 00:12:30 +02:00
|
|
|
return 0;
|
2021-06-03 03:34:29 -04:00
|
|
|
}
|
|
|
|
|
2021-05-10 00:12:30 +02:00
|
|
|
void JsonKafkaFormat::parse(json_t *json) {
|
2021-06-03 03:34:29 -04:00
|
|
|
int ret;
|
|
|
|
|
|
|
|
json_error_t err;
|
2021-05-10 00:12:30 +02:00
|
|
|
json_t *json_schema_tmp = nullptr;
|
2021-06-03 03:34:29 -04:00
|
|
|
|
2021-05-10 00:12:30 +02:00
|
|
|
ret = json_unpack_ex(json, &err, 0, "{ s?: o }", "schema", &json_schema_tmp);
|
|
|
|
if (ret)
|
|
|
|
throw ConfigError(json, err, "node-config-format-json-kafka",
|
|
|
|
"Failed to parse format configuration");
|
2021-06-03 03:34:29 -04:00
|
|
|
|
2021-05-10 00:12:30 +02:00
|
|
|
if (json_schema_tmp) {
|
|
|
|
if (!json_is_object(json_schema_tmp))
|
|
|
|
throw ConfigError(json, "node-config-format-json-kafka-schema",
|
|
|
|
"Kafka schema must be configured as a dictionary");
|
2021-06-03 03:34:29 -04:00
|
|
|
|
2021-05-10 00:12:30 +02:00
|
|
|
if (json_schema)
|
|
|
|
json_decref(json_schema);
|
2021-06-03 03:34:29 -04:00
|
|
|
|
2021-05-10 00:12:30 +02:00
|
|
|
json_schema = json_schema_tmp;
|
2021-06-03 03:34:29 -04:00
|
|
|
}
|
|
|
|
|
2021-05-10 00:12:30 +02:00
|
|
|
JsonFormat::parse(json);
|
2021-06-03 03:34:29 -04:00
|
|
|
}
|
|
|
|
|
2021-05-10 00:12:30 +02:00
|
|
|
JsonKafkaFormat::JsonKafkaFormat(int fl) : JsonFormat(fl) {
|
|
|
|
json_schema = json_pack("{ s: s, s: s }", "type", "struct", "name",
|
|
|
|
"villas-node.Value");
|
2021-06-03 03:34:29 -04:00
|
|
|
}
|
|
|
|
|
2023-08-31 11:25:01 +02:00
|
|
|
// Register format
|
2021-05-10 00:12:30 +02:00
|
|
|
static char n[] = "json.kafka";
|
|
|
|
static char d[] = "JSON Kafka schema/payload messages";
|
|
|
|
static FormatPlugin<JsonKafkaFormat, n, d,
|
|
|
|
(int)SampleFlags::HAS_TS_ORIGIN |
|
|
|
|
(int)SampleFlags::HAS_SEQUENCE |
|
|
|
|
(int)SampleFlags::HAS_DATA>
|
|
|
|
p;
|