aboutsummaryrefslogtreecommitdiff
path: root/apps
diff options
context:
space:
mode:
Diffstat (limited to 'apps')
-rw-r--r--apps/SoilMoistureIo.cpp157
-rw-r--r--apps/SoilMoistureIo.h141
-rw-r--r--apps/sample-convert.cpp8
-rw-r--r--apps/sample-timestamp.cpp20
-rw-r--r--apps/sm-serial-read.cpp8
5 files changed, 170 insertions, 164 deletions
diff --git a/apps/SoilMoistureIo.cpp b/apps/SoilMoistureIo.cpp
index b8a8b64..ad8a3bb 100644
--- a/apps/SoilMoistureIo.cpp
+++ b/apps/SoilMoistureIo.cpp
@@ -15,40 +15,26 @@ void VectorSampleOutputStream::write(SampleRecord sample) {
samples.emplace_back(sample);
}
-vector<KeyDictionary::index_t> KeyDictionary::findIndexes(vector<SampleKey> keys) {
- vector<KeyDictionary::index_t> indexes;
-
- for (auto &key: keys) {
- auto index = indexOf(key);
- indexes.push_back(index);
- }
-
- return move(indexes);
-}
-
-CsvSampleOutputStream::CsvSampleOutputStream(KeyDictionary &dict, unique_ptr<ostream> stream) :
- dict(dict), stream(move(stream)), headerWritten(false) {
+CsvSampleOutputStream::CsvSampleOutputStream(unique_ptr<ostream> stream, KeyDictionary &dict)
+ : stream(move(stream)), headerWritten(false), dict(dict) {
}
-CsvSampleOutputStream::CsvSampleOutputStream(KeyDictionary &dict, unique_ptr<ostream> stream, vector<SampleKey> fieldKeys)
- :
- dict(dict), stream(move(stream)), headerWritten(false), fields(dict.findIndexes(fieldKeys)) {
-}
-
-void CsvSampleOutputStream::write(SampleRecord values) {
+void CsvSampleOutputStream::write(SampleRecord sample) {
// Skip empty records
- if (values.empty()) {
+ if (sample.empty()) {
return;
}
- if (fields.empty()) {
- KeyDictionary::index_t index = 0;
- auto ptr = values.begin();
- while (ptr != values.end()) {
+ // Build the dict with the keys from the first sample.
+ if (dict.empty()) {
+ SampleKeyIndex index = 0;
+ auto ptr = sample.begin();
+ while (ptr != sample.end()) {
auto o = *ptr;
if (o) {
- fields.push_back(index);
+ auto name = sample.dict.at(index)->name;
+ dict.indexOf(name);
}
ptr++;
@@ -63,14 +49,15 @@ void CsvSampleOutputStream::write(SampleRecord values) {
auto &s = *stream.get();
- auto i = fields.begin();
- while (i != fields.end()) {
- if (i != fields.begin()) {
+ auto it = dict.begin();
+ while (it != dict.end()) {
+ if (it != dict.begin()) {
s << ",";
}
- auto index = *i++;
- auto o = values.at(index);
+ auto key = *it++;
+ auto sampleKey = sample.dict.indexOf(key->name);
+ auto o = sample.at(sampleKey);
if (o) {
s << o.get();
@@ -83,13 +70,13 @@ void CsvSampleOutputStream::write(SampleRecord values) {
void CsvSampleOutputStream::writeHeader() {
auto &s = *stream.get();
- auto i = fields.begin();
- while (i != fields.end()) {
- s << dict.nameOf(*i);
+ auto i = dict.begin();
+ while (i != dict.end()) {
+ s << (*i)->name;
i++;
- if (i != fields.end()) {
+ if (i != dict.end()) {
s << ",";
}
}
@@ -97,48 +84,45 @@ void CsvSampleOutputStream::writeHeader() {
s << endl;
}
-JsonSampleOutputStream::JsonSampleOutputStream(KeyDictionary &dict, unique_ptr<ostream> stream) :
- dict(dict), stream(move(stream)), filterFields(false) {
+JsonSampleOutputStream::JsonSampleOutputStream(unique_ptr<ostream> stream, KeyDictionary &dict) :
+ dict(dict), stream(move(stream)) {
}
-JsonSampleOutputStream::JsonSampleOutputStream(KeyDictionary &dict, unique_ptr<ostream> stream, vector<SampleKey> fields)
- :
- dict(dict), stream(move(stream)), fields(dict.findIndexes(fields)), filterFields(true) {
-}
-
-void JsonSampleOutputStream::write(SampleRecord values) {
- throw sample_exception("deimplemented");
+void JsonSampleOutputStream::write(SampleRecord sample) {
+ // Skip empty records
+ if (sample.empty()) {
+ return;
+ }
json doc({});
-// if (filterFields) {
-// for (auto &f: fields) {
-// auto value = values.find(f);
-//
-// if (value != values.end()) {
-// doc[f] = value->second;
-// }
-// }
-// } else {
-// for (auto &v: values) {
-// doc[v.first] = v.second;
-// }
-// }
+ if (!dict.empty()) {
+ for (auto &key: dict) {
+ auto sampleKey = sample.dict.indexOf(key->name);
- *stream.get() << doc << endl;
-}
+ auto value = sample.at(sampleKey);
+
+ if (value) {
+ doc[key->name] = value.get();
+ }
+ }
+ } else {
+ for (auto &sampleKey: sample.dict) {
+ auto o = sample.at(sampleKey);
+
+ if (o) {
+ // Make sure that the key is registered in the dictionary
+ dict.indexOf(sampleKey->name);
+ doc[sampleKey->name] = o.get();
+ }
+ }
+ }
-SqlSampleOutputStream::SqlSampleOutputStream(KeyDictionary &dict, unique_ptr<ostream> stream, string table_name) :
- dict(dict), stream(move(stream)), table_name(table_name), filter_fields(false) {
+ *stream.get() << doc << endl;
}
-SqlSampleOutputStream::SqlSampleOutputStream(KeyDictionary &dict, unique_ptr<ostream> stream, string table_name, vector<SampleKey> fields)
- :
- dict(dict),
- stream(move(stream)),
- table_name(table_name),
- fields(dict.findIndexes(fields)),
- filter_fields(true) {
+SqlSampleOutputStream::SqlSampleOutputStream(unique_ptr<ostream> stream, KeyDictionary &dict, string table_name) :
+ dict(dict), stream(move(stream)), table_name(table_name) {
}
void SqlSampleOutputStream::write(SampleRecord values) {
@@ -229,15 +213,15 @@ void CsvSampleParser::process_line(shared_ptr<vector<uint8_t>> packet) {
SampleRecord sample(dict);
while (regex_search(start, end, what, e, flags)) {
- auto key = static_cast<string>(what[1]);
+ auto name = static_cast<string>(what[1]);
auto value = static_cast<string>(what[2]);
start = what[0].second;
map<string, string> values;
- values[key] = value;
+ values[name] = value;
- auto index = dict.indexOf(key);
- sample.set(index, value);
+ auto key = dict.indexOf(name);
+ sample.set(key, value);
flags |= boost::match_prev_avail;
flags |= boost::match_not_bob;
@@ -246,8 +230,8 @@ void CsvSampleParser::process_line(shared_ptr<vector<uint8_t>> packet) {
output->write(sample);
}
-AutoSampleParser::AutoSampleParser(KeyDictionary &dict, shared_ptr<SampleOutputStream> output) :
- SampleStreamParser(sample_format_type::AUTO), csvParser(new CsvSampleParser(dict, output)) {
+AutoSampleParser::AutoSampleParser(shared_ptr<SampleOutputStream> output, KeyDictionary &dict) :
+ SampleStreamParser(sample_format_type::AUTO), csvParser(new CsvSampleParser(output, dict)) {
// Directly select the parser now until we have more than one parser
parser = std::move(csvParser);
type_ = sample_format_type::CSV;
@@ -279,38 +263,25 @@ unique_ptr<SampleStreamParser> open_sample_input_stream(
shared_ptr<SampleOutputStream> output,
sample_format_type type) {
if (type == sample_format_type::CSV) {
- return make_unique<CsvSampleParser>(dict, output);
+ return make_unique<CsvSampleParser>(output, dict);
} else if (type == sample_format_type::AUTO) {
- return make_unique<AutoSampleParser>(dict, output);
+ return make_unique<AutoSampleParser>(output, dict);
} else {
throw sample_exception("Unsupported format type: " + to_string(type));
}
}
unique_ptr<SampleOutputStream> open_sample_output_stream(
- KeyDictionary &dict,
- sample_format_type type,
unique_ptr<ostream> output,
- o<vector<SampleKey>> fields) {
+ KeyDictionary &dict,
+ sample_format_type type) {
if (type == sample_format_type::CSV) {
- if (fields) {
- return make_unique<CsvSampleOutputStream>(dict, move(output), fields.get());
- } else {
- return make_unique<CsvSampleOutputStream>(dict, move(output));
- }
+ return make_unique<CsvSampleOutputStream>(move(output), dict);
} else if (type == sample_format_type::JSON) {
- if (fields) {
- return make_unique<JsonSampleOutputStream>(dict, move(output), fields.get());
- } else {
- return make_unique<JsonSampleOutputStream>(dict, move(output));
- }
+ return make_unique<JsonSampleOutputStream>(move(output), dict);
// } else if (type == sample_format_type::SQL) {
-// if (fields) {
-// return make_unique<SqlSampleOutputStream>(dict, move(output), table_name, fields.get());
-// } else {
-// return make_unique<SqlSampleOutputStream>(dict, move(output), table_name);
-// }
+// return make_unique<SqlSampleOutputStream>(dict, move(output), table_name);
} else {
throw sample_exception("Unsupported format type: " + to_string(type));
}
diff --git a/apps/SoilMoistureIo.h b/apps/SoilMoistureIo.h
index b8f0b52..473c098 100644
--- a/apps/SoilMoistureIo.h
+++ b/apps/SoilMoistureIo.h
@@ -11,6 +11,7 @@
#include <boost/lexical_cast.hpp>
#include <functional>
+// TODO: rename to trygvis::sample
namespace trygvis {
namespace soil_moisture {
@@ -43,10 +44,9 @@ unique_ptr<SampleStreamParser> open_sample_input_stream(
sample_format_type type = sample_format_type::AUTO);
unique_ptr<SampleOutputStream> open_sample_output_stream(
- KeyDictionary &dict,
- sample_format_type type,
unique_ptr<ostream> output,
- o<vector<SampleKey>> fields = o<vector<SampleKey>>());
+ KeyDictionary &dict,
+ sample_format_type type);
class sample_exception : public runtime_error {
public:
@@ -54,61 +54,102 @@ public:
}
};
+class KeyDictionary;
+
+using SampleKeyVector = vector<SampleKey *>;
+using SampleKeyIndex = SampleKeyVector::size_type;
+
struct SampleKey {
- // TODO: only the dictionary should be able to create keys
- SampleKey(string &name) : name(name) {
+private:
+ SampleKey(const SampleKey& that) = delete;
+ SampleKey(SampleKeyIndex index, const string &name) : index(index), name(name) {
if (name.length() == 0) {
throw sample_exception("Bad sample key.");
}
}
+public:
+ friend class KeyDictionary;
+
inline
bool operator==(const SampleKey &that) const {
return name == that.name;
}
- string name;
+ const SampleKeyIndex index;
+ const string name;
};
class KeyDictionary {
public:
- typedef vector<SampleKey> v;
- typedef v::size_type index_t;
-
KeyDictionary() {
}
- index_t indexOf(const SampleKey key) {
- index_t i = 0;
- for (auto ptr = keys.begin(); ptr != keys.end(); ptr++, i++) {
- if (*ptr == key) {
- return i;
+ ~KeyDictionary() {
+ std::for_each(keys.begin(), keys.end(), std::default_delete<SampleKey>());
+ }
+ KeyDictionary(KeyDictionary& that) = delete;
+
+ SampleKey *indexOf(const string key) {
+ SampleKeyIndex i = 0;
+ for (auto ptr = keys.cbegin(); ptr != keys.cend(); ptr++, i++) {
+ if ((*ptr)->name == key) {
+ return *ptr;
}
}
- keys.push_back(key);
+ i = keys.size();
+ auto sample_key = new SampleKey(i, key);
+ keys.push_back(sample_key);
+
+ return sample_key;
+ }
+
+ SampleKey *at(SampleKeyIndex i) const {
+ if (i >= keys.size()) {
+ throw sample_exception("Out of bounds");
+ }
+
+ return keys.at(i);
+ }
+
+ vector<SampleKey *> findIndexes(SampleKeyVector &keys) {
+ vector<SampleKey *> indexes;
+
+ for (auto &key: keys) {
+ auto index = indexOf(key->name);
+ indexes.push_back(index);
+ }
- return keys.size() - 1;
+ return move(indexes);
}
- vector<index_t> findIndexes(v keys);
+ inline
+ SampleKeyVector::const_iterator end() const {
+ return keys.cend();
+ }
inline
- v::const_iterator begin() {
- return keys.begin();
+ SampleKeyVector::const_iterator begin() const {
+ return keys.cbegin();
}
+// string nameOf(SampleKeyIndex index) {
+// return keys.at(index).name;
+// }
+
inline
- v::const_iterator end() {
- return keys.end();
+ SampleKeyVector::size_type size() const {
+ return keys.size();
}
- string nameOf(index_t index) {
- return keys.at(index).name;
+ inline
+ bool empty() const {
+ return keys.empty();
}
private:
- v keys;
+ SampleKeyVector keys;
};
class SampleRecord {
@@ -137,7 +178,8 @@ public:
return values.empty();
}
- o<string> at(size_t index) {
+ o<string> at(const SampleKey *key) const {
+ SampleKeyIndex index = key->index;
if (index >= values.size()) {
return o<string>();
}
@@ -145,15 +187,15 @@ public:
return values.at(index);
}
- void set(const KeyDictionary::index_t index, const std::string &value) {
- values.resize(max(values.size(), index + 1));
+ void set(const SampleKey *key, const std::string &value) {
+ values.resize(max(values.size(), key->index + 1));
- values[index] = o<string>(value);
+ values.assign(key->index, o<string>(value));
}
template<class A>
- const o<A> lexical_at(KeyDictionary::index_t index) {
- auto value = at(index);
+ const o<A> lexical_at(const SampleKey *key) {
+ auto value = at(key);
if (!value) {
return o<A>();
@@ -163,7 +205,7 @@ public:
}
string to_string() {
- KeyDictionary::index_t i = 0;
+ SampleKeyIndex i = 0;
string s;
for (auto ptr = values.begin(); ptr != values.end(); ptr++, i++) {
auto o = *ptr;
@@ -174,13 +216,13 @@ public:
auto value = o.get();
- s += dict.nameOf(i) + " = " + value + ", ";
+ s += dict.at(i)->name + " = " + value + ", ";
}
return s;
}
-private:
KeyDictionary &dict;
+private:
vec values;
};
@@ -200,49 +242,42 @@ public:
class CsvSampleOutputStream : public SampleOutputStream {
public:
- CsvSampleOutputStream(KeyDictionary &dict, unique_ptr<ostream> stream);
-
- CsvSampleOutputStream(KeyDictionary &dict, unique_ptr<ostream> stream, vector<SampleKey> fields);
+ CsvSampleOutputStream(unique_ptr<ostream> stream, KeyDictionary &dict);
- void write(SampleRecord values);
+ void write(SampleRecord sample);
+ const KeyDictionary &getDict() {
+ return dict;
+ }
+
private:
void writeHeader();
KeyDictionary &dict;
unique_ptr<ostream> stream;
bool headerWritten;
- vector<KeyDictionary::index_t> fields;
};
class JsonSampleOutputStream : public SampleOutputStream {
public:
- JsonSampleOutputStream(KeyDictionary &dict, unique_ptr<ostream> stream);
-
- JsonSampleOutputStream(KeyDictionary &dict, unique_ptr<ostream> stream, vector<SampleKey> fields);
+ JsonSampleOutputStream(unique_ptr<ostream> stream, KeyDictionary &dict);
- void write(SampleRecord values);
+ void write(SampleRecord sample) override;
private:
KeyDictionary &dict;
unique_ptr<ostream> stream;
- bool filterFields;
- vector<KeyDictionary::index_t> fields;
};
class SqlSampleOutputStream : public SampleOutputStream {
public:
- SqlSampleOutputStream(KeyDictionary &dict, unique_ptr<ostream> stream, string table_name);
-
- SqlSampleOutputStream(KeyDictionary &dict, unique_ptr<ostream> stream, string table_name, vector<SampleKey> fields);
+ SqlSampleOutputStream(unique_ptr<ostream> stream, KeyDictionary &dict, string table_name);
- void write(SampleRecord values);
+ void write(SampleRecord sample) override;
private:
KeyDictionary &dict;
unique_ptr<ostream> stream;
- bool filter_fields;
- vector<KeyDictionary::index_t> fields;
const string table_name;
};
@@ -264,8 +299,8 @@ protected:
class CsvSampleParser : public SampleStreamParser {
public:
- CsvSampleParser(KeyDictionary &dict, shared_ptr<SampleOutputStream> output) :
- SampleStreamParser(sample_format_type::CSV), dict(dict), output(output),
+ CsvSampleParser(shared_ptr<SampleOutputStream> output, KeyDictionary &dict) :
+ SampleStreamParser(sample_format_type::CSV), output(output), dict(dict),
line(make_shared<vector<uint8_t>>()) {
}
@@ -282,7 +317,7 @@ private:
class AutoSampleParser : public SampleStreamParser {
public:
- AutoSampleParser(KeyDictionary &dict, shared_ptr<SampleOutputStream> output);
+ AutoSampleParser(shared_ptr<SampleOutputStream> output, KeyDictionary &dict);
private:
unique_ptr<SampleStreamParser> parser;
diff --git a/apps/sample-convert.cpp b/apps/sample-convert.cpp
index b3e5c02..5e87a15 100644
--- a/apps/sample-convert.cpp
+++ b/apps/sample-convert.cpp
@@ -61,22 +61,22 @@ public:
}
if (output_format == "plain") {
- output = make_shared<CsvSampleOutputStream>(dict, move(outputStream));
+ output = make_shared<CsvSampleOutputStream>(move(outputStream), dict);
} else if (output_format == "json") {
- output = make_shared<JsonSampleOutputStream>(dict, move(outputStream));
+ output = make_shared<JsonSampleOutputStream>(move(outputStream), dict);
} else if (output_format == "sql") {
if (table_name.size() == 0) {
cerr << "Missing option: table-name" << endl;
return EXIT_FAILURE;
}
- output = make_shared<SqlSampleOutputStream>(dict, move(outputStream), table_name);
+ output = make_shared<SqlSampleOutputStream>(move(outputStream), dict, table_name);
} else {
cerr << "Unsupported output format: " << output_format << endl;
return EXIT_FAILURE;
}
- auto input = make_shared<CsvSampleParser>(dict, output);
+ auto input = make_shared<CsvSampleParser>(output, dict);
char data[100];
while (!inputStream->eof()) {
diff --git a/apps/sample-timestamp.cpp b/apps/sample-timestamp.cpp
index 6ac2f86..dd9ab3c 100644
--- a/apps/sample-timestamp.cpp
+++ b/apps/sample-timestamp.cpp
@@ -14,12 +14,12 @@ namespace po = boost::program_options;
class TimestampFixingSampleOutputStream : public SampleOutputStream {
public:
- TimestampFixingSampleOutputStream(KeyDictionary dict, string timestamp_name, string now_name, time_t start_time, shared_ptr<SampleOutputStream> output) :
- timestamp_index(dict.indexOf(timestamp_name)), now_index(dict.indexOf(now_name)), start_time_(start_time), output_(output) {
+ TimestampFixingSampleOutputStream(shared_ptr<SampleOutputStream> output, KeyDictionary &dict, string timestamp_name, string now_name, time_t start_time) :
+ timestamp_key(dict.indexOf(timestamp_name)), now_key(dict.indexOf(now_name)), start_time_(start_time), output_(output) {
}
virtual void write(SampleRecord sample) override {
- o<long> relative_time_o = sample.lexical_at<long>(now_index);
+ o<long> relative_time_o = sample.lexical_at<long>(now_key);
if (!relative_time_o) {
return;
@@ -28,13 +28,13 @@ public:
long relative_time = relative_time_o.get();
string new_value = std::to_string(start_time_ + relative_time);
- sample.set(timestamp_index, new_value);
+ sample.set(timestamp_key, new_value);
output_->write(sample);
};
private:
- KeyDictionary::index_t now_index, timestamp_index;
+ const SampleKey* now_key, *timestamp_key;
time_t start_time_;
shared_ptr<SampleOutputStream> output_;
};
@@ -43,7 +43,7 @@ class sample_timestamp : public app {
private:
string input_file, timestamp_name, now_name;
- KeyDictionary::index_t now_index;
+ SampleKey* now_key;
public:
sample_timestamp() : input_file("") {
@@ -78,7 +78,7 @@ public:
KeyDictionary dict;
- now_index = dict.indexOf(now_name);
+ now_key = dict.indexOf(now_name);
auto sample_buffer = make_shared<VectorSampleOutputStream>();
unique_ptr<SampleStreamParser> parser = open_sample_input_stream(dict, sample_buffer);
@@ -101,7 +101,7 @@ public:
SampleRecord sample = *--sample_buffer->samples.end();
- o<string> s = sample.at(now_index);
+ o<string> s = sample.at(now_key);
if (!s) {
cerr << "Missing key '" + now_name + "'." << endl;
cerr << "keys: " << sample.to_string() << endl;
@@ -129,8 +129,8 @@ public:
return EXIT_FAILURE;
}
- auto output_stream = open_sample_output_stream(dict, parser->type(), unique_ptr<ostream>(&cout));
- auto p = make_shared<TimestampFixingSampleOutputStream>(dict, "timestamp", now_name, start_time, move(output_stream));
+ auto output_stream = open_sample_output_stream(unique_ptr<ostream>(&cout), dict, parser->type());
+ auto p = make_shared<TimestampFixingSampleOutputStream>(move(output_stream), dict, "timestamp", now_name, start_time);
parser = open_sample_input_stream(dict, p, parser->type());
int recordCount = 0;
diff --git a/apps/sm-serial-read.cpp b/apps/sm-serial-read.cpp
index c7fb695..c52e7f9 100644
--- a/apps/sm-serial-read.cpp
+++ b/apps/sm-serial-read.cpp
@@ -126,17 +126,17 @@ public:
unique_ptr<ostream> outputStream = unique_ptr<ostream>(&cout);
if (format == Format::JSON) {
- output = make_shared<JsonSampleOutputStream>(dict, std::move(outputStream));
+ output = make_shared<JsonSampleOutputStream>(std::move(outputStream), dict);
} else if (format == Format::SQL) {
- output = make_shared<SqlSampleOutputStream>(dict, std::move(outputStream), "raw");
+ output = make_shared<SqlSampleOutputStream>(std::move(outputStream), dict, "raw");
} else if (format == Format::PLAIN) {
- output = make_shared<CsvSampleOutputStream>(dict, std::move(outputStream));
+ output = make_shared<CsvSampleOutputStream>(std::move(outputStream), dict);
} else {
cerr << "Unsupported format: " << boost::lexical_cast<string>(format) << endl;
return EXIT_FAILURE;
}
- shared_ptr<CsvSampleParser> input = make_shared<CsvSampleParser>(dict, output);
+ shared_ptr<CsvSampleParser> input = make_shared<CsvSampleParser>(output, dict);
port_handler(port_name, port, input).run();