diff --git a/C/common/config_category.cpp b/C/common/config_category.cpp index 17e73a5c20..5e6469ebf4 100644 --- a/C/common/config_category.cpp +++ b/C/common/config_category.cpp @@ -275,6 +275,26 @@ void ConfigCategory::addItem(const std::string& name, const std::string descript m_items.push_back(new CategoryItem(name, description, def, value, options)); } +/** + * Set the display name of an item + * + * @param name The item name in the category + * @param displayName The display name to set + * @return true if the item was found + */ +bool ConfigCategory::setItemDisplayName(const std::string& name, const std::string& displayName) +{ + for (unsigned int i = 0; i < m_items.size(); i++) + { + if (name.compare(m_items[i]->m_name) == 0) + { + m_items[i]->m_displayName = displayName; + return true; + } + } + return false; +} + /** * Delete all the items from the configuration category having a specific type * @@ -777,6 +797,7 @@ ConfigCategory::CategoryItem::CategoryItem(const string& name, const Value& item) { m_name = name; + m_itemType = UnknownType; if (! item.IsObject()) { throw new ConfigMalformed(); diff --git a/C/common/filter_pipeline.cpp b/C/common/filter_pipeline.cpp index c780c3dd4d..c6f3637a0b 100644 --- a/C/common/filter_pipeline.cpp +++ b/C/common/filter_pipeline.cpp @@ -86,130 +86,131 @@ bool FilterPipeline::loadFilters(const string& categoryName) Logger::getLogger()->info("FilterPipeline::loadFilters(): categoryName=%s, filters=%s", categoryName.c_str(), filter.c_str()); if (!filter.empty()) { - std::vector> filterInfo; + std::vector> filterInfo; - // Remove \" and leading/trailing " - // TODO: improve/change this - filter.erase(remove(filter.begin(), filter.end(), '\\' ), filter.end()); - size_t i; - while (! (i = filter.find('"')) || (i = filter.rfind('"')) == static_cast(filter.size() - 1)) - { - filter.erase(i, 1); - } + // Remove \" and leading/trailing " + // TODO: improve/change this + filter.erase(remove(filter.begin(), filter.end(), '\\' ), filter.end()); + size_t i; + while (! (i = filter.find('"')) || (i = filter.rfind('"')) == static_cast(filter.size() - 1)) + { + filter.erase(i, 1); + } - //Parse JSON object for filters - Document theFilters; - theFilters.Parse(filter.c_str()); - // The "pipeline" property must be an array - if (theFilters.HasParseError() || - !theFilters.HasMember(JSON_CONFIG_PIPELINE_ELEM) || - !theFilters[JSON_CONFIG_PIPELINE_ELEM].IsArray()) - { - string errMsg("loadFilters: can not parse JSON '"); - errMsg += string(JSON_CONFIG_FILTER_ELEM) + "' property"; - Logger::getLogger()->fatal(errMsg.c_str()); - throw runtime_error(errMsg); - } - else - { - const Value& filterList = theFilters[JSON_CONFIG_PIPELINE_ELEM]; - if (!filterList.Size()) + //Parse JSON object for filters + Document theFilters; + theFilters.Parse(filter.c_str()); + // The "pipeline" property must be an array + if (theFilters.HasParseError() || + !theFilters.HasMember(JSON_CONFIG_PIPELINE_ELEM) || + !theFilters[JSON_CONFIG_PIPELINE_ELEM].IsArray()) { - // Empty array, just return true - return true; + string errMsg("loadFilters: can not parse JSON '"); + errMsg += string(JSON_CONFIG_FILTER_ELEM) + "' property"; + Logger::getLogger()->fatal(errMsg.c_str()); + throw runtime_error(errMsg); } + else + { + const Value& filterList = theFilters[JSON_CONFIG_PIPELINE_ELEM]; + if (!filterList.Size()) + { + // Empty array, just return true + return true; + } - // Prepare printable list of filters - StringBuffer buffer; - Writer writer(buffer); - filterList.Accept(writer); - string printableList(buffer.GetString()); + // Prepare printable list of filters + StringBuffer buffer; + Writer writer(buffer); + filterList.Accept(writer); + string printableList(buffer.GetString()); - string logMsg("loadFilters: found filter(s) "); - logMsg += printableList + " for plugin '"; - logMsg += categoryName + "'"; + string logMsg("loadFilters: found filter(s) "); + logMsg += printableList + " for plugin '"; + logMsg += categoryName + "'"; - Logger::getLogger()->info(logMsg.c_str()); + Logger::getLogger()->info(logMsg.c_str()); - // Try loading all filter plugins: abort on any error - for (Value::ConstValueIterator itr = filterList.Begin(); itr != filterList.End(); ++itr) - { - // Get "plugin" item fromn filterCategoryName - string filterCategoryName = itr->GetString(); - ConfigCategory filterDetails = mgtClient->getCategory(filterCategoryName); - if (!filterDetails.itemExists("plugin")) - { - string errMsg("loadFilters: 'plugin' item not found "); - errMsg += "in " + filterCategoryName + " category"; - Logger::getLogger()->fatal(errMsg.c_str()); - throw runtime_error(errMsg); - } - string filterName = filterDetails.getValue("plugin"); - PLUGIN_HANDLE filterHandle; - // Load filter plugin only: we don't call any plugin method right now - filterHandle = loadFilterPlugin(filterName); - if (!filterHandle) + // Try loading all filter plugins: abort on any error + for (Value::ConstValueIterator itr = filterList.Begin(); itr != filterList.End(); ++itr) { - string errMsg("Cannot load filter plugin '" + filterName + "'"); - Logger::getLogger()->fatal(errMsg.c_str()); - throw runtime_error(errMsg); + // Get "plugin" item fromn filterCategoryName + string filterCategoryName = itr->GetString(); + ConfigCategory filterDetails = mgtClient->getCategory(filterCategoryName); + if (!filterDetails.itemExists("plugin")) + { + string errMsg("loadFilters: 'plugin' item not found "); + errMsg += "in " + filterCategoryName + " category"; + Logger::getLogger()->fatal(errMsg.c_str()); + throw runtime_error(errMsg); + } + string filterName = filterDetails.getValue("plugin"); + PLUGIN_HANDLE filterHandle; + // Load filter plugin only: we don't call any plugin method right now + filterHandle = loadFilterPlugin(filterName); + if (!filterHandle) + { + string errMsg("Cannot load filter plugin '" + filterName + "'"); + Logger::getLogger()->fatal(errMsg.c_str()); + throw runtime_error(errMsg); + } + else + { + // Save filter handler: key is filterCategoryName + filterInfo.push_back(pair + (filterCategoryName, filterHandle)); + } } - else + + // We have kept filter default config in the filterInfo map + // Handle configuration for each filter + PluginManager *pluginManager = PluginManager::getInstance(); + for (vector>::iterator itr = filterInfo.begin(); + itr != filterInfo.end(); + ++itr) { - // Save filter handler: key is filterCategoryName - filterInfo.push_back(pair - (filterCategoryName, filterHandle)); - } - } + // Get plugin default configuration + string filterConfig = pluginManager->getInfo(itr->second)->config; - // We have kept filter default config in the filterInfo map - // Handle configuration for each filter - PluginManager *pluginManager = PluginManager::getInstance(); - for (vector>::iterator itr = filterInfo.begin(); - itr != filterInfo.end(); - ++itr) - { - // Get plugin default configuration - string filterConfig = pluginManager->getInfo(itr->second)->config; + // Create/Update default filter category items + DefaultConfigCategory filterDefConfig(categoryName + "_" + itr->first, filterConfig); + string filterDescription = "Configuration of '" + itr->first; + filterDescription += "' filter for plugin '" + categoryName + "'"; + filterDefConfig.setDescription(filterDescription); - // Update filter category items - DefaultConfigCategory filterDefConfig(itr->first, filterConfig); - string filterDescription = "Configuration of '" + itr->first; - filterDescription += "' filter for plugin '" + categoryName + "'"; - filterDefConfig.setDescription(filterDescription); + if (!mgtClient->addCategory(filterDefConfig, true)) + { + string errMsg("Cannot create/update '" + \ + categoryName + "' filter category"); + Logger::getLogger()->fatal(errMsg.c_str()); + throw runtime_error(errMsg); + } + children.push_back(categoryName + "_" + itr->first); - if (!mgtClient->addCategory(filterDefConfig, true)) - { - string errMsg("Cannot create/update '" + \ - categoryName + "' filter category"); - Logger::getLogger()->fatal(errMsg.c_str()); - throw runtime_error(errMsg); - } - children.push_back(categoryName + "_" + itr->first); + // Instantiate the FilterPlugin class + // in order to call plugin entry points + FilterPlugin* currentFilter = new FilterPlugin(itr->first, + itr->second); - // Instantiate the FilterPlugin class - // in order to call plugin entry points - FilterPlugin* currentFilter = new FilterPlugin(itr->first, - itr->second); - - // Add filter to filters vector - m_filters.push_back(currentFilter); + // Add filter to filters vector + m_filters.push_back(currentFilter); + } } } - } - /* - * Put all the new catregories in the Filter category parent - * Create an empty South category if one doesn't exist - */ - string parentName = categoryName + " Filters"; - DefaultConfigCategory filterConfig(parentName, string("{}")); - filterConfig.setDescription("Filters for " + categoryName); - mgtClient->addCategory(filterConfig, true); - mgtClient->addChildCategories(parentName, children); - vector children1; - children1.push_back(parentName); - mgtClient->addChildCategories(categoryName, children1); - return true; + + /* + * Put all the new catregories in the Filter category parent + * Create an empty South category if one doesn't exist + */ + string parentName = categoryName + " Filters"; + DefaultConfigCategory filterConfig(parentName, string("{}")); + filterConfig.setDescription("Filters for " + categoryName); + mgtClient->addCategory(filterConfig, true); + mgtClient->addChildCategories(parentName, children); + vector children1; + children1.push_back(parentName); + mgtClient->addChildCategories(categoryName, children1); + return true; } catch (ConfigItemNotFound* e) { diff --git a/C/common/include/config_category.h b/C/common/include/config_category.h index e95e4da9cf..7ba9031534 100644 --- a/C/common/include/config_category.h +++ b/C/common/include/config_category.h @@ -53,7 +53,7 @@ class ConfigCategories { class ConfigCategory { public: - enum ItemType { StringItem, EnumerationItem, JsonItem, BoolItem, NumberItem, DoubleItem, ScriptItem, CategoryType}; + enum ItemType { UnknownType, StringItem, EnumerationItem, JsonItem, BoolItem, NumberItem, DoubleItem, ScriptItem, CategoryType}; ConfigCategory(const std::string& name, const std::string& json); ConfigCategory() {}; @@ -74,6 +74,7 @@ class ConfigCategory { std::string getDescription() const { return m_description; }; unsigned int getCount() const { return m_items.size(); }; bool itemExists(const std::string& name) const; + bool setItemDisplayName(const std::string& name, const std::string& displayName); std::string getValue(const std::string& name) const; std::string getType(const std::string& name) const; std::string getDescription(const std::string& name) const; diff --git a/C/common/process.cpp b/C/common/process.cpp index a1282d1952..66bc518951 100644 --- a/C/common/process.cpp +++ b/C/common/process.cpp @@ -15,11 +15,59 @@ #include #include #include +#include +#include +#include +#include + #define LOG_SERVICE_NAME "FogLAMP Process" using namespace std; +/** + * Signal handler to log stack traces on fatal signals + */ +static void handler(int sig) +{ +Logger *logger = Logger::getLogger(); +void *array[20]; +char buf[1024]; +int size; + + // get void*'s for all entries on the stack + size = backtrace(array, 20); + + // print out all the frames to stderr + logger->fatal("Signal %d (%s) trapped:\n", sig, strsignal(sig)); + char **messages = backtrace_symbols(array, size); + for (int i = 0; i < size; i++) + { + Dl_info info; + if (dladdr(array[i], &info) && info.dli_sname) + { + char *demangled = NULL; + int status = -1; + if (info.dli_sname[0] == '_') + demangled = abi::__cxa_demangle(info.dli_sname, NULL, 0, &status); + snprintf(buf, sizeof(buf), "%-3d %*p %s + %zd---------", + i, int(2 + sizeof(void*) * 2), array[i], + status == 0 ? demangled : + info.dli_sname == 0 ? messages[i] : info.dli_sname, + (char *)array[i] - (char *)info.dli_saddr); + free(demangled); + } + else + { + snprintf(buf, sizeof(buf), "%-3d %*p %s---------", + i, int(2 + sizeof(void*) * 2), array[i], messages[i]); + } + logger->fatal("(%d) %s", i, buf); + } + free(messages); + exit(1); +} + // Destructor FogLampProcess::~FogLampProcess() { @@ -34,6 +82,12 @@ FogLampProcess::FogLampProcess(int argc, char** argv) : m_argc(argc), m_arg_vals((const char**) argv) { + signal(SIGSEGV, handler); + signal(SIGILL, handler); + signal(SIGBUS, handler); + signal(SIGFPE, handler); + signal(SIGABRT, handler); + string myName = LOG_SERVICE_NAME; m_logger = new Logger(myName); diff --git a/C/plugins/storage/sqlite/connection.cpp b/C/plugins/storage/sqlite/connection.cpp index eed12bf546..b02cf0503f 100644 --- a/C/plugins/storage/sqlite/connection.cpp +++ b/C/plugins/storage/sqlite/connection.cpp @@ -600,8 +600,16 @@ unsigned long nRows = 0, nCols = 0; Value value; if (!d.Parse(str).HasParseError()) { - // JSON parsing ok, use the document - value = Value(d, allocator); + if (d.IsNumber()) + { + // Set string + value = Value(str, allocator); + } + else + { + // JSON parsing ok, use the document + value = Value(d, allocator); + } } else { @@ -852,7 +860,7 @@ SQLBuffer jsonConstraints; } else { - sql.append("strftime('%Y-%m-%d %H:%M:%f', "); + sql.append("strftime('" F_DATEH24_MS "', "); sql.append((*itr)["column"].GetString()); sql.append(", 'utc')"); } @@ -1532,14 +1540,14 @@ bool Connection::formatDate(char *formatted_date, size_t buffer_size, const char // Extract up to seconds memset(&tm, 0, sizeof(tm)); - valid_date = strptime(date, "%Y-%m-%d %H:%M:%S", &tm); + valid_date = strptime(date, F_DATEH24_SEC, &tm); if (! valid_date) { return (false); } - strftime (formatted_date, buffer_size, "%Y-%m-%d %H:%M:%S", &tm); + strftime (formatted_date, buffer_size, F_DATEH24_SEC, &tm); // Work out the microseconds from the fractional part of the seconds char fractional[10] = {0}; @@ -1773,6 +1781,8 @@ bool add_row = false; * Fetch a block of readings from the reading table * It might not work with SQLite 3 * + * Fetch, used by the north side, returns timestamp in UTC. + * * NOTE : it expects to handle a date having a fixed format * with milliseconds, microseconds and timezone expressed, * like for example : @@ -1783,7 +1793,7 @@ bool Connection::fetchReadings(unsigned long id, unsigned int blksize, std::string& resultSet) { -char sqlbuffer[1100]; +char sqlbuffer[512]; char *zErrMsg = NULL; int rc; int retrieve; @@ -1812,7 +1822,6 @@ int retrieve; sql_cmd, id, blksize); - logSQL("ReadingsFetch", sqlbuffer); sqlite3_stmt *stmt; // Prepare the SQL statement and get the result set @@ -1854,6 +1863,8 @@ int retrieve; /** * Perform a query against the readings table * + * retrieveReadings, used by the API, returns timestamp in localtime. + * */ bool Connection::retrieveReadings(const string& condition, string& resultSet) { @@ -1879,9 +1890,9 @@ bool isAggregate = false; asset_code, read_key, reading, - strftime('%Y-%m-%d %H:%M:%S', user_ts, 'localtime') || + strftime(')" F_DATEH24_SEC R"(', user_ts, 'localtime') || substr(user_ts, instr(user_ts, '.'), 7) AS user_ts, - strftime('%Y-%m-%d %H:%M:%f', ts, 'localtime') AS ts + strftime(')" F_DATEH24_MS R"(', ts, 'localtime') AS ts FROM foglamp.readings)"; sql.append(sql_cmd); @@ -1902,7 +1913,7 @@ bool isAggregate = false; sql.append(document["modifier"].GetString()); sql.append(' '); } - if (!jsonAggregates(document, document["aggregate"], sql, jsonConstraints)) + if (!jsonAggregates(document, document["aggregate"], sql, jsonConstraints, true)) { return false; } @@ -1929,7 +1940,23 @@ bool isAggregate = false; sql.append(", "); if (!itr->IsObject()) // Simple column name { - sql.append(itr->GetString()); + if (strcmp(itr->GetString() ,"user_ts") == 0) + { + // Display without TZ expression and microseconds also + sql.append(" strftime('" F_DATEH24_SEC "', user_ts, 'localtime') "); + sql.append(" || substr(user_ts, instr(user_ts, '.'), 7) "); + sql.append(" as user_ts "); + } + else if (strcmp(itr->GetString() ,"ts") == 0) + { + // Display without TZ expression and microseconds also + sql.append(" strftime('" F_DATEH24_MS "', ts, 'localtime') "); + sql.append(" as ts "); + } + else + { + sql.append(itr->GetString()); + } } else { @@ -1975,7 +2002,7 @@ bool isAggregate = false; { // Extract milliseconds and microseconds for the user_ts fields - sql.append("strftime('%Y-%m-%d %H:%M:%S', user_ts, 'utc') "); + sql.append("strftime('" F_DATEH24_SEC "', user_ts, 'utc') "); sql.append(" || substr(user_ts, instr(user_ts, '.'), 7) "); if (! itr->HasMember("alias")) { @@ -1985,7 +2012,7 @@ bool isAggregate = false; } else { - sql.append("strftime('%Y-%m-%d %H:%M:%f', "); + sql.append("strftime('" F_DATEH24_MS "', "); sql.append((*itr)["column"].GetString()); sql.append(", 'utc')"); if (! itr->HasMember("alias")) @@ -2001,7 +2028,7 @@ bool isAggregate = false; { // Extract milliseconds and microseconds for the user_ts fields - sql.append("strftime('%Y-%m-%d %H:%M:%S', user_ts, 'localtime') "); + sql.append("strftime('" F_DATEH24_SEC "', user_ts, 'localtime') "); sql.append(" || substr(user_ts, instr(user_ts, '.'), 7) "); if (! itr->HasMember("alias")) { @@ -2011,7 +2038,7 @@ bool isAggregate = false; } else { - sql.append("strftime('%Y-%m-%d %H:%M:%f', "); + sql.append("strftime('" F_DATEH24_MS "', "); sql.append((*itr)["column"].GetString()); sql.append(", 'localtime')"); if (! itr->HasMember("alias")) @@ -2035,7 +2062,7 @@ bool isAggregate = false; { // Extract milliseconds and microseconds for the user_ts fields - sql.append("strftime('%Y-%m-%d %H:%M:%S', user_ts, 'localtime') "); + sql.append("strftime('" F_DATEH24_SEC "', user_ts, 'localtime') "); sql.append(" || substr(user_ts, instr(user_ts, '.'), 7) "); if (! itr->HasMember("alias")) { @@ -2045,7 +2072,7 @@ bool isAggregate = false; } else { - sql.append("strftime('%Y-%m-%d %H:%M:%f', "); + sql.append("strftime('" F_DATEH24_MS "', "); sql.append((*itr)["column"].GetString()); sql.append(", 'localtime')"); if (! itr->HasMember("alias")) @@ -2095,9 +2122,9 @@ bool isAggregate = false; asset_code, read_key, reading, - strftime('%Y-%m-%d %H:%M:%S', user_ts, 'localtime') || + strftime(')" F_DATEH24_SEC R"(', user_ts, 'localtime') || substr(user_ts, instr(user_ts, '.'), 7) AS user_ts, - strftime('%Y-%m-%d %H:%M:%f', ts, 'localtime') AS ts + strftime(')" F_DATEH24_MS R"(', ts, 'localtime') AS ts FROM foglamp.)"; sql.append(sql_cmd); @@ -2471,7 +2498,8 @@ int blocks = 0; bool Connection::jsonAggregates(const Value& payload, const Value& aggregates, SQLBuffer& sql, - SQLBuffer& jsonConstraint) + SQLBuffer& jsonConstraint, + bool isTableReading) { if (aggregates.IsObject()) { @@ -2498,7 +2526,18 @@ bool Connection::jsonAggregates(const Value& payload, } else { - sql.append(col); + // an operation different from the 'count' is requested + if (isTableReading && (col.compare("user_ts") == 0) ) + { + sql.append("strftime('" F_DATEH24_SEC "', user_ts, 'localtime') "); + sql.append(" || substr(user_ts, instr(user_ts, '.'), 7) "); + } + else + { + sql.append("\""); + sql.append(col); + sql.append("\""); + } } } else if (aggregates.HasMember("json")) @@ -2631,7 +2670,19 @@ bool Connection::jsonAggregates(const Value& payload, sql.append('('); if (itr->HasMember("column")) { - sql.append((*itr)["column"].GetString()); + string column_name= (*itr)["column"].GetString(); + if (isTableReading && (column_name.compare("user_ts") == 0) ) + { + sql.append("strftime('" F_DATEH24_SEC "', user_ts, 'localtime') "); + sql.append(" || substr(user_ts, instr(user_ts, '.'), 7) "); + } + else + { + sql.append("\""); + sql.append(column_name); + sql.append("\""); + } + } else if (itr->HasMember("json")) { diff --git a/C/plugins/storage/sqlite/include/connection.h b/C/plugins/storage/sqlite/include/connection.h index 1eb04d8110..2b26164f6c 100644 --- a/C/plugins/storage/sqlite/include/connection.h +++ b/C/plugins/storage/sqlite/include/connection.h @@ -50,7 +50,8 @@ class Connection { bool jsonAggregates(const rapidjson::Value&, const rapidjson::Value&, SQLBuffer&, - SQLBuffer&); + SQLBuffer&, + bool isTableReading = false); bool returnJson(const rapidjson::Value&, SQLBuffer&, SQLBuffer&); char *trim(char *str); const char *escape(const char *); diff --git a/C/plugins/storage/sqlitememory/connection.cpp b/C/plugins/storage/sqlitememory/connection.cpp index 2fb752cbfe..72afc94908 100644 --- a/C/plugins/storage/sqlitememory/connection.cpp +++ b/C/plugins/storage/sqlitememory/connection.cpp @@ -39,13 +39,18 @@ using namespace rapidjson; #define _DB_NAME "/foglamp.sqlite" -#define F_TIMEH24_S "%H:%M:%S" -#define F_DATEH24_S "%Y-%m-%d %H:%M:%S" -#define F_DATEH24_M "%Y-%m-%d %H:%M" -#define F_DATEH24_H "%Y-%m-%d %H" +#define LEN_BUFFER_DATE 100 +#define F_TIMEH24_S "%H:%M:%S" +#define F_DATEH24_S "%Y-%m-%d %H:%M:%S" +#define F_DATEH24_M "%Y-%m-%d %H:%M" +#define F_DATEH24_H "%Y-%m-%d %H" // This is the default datetime format in FogLAMP: 2018-05-03 18:15:00.622 -#define F_DATEH24_MS "%Y-%m-%d %H:%M:%f" -#define SQLITE3_NOW "strftime('%Y-%m-%d %H:%M:%f', 'now', 'localtime')" +#define F_DATEH24_MS "%Y-%m-%d %H:%M:%f" +// Format up to seconds +#define F_DATEH24_SEC "%Y-%m-%d %H:%M:%S" +#define SQLITE3_NOW "strftime('%Y-%m-%d %H:%M:%f', 'now', 'localtime')" +// The default precision is milliseconds, it adds microseconds and timezone +#define SQLITE3_NOW_READING "strftime('%Y-%m-%d %H:%M:%f000+00:00', 'now')" #define SQLITE3_FOGLAMP_DATETIME_TYPE "DATETIME" static time_t connectErrorTime = 0; map sqliteDateFormat = { @@ -106,82 +111,118 @@ bool Connection::applyColumnDateTimeFormat(sqlite3_stmt *pStmt, int i, string& newDate) { - /** - * Handle here possible unformatted DATETIME column type - * If (column_name == column_original_name) AND - * (sqlite3_column_table_name() == "DATETIME") - * we assume the column has not been formatted - * by any datetime() or strftime() SQLite function. - * Thus we apply default FOGLAMP formatting: - * "%Y-%m-%d %H:%M:%f" with 'localtime' - */ + + bool apply_format = false; + string formatStmt = {}; if (sqlite3_column_database_name(pStmt, i) != NULL && - sqlite3_column_table_name(pStmt, i) != NULL && - (strcmp(sqlite3_column_origin_name(pStmt, i), - sqlite3_column_name(pStmt, i)) == 0)) + sqlite3_column_table_name(pStmt, i) != NULL) { - const char* pzDataType; - int retType = sqlite3_table_column_metadata(inMemory, - sqlite3_column_database_name(pStmt, i), - sqlite3_column_table_name(pStmt, i), - sqlite3_column_name(pStmt, i), - &pzDataType, - NULL, NULL, NULL, NULL); - - // Check whether to Apply dateformat - if (pzDataType != NULL && - retType == SQLITE_OK && - strcmp(pzDataType, SQLITE3_FOGLAMP_DATETIME_TYPE) == 0 && - strcmp(sqlite3_column_origin_name(pStmt, i), - sqlite3_column_name(pStmt, i)) == 0) - { - // Column metadata found and column datatype is "pzDataType" - string formatStmt = string("SELECT strftime('"); - formatStmt += string(F_DATEH24_MS); - formatStmt += "', '" + string((char *)sqlite3_column_text(pStmt, i)); - formatStmt += "')"; - char* zErrMsg = NULL; - // New formatted data - char formattedData[100] = ""; + if ((strcmp(sqlite3_column_origin_name(pStmt, i), "user_ts") == 0) && + (strcmp(sqlite3_column_table_name(pStmt, i), "readings") == 0) && + (strlen((char *) sqlite3_column_text(pStmt, i)) == 32)) + { - // Exec the format SQL - int rc = SQLexec(inMemory, - formatStmt.c_str(), - dateCallback, - formattedData, - &zErrMsg); + // Extract milliseconds and microseconds for the user_ts field of the readings table + formatStmt = string("SELECT strftime('"); + formatStmt += string(F_DATEH24_SEC); + formatStmt += "', '" + string((char *) sqlite3_column_text(pStmt, i)); + formatStmt += "')"; + formatStmt += " || substr('" + string((char *) sqlite3_column_text(pStmt, i)); + formatStmt += "', instr('" + string((char *) sqlite3_column_text(pStmt, i)); + formatStmt += "', '.'), 7)"; - if (rc == SQLITE_OK ) + apply_format = true; + } + else + { + /** + * Handle here possible unformatted DATETIME column type + * If (column_name == column_original_name) AND + * (sqlite3_column_table_name() == "DATETIME") + * we assume the column has not been formatted + * by any datetime() or strftime() SQLite function. + * Thus we apply default FOGLAMP formatting: + * "%Y-%m-%d %H:%M:%f" + */ + if (sqlite3_column_database_name(pStmt, i) != NULL && + sqlite3_column_table_name(pStmt, i) != NULL && + (strcmp(sqlite3_column_origin_name(pStmt, i), + sqlite3_column_name(pStmt, i)) == 0)) { - // Use new formatted datetime value - newDate.assign(formattedData); + const char *pzDataType; + int retType = sqlite3_table_column_metadata(inMemory, + sqlite3_column_database_name(pStmt, i), + sqlite3_column_table_name(pStmt, i), + sqlite3_column_name(pStmt, i), + &pzDataType, + NULL, NULL, NULL, NULL); + + // Check whether to Apply dateformat + if (pzDataType != NULL && + retType == SQLITE_OK && + strcmp(pzDataType, SQLITE3_FOGLAMP_DATETIME_TYPE) == 0 && + strcmp(sqlite3_column_origin_name(pStmt, i), + sqlite3_column_name(pStmt, i)) == 0) + { + // Column metadata found and column datatype is "pzDataType" + formatStmt = string("SELECT strftime('"); + formatStmt += string(F_DATEH24_MS); + formatStmt += "', '" + string((char *) sqlite3_column_text(pStmt, i)); + formatStmt += "')"; - return true; - } - else - { - Logger::getLogger()->error("SELECT dateformat '%s': error %s", - formatStmt.c_str(), - zErrMsg); + apply_format = true; - sqlite3_free(zErrMsg); + } + else + { + // Format not done + // Just log the error if present + if (retType != SQLITE_OK) + { + Logger::getLogger()->error("SQLite3 failed " \ + "to call sqlite3_table_column_metadata() " \ + "for column '%s'", + sqlite3_column_name(pStmt, i)); + } + } } } + } + + if (apply_format) + { + + char* zErrMsg = NULL; + // New formatted data + char formattedData[100] = ""; + + // Exec the format SQL + int rc = SQLexec(inMemory, + formatStmt.c_str(), + dateCallback, + formattedData, + &zErrMsg); + + if (rc == SQLITE_OK ) + { + // Use new formatted datetime value + newDate.assign(formattedData); + + return true; + } else { - // Format not done - // Just log the error if present - if (retType != SQLITE_OK) - { - Logger::getLogger()->error("SQLite3 memory failed " \ - "to call sqlite3_table_column_metadata() " \ - "for column '%s'", - sqlite3_column_name(pStmt, i)); - } + Logger::getLogger()->error("SELECT dateformat '%s': error %s", + formatStmt.c_str(), + zErrMsg); + + sqlite3_free(zErrMsg); } + } + return false; } @@ -233,7 +274,68 @@ bool retCode; outFormat.append(colName); } - outFormat.append(")"); + outFormat.append(", 'localtime')"); // MR TRY THIS + retCode = true; + } + else + { + // Use column as is + outFormat.append(colName); + retCode = false; + } + + return retCode; +} + +/** + * Apply the specified date format + * using the available formats in SQLite3 + * for a specific column + * + * If the requested format is not availble + * the input column is used as is. + * Additionally milliseconds could be rounded + * upon request. + * The routine return false if datwe format is not + * found and the caller might decide to raise an error + * or use the non formatted value + * + * @param inFormat Input date format from application + * @param colName The column name to format + * @param outFormat The formatted column + * @return True if format has been applied or + * false id no format is in use. + */ +static bool applyColumnDateFormatLocaltime(const string& inFormat, + const string& colName, + string& outFormat, + bool roundMs = false) + +{ +bool retCode; + // Get format, if any, from the supported formats map + const string format = sqliteDateFormat[inFormat]; + if (!format.empty()) + { + // Apply found format via SQLite3 strftime() + outFormat.append("strftime('"); + outFormat.append(format); + outFormat.append("', "); + + // Check whether we have to round milliseconds + if (roundMs == true && + format.back() == 'f') + { + outFormat.append("cast(round((julianday("); + outFormat.append(colName); + outFormat.append(") - 2440587.5)*86400 -0.00005, 3) AS FLOAT), 'unixepoch'"); + } + else + { + outFormat.append(colName); + } + + outFormat.append(", 'localtime')"); // MR force localtime retCode = true; } else @@ -287,13 +389,14 @@ Connection::Connection() */ const char *inMemoryConn = "file:?cache=shared"; + // UTC time as default const char * createReadings = "CREATE TABLE foglamp.readings (" \ "id INTEGER PRIMARY KEY AUTOINCREMENT," \ "asset_code character varying(50) NOT NULL," \ "read_key uuid UNIQUE," \ "reading JSON NOT NULL DEFAULT '{}'," \ - "user_ts DATETIME DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW', 'localtime'))," \ - "ts DATETIME DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW', 'localtime'))" \ + "user_ts DATETIME DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f+00:00', 'NOW' ))," \ + "ts DATETIME DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f+00:00', 'NOW' ))" \ ");"; const char * createReadingsFk = "CREATE INDEX fki_readings_fk1 ON readings (asset_code);"; @@ -398,7 +501,7 @@ unsigned long nRows = 0, nCols = 0; // Iterate over all the rows in the resultSet while ((rc = sqlite3_step(pStmt)) == SQLITE_ROW) { - // Get number of columns foir current row + // Get number of columns for current row nCols = sqlite3_column_count(pStmt); // Create the 'row' object Value row(kObjectType); @@ -549,11 +652,12 @@ int *nRows = (int *)data; } /** - * Perform a query against a common table + * Perform a query against the readings table + * + * retrieveReadings, used by the API, returns timestamp in localtime. * */ -bool Connection::retrieveReadings(const string& condition, - string& resultSet) +bool Connection::retrieveReadings(const string& condition, string& resultSet) { // Default template parameter uses UTF8 and MemoryPoolAllocator. Document document; @@ -570,7 +674,18 @@ SQLBuffer jsonConstraints; if (condition.empty()) { - sql.append("SELECT * FROM readings"); + const char *sql_cmd = R"( + SELECT + id, + asset_code, + read_key, + reading, + strftime(')" F_DATEH24_SEC R"(', user_ts, 'localtime') || + substr(user_ts, instr(user_ts, '.'), 7) AS user_ts, + strftime(')" F_DATEH24_MS R"(', ts, 'localtime') AS ts + FROM readings)"; + + sql.append(sql_cmd); } else { @@ -587,7 +702,7 @@ SQLBuffer jsonConstraints; sql.append(document["modifier"].GetString()); sql.append(' '); } - if (!jsonAggregates(document, document["aggregate"], sql, jsonConstraints)) + if (!jsonAggregates(document, document["aggregate"], sql, jsonConstraints, true)) { return false; } @@ -610,11 +725,29 @@ SQLBuffer jsonConstraints; } for (Value::ConstValueIterator itr = columns.Begin(); itr != columns.End(); ++itr) { + if (col) sql.append(", "); if (!itr->IsObject()) // Simple column name { - sql.append(itr->GetString()); + if (strcmp(itr->GetString() ,"user_ts") == 0) + { + // Display without TZ expression and microseconds also + sql.append(" strftime('" F_DATEH24_SEC "', user_ts, 'localtime') "); + sql.append(" || substr(user_ts, instr(user_ts, '.'), 7) "); + sql.append(" as user_ts "); + } + else if (strcmp(itr->GetString() ,"ts") == 0) + { + // Display without TZ expression and microseconds also + sql.append(" strftime('" F_DATEH24_MS "', ts, 'localtime') "); + sql.append(" as ts "); + } + else + { + sql.append(itr->GetString()); + } + } else { @@ -652,24 +785,95 @@ SQLBuffer jsonConstraints; return false; } // SQLite3 doesnt support time zone formatting - if (strcasecmp((*itr)["timezone"].GetString(), "utc") != 0) + const char *tz = (*itr)["timezone"].GetString(); + + if (strncasecmp(tz, "utc", 3) == 0) { - raiseError("retrieve", - "SQLite3 Memory plugin does not support timezones in qeueries"); - return false; + if (strcmp((*itr)["column"].GetString() ,"user_ts") == 0) + { + // Extract milliseconds and microseconds for the user_ts fields + + sql.append("strftime('" F_DATEH24_SEC "', user_ts, 'utc') "); + sql.append(" || substr(user_ts, instr(user_ts, '.'), 7) "); + if (! itr->HasMember("alias")) + { + sql.append(" AS "); + sql.append((*itr)["column"].GetString()); + } + } + else + { + sql.append("strftime('" F_DATEH24_MS "', "); + sql.append((*itr)["column"].GetString()); + sql.append(", 'utc')"); + if (! itr->HasMember("alias")) + { + sql.append(" AS "); + sql.append((*itr)["column"].GetString()); + } + } + } + else if (strncasecmp(tz, "localtime", 9) == 0) + { + if (strcmp((*itr)["column"].GetString() ,"user_ts") == 0) + { + // Extract milliseconds and microseconds for the user_ts fields + + sql.append("strftime('" F_DATEH24_SEC "', user_ts, 'localtime') "); + sql.append(" || substr(user_ts, instr(user_ts, '.'), 7) "); + if (! itr->HasMember("alias")) + { + sql.append(" AS "); + sql.append((*itr)["column"].GetString()); + } + } + else + { + sql.append("strftime('" F_DATEH24_MS "', "); + sql.append((*itr)["column"].GetString()); + sql.append(", 'localtime')"); + if (! itr->HasMember("alias")) + { + sql.append(" AS "); + sql.append((*itr)["column"].GetString()); + } + } } else { - sql.append("strftime('%Y-%m-%d %H:%M:%f', "); - sql.append((*itr)["column"].GetString()); - sql.append(", 'utc')"); - sql.append(" AS "); - sql.append((*itr)["column"].GetString()); + raiseError("retrieve", + "SQLite3 plugin does not support timezones in queries"); + return false; } + } else { - sql.append((*itr)["column"].GetString()); + + if (strcmp((*itr)["column"].GetString() ,"user_ts") == 0) + { + // Extract milliseconds and microseconds for the user_ts fields + + sql.append("strftime('" F_DATEH24_SEC "', user_ts, 'localtime') "); + sql.append(" || substr(user_ts, instr(user_ts, '.'), 7) "); + if (! itr->HasMember("alias")) + { + sql.append(" AS "); + sql.append((*itr)["column"].GetString()); + } + } + else + { + sql.append("strftime('" F_DATEH24_MS "', "); + sql.append((*itr)["column"].GetString()); + sql.append(", 'localtime')"); + if (! itr->HasMember("alias")) + { + sql.append(" AS "); + sql.append((*itr)["column"].GetString()); + } + } + } sql.append(' '); } @@ -705,7 +909,17 @@ SQLBuffer jsonConstraints; sql.append(document["modifier"].GetString()); sql.append(' '); } - sql.append(" * FROM foglamp."); + const char *sql_cmd = R"( + id, + asset_code, + read_key, + reading, + strftime(')" F_DATEH24_SEC R"(', user_ts, 'localtime') || + substr(user_ts, instr(user_ts, '.'), 7) AS user_ts, + strftime(')" F_DATEH24_MS R"(', ts, 'localtime') AS ts + FROM foglamp.)"; + + sql.append(sql_cmd); } sql.append("readings"); if (document.HasMember("where")) @@ -778,6 +992,123 @@ SQLBuffer jsonConstraints; } } + +/** + * Format a date to a fixed format with milliseconds, microseconds and + * timezone expressed, examples : + * + * case - formatted |2019-01-01 10:01:01.000000+00:00| date |2019-01-01 10:01:01| + * case - formatted |2019-02-01 10:02:01.000000+00:00| date |2019-02-01 10:02:01.0| + * case - formatted |2019-02-02 10:02:02.841000+00:00| date |2019-02-02 10:02:02.841| + * case - formatted |2019-02-03 10:02:03.123456+00:00| date |2019-02-03 10:02:03.123456| + * case - formatted |2019-03-01 10:03:01.100000+00:00| date |2019-03-01 10:03:01.1+00:00| + * case - formatted |2019-03-02 10:03:02.123000+00:00| date |2019-03-02 10:03:02.123+00:00| + * case - formatted |2019-03-03 10:03:03.123456+00:00| date |2019-03-03 10:03:03.123456+00:00| + * case - formatted |2019-03-04 10:03:04.123456+01:00| date |2019-03-04 10:03:04.123456+01:00| + * case - formatted |2019-03-05 10:03:05.123456-01:00| date |2019-03-05 10:03:05.123456-01:00| + * case - formatted |2019-03-04 10:03:04.123456+02:30| date |2019-03-04 10:03:04.123456+02:30| + * case - formatted |2019-03-05 10:03:05.123456-02:30| date |2019-03-05 10:03:05.123456-02:30| + * + * @param out false if the date is invalid + * + */ +bool Connection::formatDate(char *formatted_date, size_t buffer_size, const char *date) { + + struct timeval tv = {0}; + struct tm tm = {0}; + char *valid_date = nullptr; + + // Extract up to seconds + memset(&tm, 0, sizeof(tm)); + valid_date = strptime(date, F_DATEH24_SEC, &tm); + + if (! valid_date) + { + return (false); + } + + strftime (formatted_date, buffer_size, F_DATEH24_SEC, &tm); + + // Work out the microseconds from the fractional part of the seconds + char fractional[10] = {0}; + sscanf(date, "%*d-%*d-%*d %*d:%*d:%*d.%[0-9]*", fractional); + // Truncate to max 6 digits + fractional[6] = 0; + int multiplier = 6 - (int)strlen(fractional); + if (multiplier < 0) + multiplier = 0; + while (multiplier--) + strcat(fractional, "0"); + + strcat(formatted_date ,"."); + strcat(formatted_date ,fractional); + + // Handles timezone + char timezone_hour[5] = {0}; + char timezone_min[5] = {0}; + char sign[2] = {0}; + + sscanf(date, "%*d-%*d-%*d %*d:%*d:%*d.%*d-%2[0-9]:%2[0-9]", timezone_hour, timezone_min); + if (timezone_hour[0] != 0) + { + strcat(sign, "-"); + } + else + { + memset(timezone_hour, 0, sizeof(timezone_hour)); + memset(timezone_min, 0, sizeof(timezone_min)); + + sscanf(date, "%*d-%*d-%*d %*d:%*d:%*d.%*d+%2[0-9]:%2[0-9]", timezone_hour, timezone_min); + if (timezone_hour[0] != 0) + { + strcat(sign, "+"); + } + else + { + // No timezone is expressed in the source date + // the default UTC is added + strcat(formatted_date, "+00:00"); + } + } + + if (sign[0] != 0) + { + if (timezone_hour[0] != 0) + { + strcat(formatted_date, sign); + + // Pad with 0 if an hour having only 1 digit was provided + // +1 -> +01 + if (strlen(timezone_hour) == 1) + strcat(formatted_date, "0"); + + strcat(formatted_date, timezone_hour); + strcat(formatted_date, ":"); + } + + if (timezone_min[0] != 0) + { + strcat(formatted_date, timezone_min); + + // Pad with 0 if minutes having only 1 digit were provided + // 3 -> 30 + if (strlen(timezone_min) == 1) + strcat(formatted_date, "0"); + + } + else + { + // Minutes aren't expressed in the source date + strcat(formatted_date, "00"); + } + } + + + return (true); + + +} + /** * Append a set of readings to the readings table */ @@ -787,6 +1118,7 @@ int Connection::appendReadings(const char *readings) Document doc; SQLBuffer sql; int row = 0; +bool add_row = false; ParseResult ok = doc.Parse(readings); if (!ok) @@ -795,7 +1127,7 @@ int row = 0; return -1; } - sql.append("INSERT INTO foglamp.readings ( asset_code, read_key, reading, user_ts ) VALUES "); + sql.append("INSERT INTO foglamp.readings ( user_ts, asset_code, read_key, reading ) VALUES "); if (!doc.HasMember("readings")) { @@ -816,48 +1148,81 @@ int row = 0; "Each reading in the readings array must be an object"); return -1; } - if (row) - { - sql.append(", ("); - } - else - { - sql.append('('); - } - row++; - sql.append('\''); - sql.append((*itr)["asset_code"].GetString()); - // Python code is passing the string None when here is no read_key in the payload - if (itr->HasMember("read_key") && strcmp((*itr)["read_key"].GetString(), "None") != 0) - { - sql.append("', \'"); - sql.append((*itr)["read_key"].GetString()); - sql.append("', \'"); - } - else - { - // No "read_key" in this reading, insert NULL - sql.append("', NULL, '"); - } - StringBuffer buffer; - Writer writer(buffer); - (*itr)["reading"].Accept(writer); - sql.append(buffer.GetString()); - sql.append("\', "); + add_row = true; + + // Handles - user_ts const char *str = (*itr)["user_ts"].GetString(); if (strcmp(str, "now()") == 0) { - sql.append(SQLITE3_NOW); + if (row) + { + sql.append(", ("); + } + else + { + sql.append('('); + } + + sql.append(SQLITE3_NOW_READING); } else { + char formatted_date[LEN_BUFFER_DATE] = {0}; + if (! formatDate(formatted_date, sizeof(formatted_date), str) ) + { + raiseError("appendReadings", "Invalid date |%s|", str); + add_row = false; + } + else + { + if (row) + { + sql.append(", ("); + } + else + { + sql.append('('); + } + + sql.append('\''); + sql.append(formatted_date); + sql.append('\''); + } + } + + if (add_row) + { + row++; + + // Handles - asset_code + sql.append(",\'"); + sql.append((*itr)["asset_code"].GetString()); + + // Handles - read_key + // Python code is passing the string None when here is no read_key in the payload + if (itr->HasMember("read_key") && strcmp((*itr)["read_key"].GetString(), "None") != 0) + { + sql.append("', \'"); + sql.append((*itr)["read_key"].GetString()); + sql.append("', \'"); + } + else + { + // No "read_key" in this reading, insert NULL + sql.append("', NULL, '"); + } + + // Handles - reading + StringBuffer buffer; + Writer writer(buffer); + (*itr)["reading"].Accept(writer); + sql.append(buffer.GetString()); sql.append('\''); - sql.append(escape(str)); - sql.append('\''); + + sql.append(')'); } - sql.append(')'); } sql.append(';'); @@ -894,32 +1259,46 @@ int row = 0; /** * Fetch a block of readings from the reading table + * + * Fetch, used by the north side, returns timestamp in UTC. + * + * NOTE : it expects to handle a date having a fixed format + * with milliseconds, microseconds and timezone expressed, + * like for example : + * + * 2019-01-11 15:45:01.123456+01:00 */ bool Connection::fetchReadings(unsigned long id, unsigned int blksize, std::string& resultSet) { -char sqlbuffer[300]; +char sqlbuffer[512]; char *zErrMsg = NULL; int rc; int retrieve; + // SQL command to extract the data from the foglamp.readings + const char *sql_cmd = R"( + SELECT + id, + asset_code, + read_key, + reading, + strftime('%%Y-%%m-%%d %%H:%%M:%%S', user_ts, 'utc') || + substr(user_ts, instr(user_ts, '.'), 7) AS user_ts, + strftime('%%Y-%%m-%%d %%H:%%M:%%f', ts, 'utc') AS ts + FROM foglamp.readings + WHERE id >= %lu + ORDER BY id ASC + LIMIT %u; + )"; + /* * This query assumes datetime values are in 'localtime' */ - snprintf(sqlbuffer, sizeof(sqlbuffer), - "SELECT id, " \ - "asset_code, " \ - "read_key, " \ - "reading, " \ - "strftime('%%Y-%%m-%%d %%H:%%M:%%f', user_ts, 'utc') AS \"user_ts\", " \ - "strftime('%%Y-%%m-%%d %%H:%%M:%%f', ts, 'utc') AS \"ts\" " \ - "FROM foglamp.readings " \ - "WHERE id >= %lu " \ - "ORDER BY id ASC " \ - "LIMIT %u;", + sql_cmd, id, blksize); @@ -1143,7 +1522,8 @@ long numReadings = 0; bool Connection::jsonAggregates(const Value& payload, const Value& aggregates, SQLBuffer& sql, - SQLBuffer& jsonConstraint) + SQLBuffer& jsonConstraint, + bool isTableReading) { if (aggregates.IsObject()) { @@ -1159,11 +1539,32 @@ bool Connection::jsonAggregates(const Value& payload, "Missing property \"column\" or \"json\""); return false; } + string column_name = aggregates["column"].GetString(); + sql.append(aggregates["operation"].GetString()); sql.append('('); if (aggregates.HasMember("column")) { - sql.append(aggregates["column"].GetString()); + if (strcmp(aggregates["operation"].GetString(), "count") != 0) + { + // an operation different from the 'count' is requested + if (isTableReading && (column_name.compare("user_ts") == 0) ) + { + sql.append("strftime('" F_DATEH24_SEC "', user_ts, 'localtime') "); + sql.append(" || substr(user_ts, instr(user_ts, '.'), 7) "); + } + else + { + sql.append("\""); + sql.append(column_name); + sql.append("\""); + } + } + else + { + // 'count' operation is requested + sql.append(column_name); + } } else if (aggregates.HasMember("json")) { @@ -1295,7 +1696,19 @@ bool Connection::jsonAggregates(const Value& payload, sql.append('('); if (itr->HasMember("column")) { - sql.append((*itr)["column"].GetString()); + string column_name= (*itr)["column"].GetString(); + if (isTableReading && (column_name.compare("user_ts") == 0) ) + { + sql.append("strftime('" F_DATEH24_SEC "', user_ts, 'localtime') "); + sql.append(" || substr(user_ts, instr(user_ts, '.'), 7) "); + } + else + { + sql.append("\""); + sql.append(column_name); + sql.append("\""); + } + } else if (itr->HasMember("json")) { @@ -1540,7 +1953,7 @@ bool Connection::jsonAggregates(const Value& payload, } /** - * Process the modifers for limit, skip, sort and group + * Process the modifiers for limit, skip, sort and group */ bool Connection::jsonModifiers(const Value& payload, SQLBuffer& sql) { @@ -1718,7 +2131,7 @@ bool Connection::jsonModifiers(const Value& payload, SQLBuffer& sql) * */ bool Connection::jsonWhereClause(const Value& whereClause, - SQLBuffer& sql) + SQLBuffer& sql, bool convertLocaltime) { if (!whereClause.IsObject()) { @@ -1755,7 +2168,10 @@ bool Connection::jsonWhereClause(const Value& whereClause, } sql.append("< datetime('now', '-"); sql.append(whereClause["value"].GetInt()); - sql.append(" seconds', 'localtime')"); + if (convertLocaltime) + sql.append(" seconds', 'localtime')"); // Get value in localtime + else + sql.append(" seconds')"); // Get value in UTC by asking for no timezone } else if (!cond.compare("newer")) { @@ -1767,7 +2183,10 @@ bool Connection::jsonWhereClause(const Value& whereClause, } sql.append("> datetime('now', '-"); sql.append(whereClause["value"].GetInt()); - sql.append(" seconds', 'localtime')"); + if (convertLocaltime) + sql.append(" seconds', 'localtime')"); // Get value in localtime + else + sql.append(" seconds')"); // Get value in UTC by asking for no timezone } else if (!cond.compare("in") || !cond.compare("not in")) { @@ -1779,8 +2198,8 @@ bool Connection::jsonWhereClause(const Value& whereClause, sql.append(" ( "); int field = 0; for (Value::ConstValueIterator itr = whereClause["value"].Begin(); - itr != whereClause["value"].End(); - ++itr) + itr != whereClause["value"].End(); + ++itr) { if (field) { @@ -1811,9 +2230,9 @@ bool Connection::jsonWhereClause(const Value& whereClause, else { string message("The \"value\" of a \"" + \ - cond + \ - "\" condition array element must be " \ - "a string, integer or double."); + cond + \ + "\" condition array element must be " \ + "a string, integer or double."); raiseError("where clause", message.c_str()); return false; } @@ -1839,7 +2258,7 @@ bool Connection::jsonWhereClause(const Value& whereClause, } else if (whereClause["value"].IsString()) { sql.append('\''); - sql.append(whereClause["value"].GetString()); + sql.append(escape(whereClause["value"].GetString())); sql.append('\''); } } @@ -1847,7 +2266,7 @@ bool Connection::jsonWhereClause(const Value& whereClause, if (whereClause.HasMember("and")) { sql.append(" AND "); - if (!jsonWhereClause(whereClause["and"], sql)) + if (!jsonWhereClause(whereClause["and"], sql, convertLocaltime)) { return false; } @@ -1855,7 +2274,7 @@ bool Connection::jsonWhereClause(const Value& whereClause, if (whereClause.HasMember("or")) { sql.append(" OR "); - if (!jsonWhereClause(whereClause["or"], sql)) + if (!jsonWhereClause(whereClause["or"], sql, convertLocaltime)) { return false; } diff --git a/C/plugins/storage/sqlitememory/include/connection.h b/C/plugins/storage/sqlitememory/include/connection.h index a50e5c9bfe..7cf538abf5 100644 --- a/C/plugins/storage/sqlitememory/include/connection.h +++ b/C/plugins/storage/sqlitememory/include/connection.h @@ -28,6 +28,7 @@ class Connection { unsigned long sent, std::string& results); long tableSize(const std::string& table); void setTrace(bool flag) { m_logSQL = flag; }; + static bool formatDate(char *formatted_date, size_t formatted_date_size, const char *date); private: int SQLexec(sqlite3 *db, const char *sql, int (*callback)(void*,int,char**,char**), @@ -36,12 +37,13 @@ class Connection { void raiseError(const char *operation, const char *reason,...); sqlite3 *inMemory; // Handle for :memory: database int mapResultSet(void *res, std::string& resultSet); - bool jsonWhereClause(const rapidjson::Value& whereClause, SQLBuffer&); + bool jsonWhereClause(const rapidjson::Value& whereClause, SQLBuffer&, bool convertLocaltime = false); bool jsonModifiers(const rapidjson::Value&, SQLBuffer&); - bool jsonAggregates(const rapidjson::Value&, - const rapidjson::Value&, - SQLBuffer&, - SQLBuffer&); + bool jsonAggregates(const rapidjson::Value&, + const rapidjson::Value&, + SQLBuffer&, + SQLBuffer&, + bool isTableReading = false); bool returnJson(const rapidjson::Value&, SQLBuffer&, SQLBuffer&); char *trim(char *str); const char *escape(const char *); diff --git a/C/services/south-plugin-interfaces/python/python_plugin_interface.cpp b/C/services/south-plugin-interfaces/python/python_plugin_interface.cpp index 191301b9a9..b937a6e609 100644 --- a/C/services/south-plugin-interfaces/python/python_plugin_interface.cpp +++ b/C/services/south-plugin-interfaces/python/python_plugin_interface.cpp @@ -611,20 +611,14 @@ static void logErrorMessage() //Get error message PyObject *pType, *pValue, *pTraceback; PyErr_Fetch(&pType, &pValue, &pTraceback); + PyErr_NormalizeException(&pType, &pValue, &pTraceback); - // NOTE from : - // https://docs.python.org/2/c-api/exceptions.html - // - // The value and traceback object may be NULL - // even when the type object is not. + PyObject* str_exc_value = PyObject_Repr(pValue); + PyObject* pyExcValueStr = PyUnicode_AsEncodedString(str_exc_value, "utf-8", "Error ~"); const char* pErrorMessage = pValue ? - PyBytes_AsString(pValue) : + PyBytes_AsString(pyExcValueStr) : "no error description."; - - Logger::getLogger()->fatal("logErrorMessage: Error '%s' ", - pErrorMessage ? - pErrorMessage : - "no description"); + Logger::getLogger()->fatal("logErrorMessage: Error '%s' ", pErrorMessage); // Reset error PyErr_Clear(); @@ -633,6 +627,8 @@ static void logErrorMessage() Py_CLEAR(pType); Py_CLEAR(pValue); Py_CLEAR(pTraceback); + Py_CLEAR(str_exc_value); + Py_CLEAR(pyExcValueStr); } }; diff --git a/C/services/south/include/defaults.h b/C/services/south/include/defaults.h index e990db64e7..b1e7c21ab0 100644 --- a/C/services/south/include/defaults.h +++ b/C/services/south/include/defaults.h @@ -12,13 +12,17 @@ static struct { const char *name; + const char *displayName; const char *description; const char *type; const char *value; } defaults[] = { - { "readingsPerSec", "Number of readings to generate per sec", "integer", "1" }, - { "maxSendLatency", "Maximum time to spend filling buffer before sending", "integer", "5000" }, - { "bufferThreshold", "Number of readings to buffer before sending", "integer", "100" }, - { NULL, NULL, NULL, NULL } + { "maxSendLatency", "Maximum Reading Latency (mS)", + "Maximum time to spend filling buffer before sending", "integer", "5000" }, + { "bufferThreshold", "Maximum buffered Readings", + "Number of readings to buffer before sending", "integer", "100" }, + { "readingsPerSec", "Reading Rate", + "Number of readings to generate per interval", "integer", "1" }, + { NULL, NULL, NULL, NULL, NULL } }; #endif diff --git a/C/services/south/include/south_service.h b/C/services/south/include/south_service.h index e7caf14617..f8b105b11c 100644 --- a/C/services/south/include/south_service.h +++ b/C/services/south/include/south_service.h @@ -37,7 +37,7 @@ class SouthService : public ServiceHandler { private: void addConfigDefaults(DefaultConfigCategory& defaults); bool loadPlugin(); - int createTimerFd(int usecs); + int createTimerFd(struct timeval rate); void createConfigCategories(DefaultConfigCategory configCategory, std::string parent_name,std::string current_name); private: SouthPlugin *southPlugin; @@ -48,7 +48,7 @@ class SouthService : public ServiceHandler { ConfigCategory m_config; ConfigCategory m_configAdvanced; ManagementClient *m_mgtClient; - unsigned long m_readingsPerSec; + unsigned long m_readingsPerSec; // May not be per second, new rate defines time units unsigned int m_threshold; unsigned long m_timeout; Ingest *m_ingest; diff --git a/C/services/south/south.cpp b/C/services/south/south.cpp index d8e252c88f..087f9529b6 100644 --- a/C/services/south/south.cpp +++ b/C/services/south/south.cpp @@ -285,7 +285,19 @@ void SouthService::start(string& coreAddress, unsigned short corePort) // Get and ingest data if (! southPlugin->isAsync()) { - m_timerfd = createTimerFd(1000000/(int)m_readingsPerSec); // interval to be passed is in usecs + string units = m_configAdvanced.getValue("units"); + unsigned long dividend = 1000000; + if (units.compare("second") == 0) + dividend = 1000000; + else if (units.compare("minute") == 0) + dividend = 60000000; + else if (units.compare("hour") == 0) + dividend = 3600000000; + unsigned long usecs = dividend / m_readingsPerSec; + struct timeval rate; + rate.tv_sec = (int)(usecs / 1000000); + rate.tv_usec = (int)(usecs % 1000000); + m_timerfd = createTimerFd(rate); // interval to be passed is in usecs if (m_timerfd < 0) { logger->fatal("Could not create timer FD"); @@ -537,11 +549,23 @@ void SouthService::configChange(const string& categoryName, const string& catego m_configAdvanced = ConfigCategory(m_name+"Advanced", category); try { unsigned long newval = (unsigned long)strtol(m_configAdvanced.getValue("readingsPerSec").c_str(), NULL, 10); + string units = m_configAdvanced.getValue("units"); + unsigned long dividend = 1000000; + if (units.compare("second") == 0) + dividend = 1000000; + else if (units.compare("minute") == 0) + dividend = 60000000; + else if (units.compare("hour") == 0) + dividend = 3600000000; if (newval != m_readingsPerSec) { m_readingsPerSec = newval; close(m_timerfd); - m_timerfd = createTimerFd(1000000/(int)m_readingsPerSec); // interval to be passed is in usecs + unsigned long usecs = dividend / m_readingsPerSec; + struct timeval rate; + rate.tv_sec = (int)(usecs / 1000000); + rate.tv_usec = (int)(usecs % 1000000); + m_timerfd = createTimerFd(rate); // interval to be passed is in usecs } } catch (ConfigItemNotFound e) { logger->error("Failed to update poll interval following configuration change"); @@ -572,13 +596,21 @@ void SouthService::addConfigDefaults(DefaultConfigCategory& defaultConfig) for (int i = 0; defaults[i].name; i++) { defaultConfig.addItem(defaults[i].name, defaults[i].description, - defaults[i].type, defaults[i].value, defaults[i].value); + defaults[i].type, defaults[i].value, defaults[i].value); + defaultConfig.setItemDisplayName(defaults[i].name, defaults[i].displayName); } + /* Add the reading rate units */ + vector rateUnits = { "second", "minute", "hour" }; + defaultConfig.addItem("units", "Reading Rate Per", + "second", "second", rateUnits); + defaultConfig.setItemDisplayName("units", "Reading Rate Per"); + /* Add the set of logging levels to the service */ vector logLevels = { "error", "warning", "info", "debug" }; defaultConfig.addItem("logLevel", "Minimum logging level reported", "warning", "warning", logLevels); + defaultConfig.setItemDisplayName("logLevel", "Minimum Log Level"); } /** @@ -587,7 +619,7 @@ void SouthService::addConfigDefaults(DefaultConfigCategory& defaultConfig) * * @param usecs Time in micro-secs after which data would be available on the timer FD */ -int SouthService::createTimerFd(int usecs) +int SouthService::createTimerFd(struct timeval rate) { int fd = -1; struct itimerspec new_value; @@ -596,16 +628,16 @@ int SouthService::createTimerFd(int usecs) if (clock_gettime(CLOCK_REALTIME, &now) == -1) Logger::getLogger()->error("clock_gettime"); - new_value.it_value.tv_sec = now.tv_sec; - new_value.it_value.tv_nsec = now.tv_nsec + usecs*1000; + new_value.it_value.tv_sec = now.tv_sec + rate.tv_sec; + new_value.it_value.tv_nsec = now.tv_nsec + rate.tv_usec*1000; if (new_value.it_value.tv_nsec >= 1000000000) { new_value.it_value.tv_sec += new_value.it_value.tv_nsec/1000000000; new_value.it_value.tv_nsec %= 1000000000; } - new_value.it_interval.tv_sec = 0; - new_value.it_interval.tv_nsec = usecs*1000; + new_value.it_interval.tv_sec = rate.tv_sec; + new_value.it_interval.tv_nsec = rate.tv_usec*1000; if (new_value.it_interval.tv_nsec >= 1000000000) { new_value.it_interval.tv_sec += new_value.it_interval.tv_nsec/1000000000; diff --git a/C/services/storage/storage.cpp b/C/services/storage/storage.cpp index 66a0f75fed..9062be9711 100644 --- a/C/services/storage/storage.cpp +++ b/C/services/storage/storage.cpp @@ -18,11 +18,59 @@ #include #include #include +#include +#include +#include +#include extern int makeDaemon(void); using namespace std; +/** + * Signal handler to log stack trqaces on fatal signals + */ +static void handler(int sig) +{ +Logger *logger = Logger::getLogger(); +void *array[20]; +char buf[1024]; +int size; + + // get void*'s for all entries on the stack + size = backtrace(array, 20); + + // print out all the frames to stderr + logger->fatal("Signal %d (%s) trapped:\n", sig, strsignal(sig)); + char **messages = backtrace_symbols(array, size); + for (int i = 0; i < size; i++) + { + Dl_info info; + if (dladdr(array[i], &info) && info.dli_sname) + { + char *demangled = NULL; + int status = -1; + if (info.dli_sname[0] == '_') + demangled = abi::__cxa_demangle(info.dli_sname, NULL, 0, &status); + snprintf(buf, sizeof(buf), "%-3d %*p %s + %zd---------", + i, int(2 + sizeof(void*) * 2), array[i], + status == 0 ? demangled : + info.dli_sname == 0 ? messages[i] : info.dli_sname, + (char *)array[i] - (char *)info.dli_saddr); + free(demangled); + } + else + { + snprintf(buf, sizeof(buf), "%-3d %*p %s---------", + i, int(2 + sizeof(void*) * 2), array[i], messages[i]); + } + logger->fatal("(%d) %s", i, buf); + } + free(messages); + exit(1); +} + + /** * Storage service main entry point */ @@ -122,6 +170,12 @@ unsigned short servicePort; config = new StorageConfiguration(); logger = new Logger(myName); + signal(SIGSEGV, handler); + signal(SIGILL, handler); + signal(SIGBUS, handler); + signal(SIGFPE, handler); + signal(SIGABRT, handler); + if (config->getValue("port") == NULL) { servicePort = 0; // default to a dynamic port diff --git a/C/tasks/north/sending_process/sending.cpp b/C/tasks/north/sending_process/sending.cpp index 0c068c156a..e5d2d96749 100644 --- a/C/tasks/north/sending_process/sending.cpp +++ b/C/tasks/north/sending_process/sending.cpp @@ -70,28 +70,36 @@ static const string sendingDefaultConfig = "\"enable\": {" "\"description\": \"A switch that can be used to enable or disable execution of " "the sending process.\", \"type\": \"boolean\", \"default\": \"true\" , \"readonly\": \"true\" }," - "\"duration\": {" - "\"description\": \"How long the sending process should run (in seconds) before stopping.\", " - "\"type\": \"integer\", \"default\": \"60\" , \"order\": \"7\", \"displayName\" : \"Duration\" }, " - "\"blockSize\": {" - "\"description\": \"The size of a block of readings to send in each transmission.\", " - "\"type\": \"integer\", \"default\": \"500\", \"order\": \"8\", \"displayName\" : \"Readings Block Size\" }, " - "\"sleepInterval\": {" - "\"description\": \"A period of time, expressed in seconds, " - "to wait between attempts to send readings when there are no " - "readings to be sent.\", \"type\": \"integer\", \"default\": \"1\", \"order\": \"11\", \"displayName\" : \"Sleep Interval\" }, " "\"streamId\": {" "\"description\": \"Identifies the specific stream to handle and the related information," " among them the ID of the last object streamed.\", " "\"type\": \"integer\", \"default\": \"0\", " - "\"readonly\": \"true\" }, " - "\"memoryBufferSize\": {" - "\"description\": \"Number of elements of blockSize size to be buffered in memory\"," - "\"type\": \"integer\", " - "\"default\": \"10\", " - "\"order\": \"12\", \"displayName\" : \"Memory Buffer Size\" ," - "\"readonly\": \"false\" " - "} " + "\"readonly\": \"true\" } " + "}"; + +// Sending process advanced configuration +static const string sendingAdvancedConfig = + "{" \ + "\"duration\": {" \ + "\"description\": \"How long the sending process " \ + "should run (in seconds) before stopping.\", " \ + "\"type\": \"integer\", \"default\": \"60\" , " \ + "\"order\": \"7\", \"displayName\" : \"Duration\" }, " \ + "\"blockSize\": {" \ + "\"description\": \"The size of a block of readings to send " \ + "in each transmission.\", " \ + "\"type\": \"integer\", \"default\": \"500\", \"order\": \"8\", " \ + "\"displayName\" : \"Readings Block Size\" }, " \ + "\"sleepInterval\": {" \ + "\"description\": \"A period of time, expressed in seconds, " \ + "to wait between attempts to send readings when there are no " \ + "readings to be sent.\", \"type\": \"integer\", \"default\": \"1\", " \ + "\"order\": \"11\", \"displayName\" : \"Sleep Interval\" }, " \ + "\"memoryBufferSize\": {" \ + "\"description\": \"Number of elements of blockSize size to be buffered in memory\", " \ + "\"type\": \"integer\", \"default\": \"10\", " \ + "\"order\": \"12\", \"displayName\" : \"Memory Buffer Size\" ," \ + "\"readonly\": \"false\" } " \ "}"; volatile std::sig_atomic_t signalReceived = 0; @@ -773,6 +781,7 @@ ConfigCategory SendingProcess::fetchConfiguration(const std::string& defaultConf #endif ConfigCategory configuration; + ConfigCategory advancedConfiguration; try { // Create category, with "default" values only DefaultConfigCategory category(categoryName, @@ -791,19 +800,36 @@ ConfigCategory SendingProcess::fetchConfiguration(const std::string& defaultConf } // Create/Update hierarchical configuration categories - createConfigCategories(category, PARENT_CONFIGURATION_KEY, categoryName, CONFIG_CATEGORY_DESCRIPTION); + createConfigCategories(category, + PARENT_CONFIGURATION_KEY, + categoryName, + CONFIG_CATEGORY_DESCRIPTION); + + // Create advanced configuration category + string advancedCatName = categoryName + string("Advanced"); + DefaultConfigCategory defConfigAdvanced(advancedCatName, + sendingAdvancedConfig); + // Set/Updaqte advanced configuration category + this->getManagementClient()->addCategory(defConfigAdvanced, true); + // Set advanced configuration category as child pf parent categoryName + vector children1; + children1.push_back(advancedCatName); + this->getManagementClient()->addChildCategories(categoryName, children1); // Get the category with values and defaults configuration = this->getManagementClient()->getCategory(categoryName); + // Get the advanced category with values and defaults + advancedConfiguration = this->getManagementClient()->getCategory(advancedCatName); + /** - * Handle the sending process parameters here + * Handle the sending process parameters here: + * fetch the Advanced configuration */ - - string blockSize = configuration.getValue("blockSize"); - string duration = configuration.getValue("duration"); - string sleepInterval = configuration.getValue("sleepInterval"); - string memoryBufferSize = configuration.getValue("memoryBufferSize"); + string blockSize = advancedConfiguration.getValue("blockSize"); + string duration = advancedConfiguration.getValue("duration"); + string sleepInterval = advancedConfiguration.getValue("sleepInterval"); + string memoryBufferSize = advancedConfiguration.getValue("memoryBufferSize"); // Handles the case in which the stream_id is not defined // in the configuration and sets it to not defined (0) @@ -845,7 +871,8 @@ ConfigCategory SendingProcess::fetchConfiguration(const std::string& defaultConf m_data_source_t = ""; } - // Sets the m_memory_buffer_size = 1 in case of an invalid value from the configuration like for example "A432" + // Sets the m_memory_buffer_size = 1 in case of an invalid value + // from the configuration like for example "A432" m_memory_buffer_size = strtoul(memoryBufferSize.c_str(), NULL, 10); if (m_memory_buffer_size < 1) { diff --git a/C/tasks/north/sending_process/sending_process.cpp b/C/tasks/north/sending_process/sending_process.cpp index ee3ff2a8a7..b8ce5ae4ed 100644 --- a/C/tasks/north/sending_process/sending_process.cpp +++ b/C/tasks/north/sending_process/sending_process.cpp @@ -45,6 +45,12 @@ condition_variable cond_var; // Buffer max elements unsigned long memoryBufferSize; +// Exit code: +// 0 = success (some data sent) +// 1 = 100% failure sending data to north server +// 2 =internal errors +int exitCode = 1; + // Used to identifies logs const string LOG_SERVICE_NAME = "SendingProcess/sending_process"; @@ -80,7 +86,7 @@ int main(int argc, char** argv) { cerr << "Exception in " << argv[0] << " : " << e.what() << endl; // Return failure for class instance/configuration etc - exit(1); + exit(2); } // Catch all exceptions catch (...) @@ -88,11 +94,11 @@ int main(int argc, char** argv) std::exception_ptr p = std::current_exception(); string name = (p ? p.__cxa_exception_type()->name() : "null"); cerr << "Generic Exception in " << argv[0] << " : " << name << endl; - exit(1); + exit(2); } // Return success - exit(0); + exit(exitCode); } /** @@ -432,10 +438,12 @@ static void sendDataThread(SendingProcess *sendData) if (sentReadings) { processUpdate = true; + exitCode = 0; } } else { + exitCode = 0; // We have an empty readings set: check last id if (sendData->m_last_read_id.at(sendIdx) > 0) { @@ -445,6 +453,8 @@ static void sendDataThread(SendingProcess *sendData) if (processUpdate) { + exitCode = 0; + /** Sending done */ sendData->setUpdateDb(true); diff --git a/README.rst b/README.rst index 26e39e6cee..3f85e22ae9 100644 --- a/README.rst +++ b/README.rst @@ -69,7 +69,7 @@ FogLAMP is currently based on C/C++ and Python code. The packages needed to buil - sqlite3 - libsqlite3-dev -On Ubuntu-based Linux distributions the packages can be installed with *apt-get*: +On Ubuntu-based Linux distributions the packages can be installed with given `requirements.sh `_ or manual *apt-get*: :: apt-get install avahi-daemon curl apt-get install cmake g++ make build-essential autoconf automake uuid-dev diff --git a/VERSION b/VERSION index 90bd74bd9a..4861170611 100755 --- a/VERSION +++ b/VERSION @@ -1,2 +1,2 @@ -foglamp_version=1.5.0 +foglamp_version=1.5.1 foglamp_schema=26 diff --git a/docs/91_version_history.rst b/docs/91_version_history.rst index 414cf0f737..4a7ded010d 100644 --- a/docs/91_version_history.rst +++ b/docs/91_version_history.rst @@ -25,6 +25,21 @@ Version History FogLAMP v1 ========== +v1.5.1 +------- + +Release Date: 2019-03-12 + +- **FogLAMP Core** + + - Bug Fix: plugin loading errors + + +- **GUI** + + - Bug Fix: uptime shows up to 24 hour clock only + + v1.5.0 ------- diff --git a/docs/92_downloads.rst b/docs/92_downloads.rst index b5eaa9928d..c25837f578 100644 --- a/docs/92_downloads.rst +++ b/docs/92_downloads.rst @@ -27,17 +27,17 @@ https://github.com/foglamp/storage-postgres -.. |intel 1.5.0 Ubuntu 16.04| raw:: html +.. |intel 1.5.1 Ubuntu 16.04| raw:: html - v1.5.0 Ubuntu 16.04 + v1.5.1 Ubuntu 16.04 -.. |intel 1.5.0 Ubuntu 18.04| raw:: html +.. |intel 1.5.1 Ubuntu 18.04| raw:: html - v1.5.0 Ubuntu 18.04 + v1.5.1 Ubuntu 18.04 -.. |arm 1.5.0| raw:: html +.. |arm 1.5.1| raw:: html - v1.5.0 ARM + v1.5.1 ARM @@ -54,14 +54,14 @@ We have created Debian for Intel and ARM architectures. The packages have been t FogLAMP Debian Packages for Intel --------------------------------- -- |intel 1.5.0 Ubuntu 16.04| -- |intel 1.5.0 Ubuntu 18.04| +- |intel 1.5.1 Ubuntu 16.04| +- |intel 1.5.1 Ubuntu 18.04| FogLAMP Debian Packages for ARM ------------------------------- -- |arm 1.5.0| +- |arm 1.5.1| Download/Clone from GitHub diff --git a/python/foglamp/common/configuration_manager.py b/python/foglamp/common/configuration_manager.py index 8aecdfa942..6de3199abd 100644 --- a/python/foglamp/common/configuration_manager.py +++ b/python/foglamp/common/configuration_manager.py @@ -13,6 +13,7 @@ import ipaddress import datetime import os +from math import * from foglamp.common.storage_client.payload_builder import PayloadBuilder from foglamp.common.storage_client.storage_client import StorageClientAsync @@ -218,7 +219,7 @@ async def _validate_category_val(self, category_name, category_val, set_value_va .format(category_name, item_name, type(item_val))) optional_item_entries = {'readonly': 0, 'order': 0, 'length': 0, 'maximum': 0, 'minimum': 0, - 'deprecated': 0, 'displayName': 0} + 'deprecated': 0, 'displayName': 0, 'rule': 0} expected_item_entries = {'description': 0, 'default': 0, 'type': 0} if require_entry_value: @@ -269,7 +270,7 @@ def get_entry_val(k): if (self._validate_type_value('integer', entry_val) or self._validate_type_value('float', entry_val)) is False: raise ValueError('For {} category, entry value must be an integer or float for item name ' '{}; got {}'.format(category_name, entry_name, type(entry_val))) - elif entry_name == 'displayName': + elif entry_name == 'rule' or entry_name == 'displayName': if not isinstance(entry_val, str): raise ValueError('For {} category, entry value must be string for item name {}; got {}' .format(category_name, entry_name, type(entry_val))) @@ -467,7 +468,11 @@ async def update_configuration_item_bulk(self, category_name, config_item_list): for item_name, new_val in config_item_list.items(): if item_name not in cat_info: raise KeyError('{} config item not found'.format(item_name)) - + # Evaluate new_val as per rule if defined + if 'rule' in cat_info[item_name]: + rule = cat_info[item_name]['rule'].replace("value", new_val) + if eval(rule) is False: + raise ValueError('Proposed value for item_name {} is not allowed as per rule defined'.format(item_name)) if cat_info[item_name]['type'] == 'JSON': if isinstance(new_val, dict): pass @@ -692,6 +697,11 @@ async def set_category_item_value_entry(self, category_name, item_name, new_valu raise TypeError('Unrecognized value name for item_name {}'.format(item_name)) new_value_entry = self._clean(storage_value_entry['type'], new_value_entry) + # Evaluate new_value_entry as per rule if defined + if 'rule' in storage_value_entry: + rule = storage_value_entry['rule'].replace("value", new_value_entry) + if eval(rule) is False: + raise ValueError('Proposed value for item_name {} is not allowed as per rule defined'.format(item_name)) await self._update_value_val(category_name, item_name, new_value_entry) # always get value from storage cat_item = await self._read_item_val(category_name, item_name) @@ -786,6 +796,12 @@ async def create_category(self, category_name, category_value, category_descript try: # validate new category_val, set "value" from default category_val_prepared = await self._validate_category_val(category_name, category_value, True) + # Evaluate value as per rule if defined + for item_name in category_val_prepared: + if 'rule' in category_val_prepared[item_name]: + rule = category_val_prepared[item_name]['rule'].replace("value", category_val_prepared[item_name]['value']) + if eval(rule) is False: + raise ValueError('For {} category, Proposed value for item_name {} is not allowed as per rule defined'.format(category_name, item_name)) # check if category_name is already in storage category_val_storage = await self._read_category_val(category_name) if category_val_storage is None: diff --git a/python/foglamp/common/process.py b/python/foglamp/common/process.py index 4ad156faf6..14a394773a 100644 --- a/python/foglamp/common/process.py +++ b/python/foglamp/common/process.py @@ -18,26 +18,25 @@ __license__ = "Apache 2.0" __version__ = "${VERSION}" - _logger = logger.setup(__name__) class ArgumentParserError(Exception): """ Override default exception to not terminate application """ - pass + def __init__(self, message): + self.message = message -class SilentArgParse(argparse.ArgumentParser): + def __str__(self): + fmt = '%(message)s' + return fmt % dict(message=self.message) + +class SilentArgParse(argparse.ArgumentParser): def error(self, message): """ Override default error functionality to not terminate application """ raise ArgumentParserError(message) - def silent_arg_parse(self, argument_name): - self.add_argument(argument_name) - parser_result = self.parse_known_args() - return list(vars(parser_result[0]).values())[0] - class FoglampProcess(ABC): """ FoglampProcess for all non-core python processes. @@ -73,25 +72,37 @@ def __init__(self): --port [core microservice management port] --name [process name] """ - + self._start_time = time.time() - try: - self._core_management_host = self.get_arg_value("--address") - self._core_management_port = self.get_arg_value("--port") - self._name = self.get_arg_value("--name") - except ArgumentParserError: + try: + parser = SilentArgParse() + parser.add_argument("--name", required=True) + parser.add_argument("--address", required=True) + parser.add_argument("--port", required=True, type=int) + namespace, args = parser.parse_known_args() + self._name = getattr(namespace, 'name') + self._core_management_host = getattr(namespace, 'address') + self._core_management_port = getattr(namespace, 'port') + r = range(1, 65536) + if self._core_management_port not in r: + raise ArgumentParserError("Invalid Port: {}".format(self._core_management_port)) + for item in args: + if item.startswith('--'): + kv = item.split('=') + if len(kv) == 2: + if len(kv[1].strip()) == 0: + raise ArgumentParserError("Invalid value {} for optional arg {}".format(kv[1], kv[0])) + + except ArgumentParserError as ex: + _logger.error("Arg parser error: %s", str(ex)) raise - if self._core_management_host is None: - raise ValueError("--address is not specified") - elif self._core_management_port is None: - raise ValueError("--port is not specified") - elif self._name is None: - raise ValueError("--name is not specified") - self._core_microservice_management_client = MicroserviceManagementClient(self._core_management_host,self._core_management_port) + self._core_microservice_management_client = MicroserviceManagementClient(self._core_management_host, + self._core_management_port) - self._readings_storage_async = ReadingsStorageClientAsync(self._core_management_host, self._core_management_port) + self._readings_storage_async = ReadingsStorageClientAsync(self._core_management_host, + self._core_management_port) self._storage_async = StorageClientAsync(self._core_management_host, self._core_management_port) # pure virtual method run() to be implemented by child class @@ -99,25 +110,6 @@ def __init__(self): def run(self): pass - def get_arg_value(self, argument_name): - """ Parses command line arguments for a single argument of name argument_name. Returns the value of the argument specified or None if argument was not specified. - - Keyword Arguments: - argument_name -- name of command line argument to retrieve value for - - Return Values: - Argument value (as a string) - None (if argument was not passed) - - Side Effects: - None - - Known Exceptions: - ArgumentParserError - """ - parser = SilentArgParse() - return parser.silent_arg_parse(argument_name) - def get_services_from_core(self, name=None, _type=None): return self._core_microservice_management_client.get_services(name, _type) diff --git a/python/foglamp/common/storage_client/payload_builder.py b/python/foglamp/common/storage_client/payload_builder.py index 4ac7ef349c..0178ae5fbb 100644 --- a/python/foglamp/common/storage_client/payload_builder.py +++ b/python/foglamp/common/storage_client/payload_builder.py @@ -55,9 +55,12 @@ def verify_condition(arg): retval = False if isinstance(arg, list): if len(arg) == 3: - # TODO: Implement LIKE and IN later when support becomes available in storage service - if arg[1] in ['<', '>', '=', '>=', '<=', '!=', 'newer', 'older']: + # TODO: Implement LIKE later when support becomes available in storage service + if arg[1] in ['<', '>', '=', '>=', '<=', '!=', 'newer', 'older', 'in', 'not in']: retval = True + if arg[1] in ['in', 'not in']: + if isinstance(arg[2], list): + retval = True return retval @staticmethod diff --git a/python/foglamp/common/utils.py b/python/foglamp/common/utils.py index 85e71b0af6..6a17b105a3 100644 --- a/python/foglamp/common/utils.py +++ b/python/foglamp/common/utils.py @@ -6,6 +6,7 @@ """Common utilities""" +import datetime __author__ = "Amarendra K Sinha" __copyright__ = "Copyright (c) 2017 OSIsoft, LLC" @@ -34,3 +35,11 @@ def check_reserved(string): if s in reserved: return False return True + + +def local_timestamp(): + """ + :return: str - current time stamp with microseconds and machine timezone info + :example '2018-05-08 14:06:40.517313+05:30' + """ + return str(datetime.datetime.now(datetime.timezone.utc).astimezone()) diff --git a/python/foglamp/plugins/common/shim/shim.py b/python/foglamp/plugins/common/shim/shim.py index 5bef308239..511b13d779 100644 --- a/python/foglamp/plugins/common/shim/shim.py +++ b/python/foglamp/plugins/common/shim/shim.py @@ -43,7 +43,9 @@ def plugin_info(): def plugin_init(config): _LOGGER.info("plugin_init called") handle = _plugin.plugin_init(json.loads(config)) - return handle + # TODO: FOGL-1827 - Config item value must be respected as per type given + revised_handle = _revised_config_for_json_item(handle) + return revised_handle def plugin_poll(handle): @@ -54,7 +56,9 @@ def plugin_poll(handle): def plugin_reconfigure(handle, new_config): _LOGGER.info("plugin_reconfigure") new_handle = _plugin.plugin_reconfigure(handle, json.loads(new_config)) - return new_handle + # TODO: FOGL-1827 - Config item value must be respected as per type given + revised_handle = _revised_config_for_json_item(new_handle) + return revised_handle def plugin_shutdown(handle): @@ -70,3 +74,19 @@ def plugin_start(handle): def plugin_register_ingest(handle, callback, ingest_ref): _LOGGER.info("plugin_register_ingest") return _plugin.plugin_register_ingest(handle, callback, ingest_ref) + + +def _revised_config_for_json_item(config): + # South C server sends "config" argument as string in which all JSON type items' components, + # 'default' and 'value', gets converted to dict during json.loads(). Hence we need to restore + # them to str, which is the required format for configuration items. + revised_config_handle = {} + for k, v in config.items(): + if isinstance(v, dict): + if 'type' in v and v['type'] == 'JSON': + if isinstance(v['default'], dict): + v['default'] = json.dumps(v['default']) + if isinstance(v['value'], dict): + v['value'] = json.dumps(v['value']) + revised_config_handle.update({k: v}) + return revised_config_handle diff --git a/python/foglamp/services/common/microservice_management/routes.py b/python/foglamp/services/common/microservice_management/routes.py index 97c938fccd..f104f9f203 100644 --- a/python/foglamp/services/common/microservice_management/routes.py +++ b/python/foglamp/services/common/microservice_management/routes.py @@ -27,6 +27,7 @@ def setup(app, obj, is_core=False): app.router.add_route('GET', '/foglamp/service/category', obj.get_configuration_categories) app.router.add_route('POST', '/foglamp/service/category', obj.create_configuration_category) app.router.add_route('GET', '/foglamp/service/category/{category_name}', obj.get_configuration_category) + app.router.add_route('DELETE', '/foglamp/service/category/{category_name}', obj.delete_configuration_category) app.router.add_route('GET', '/foglamp/service/category/{category_name}/children', obj.get_child_category) app.router.add_route('POST', '/foglamp/service/category/{category_name}/children', obj.create_child_category) app.router.add_route('GET', '/foglamp/service/category/{category_name}/{config_item}', obj.get_configuration_item) diff --git a/python/foglamp/services/core/api/configuration.py b/python/foglamp/services/core/api/configuration.py index 8355c2fefb..4cd0bff08d 100644 --- a/python/foglamp/services/core/api/configuration.py +++ b/python/foglamp/services/core/api/configuration.py @@ -25,7 +25,7 @@ _help = """ -------------------------------------------------------------------------------- | GET POST | /foglamp/category | - | GET PUT | /foglamp/category/{category_name} | + | GET PUT DELETE | /foglamp/category/{category_name} | | GET POST PUT | /foglamp/category/{category_name}/{config_item} | | DELETE | /foglamp/category/{category_name}/{config_item}/value | | POST | /foglamp/category/{category_name}/{config_item}/upload | @@ -143,19 +143,40 @@ async def create_category(request): if data.get('children'): r = await cf_mgr.create_child_category(category_name, data.get('children')) result.update(r) - except (KeyError, ValueError, TypeError) as ex: raise web.HTTPBadRequest(reason=str(ex)) - except LookupError as ex: raise web.HTTPNotFound(reason=str(ex)) - except Exception as ex: raise web.HTTPException(reason=str(ex)) - return web.json_response(result) +async def delete_category(request): + """ + Args: + request: category_name required + Returns: + Success message on successful deletion + Raises: + TypeError/ValueError/Exception on error + :Example: + curl -X DELETE http://localhost:8081/foglamp/category/{category_name} + """ + category_name = request.match_info.get('category_name', None) + category_name = urllib.parse.unquote(category_name) if category_name is not None else None + + try: + cf_mgr = ConfigurationManager(connect.get_storage_async()) + await cf_mgr.delete_category_and_children_recursively(category_name) + except (ValueError, TypeError) as ex: + raise web.HTTPBadRequest(reason=ex) + except Exception as ex: + raise web.HTTPInternalServerError(reason=ex) + else: + return web.json_response({'result': 'Category {} deleted successfully.'.format(category_name)}) + + async def get_category_item(request): """ Args: diff --git a/python/foglamp/services/core/api/filters.py b/python/foglamp/services/core/api/filters.py index 60ac749fee..8451003eb5 100644 --- a/python/foglamp/services/core/api/filters.py +++ b/python/foglamp/services/core/api/filters.py @@ -241,7 +241,7 @@ async def add_filters_pipeline(request: web.Request) -> web.Response: # Config update for filter pipeline and a change callback after category children creation await cf_mgr.set_category_item_value_entry(user_name, config_item, {'pipeline': new_list}) else: # No existing filters, hence create new item 'config_item' and add the "pipeline" array as a string - new_item = dict({config_item: {'description': 'Filter pipeline', 'type': 'JSON', 'default': {}}}) + new_item = dict({config_item: {'description': 'Filter pipeline', 'type': 'JSON', 'default': {}, 'readonly':'true'}}) new_item[config_item]['default'] = json.dumps({'pipeline': filter_list}) await _add_child_filters(storage, cf_mgr, user_name, filter_list) await cf_mgr.create_category(category_name=user_name, category_value=new_item, keep_original_items=True) diff --git a/python/foglamp/services/core/api/notification.py b/python/foglamp/services/core/api/notification.py index 7e0e7f3667..ee7f76c888 100644 --- a/python/foglamp/services/core/api/notification.py +++ b/python/foglamp/services/core/api/notification.py @@ -59,6 +59,15 @@ async def get_plugin(request): return web.json_response({'rules': rule_plugins, 'delivery': delivery_plugins}) +async def get_type(request): + """ GET the list of available notification types + + :Example: + curl -X GET http://localhost:8081/foglamp/notification/type + """ + return web.json_response({'notification_type': NOTIFICATION_TYPE}) + + async def get_notification(request): """ GET an existing notification @@ -217,6 +226,7 @@ async def post_notification(request): storage = connect.get_storage_async() config_mgr = ConfigurationManager(storage) notification_config = { + "description": description, "rule": rule, "channel": channel, "notification_type": notification_type, diff --git a/python/foglamp/services/core/api/scheduler.py b/python/foglamp/services/core/api/scheduler.py index a40fe26524..da107710c9 100644 --- a/python/foglamp/services/core/api/scheduler.py +++ b/python/foglamp/services/core/api/scheduler.py @@ -73,19 +73,25 @@ async def get_scheduled_process(request): a list of all the defined scheduled_processes from scheduled_processes table :Example: - curl -X GET http://localhost:8081/foglamp/schedule/process/purge + curl -X GET http://localhost:8081/foglamp/schedule/process/purge + curl -X GET http://localhost:8081/foglamp/schedule/process/purge%2Cbackup%2Crestore + curl -X GET http://localhost:8081/foglamp/schedule/process/purge%2Cbackup%2Cstats%20collector """ - scheduled_process_name = request.match_info.get('scheduled_process_name', None) - - payload = PayloadBuilder().SELECT("name").WHERE(["name", "=", scheduled_process_name]).payload() + scheduled_process_names = request.match_info.get('scheduled_process_name', None) + scheduled_process_name = scheduled_process_names.split(',') + payload = PayloadBuilder().SELECT("name").WHERE(["name", "in", scheduled_process_name]).payload() _storage = connect.get_storage_async() scheduled_process = await _storage.query_tbl_with_payload('scheduled_processes', payload) if len(scheduled_process['rows']) == 0: raise web.HTTPNotFound(reason='No such Scheduled Process: {}.'.format(scheduled_process_name)) - return web.json_response(scheduled_process['rows'][0].get("name")) + if len(scheduled_process['rows']) == 1: + retval = scheduled_process['rows'][0].get("name") + else: + retval = scheduled_process['rows'] + return web.json_response(retval) ################################# diff --git a/python/foglamp/services/core/routes.py b/python/foglamp/services/core/routes.py index a51e98ae67..4c021f078b 100644 --- a/python/foglamp/services/core/routes.py +++ b/python/foglamp/services/core/routes.py @@ -61,6 +61,7 @@ def setup(app): app.router.add_route('POST', '/foglamp/category', api_configuration.create_category) app.router.add_route('GET', '/foglamp/category/{category_name}', api_configuration.get_category) app.router.add_route('PUT', '/foglamp/category/{category_name}', api_configuration.update_configuration_item_bulk) + app.router.add_route('DELETE', '/foglamp/category/{category_name}', api_configuration.delete_category) app.router.add_route('POST', '/foglamp/category/{category_name}/children', api_configuration.create_child_category) app.router.add_route('GET', '/foglamp/category/{category_name}/children', api_configuration.get_child_category) app.router.add_route('DELETE', '/foglamp/category/{category_name}/children/{child_category}', api_configuration.delete_child_category) @@ -168,6 +169,7 @@ def setup(app): # Notification app.router.add_route('GET', '/foglamp/notification', notification.get_notifications) app.router.add_route('GET', '/foglamp/notification/plugin', notification.get_plugin) + app.router.add_route('GET', '/foglamp/notification/type', notification.get_type) app.router.add_route('GET', '/foglamp/notification/{notification_name}', notification.get_notification) app.router.add_route('POST', '/foglamp/notification', notification.post_notification) app.router.add_route('PUT', '/foglamp/notification/{notification_name}', notification.put_notification) diff --git a/python/foglamp/services/core/scheduler/scheduler.py b/python/foglamp/services/core/scheduler/scheduler.py index be2a94679b..060711271e 100644 --- a/python/foglamp/services/core/scheduler/scheduler.py +++ b/python/foglamp/services/core/scheduler/scheduler.py @@ -17,18 +17,19 @@ import subprocess import signal from typing import List -from foglamp.common.configuration_manager import ConfigurationManager + from foglamp.common import logger +from foglamp.common import utils as common_utils from foglamp.common.audit_logger import AuditLogger -from foglamp.services.core.scheduler.entities import * -from foglamp.services.core.scheduler.exceptions import * from foglamp.common.storage_client.exceptions import * from foglamp.common.storage_client.payload_builder import PayloadBuilder from foglamp.common.storage_client.storage_client import StorageClientAsync +from foglamp.common.configuration_manager import ConfigurationManager +from foglamp.services.core.scheduler.entities import * +from foglamp.services.core.scheduler.exceptions import * from foglamp.services.core.service_registry.service_registry import ServiceRegistry from foglamp.services.core.service_registry import exceptions as service_registry_exceptions from foglamp.services.common import utils -from foglamp.plugins.common import utils as timedateUtils __author__ = "Terris Linenbach, Amarendra K Sinha, Massimiliano Pinto" __copyright__ = "Copyright (c) 2017-2018 OSIsoft, LLC" @@ -264,7 +265,7 @@ async def _wait_for_task_completion(self, task_process: _TaskProcess) -> None: update_payload = PayloadBuilder() \ .SET(exit_code=exit_code, state=int(state), - end_time=str(timedateUtils.local_timestamp())) \ + end_time=str(common_utils.local_timestamp())) \ .WHERE(['id', '=', str(task_process.task_id)]) \ .payload() try: @@ -337,7 +338,7 @@ async def _start_task(self, schedule: _ScheduleRow) -> None: schedule_name=schedule.name, process_name=schedule.process_name, state=int(Task.State.RUNNING), - start_time=str(timedateUtils.local_timestamp())) \ + start_time=str(common_utils.local_timestamp())) \ .payload() try: self._logger.debug('Database command: %s', insert_payload) diff --git a/python/foglamp/services/core/server.py b/python/foglamp/services/core/server.py index 3c78523e6e..f1e2b5db7d 100755 --- a/python/foglamp/services/core/server.py +++ b/python/foglamp/services/core/server.py @@ -1260,6 +1260,11 @@ async def create_configuration_category(cls, request): res = await conf_api.create_category(request) return res + @classmethod + async def delete_configuration_category(cls, request): + res = await conf_api.delete_category(request) + return res + @classmethod async def create_child_category(cls, request): res = await conf_api.create_child_category(request) diff --git a/python/foglamp/tasks/statistics/statistics_history.py b/python/foglamp/tasks/statistics/statistics_history.py index f5a08da3e9..f6343c2880 100644 --- a/python/foglamp/tasks/statistics/statistics_history.py +++ b/python/foglamp/tasks/statistics/statistics_history.py @@ -10,12 +10,12 @@ Fetch information from the statistics table, compute delta and stores the delta value (statistics.value - statistics.previous_value) in the statistics_history table """ +import json from foglamp.common.storage_client.payload_builder import PayloadBuilder from foglamp.common import logger from foglamp.common.process import FoglampProcess -from foglamp.plugins.common import utils - +from foglamp.common import utils as common_utils __author__ = "Ori Shadmon, Ashish Jabble" __copyright__ = "Copyright (c) 2017 OSI Soft, LLC" @@ -45,17 +45,15 @@ async def _insert_into_stats_history(self, key='', value=0, history_ts=None): payload = PayloadBuilder().INSERT(key=key, value=value, history_ts=history_ts).payload() await self._storage_async.insert_into_tbl("statistics_history", payload) - async def _update_previous_value(self, key='', value=0): + async def _bulk_update_previous_value(self, payload): """ UPDATE previous_value of column to have the same value as snapshot Query: UPDATE statistics_history SET previous_value = value WHERE key = key Args: - key: Key which previous_value gets update - value: value at snapshot + payload: dict containing statistics keys and previous values """ - payload = PayloadBuilder().SET(previous_value=value).WHERE(["key", "=", key]).payload() - await self._storage_async.update_tbl("statistics", payload) + await self._storage_async.update_tbl("statistics", json.dumps(payload, sort_keys=False)) async def run(self): """ SELECT against the statistics table, to get a snapshot of the data at that moment. @@ -64,12 +62,16 @@ async def run(self): 1. INSERT the delta between `value` and `previous_value` into statistics_history 2. UPDATE the previous_value in statistics table to be equal to statistics.value at snapshot """ - current_time = utils.local_timestamp() + current_time = common_utils.local_timestamp() results = await self._storage_async.query_tbl("statistics") + payload = {"updates": []} for r in results['rows']: key = r['key'] value = int(r["value"]) previous_value = int(r["previous_value"]) delta = value - previous_value + # TODO: Once FOGL-1973 is done, use bulk insert for statistics_history await self._insert_into_stats_history(key=key, value=delta, history_ts=current_time) - await self._update_previous_value(key=key, value=value) + payload_item = PayloadBuilder().SET(previous_value=value).WHERE(["key", "=", key]).payload() + payload['updates'].append(json.loads(payload_item)) + await self._bulk_update_previous_value(payload) diff --git a/requirements.sh b/requirements.sh new file mode 100755 index 0000000000..1330f6ad45 --- /dev/null +++ b/requirements.sh @@ -0,0 +1,34 @@ +#!//usr/bin/env bash + +##-------------------------------------------------------------------- +## Copyright (c) 2019 Dianomic Systems +## +## Licensed under the Apache License, Version 2.0 (the "License"); +## you may not use this file except in compliance with the License. +## You may obtain a copy of the License at +## +## http://www.apache.org/licenses/LICENSE-2.0 +## +## Unless required by applicable law or agreed to in writing, software +## distributed under the License is distributed on an "AS IS" BASIS, +## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +## See the License for the specific language governing permissions and +## limitations under the License. +##-------------------------------------------------------------------- + +## +## Author: Ashish Jabble +## + + +set -e + +sudo apt update +sudo apt -y upgrade + +sudo apt install -y avahi-daemon curl +sudo apt install -y cmake g++ make build-essential autoconf automake uuid-dev +sudo apt install -y libtool libboost-dev libboost-system-dev libboost-thread-dev libpq-dev libssl-dev libz-dev +sudo apt install -y python-dbus python-dev python3-dev python3-pip +sudo apt install -y sqlite3 libsqlite3-dev +# sudo apt install -y postgresql diff --git a/tests/system/python/README.rst b/tests/system/python/README.rst index d4586eadc2..b20e195379 100644 --- a/tests/system/python/README.rst +++ b/tests/system/python/README.rst @@ -85,7 +85,8 @@ custom options :: $ pytest --help ... custom options: - + --storage-plugin=STORAGE_PLUGIN + Database plugin to use for tests --south-branch=SOUTH_BRANCH south branch name --north-branch=NORTH_BRANCH @@ -139,7 +140,13 @@ custom options :: --kafka-rest-port=KAFKA_REST_PORT Kafka REST Proxy Port +Using different storage engine +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +By default system tests runs with sqlite database. If you want, you can use postgres storage plugin and tests will be +executed using postgres database and postgres storage engine:: + $ pytest test_smoke.py --storage-plugin=postgres Test test_e2e_coap_PI and test_e2e_csv_PI ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests/system/python/api/test_configuration.py b/tests/system/python/api/test_configuration.py new file mode 100644 index 0000000000..6c66831c56 --- /dev/null +++ b/tests/system/python/api/test_configuration.py @@ -0,0 +1,162 @@ +# -*- coding: utf-8 -*- + +# FOGLAMP_BEGIN +# See: http://foglamp.readthedocs.io/ +# FOGLAMP_END + +""" Test Configuration REST API """ + + +import http.client +import json +from urllib.parse import quote +import time + +__author__ = "Praveen Garg" +__copyright__ = "Copyright (c) 2019 Dianomic Systems" +__license__ = "Apache 2.0" +__version__ = "${VERSION}" + + +class TestConfiguration: + + def test_default(self, foglamp_url, reset_and_start_foglamp, wait_time): + conn = http.client.HTTPConnection(foglamp_url) + + conn.request("GET", '/foglamp/category') + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + assert len(jdoc) + + # Utilities parent key creation + time.sleep(wait_time) + + conn.request("GET", '/foglamp/category?root=true') + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + cats = jdoc["categories"] + assert 3 == len(cats) + assert {'key': 'General', 'displayName': 'General', 'description': 'General'} == cats[0] + assert {'key': 'Advanced', 'displayName': 'Advanced', 'description': 'Advanced'} == cats[1] + assert {'key': 'Utilities', 'displayName': 'Utilities', 'description': 'Utilities'} == cats[2] + + conn.request("GET", '/foglamp/category?root=true&children=true') + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + assert 3 == len(jdoc["categories"]) + + expected_with_utilities = [ + {'children': [{'children': [], 'displayName': 'Admin API', 'key': 'rest_api', + 'description': 'FogLAMP Admin and User REST API'}, + {'children': [], 'displayName': 'FogLAMP Service', 'key': 'service', + 'description': 'FogLAMP Service'} + ], + 'displayName': 'General', 'key': 'General', 'description': 'General' + }, + {'children': [{'children': [], 'displayName': 'Scheduler', 'key': 'SCHEDULER', + 'description': 'Scheduler configuration'}, + {'children': [], 'displayName': 'Service Monitor', 'key': 'SMNTR', + 'description': 'Service Monitor'}], + 'displayName': 'Advanced', 'key': 'Advanced', 'description': 'Advanced' + }, + {'children': [], + 'displayName': 'Utilities', 'key': 'Utilities', 'description': 'Utilities' + } + ] + + assert expected_with_utilities == jdoc["categories"] + + def test_get_category(self, foglamp_url): + conn = http.client.HTTPConnection(foglamp_url) + conn.request("GET", '/foglamp/category/rest_api') + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + assert len(jdoc) + for k, v in jdoc.items(): + assert 'type' in v + assert 'value' in v + assert 'default' in v + assert 'description' in v + + assert 'displayName' in v + + def test_create_category(self, foglamp_url): + payload = {'key': 'pub #1', 'description': 'a publisher', 'display_name': 'Pub #1'} + conf = {'check': {'type': 'boolean', 'description': 'A Boolean check', 'default': 'False'}} + payload.update({'value': conf}) + conn = http.client.HTTPConnection(foglamp_url) + conn.request('POST', '/foglamp/category', body=json.dumps(payload)) + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + assert "pub #1" == jdoc['key'] + assert "a publisher" == jdoc['description'] + assert "Pub #1" == jdoc['displayName'] + expected_value = {'check': { + 'type': 'boolean', 'default': 'false', 'value': 'false', 'description': 'A Boolean check'} + } + assert expected_value == jdoc['value'] + + def test_get_category_item(self, foglamp_url): + conn = http.client.HTTPConnection(foglamp_url) + encoded_url = '/foglamp/category/{}/check'.format(quote('pub #1')) + conn.request("GET", encoded_url) + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + assert 'boolean' == jdoc['type'] + assert 'A Boolean check' == jdoc['description'] + assert 'false' == jdoc['value'] + + def test_set_configuration_item(self, foglamp_url): + conn = http.client.HTTPConnection(foglamp_url) + encoded_url = '/foglamp/category/{}/check'.format(quote('pub #1')) + conn.request("PUT", encoded_url, body=json.dumps({"value": "true"})) + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + assert 'boolean' == jdoc['type'] + + conn.request("GET", encoded_url) + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + assert 'boolean' == jdoc['type'] + assert 'true' == jdoc['value'] + assert 'false' == jdoc['default'] + + def test_update_configuration_item_bulk(self, foglamp_url): + pass + + def test_add_configuration_item(self, foglamp_url): + pass + + def test_delete_configuration_item_value(self, foglamp_url): + pass + + def test_get_child_category(self, foglamp_url): + pass + + def test_create_child_category(self, foglamp_url): + pass + + def test_delete_child_category(self, foglamp_url): + pass + + def test_delete_parent_category(self, foglamp_url): + pass + + def test_upload_script(self, foglamp_url): + pass diff --git a/tests/system/python/conftest.py b/tests/system/python/conftest.py index d3be62394d..4c39040c97 100644 --- a/tests/system/python/conftest.py +++ b/tests/system/python/conftest.py @@ -25,12 +25,19 @@ @pytest.fixture -def reset_and_start_foglamp(): - """Fixture that kills foglamp, reset database and starts foglamp again""" +def reset_and_start_foglamp(storage_plugin): + """Fixture that kills foglamp, reset database and starts foglamp again + storage_plugin: Fixture that defines the storage plugin to be used for tests + """ - # TODO: allow to sed storage.json and use postgres database plugin assert os.environ.get('FOGLAMP_ROOT') is not None + subprocess.run(["$FOGLAMP_ROOT/scripts/foglamp kill"], shell=True, check=True) + if storage_plugin == 'postgres': + subprocess.run(["sed -i 's/sqlite/postgres/g' $FOGLAMP_ROOT/data/etc/storage.json"], shell=True, check=True) + else: + subprocess.run(["sed -i 's/postgres/sqlite/g' $FOGLAMP_ROOT/data/etc/storage.json"], shell=True, check=True) + subprocess.run(["echo YES | $FOGLAMP_ROOT/scripts/foglamp reset"], shell=True, check=True) subprocess.run(["$FOGLAMP_ROOT/scripts/foglamp start"], shell=True) stat = subprocess.run(["$FOGLAMP_ROOT/scripts/foglamp status"], shell=True, stdout=subprocess.PIPE) @@ -238,9 +245,9 @@ def _add_filter(filter_plugin, filter_plugin_branch, filter_name, filter_config, r = conn.getresponse() assert 200 == r.status res = r.read().decode() - expected = "Filter pipeline {{'pipeline': ['{}']}} updated successfully".format(filter_name) jdoc = json.loads(res) - assert expected == jdoc["result"] + # Asset newly added filter exist in request's response + assert filter_name in jdoc["result"] return _add_filter @@ -274,14 +281,37 @@ def _disable_sch(foglamp_url, sch_name): def pytest_addoption(parser): - parser.addoption("--south-branch", action="store", default="develop", - help="south branch name") - parser.addoption("--north-branch", action="store", default="develop", - help="north branch name") + parser.addoption("--storage-plugin", action="store", default="sqlite", + help="Database plugin to use for tests") parser.addoption("--foglamp-url", action="store", default="localhost:8081", help="FogLAMP client api url") parser.addoption("--use-pip-cache", action="store", default=False, help="use pip cache is requirement is available") + parser.addoption("--wait-time", action="store", default=5, type=int, + help="Generic wait time between processes to run") + parser.addoption("--retries", action="store", default=3, type=int, + help="Number of tries for polling") + + # South/North Args + parser.addoption("--south-branch", action="store", default="develop", + help="south branch name") + parser.addoption("--north-branch", action="store", default="develop", + help="north branch name") + parser.addoption("--south-service-name", action="store", default="southSvc #1", + help="Name of the South Service") + parser.addoption("--asset-name", action="store", default="SystemTest", + help="Name of asset") + + # Filter Args + parser.addoption("--filter-branch", action="store", default="develop", help="Filter plugin repo branch") + parser.addoption("--filter-name", action="store", default="Meta #1", help="Filter name to be added to pipeline") + + # External Services Arg foglamp-service-* e.g. foglamp-service-notification + parser.addoption("--service-branch", action="store", default="develop", + help="service branch name") + # Notify Arg + parser.addoption("--notify-branch", action="store", default="develop", help="Notify plugin repo branch") + # PI Config parser.addoption("--pi-host", action="store", default="pi-server", help="PI Server Host Name/IP") @@ -308,20 +338,6 @@ def pytest_addoption(parser): parser.addoption("--ocs-token", action="store", default="ocs_north_0001", help="Token of OCS account") - parser.addoption("--south-service-name", action="store", default="southSvc #1", - help="Name of the South Service") - parser.addoption("--asset-name", action="store", default="SystemTest", - help="Name of asset") - - parser.addoption("--wait-time", action="store", default=5, type=int, - help="Generic wait time between processes to run") - parser.addoption("--retries", action="store", default=3, type=int, - help="Number of tries for polling") - - # Filter Args - parser.addoption("--filter-branch", action="store", default="develop", help="Filter plugin repo branch") - parser.addoption("--filter-name", action="store", default="Meta #1", help="Filter name to be added to pipeline") - # Kafka Config parser.addoption("--kafka-host", action="store", default="localhost", help="Kafka Server Host Name/IP") @@ -331,6 +347,11 @@ def pytest_addoption(parser): parser.addoption("--kafka-rest-port", action="store", default="8082", help="Kafka Rest Proxy Port") +@pytest.fixture +def storage_plugin(request): + return request.config.getoption("--storage-plugin") + + @pytest.fixture def south_branch(request): return request.config.getoption("--south-branch") @@ -341,11 +362,21 @@ def north_branch(request): return request.config.getoption("--north-branch") +@pytest.fixture +def service_branch(request): + return request.config.getoption("--service-branch") + + @pytest.fixture def filter_branch(request): return request.config.getoption("--filter-branch") +@pytest.fixture +def notify_branch(request): + return request.config.getoption("--notify-branch") + + @pytest.fixture def use_pip_cache(request): return request.config.getoption("--use-pip-cache") diff --git a/tests/system/python/e2e/test_e2e_coap_OCS.py b/tests/system/python/e2e/test_e2e_coap_OCS.py index f137ddb6dd..dab69e5daf 100644 --- a/tests/system/python/e2e/test_e2e_coap_OCS.py +++ b/tests/system/python/e2e/test_e2e_coap_OCS.py @@ -153,6 +153,7 @@ def _read_data_from_ocs(ocs_client_id, ocs_client_secret, ocs_tenant, ocs_namesp return _read_data_from_ocs +@pytest.mark.skip(reason="OCS is currently disabled!") class TestE2EOCS: def test_end_to_end(self, start_south_north, read_data_from_ocs, foglamp_url, wait_time, retries, ocs_client_id, ocs_client_secret, ocs_tenant, ocs_namespace, asset_name="endToEndCoAP"): diff --git a/tests/system/python/e2e/test_e2e_csv_multi_filter_pi.py b/tests/system/python/e2e/test_e2e_csv_multi_filter_pi.py new file mode 100644 index 0000000000..e1235efdee --- /dev/null +++ b/tests/system/python/e2e/test_e2e_csv_multi_filter_pi.py @@ -0,0 +1,166 @@ +# -*- coding: utf-8 -*- + +# FOGLAMP_BEGIN +# See: http://foglamp.readthedocs.io/ +# FOGLAMP_END + +""" Test end to end flow with: + Playback south plugin + Delta, RMS, Rate, Scale, Asset & Metadata filter plugins + PI Server (C) plugin +""" + + +import http.client +import os +import json +import time +import pytest + + +__author__ = "Vaibhav Singhal" +__copyright__ = "Copyright (c) 2019 Dianomic Systems" +__license__ = "Apache 2.0" +__version__ = "${VERSION}" + + +SVC_NAME = "playfilter" +CSV_NAME = "sample.csv" +CSV_HEADERS = "ivalue" +CSV_DATA = "10,20,21,40" + +NORTH_TASK_NAME = "NorthReadingsTo_PI" + + +class TestE2eCsvMultiFltrPi: + + @pytest.fixture + def start_south_north(self, reset_and_start_foglamp, add_south, enable_schedule, remove_directories, + remove_data_file, south_branch, foglamp_url, add_filter, filter_branch, + start_north_pi_server_c, pi_host, pi_port, pi_token, asset_name="e2e_csv_filter_pi"): + """ This fixture clone a south and north repo and starts both south and north instance + + reset_and_start_foglamp: Fixture that resets and starts foglamp, no explicit invocation, called at start + add_south: Fixture that adds a south service with given configuration with enabled or disabled mode + remove_directories: Fixture that remove directories created during the tests + remove_data_file: Fixture that remove data file created during the tests + """ + + # Define configuration of foglamp south playback service + south_config = {"assetName": {"value": "{}".format(asset_name)}, + "csvFilename": {"value": "{}".format(CSV_NAME)}, + "ingestMode": {"value": "batch"}} + + # Define the CSV data and create expected lists to be verified later + csv_file_path = os.path.join(os.path.expandvars('${FOGLAMP_ROOT}'), 'data/{}'.format(CSV_NAME)) + with open(csv_file_path, 'w') as f: + f.write(CSV_HEADERS) + for _items in CSV_DATA.split(","): + f.write("\n{}".format(_items)) + + south_plugin = "playback" + add_south(south_plugin, south_branch, foglamp_url, service_name=SVC_NAME, + config=south_config, start_service=False) + + filter_cfg_scale = {"enable": "true"} + # I/P 10, 20, 21, 40 -> O/P 1000, 2000, 2100, 4000 + add_filter("scale", filter_branch, "fscale", filter_cfg_scale, foglamp_url, SVC_NAME) + + # I/P asset_name : e2e_csv_filter_pi > O/P e2e_filters + filter_cfg_asset = {"config": {"rules": [{"new_asset_name": "e2e_filters", + "action": "rename", + "asset_name": asset_name}]}, + "enable": "true"} + add_filter("asset", filter_branch, "fasset", filter_cfg_asset, foglamp_url, SVC_NAME) + + # I/P 1000, 2000, 2100, 4000 -> O/P 2000, 2100, 4000 + filter_cfg_rate = {"trigger": "ivalue > 1200", "untrigger": "ivalue < 1100", "preTrigger": "0", "enable": "true"} + add_filter("rate", filter_branch, "frate", filter_cfg_rate, foglamp_url, SVC_NAME) + + # I/P 1000, 2000, 2100, 4000 -> O/P 2000, 4000 + # Delta in 1st pair (2000-1000) = 1000 (> 20% of 1000) so 2000 is output + # Delta in second pair (2100-2000) = 100 (<20% of 2000) so 2100 not in output + # Delta in third pair (4000-2100) = 1900 (>20% of 2100) so 4000 in output + filter_cfg_delta = {"tolerance": "20", "enable": "true"} + add_filter("delta", filter_branch, "fdelta", filter_cfg_delta , foglamp_url, SVC_NAME) + + # I/P 2000, 4000 -> O/P rms=3162.2776601684, rms_peak=2000 + filter_cfg_rms = {"assetName": "%a_RMS", "samples": "2", "peak": "true", "enable": "true"} + add_filter("rms", filter_branch, "frms", filter_cfg_rms, foglamp_url, SVC_NAME) + + filter_cfg_meta = {"enable": "true"} + add_filter("metadata", filter_branch, "fmeta", filter_cfg_meta, foglamp_url, SVC_NAME) + + # Since playback plugin reads all csv data at once, we cant keep it in enable mode before filter add + # enable service when all filters all applied + enable_schedule(foglamp_url, SVC_NAME) + + start_north_pi_server_c(foglamp_url, pi_host, pi_port, pi_token) + + yield self.start_south_north + + remove_directories("/tmp/foglamp-south-{}".format(south_plugin)) + filters = ["scale", "asset", "rate", "delta", "rms", "metadata"] + for fltr in filters: + remove_directories("/tmp/foglamp-filter-{}".format(fltr)) + + remove_data_file(csv_file_path) + + def test_end_to_end(self, start_south_north, disable_schedule, foglamp_url, read_data_from_pi, pi_host, pi_admin, + pi_passwd, pi_db, wait_time, retries): + """ Test that data is inserted in FogLAMP using playback south plugin & + Delta, RMS, Rate, Scale, Asset & Metadata filters, and sent to PI + start_south_north: Fixture that starts FogLAMP with south service, add filter and north instance + Assertions: + on endpoint GET /foglamp/asset + on endpoint GET /foglamp/asset/ with applied data processing filter value + data received from PI is same as data sent""" + + time.sleep(wait_time) + conn = http.client.HTTPConnection(foglamp_url) + self._verify_ingest(conn) + + # disable schedule to stop the service and sending data + disable_schedule(foglamp_url, SVC_NAME) + + self._verify_egress(read_data_from_pi, pi_host, pi_admin, pi_passwd, pi_db, wait_time, retries) + + def _verify_ingest(self, conn): + + conn.request("GET", '/foglamp/asset') + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + assert 1 == len(jdoc) + assert "e2e_filters_RMS" == jdoc[0]["assetCode"] + assert 0 < jdoc[0]["count"] + + conn.request("GET", '/foglamp/asset/{}'.format("e2e_filters_RMS")) + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + assert 0 < len(jdoc) + + read = jdoc[0]["reading"] + assert 2000.0 == read["ivaluepeak"] + assert 3162.2776601684 == read["ivalue"] + assert "value" == read["name"] + + def _verify_egress(self, read_data_from_pi, pi_host, pi_admin, pi_passwd, pi_db, wait_time, retries): + + retry_count = 0 + data_from_pi = None + while (data_from_pi is None or data_from_pi == []) and retry_count < retries: + data_from_pi = read_data_from_pi(pi_host, pi_admin, pi_passwd, pi_db, + "e2e_filters_RMS", {"ivalue", "ivaluepeak", "name"}) + retry_count += 1 + time.sleep(wait_time * 2) + + if data_from_pi is None or retry_count == retries: + assert False, "Failed to read data from PI" + + assert 3162.2776601684 == data_from_pi["ivalue"][-1] + assert 2000 == data_from_pi["ivaluepeak"][-1] + assert "value" == data_from_pi["name"][-1] diff --git a/tests/system/python/e2e/test_e2e_expr_pi.py b/tests/system/python/e2e/test_e2e_expr_pi.py index 61a2d50073..4469d3cd6f 100644 --- a/tests/system/python/e2e/test_e2e_expr_pi.py +++ b/tests/system/python/e2e/test_e2e_expr_pi.py @@ -47,18 +47,14 @@ def start_south_north(self, reset_and_start_foglamp, add_south, enable_schedule, "stepX": {"value": "0"}} add_south(SOUTH_PLUGIN, south_branch, foglamp_url, service_name=SVC_NAME, config=cfg, - plugin_lang=SOUTH_PLUGIN_LANGUAGE, start_service=False) + plugin_lang=SOUTH_PLUGIN_LANGUAGE, start_service=True) filter_cfg = {"enable": "true"} filter_plugin = "metadata" add_filter(filter_plugin, filter_branch, filter_name, filter_cfg, foglamp_url, SVC_NAME) - enable_schedule(foglamp_url, SVC_NAME) + # enable_schedule(foglamp_url, SVC_NAME) - # FIXME: FOGL-2417 - # We need to make north PI sending process to handle the case, to send and retrieve applied filter data - # in running service, so that we don't need to add south service in disabled mode And enable after applying - # filter pipeline start_north_pi_server_c(foglamp_url, pi_host, pi_port, pi_token) yield self.start_south_north @@ -103,7 +99,8 @@ def _verify_ingest(self, conn): assert 0 < len(jdoc) read = jdoc[0]["reading"] - assert 1.61977519054386 == read["Expression"] + # FOGL-2438 values like tan(45) = 1.61977519054386 gets truncated to 1.6197751905 with ingest + assert 1.6197751905 == read["Expression"] # verify filter is applied and we have {name: value} pair added by metadata filter assert "value" == read["name"] @@ -125,4 +122,5 @@ def _verify_egress(self, read_data_from_pi, pi_host, pi_admin, pi_passwd, pi_db, assert isinstance(data_from_pi["name"], list) assert isinstance(data_from_pi["Expression"], list) assert "value" in data_from_pi["name"] - assert 1.61977519054386 in data_from_pi["Expression"] + # FOGL-2438 values like tan(45) = 1.61977519054386 gets truncated to 1.6197751905 with ingest + assert 1.6197751905 in data_from_pi["Expression"] diff --git a/tests/system/python/e2e/test_e2e_notification_service_with_plugins.py b/tests/system/python/e2e/test_e2e_notification_service_with_plugins.py new file mode 100644 index 0000000000..d81d5fc4b1 --- /dev/null +++ b/tests/system/python/e2e/test_e2e_notification_service_with_plugins.py @@ -0,0 +1,301 @@ +# -*- coding: utf-8 -*- + +# FOGLAMP_BEGIN +# See: http://foglamp.readthedocs.io/ +# FOGLAMP_END + +""" Test end to end flow with: + Notification service And OverMaxRule in built rule plugin + notify-python35 delivery plugin +""" + +import os +import time +import subprocess +import http.client +import json +import pytest + + +__author__ = "Ashish Jabble" +__copyright__ = "Copyright (c) 2019 Dianomic Systems" +__license__ = "Apache 2.0" +__version__ = "${VERSION}" + + +SERVICE = "notification" +SERVICE_NAME = "NotificationServer #1" +NOTIFY_PLUGIN = "python35" +NOTIFY_RULE = "OverMaxRule" + + +def _configure_and_start_service(service_branch, foglamp_url, remove_directories): + try: + subprocess.run(["$FOGLAMP_ROOT/tests/system/python/scripts/install_c_service {} {}" + .format(service_branch, SERVICE)], shell=True, check=True) + except subprocess.CalledProcessError: + assert False, "{} installation failed".format(SERVICE) + finally: + remove_directories("/tmp/foglamp-service-{}".format(SERVICE)) + + # Start service + conn = http.client.HTTPConnection(foglamp_url) + data = {"name": SERVICE_NAME, + "type": "notification", + "enabled": "true" + } + conn.request("POST", '/foglamp/service', json.dumps(data)) + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + assert 2 == len(jdoc) + assert SERVICE_NAME == jdoc['name'] + + +def _install_notify_plugin(notify_branch, plugin_name, remove_directories): + try: + subprocess.run(["$FOGLAMP_ROOT/tests/system/python/scripts/install_c_plugin {} notify {}".format( + notify_branch, plugin_name)], shell=True, check=True) + except subprocess.CalledProcessError: + assert False, "{} installation failed".format(plugin_name) + finally: + remove_directories("/tmp/foglamp-notify-{}".format(plugin_name)) + + +def _get_result(foglamp_url, path): + conn = http.client.HTTPConnection(foglamp_url) + conn.request("GET", path) + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + return jdoc + + +def _verify_service(foglamp_url, status): + jdoc = _get_result(foglamp_url, '/foglamp/service') + svc = jdoc['services'][2] + assert SERVICE_NAME == svc['name'] + assert SERVICE.capitalize() == svc['type'] + assert status == svc['status'] + + +def _verify_audit_log_entry(foglamp_url, path, name, severity='INFORMATION'): + jdoc = _get_result(foglamp_url, path) + audit_detail = jdoc['audit'][0] + assert 1 == jdoc['totalCount'] + assert severity == audit_detail['severity'] + assert name == audit_detail['details']['name'] + + +def _add_notification_instance(foglamp_url, payload): + conn = http.client.HTTPConnection(foglamp_url) + conn.request("POST", '/foglamp/notification', json.dumps(payload)) + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + assert "Notification {} created successfully".format(payload['name']) == jdoc['result'] + + +class TestNotificationService: + + def test_service(self, reset_and_start_foglamp, service_branch, foglamp_url, wait_time, retries, remove_directories): + _configure_and_start_service(service_branch, foglamp_url, remove_directories) + + retry_count = 0 + # only 2 services is being up by default i.e core and storage + default_registry_count = 2 + service_registry = default_registry_count + while service_registry != 3 and retry_count < retries: + svc = _get_result(foglamp_url, '/foglamp/service') + service_registry = svc['services'] + retry_count += 1 + time.sleep(wait_time * 2) + + if len(service_registry) == default_registry_count: + assert False, "Failed to start the {} service".format(SERVICE) + + _verify_service(foglamp_url, status='running') + + _verify_audit_log_entry(foglamp_url, '/foglamp/audit?source=NTFST', name=SERVICE_NAME) + + def test_get_default_notification_plugins(self, foglamp_url, remove_directories): + remove_directories(os.environ['FOGLAMP_ROOT'] + '/plugins/notificationDelivery') + remove_directories(os.environ['FOGLAMP_ROOT'] + 'cmake_build/C/plugins/notificationDelivery') + jdoc = _get_result(foglamp_url, '/foglamp/notification/plugin') + assert [] == jdoc['delivery'] + assert 1 == len(jdoc['rules']) + assert NOTIFY_RULE == jdoc['rules'][0]['name'] + + +class TestNotificationCRUD: + + # FIXME: FOGL-2434 Add name with some special character + @pytest.mark.parametrize("data", [ + {"name": "Test1", "description": "Test 1 notification", "rule": NOTIFY_RULE, + "channel": NOTIFY_PLUGIN, "enabled": "false", "notification_type": "retriggered"}, + {"name": "Test2", "description": "Test 2 notification", "rule": NOTIFY_RULE, + "channel": NOTIFY_PLUGIN, "enabled": "false", "notification_type": "toggled"}, + {"name": "Test3", "description": "Test 3 notification", "rule": NOTIFY_RULE, + "channel": NOTIFY_PLUGIN, "enabled": "false", "notification_type": "one shot"} + ]) + def test_create_notification_instances_with_default_rule_and_channel_python35(self, foglamp_url, notify_branch, + data, + remove_directories): + # FIXME: Handle in a better way; we need below code once for a test + if data['name'] == 'Test1': + _install_notify_plugin(notify_branch, NOTIFY_PLUGIN, remove_directories) + _add_notification_instance(foglamp_url, data) + + def test_inbuilt_rule_plugin_and_notify_python35_delivery(self, foglamp_url): + jdoc = _get_result(foglamp_url, '/foglamp/notification/plugin') + assert 1 == len(jdoc['delivery']) + assert NOTIFY_PLUGIN == jdoc['delivery'][0]['name'] + assert 1 == len(jdoc['rules']) + assert NOTIFY_RULE == jdoc['rules'][0]['name'] + + def test_get_notifications_and_audit_entry(self, foglamp_url): + jdoc = _get_result(foglamp_url, '/foglamp/notification') + assert 3 == len(jdoc['notifications']) + + jdoc = _get_result(foglamp_url, '/foglamp/audit?source=NTFAD') + assert 3 == jdoc['totalCount'] + + def test_update_notification(self, foglamp_url, name="Test1"): + conn = http.client.HTTPConnection(foglamp_url) + data = {"notification_type": "toggled"} + conn.request("PUT", '/foglamp/notification/{}'.format(name), json.dumps(data)) + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + assert "Notification {} updated successfully".format(name) == jdoc["result"] + + # Verify updated notification info + jdoc = _get_result(foglamp_url, '/foglamp/notification/{}'.format(name)) + assert "toggled" == jdoc['notification']['notificationType'] + + def test_delete_notification(self, foglamp_url, name="Test3"): + conn = http.client.HTTPConnection(foglamp_url) + conn.request("DELETE", '/foglamp/notification/{}'.format(name)) + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + assert "Notification {} deleted successfully.".format(name) == jdoc["result"] + + # Verify only two notifications should exist NOT 3 + jdoc = _get_result(foglamp_url, '/foglamp/notification') + notifications = jdoc['notifications'] + assert 2 == len(notifications) + assert "Notification Test1" == notifications[0]['name'] + assert "Notification Test2" == notifications[1]['name'] + + +class TestSentAndReceiveNotification: + FOGBENCH_TEMPLATE = "fogbench-template.json" + SENSOR_VALUE = 20 + SOUTH_PLUGIN_NAME = "coap" + ASSET_NAME = "{}".format(SOUTH_PLUGIN_NAME) + + @pytest.fixture + def start_south(self, add_south, remove_data_file, remove_directories, south_branch, foglamp_url, wait_time): + """ This fixture clone a south repo and starts south instance + add_south: Fixture that starts any south service with given configuration + remove_data_file: Fixture that remove data file created during the tests + remove_directories: Fixture that remove directories created during the tests """ + + fogbench_template_path = self.prepare_template_reading_from_fogbench() + + add_south(self.SOUTH_PLUGIN_NAME, south_branch, foglamp_url, service_name=self.SOUTH_PLUGIN_NAME) + + yield self.start_south + + # Cleanup code that runs after the test is over + remove_data_file(fogbench_template_path) + remove_directories("/tmp/foglamp-south-{}".format(self.SOUTH_PLUGIN_NAME)) + + def prepare_template_reading_from_fogbench(self): + """ Define the template file for fogbench readings """ + + fogbench_template_path = os.path.join( + os.path.expandvars('${FOGLAMP_ROOT}'), 'data/{}'.format(self.FOGBENCH_TEMPLATE)) + with open(fogbench_template_path, "w") as f: + f.write( + '[{"name": "%s", "sensor_values": ' + '[{"name": "sensor", "type": "number", "min": %d, "max": %d, "precision": 0}]}]' % ( + self.ASSET_NAME, self.SENSOR_VALUE, self.SENSOR_VALUE)) + + return fogbench_template_path + + def ingest_readings_from_fogbench(self, foglamp_url, wait_time): + conn = http.client.HTTPConnection(foglamp_url) + subprocess.run(["cd $FOGLAMP_ROOT/extras/python; python3 -m fogbench -t ../../data/{}; cd -" + .format(self.FOGBENCH_TEMPLATE)], shell=True, check=True) + time.sleep(wait_time) + conn.request("GET", '/foglamp/asset') + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + val = json.loads(r) + assert 1 == len(val) + assert self.ASSET_NAME == val[0]["assetCode"] + assert 1 == val[0]["count"] + + conn.request("GET", '/foglamp/asset/{}'.format(self.ASSET_NAME)) + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + val = json.loads(r) + assert 1 == len(val) + assert {'sensor': self.SENSOR_VALUE} == val[0]["reading"] + + def configure_rule_with_latest_eval_type(self, foglamp_url, cat_name): + conn = http.client.HTTPConnection(foglamp_url) + data = {"asset": self.ASSET_NAME, + "datapoint": "sensor", + "evaluation_type": "latest", + "trigger_value": str(self.SENSOR_VALUE), + } + conn.request("PUT", '/foglamp/category/rule{}'.format(cat_name), json.dumps(data)) + r = conn.getresponse() + assert 200 == r.status + + def enable_notification(self, foglamp_url, cat_name, is_enabled=True): + _enabled = "true" if is_enabled else "false" + data = {"value": _enabled} + conn = http.client.HTTPConnection(foglamp_url) + conn.request("PUT", '/foglamp/category/{}/enable'.format(cat_name), json.dumps(data)) + r = conn.getresponse() + assert 200 == r.status + + def test_sent_and_receive_notification(self, foglamp_url, start_south, wait_time): + data = {"name": "Test4", "description": "Test4_Notification", "rule": NOTIFY_RULE, "channel": NOTIFY_PLUGIN, + "enabled": False, "notification_type": "retriggered"} + name = data['name'] + _add_notification_instance(foglamp_url, data) + self.configure_rule_with_latest_eval_type(foglamp_url, name) + self.enable_notification(foglamp_url, name) + + time.sleep(wait_time) + self.ingest_readings_from_fogbench(foglamp_url, wait_time) + + _verify_audit_log_entry(foglamp_url, '/foglamp/audit?source=NTFSN', name=name) + + +class TestStartStopNotificationService: + + def test_shutdown_service_with_schedule_disable(self, foglamp_url, disable_schedule, wait_time): + disable_schedule(foglamp_url, SERVICE_NAME) + + _verify_service(foglamp_url, status='shutdown') + time.sleep(wait_time) + _verify_audit_log_entry(foglamp_url, '/foglamp/audit?source=NTFSD', name=SERVICE_NAME) + + def test_restart_notification_service(self, foglamp_url, enable_schedule): + enable_schedule(foglamp_url, SERVICE_NAME) + + _verify_service(foglamp_url, status='running') + _verify_audit_log_entry(foglamp_url, '/foglamp/audit?source=NTFST', name=SERVICE_NAME) diff --git a/tests/system/python/e2e/test_e2e_vary_asset_http_pi.py b/tests/system/python/e2e/test_e2e_vary_asset_http_pi.py new file mode 100644 index 0000000000..2909a89480 --- /dev/null +++ b/tests/system/python/e2e/test_e2e_vary_asset_http_pi.py @@ -0,0 +1,145 @@ +# -*- coding: utf-8 -*- + +# FOGLAMP_BEGIN +# See: http://foglamp.readthedocs.io/ +# FOGLAMP_END + +""" Test system/python/e2e/test_e2e_vary_asset_http_pi.py + +""" + +import http.client +import json +import time +from datetime import datetime, timezone +import uuid +import pytest + + +__author__ = "Vaibhav Singhal" +__copyright__ = "Copyright (c) 2019 Dianomic Systems" +__license__ = "Apache 2.0" +__version__ = "${VERSION}" + + +class TestE2EAssetHttpPI: + + @pytest.fixture + def start_south_north(self, reset_and_start_foglamp, add_south, start_north_pi_server_c, remove_directories, + south_branch, foglamp_url, pi_host, pi_port, pi_token): + """ This fixture clone a south repo and starts both south and north instance + reset_and_start_foglamp: Fixture that resets and starts foglamp, no explicit invocation, called at start + add_south: Fixture that adds a south service with given configuration + start_north_pi_server_c: Fixture that starts PI north task + remove_directories: Fixture that remove directories created during the tests""" + + south_plugin = "http" + add_south("http_south", south_branch, foglamp_url, config={"assetNamePrefix": {"value": ""}}, + service_name="http_south") + start_north_pi_server_c(foglamp_url, pi_host, pi_port, pi_token) + + yield self.start_south_north + + # Cleanup code that runs after the caller test is over + remove_directories("/tmp/foglamp-south-{}".format(south_plugin)) + + def test_end_to_end(self, start_south_north, read_data_from_pi, foglamp_url, pi_host, pi_admin, pi_passwd, pi_db, + wait_time, retries): + """ Test that data is inserted in FogLAMP and sent to PI + start_south_north: Fixture that starts FogLAMP with south and north instance + read_data_from_pi: Fixture to read data from PI + Assertions: + on endpoint GET /foglamp/asset + on endpoint GET /foglamp/asset/ + data received from PI is same as data sent""" + + conn = http.client.HTTPConnection(foglamp_url) + + # Send data to foglamp-south-http + conn_http_south = http.client.HTTPConnection("localhost:6683") + + asset_name = "e2e_varying" + # 2 list having mixed data simulating different sensors + # (sensors coming up and down, sensors throwing int and float data) + sensor_data = [{"a": 1}, {"a": 2, "b": 3}, {"b": 4}] + sensor_data_2 = [{"b": 1.1}, {"a2": 2, "b2": 3}, {"a": 4.0}] + for d in sensor_data + sensor_data_2: + tm = str(datetime.now(timezone.utc).astimezone()) + data = [{"asset": "{}".format(asset_name), "timestamp": "{}".format(tm), "key": str(uuid.uuid4()), + "readings": d}] + conn_http_south.request("POST", '/sensor-reading', json.dumps(data)) + r = conn_http_south.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + assert {'result': 'success'} == jdoc + + # Allow some buffer so that data is ingested before retrieval + time.sleep(wait_time) + + conn.request("GET", '/foglamp/asset') + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + retval = json.loads(r) + assert len(retval) == 1 + assert asset_name == retval[0]["assetCode"] + assert 6 == retval[0]["count"] + + conn.request("GET", '/foglamp/asset/{}'.format(asset_name)) + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + retval = json.loads(r) + + assert sensor_data_2[2] == retval[0]["reading"] + assert sensor_data_2[1] == retval[1]["reading"] + assert sensor_data_2[0] == retval[2]["reading"] + assert sensor_data[2] == retval[3]["reading"] + assert sensor_data[1] == retval[4]["reading"] + assert sensor_data[0] == retval[5]["reading"] + + # Allow some buffer so that data is ingested in PI before fetching using PI Web API + time.sleep(wait_time) + + retry_count = 0 + data_from_pi = None + while (data_from_pi is None or data_from_pi == []) and retry_count < retries: + data_from_pi = read_data_from_pi(pi_host, pi_admin, pi_passwd, pi_db, asset_name, {"a", "b", "a2", "b2"}) + retry_count += 1 + time.sleep(wait_time*2) + + if data_from_pi is None or retry_count == retries: + assert False, "Failed to read data from PI" + + assert data_from_pi["b"][-1] == 0.0 + assert data_from_pi["b"][-2] == 0.0 + assert data_from_pi["b"][-3] == sensor_data_2[0]["b"] + assert data_from_pi["b"][-4] == sensor_data[2]["b"] + assert data_from_pi["b"][-5] == sensor_data[1]["b"] + assert data_from_pi["b"][-6] == 0.0 + + assert data_from_pi["a"][-1] == sensor_data_2[2]["a"] + assert data_from_pi["a"][-2] == 0.0 + assert data_from_pi["a"][-3] == 0.0 + assert data_from_pi["a"][-4] == 0.0 + assert data_from_pi["a"][-5] == sensor_data[1]["a"] + assert data_from_pi["a"][-6] == sensor_data[0]["a"] + + assert data_from_pi["b2"][-1] == 0.0 + assert data_from_pi["b2"][-2] == sensor_data_2[1]["b2"] + assert data_from_pi["b2"][-3] == 0.0 + assert data_from_pi["b2"][-4] == 0.0 + assert data_from_pi["b2"][-5] == 0.0 + assert data_from_pi["b2"][-6] == 0.0 + + assert data_from_pi["a2"][-1] == 0.0 + assert data_from_pi["a2"][-2] == sensor_data_2[1]["a2"] + assert data_from_pi["a2"][-3] == 0.0 + assert data_from_pi["a2"][-4] == 0.0 + assert data_from_pi["a2"][-5] == 0.0 + assert data_from_pi["a2"][-6] == 0.0 + + + + diff --git a/tests/system/python/scripts/install_c_service b/tests/system/python/scripts/install_c_service new file mode 100755 index 0000000000..26254c8416 --- /dev/null +++ b/tests/system/python/scripts/install_c_service @@ -0,0 +1,38 @@ +#!/usr/bin/env bash +set -e + +__author__="Ashish Jabble" +__copyright__="Copyright (c) 2019 Dianomic Systems" +__license__="Apache 2.0" +__version__="1.0.0" + +########################################################################################## +# Usage text for this script +# $FOGLAMP_ROOT/tests/system/python/scripts/install_c_service {BRANCH_NAME} {SERVICE_NAME} +########################################################################################## + +BRANCH_NAME=$1 +SERVICE_NAME=$2 + +[[ -z "${BRANCH_NAME}" ]] && echo "Branch name not found." && exit 1 +[[ -z "${SERVICE_NAME}" ]] && echo "Service name not found." && exit 1 + +REPO_NAME=foglamp-service-${SERVICE_NAME} + +clean () { + rm -rf /tmp/${REPO_NAME} + rm -rf ${FOGLAMP_ROOT}/services/foglamp.services.${SERVICE_NAME} +} + +clone_repo () { + git clone -b ${BRANCH_NAME} --single-branch https://github.com/foglamp/${REPO_NAME}.git /tmp/${REPO_NAME} +} + +install_binary_file () { + mkdir -p /tmp/${REPO_NAME}/build; cd /tmp/${REPO_NAME}/build; cmake -DFOGLAMP_INSTALL=${FOGLAMP_ROOT} ..; make -j4 && make install; cd - +} + +clean +clone_repo +install_binary_file +echo "${SERVICE_NAME} service is installed." diff --git a/tests/system/python/scripts/install_python_plugin b/tests/system/python/scripts/install_python_plugin index 1a49b3637d..9b659486e7 100755 --- a/tests/system/python/scripts/install_python_plugin +++ b/tests/system/python/scripts/install_python_plugin @@ -41,7 +41,11 @@ clone_repo () { } copy_file_and_requirement () { - cp -r /tmp/${REPO_NAME}/python/foglamp/plugins/${PLUGIN_TYPE}/${PLUGIN_NAME} $FOGLAMP_ROOT/python/foglamp/plugins/${PLUGIN_TYPE}/ + if [ "$PLUGIN_NAME" = "http" ]; then + cp -r /tmp/${REPO_NAME}/python/foglamp/plugins/${PLUGIN_TYPE}/${PLUGIN_NAME}_south $FOGLAMP_ROOT/python/foglamp/plugins/${PLUGIN_TYPE}/ + else + cp -r /tmp/${REPO_NAME}/python/foglamp/plugins/${PLUGIN_TYPE}/${PLUGIN_NAME} $FOGLAMP_ROOT/python/foglamp/plugins/${PLUGIN_TYPE}/ + fi req_file=$(find /tmp/${REPO_NAME} -name requirement*.txt) [ ! -z "${req_file}" ] && pip3 install --user -Ir ${req_file} ${USE_PIP_CACHE} || echo "No such external dependency needed for ${PLUGIN_NAME} plugin." } diff --git a/tests/unit/C/CMakeLists.txt b/tests/unit/C/CMakeLists.txt new file mode 100644 index 0000000000..120f5376ca --- /dev/null +++ b/tests/unit/C/CMakeLists.txt @@ -0,0 +1,97 @@ +cmake_minimum_required(VERSION 2.6) + +set(CMAKE_CXX_FLAGS "-std=c++11 -O3") +set(UUIDLIB -luuid) +set(COMMONLIB -ldl) + +set(BOOST_COMPONENTS system thread) +# Late 2017 TODO: remove the following checks and always use std::regex +if("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU") + if (CMAKE_CXX_COMPILER_VERSION VERSION_LESS 4.9) + set(BOOST_COMPONENTS ${BOOST_COMPONENTS} regex) + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -DUSE_BOOST_REGEX") + endif() +endif() +find_package(Boost 1.53.0 COMPONENTS ${BOOST_COMPONENTS} REQUIRED) +include_directories(SYSTEM ${Boost_INCLUDE_DIR}) + +# Find python3.x dev/lib package +find_package(PythonLibs 3 REQUIRED) + +include_directories(../../../C/common/include) +include_directories(../../../C/plugins/common/include) +include_directories(../../../C/services/common/include) +include_directories(../../../C/thirdparty/rapidjson/include) +include_directories(../../../C/thirdparty/Simple-Web-Server) +# sqlitememory plugin +include_directories(../../../C/plugins/storage/common/include) +include_directories(../../../C/plugins/storage/sqlitememory/include) + +# Add Python 3.x header files +include_directories(${PYTHON_INCLUDE_DIRS}) + +set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${PROJECT_BINARY_DIR}/../lib) + +# Find source files +file(GLOB COMMON_LIB_SOURCES ../../../C/common/*.cpp) + +# Create shared library +add_library(common-lib SHARED ${COMMON_LIB_SOURCES}) +target_link_libraries(common-lib ${UUIDLIB}) +target_link_libraries(common-lib ${Boost_LIBRARIES}) +set_target_properties(common-lib PROPERTIES SOVERSION 1) + + +# Find source files +file(GLOB SERVICES_COMMON_LIB_SOURCES ../../../C/services/common/*.cpp) + +# Create shared library +add_library(services-common-lib SHARED ${SERVICES_COMMON_LIB_SOURCES}) +target_link_libraries(services-common-lib ${COMMONLIB}) +target_link_libraries(services-common-lib ${PYTHON_LIBRARIES}) +set_target_properties(services-common-lib PROPERTIES SOVERSION 1) + + +# Find source files +file(GLOB PLUGINS_COMMON_LIB_SOURCES ../../../C/plugins/common/*.cpp) + +# Create shared library +add_library(plugins-common-lib SHARED ${PLUGINS_COMMON_LIB_SOURCES}) +target_link_libraries(plugins-common-lib ${Boost_LIBRARIES} common-lib services-common-lib z ssl crypto) + +set_target_properties(plugins-common-lib PROPERTIES SOVERSION 1) + +# +# storage-common-lib +# +set(LIB_NAME storage-common-lib) +set(DLLIB -ldl) + +# Find source files +file(GLOB STORAGE_COMMON_LIB_SOURCE ../../../C/plugins/storage/common/*.cpp) + +# Create shared library +add_library(${LIB_NAME} SHARED ${STORAGE_COMMON_LIB_SOURCE}) +target_link_libraries(${LIB_NAME} ${DLLIB}) +set_target_properties(${LIB_NAME} PROPERTIES SOVERSION 1) + + +# +# sqlitememory plugin +# +set(LIB_NAME sqlitememory) + +# Check Sqlite3 required version +set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${CMAKE_CURRENT_SOURCE_DIR}") +find_package(sqlite3) + +# Find source files +file(GLOB PG_PLUGIN_SOURCES ../../../C/plugins/storage/sqlitememory/*.cpp) + +# Create shared library +add_library(${LIB_NAME} SHARED ${PG_PLUGIN_SOURCES}) +target_link_libraries(${LIB_NAME} ${UUIDLIB}) +target_link_libraries(${LIB_NAME} ${Boost_LIBRARIES}) +target_link_libraries(${LIB_NAME} -lsqlite3) +set_target_properties(${LIB_NAME} PROPERTIES SOVERSION 1) + diff --git a/tests/unit/C/Findsqlite3.cmake b/tests/unit/C/Findsqlite3.cmake new file mode 100644 index 0000000000..c20a28888a --- /dev/null +++ b/tests/unit/C/Findsqlite3.cmake @@ -0,0 +1,26 @@ +# This CMake file locates the SQLite3 development libraries +# +# The following variables are set: +# SQLITE_FOUND - If the SQLite library was found +# SQLITE_LIBRARIES - Path to the static library +# SQLITE_INCLUDE_DIR - Path to SQLite headers +# SQLITE_VERSION - Library version + +set(SQLITE_MIN_VERSION "3.11.0") +find_path(SQLITE_INCLUDE_DIR sqlite3.h) +find_library(SQLITE_LIBRARIES NAMES libsqlite3.so) + +if (SQLITE_INCLUDE_DIR AND SQLITE_LIBRARIES) + execute_process(COMMAND grep ".*#define.*SQLITE_VERSION " ${SQLITE_INCLUDE_DIR}/sqlite3.h + COMMAND sed "s/.*\"\\(.*\\)\".*/\\1/" + OUTPUT_VARIABLE SQLITE_VERSION + OUTPUT_STRIP_TRAILING_WHITESPACE) + if ("${SQLITE_VERSION}" VERSION_LESS "${SQLITE_MIN_VERSION}") + message(FATAL_ERROR "SQLite3 version >= ${SQLITE_MIN_VERSION} required, found version ${SQLITE_VERSION}") + else() + message(STATUS "Found SQLite version ${SQLITE_VERSION}: ${SQLITE_LIBRARIES}") + set(SQLITE_FOUND TRUE) + endif() +else() + message(FATAL_ERROR "Could not find SQLite") +endif() diff --git a/tests/unit/C/common/CMakeLists.txt b/tests/unit/C/common/CMakeLists.txt index e91bbc8756..26866b23ed 100644 --- a/tests/unit/C/common/CMakeLists.txt +++ b/tests/unit/C/common/CMakeLists.txt @@ -25,7 +25,10 @@ include_directories(../../../../C/services/common/include) include_directories(../../../../C/thirdparty/rapidjson/include) include_directories(../../../../C/thirdparty/Simple-Web-Server) -file(GLOB test_sources "../../../../C/common/*.cpp" "../../../../C/plugins/common/*.cpp" "../../../../C/services/common/*.cpp") +set(COMMON_LIB common-lib) +set(SERVICE_COMMON_LIB services-common-lib) +set(PLUGINS_COMMON_LIB plugins-common-lib) + file(GLOB unittests "*.cpp") # Find python3.x dev/lib package @@ -34,14 +37,18 @@ find_package(PythonLibs 3 REQUIRED) # Add Python 3.x header files include_directories(${PYTHON_INCLUDE_DIRS}) +link_directories(${PROJECT_BINARY_DIR}/../../lib) # Link runTests with what we want to test and the GTest and pthread library -add_executable(RunTests ${test_sources} ${unittests}) +add_executable(RunTests ${unittests}) target_link_libraries(RunTests ${GTEST_LIBRARIES} pthread) target_link_libraries(RunTests ${Boost_LIBRARIES}) target_link_libraries(RunTests ${UUIDLIB}) target_link_libraries(RunTests ${COMMONLIB}) target_link_libraries(RunTests -lssl -lcrypto -lz) +target_link_libraries(RunTests ${COMMON_LIB}) +target_link_libraries(RunTests ${SERVICE_COMMON_LIB}) +target_link_libraries(RunTests ${PLUGINS_COMMON_LIB}) # Add Python 3.x library target_link_libraries(RunTests ${PYTHON_LIBRARIES}) diff --git a/tests/unit/C/plugins/common/CMakeLists.txt b/tests/unit/C/plugins/common/CMakeLists.txt index 5d13cb8d08..b3353b263a 100644 --- a/tests/unit/C/plugins/common/CMakeLists.txt +++ b/tests/unit/C/plugins/common/CMakeLists.txt @@ -25,9 +25,10 @@ include_directories(../../../../../C/services/common/include) include_directories(../../../../../C/thirdparty/rapidjson/include) include_directories(../../../../../C/thirdparty/Simple-Web-Server) -file(GLOB common_sources "../../../../../C/common/*.cpp") -file(GLOB plugin_common_sources "../../../../../C/plugins/common/*.cpp") -file(GLOB services_common_sources "../../../../../C/services/common/*.cpp") +set(COMMON_LIB common-lib) +set(SERVICE_COMMON_LIB services-common-lib) +set(PLUGINS_COMMON_LIB plugins-common-lib) + file(GLOB unittests "*.cpp") # Find python3.x dev/lib package @@ -36,13 +37,18 @@ find_package(PythonLibs 3 REQUIRED) # Add Python 3.x header files include_directories(${PYTHON_INCLUDE_DIRS}) +link_directories(${PROJECT_BINARY_DIR}/../../../lib) + # Link runTests with what we want to test and the GTest and pthread library -add_executable(RunTests ${common_sources} ${plugin_common_sources} ${services_common_sources} ${unittests}) +add_executable(RunTests ${unittests}) target_link_libraries(RunTests ${GTEST_LIBRARIES} pthread) -target_link_libraries(RunTests ${Boost_LIBRARIES}) -target_link_libraries(RunTests ${UUIDLIB}) -target_link_libraries(RunTests ${COMMONLIB}) -target_link_libraries(RunTests -lssl -lcrypto -lz) +target_link_libraries(RunTests ${Boost_LIBRARIES}) +target_link_libraries(RunTests ${UUIDLIB}) +target_link_libraries(RunTests ${COMMONLIB}) +target_link_libraries(RunTests -lssl -lcrypto -lz) +target_link_libraries(RunTests ${COMMON_LIB}) +target_link_libraries(RunTests ${SERVICE_COMMON_LIB}) +target_link_libraries(RunTests ${PLUGINS_COMMON_LIB}) # Add Python 3.x library target_link_libraries(RunTests ${PYTHON_LIBRARIES}) diff --git a/tests/unit/C/plugins/storage/sqlitememory/CMakeLists.txt b/tests/unit/C/plugins/storage/sqlitememory/CMakeLists.txt new file mode 100644 index 0000000000..d4557c0c18 --- /dev/null +++ b/tests/unit/C/plugins/storage/sqlitememory/CMakeLists.txt @@ -0,0 +1,45 @@ +# Project configuration +project(RunTests) +cmake_minimum_required(VERSION 2.6) +set(CMAKE_CXX_FLAGS "-std=c++11 -O3") + +# libraries +set(PG_LIB pq) + +# FogLAMP libraries +set(COMMON_LIB common-lib) +set(SERVICE_COMMON_LIB services-common-lib) +set(PLUGINS_COMMON_LIB plugins-common-lib) +set(PLUGIN_SQLITEMEMORY sqlitememory) +set(STORAGE_COMMON_LIB storage-common-lib) + +# Locate GTest +find_package(GTest REQUIRED) + +# Include files +include_directories(${GTEST_INCLUDE_DIRS}) +include_directories(../../../../../../C/common/include) +include_directories(../../../../../../C/services/common/include) +include_directories(../../../../../../C/plugins/storage/common/include) +include_directories(../../../../../../C/plugins/storage/sqlitememory/include) +include_directories(../../../../../../C/thirdparty/rapidjson/include) + +# Source files +file(GLOB test_sources tests.cpp) + +# Exe creation +link_directories( + ${PROJECT_BINARY_DIR}/../../../../lib +) + +add_executable(${PROJECT_NAME} ${test_sources}) + +target_link_libraries(${PROJECT_NAME} ${COMMON_LIB}) +target_link_libraries(${PROJECT_NAME} ${SERVICE_COMMON_LIB}) +target_link_libraries(${PROJECT_NAME} ${PLUGINS_COMMON_LIB}) + +target_link_libraries(${PROJECT_NAME} ${PLUGIN_SQLITEMEMORY}) +target_link_libraries(${PROJECT_NAME} ${STORAGE_COMMON_LIB}) +target_link_libraries(${PROJECT_NAME} ${PG_LIB}) + +target_link_libraries(${PROJECT_NAME} ${GTEST_LIBRARIES} pthread) \ No newline at end of file diff --git a/tests/unit/C/plugins/storage/sqlitememory/README.rst b/tests/unit/C/plugins/storage/sqlitememory/README.rst new file mode 100644 index 0000000000..4423a3ef32 --- /dev/null +++ b/tests/unit/C/plugins/storage/sqlitememory/README.rst @@ -0,0 +1,21 @@ +***************************************************** +Unit Test for Postgres Storage Plugin +***************************************************** + +Require Google Unit Test framework + +Install with: +:: + sudo apt-get install libgtest-dev + cd /usr/src/gtest + cmake CMakeLists.txt + sudo make + sudo make install + +To build the unit test: +:: + mkdir build + cd build + cmake .. + make + ./runTests diff --git a/tests/unit/C/plugins/storage/sqlitememory/tests.cpp b/tests/unit/C/plugins/storage/sqlitememory/tests.cpp new file mode 100644 index 0000000000..b3f84160b3 --- /dev/null +++ b/tests/unit/C/plugins/storage/sqlitememory/tests.cpp @@ -0,0 +1,75 @@ +#include +#include +#include "gtest/gtest.h" +#include +#include +#include + +using namespace std; + +int main(int argc, char **argv) { + testing::InitGoogleTest(&argc, argv); + return RUN_ALL_TESTS(); +} + + +class RowFormatDate { + public: + const char *test_case; + const char *expected; + bool result; + + RowFormatDate(const char *p1, const char *p2, bool p3) { + test_case = p1; + expected = p2; + result = p3; + }; +}; + +class TestFormatDate : public ::testing::TestWithParam { +}; + +TEST_P(TestFormatDate, TestConversions) +{ + Logger::getLogger()->setMinLevel("debug"); + + RowFormatDate const& p = GetParam(); + + char formatted_date[50] = {0}; + memset (formatted_date,0 , sizeof (formatted_date)); + bool result = Connection::formatDate(formatted_date, sizeof(formatted_date), p.test_case); + + string test_case = formatted_date; + string expected = p.expected; + + ASSERT_EQ(test_case, expected); + ASSERT_EQ(result, p.result); +} + +INSTANTIATE_TEST_CASE_P( + TestConversions, + TestFormatDate, + ::testing::Values( + // Test cases Expected + RowFormatDate("2019-01-01 10:01:01" ,"2019-01-01 10:01:01.000000+00:00", true), + RowFormatDate("2019-02-01 10:02:01.0" ,"2019-02-01 10:02:01.000000+00:00", true), + RowFormatDate("2019-02-02 10:02:02.841" ,"2019-02-02 10:02:02.841000+00:00", true), + RowFormatDate("2019-02-03 10:02:03.123456" ,"2019-02-03 10:02:03.123456+00:00", true), + + RowFormatDate("2019-03-01 10:03:01.1+00:00" ,"2019-03-01 10:03:01.100000+00:00", true), + RowFormatDate("2019-03-02 10:03:02.123+00:00" ,"2019-03-02 10:03:02.123000+00:00", true), + + RowFormatDate("2019-03-03 10:03:03.123456+00:00" ,"2019-03-03 10:03:03.123456+00:00", true), + RowFormatDate("2019-03-04 10:03:04.123456+01:00" ,"2019-03-04 10:03:04.123456+01:00", true), + RowFormatDate("2019-03-05 10:03:05.123456-01:00" ,"2019-03-05 10:03:05.123456-01:00", true), + RowFormatDate("2019-03-04 10:03:04.123456+02:30" ,"2019-03-04 10:03:04.123456+02:30", true), + RowFormatDate("2019-03-05 10:03:05.123456-02:30" ,"2019-03-05 10:03:05.123456-02:30", true), + + // Timestamp truncated + RowFormatDate("2017-10-11 15:10:51.927191906" ,"2017-10-11 15:10:51.927191+00:00", true), + + // Bad cases + RowFormatDate("xxx", "", false), + RowFormatDate("2019-50-50 10:01:01.0", "", false) + ) +); diff --git a/tests/unit/C/scripts/RunAllTests.sh b/tests/unit/C/scripts/RunAllTests.sh index e0e4283115..3b68b4d833 100755 --- a/tests/unit/C/scripts/RunAllTests.sh +++ b/tests/unit/C/scripts/RunAllTests.sh @@ -18,7 +18,14 @@ cd $FOGLAMP_ROOT/tests/unit/C if [ ! -d results ] ; then mkdir results fi -cmakefile=`find . -name CMakeLists.txt` + +if [ -f "./CMakeLists.txt" ] ; then + echo -n "Compiling libraries..." + (rm -rf build && mkdir build && cd build && cmake .. && make ${jobs} && cd ..) >/dev/null + echo "done" +fi + +cmakefile=`find . -name CMakeLists.txt | grep -v "\.\/CMakeLists.txt"` for f in $cmakefile; do dir=`dirname $f` echo Testing $dir diff --git a/tests/unit/C/services/core/CMakeLists.txt b/tests/unit/C/services/core/CMakeLists.txt index b235121ab2..0c2ac21b5f 100644 --- a/tests/unit/C/services/core/CMakeLists.txt +++ b/tests/unit/C/services/core/CMakeLists.txt @@ -25,9 +25,10 @@ include_directories(../../../../../C/services/core/include) include_directories(../../../../../C/thirdparty/rapidjson/include) include_directories(../../../../../C/thirdparty/Simple-Web-Server) +set(COMMON_LIB common-lib) +set(SERVICE_COMMON_LIB services-common-lib) + file(GLOB test_sources "../../../../../C/services/core/*.cpp") -file(GLOB common_services "../../../../../C/services/common/*.cpp") -file(GLOB common_sources "../../../../../C/common/*.cpp") file(GLOB unittests "*.cpp") # Find python3.x dev/lib package @@ -36,12 +37,17 @@ find_package(PythonLibs 3 REQUIRED) # Add Python 3.x header files include_directories(${PYTHON_INCLUDE_DIRS}) +link_directories(${PROJECT_BINARY_DIR}/../../../lib) + # Link runTests with what we want to test and the GTest and pthread library -add_executable(RunTests ${test_sources} ${common_services} ${common_sources} ${unittests}) +add_executable(RunTests ${test_sources} ${unittests}) target_link_libraries(RunTests ${GTEST_LIBRARIES} pthread) target_link_libraries(RunTests ${Boost_LIBRARIES}) target_link_libraries(RunTests ${UUIDLIB}) target_link_libraries(RunTests ${COMMONLIB}) +target_link_libraries(RunTests -lssl -lcrypto -lz) +target_link_libraries(RunTests ${COMMON_LIB}) +target_link_libraries(RunTests ${SERVICE_COMMON_LIB}) # Add Python 3.x library target_link_libraries(RunTests ${PYTHON_LIBRARIES}) diff --git a/tests/unit/C/services/storage/sqlite/expected/103 b/tests/unit/C/services/storage/sqlite/expected/103 deleted file mode 100644 index 7bbe797ef1..0000000000 --- a/tests/unit/C/services/storage/sqlite/expected/103 +++ /dev/null @@ -1 +0,0 @@ -{ "response" : "appended", "readings_added" : 4 } \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected/104 b/tests/unit/C/services/storage/sqlite/expected/104 deleted file mode 100644 index 3beac32be3..0000000000 --- a/tests/unit/C/services/storage/sqlite/expected/104 +++ /dev/null @@ -1 +0,0 @@ -{"count":4,"rows":[{"asset_code":"msec__001_OK","read_key":"f1cfff7a-3769-4f47-9ded-00000000001","reading":{"value":1},"user_ts":"2019-01-01 10:01:01.000000"},{"asset_code":"msec__002_OK","read_key":"f1cfff7a-3769-4f47-9ded-00000000002","reading":{"value":1},"user_ts":"2019-01-02 10:02:01.000000"},{"asset_code":"msec__003_OK","read_key":"f1cfff7a-3769-4f47-9ded-00000000003","reading":{"value":1},"user_ts":"2019-01-03 10:02:02.841000"},{"asset_code":"msec__004_OK","read_key":"f1cfff7a-3769-4f47-9ded-00000000004","reading":{"value":1},"user_ts":"2019-01-04 11:03:05.123456"}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected/1 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/1 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/1 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/1 diff --git a/tests/unit/C/services/storage/sqlite/expected/10 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/10 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/10 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/10 diff --git a/tests/unit/C/services/storage/sqlite/expected/100 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/100 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/100 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/100 diff --git a/tests/unit/C/services/storage/sqlite/expected/101 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/101 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/101 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/101 diff --git a/tests/unit/C/services/storage/sqlite/expected/102 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/102 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/102 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/102 diff --git a/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/103 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/103 new file mode 100644 index 0000000000..d84da94821 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/103 @@ -0,0 +1 @@ +{ "response" : "appended", "readings_added" : 11 } \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/104 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/104 new file mode 100644 index 0000000000..b0e610c297 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/104 @@ -0,0 +1 @@ +{"count":11,"rows":[{"asset_code":"msec_003_OK","read_key":"f1cfff7a-3769-4f47-9ded-000000000003","reading":{"value":3},"user_ts":"2019-01-01 10:01:01.000000"},{"asset_code":"msec_004_OK","read_key":"f1cfff7a-3769-4f47-9ded-000000000004","reading":{"value":4},"user_ts":"2019-01-02 10:02:01.000000"},{"asset_code":"msec_005_OK","read_key":"f1cfff7a-3769-4f47-9ded-000000000005","reading":{"value":5},"user_ts":"2019-01-03 10:02:02.841000"},{"asset_code":"msec_006_OK","read_key":"f1cfff7a-3769-4f47-9ded-000000000006","reading":{"value":6},"user_ts":"2019-01-04 10:03:05.123456"},{"asset_code":"msec_007_OK","read_key":"f1cfff7a-3769-4f47-9ded-000000000007","reading":{"value":7},"user_ts":"2019-01-04 10:03:05.100000"},{"asset_code":"msec_008_OK","read_key":"f1cfff7a-3769-4f47-9ded-000000000008","reading":{"value":8},"user_ts":"2019-01-04 10:03:05.123000"},{"asset_code":"msec_009_OK","read_key":"f1cfff7a-3769-4f47-9ded-000000000009","reading":{"value":9},"user_ts":"2019-03-03 10:03:03.123456"},{"asset_code":"msec_010_OK","read_key":"f1cfff7a-3769-4f47-9ded-000000000010","reading":{"value":10},"user_ts":"2019-03-04 09:03:04.123456"},{"asset_code":"msec_011_OK","read_key":"f1cfff7a-3769-4f47-9ded-000000000011","reading":{"value":11},"user_ts":"2019-03-05 11:03:05.123456"},{"asset_code":"msec_012_OK","read_key":"f1cfff7a-3769-4f47-9ded-000000000012","reading":{"value":12},"user_ts":"2019-03-04 07:33:04.123456"},{"asset_code":"msec_013_OK","read_key":"f1cfff7a-3769-4f47-9ded-000000000013","reading":{"value":13},"user_ts":"2019-03-05 12:33:05.123456"}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/105 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/105 new file mode 100644 index 0000000000..d5ef46e060 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/105 @@ -0,0 +1 @@ +{"count":1,"rows":[{"reading":{"value":9},"user_ts":"2019-03-03 10:03:03.123456"}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/106 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/106 new file mode 100644 index 0000000000..36b92e7724 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/106 @@ -0,0 +1 @@ +{"count":1,"rows":[{"reading":{"value":9},"user_ts_alias":"2019-03-03 10:03:03.123456"}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/107 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/107 new file mode 100644 index 0000000000..761333c73d --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/107 @@ -0,0 +1 @@ +{"count":1,"rows":[{"user_ts_min":"2019-03-03 10:03:03.123456"}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/108 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/108 new file mode 100644 index 0000000000..96421c5796 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/108 @@ -0,0 +1 @@ +{"count":1,"rows":[{"user_ts_min":"2019-03-03 10:03:03.123456","user_ts_max":"2019-03-03 10:03:03.123456"}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected/11 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/11 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/11 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/11 diff --git a/tests/unit/C/services/storage/sqlite/expected/12 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/12 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/12 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/12 diff --git a/tests/unit/C/services/storage/sqlite/expected/13 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/13 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/13 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/13 diff --git a/tests/unit/C/services/storage/sqlite/expected/14 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/14 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/14 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/14 diff --git a/tests/unit/C/services/storage/sqlite/expected/15 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/15 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/15 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/15 diff --git a/tests/unit/C/services/storage/sqlite/expected/16 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/16 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/16 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/16 diff --git a/tests/unit/C/services/storage/sqlite/expected/18 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/18 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/18 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/18 diff --git a/tests/unit/C/services/storage/sqlite/expected/19 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/19 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/19 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/19 diff --git a/tests/unit/C/services/storage/sqlite/expected/2 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/2 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/2 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/2 diff --git a/tests/unit/C/services/storage/sqlite/expected/20 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/20 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/20 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/20 diff --git a/tests/unit/C/services/storage/sqlite/expected/21 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/21 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/21 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/21 diff --git a/tests/unit/C/services/storage/sqlite/expected/22 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/22 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/22 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/22 diff --git a/tests/unit/C/services/storage/sqlite/expected/23 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/23 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/23 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/23 diff --git a/tests/unit/C/services/storage/sqlite/expected/24 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/24 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/24 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/24 diff --git a/tests/unit/C/services/storage/sqlite/expected/25 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/25 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/25 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/25 diff --git a/tests/unit/C/services/storage/sqlite/expected/26 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/26 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/26 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/26 diff --git a/tests/unit/C/services/storage/sqlite/expected/27 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/27 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/27 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/27 diff --git a/tests/unit/C/services/storage/sqlite/expected/28 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/28 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/28 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/28 diff --git a/tests/unit/C/services/storage/sqlite/expected/29 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/29 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/29 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/29 diff --git a/tests/unit/C/services/storage/sqlite/expected/3 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/3 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/3 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/3 diff --git a/tests/unit/C/services/storage/sqlite/expected/30 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/30 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/30 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/30 diff --git a/tests/unit/C/services/storage/sqlite/expected/31 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/31 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/31 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/31 diff --git a/tests/unit/C/services/storage/sqlite/expected/32 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/32 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/32 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/32 diff --git a/tests/unit/C/services/storage/sqlite/expected/33 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/33 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/33 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/33 diff --git a/tests/unit/C/services/storage/sqlite/expected/34 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/34 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/34 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/34 diff --git a/tests/unit/C/services/storage/sqlite/expected/35 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/35 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/35 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/35 diff --git a/tests/unit/C/services/storage/sqlite/expected/37 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/37 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/37 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/37 diff --git a/tests/unit/C/services/storage/sqlite/expected/38 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/38 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/38 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/38 diff --git a/tests/unit/C/services/storage/sqlite/expected/39 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/39 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/39 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/39 diff --git a/tests/unit/C/services/storage/sqlite/expected/4 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/4 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/4 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/4 diff --git a/tests/unit/C/services/storage/sqlite/expected/40 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/40 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/40 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/40 diff --git a/tests/unit/C/services/storage/sqlite/expected/41 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/41 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/41 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/41 diff --git a/tests/unit/C/services/storage/sqlite/expected/42 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/42 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/42 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/42 diff --git a/tests/unit/C/services/storage/sqlite/expected/43 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/43 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/43 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/43 diff --git a/tests/unit/C/services/storage/sqlite/expected/44 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/44 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/44 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/44 diff --git a/tests/unit/C/services/storage/sqlite/expected/45 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/45 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/45 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/45 diff --git a/tests/unit/C/services/storage/sqlite/expected/46 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/46 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/46 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/46 diff --git a/tests/unit/C/services/storage/sqlite/expected/47 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/47 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/47 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/47 diff --git a/tests/unit/C/services/storage/sqlite/expected/48 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/48 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/48 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/48 diff --git a/tests/unit/C/services/storage/sqlite/expected/49 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/49 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/49 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/49 diff --git a/tests/unit/C/services/storage/sqlite/expected/5 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/5 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/5 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/5 diff --git a/tests/unit/C/services/storage/sqlite/expected/50 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/50 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/50 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/50 diff --git a/tests/unit/C/services/storage/sqlite/expected/51 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/51 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/51 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/51 diff --git a/tests/unit/C/services/storage/sqlite/expected/52 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/52 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/52 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/52 diff --git a/tests/unit/C/services/storage/sqlite/expected/53 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/53 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/53 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/53 diff --git a/tests/unit/C/services/storage/sqlite/expected/54 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/54 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/54 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/54 diff --git a/tests/unit/C/services/storage/sqlite/expected/55 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/55 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/55 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/55 diff --git a/tests/unit/C/services/storage/sqlite/expected/56 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/56 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/56 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/56 diff --git a/tests/unit/C/services/storage/sqlite/expected/57 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/57 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/57 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/57 diff --git a/tests/unit/C/services/storage/sqlite/expected/58 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/58 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/58 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/58 diff --git a/tests/unit/C/services/storage/sqlite/expected/59 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/59 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/59 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/59 diff --git a/tests/unit/C/services/storage/sqlite/expected/6 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/6 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/6 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/6 diff --git a/tests/unit/C/services/storage/sqlite/expected/60 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/60 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/60 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/60 diff --git a/tests/unit/C/services/storage/sqlite/expected/61 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/61 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/61 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/61 diff --git a/tests/unit/C/services/storage/sqlite/expected/62 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/62 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/62 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/62 diff --git a/tests/unit/C/services/storage/sqlite/expected/63 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/63 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/63 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/63 diff --git a/tests/unit/C/services/storage/sqlite/expected/64 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/64 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/64 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/64 diff --git a/tests/unit/C/services/storage/sqlite/expected/65 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/65 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/65 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/65 diff --git a/tests/unit/C/services/storage/sqlite/expected/66 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/66 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/66 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/66 diff --git a/tests/unit/C/services/storage/sqlite/expected/67 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/67 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/67 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/67 diff --git a/tests/unit/C/services/storage/sqlite/expected/68 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/68 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/68 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/68 diff --git a/tests/unit/C/services/storage/sqlite/expected/69 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/69 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/69 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/69 diff --git a/tests/unit/C/services/storage/sqlite/expected/7 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/7 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/7 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/7 diff --git a/tests/unit/C/services/storage/sqlite/expected/70 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/70 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/70 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/70 diff --git a/tests/unit/C/services/storage/sqlite/expected/71 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/71 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/71 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/71 diff --git a/tests/unit/C/services/storage/sqlite/expected/72 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/72 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/72 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/72 diff --git a/tests/unit/C/services/storage/sqlite/expected/73 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/73 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/73 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/73 diff --git a/tests/unit/C/services/storage/sqlite/expected/74 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/74 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/74 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/74 diff --git a/tests/unit/C/services/storage/sqlite/expected/75 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/75 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/75 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/75 diff --git a/tests/unit/C/services/storage/sqlite/expected/76 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/76 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/76 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/76 diff --git a/tests/unit/C/services/storage/sqlite/expected/77 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/77 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/77 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/77 diff --git a/tests/unit/C/services/storage/sqlite/expected/78 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/78 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/78 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/78 diff --git a/tests/unit/C/services/storage/sqlite/expected/79 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/79 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/79 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/79 diff --git a/tests/unit/C/services/storage/sqlite/expected/8 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/8 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/8 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/8 diff --git a/tests/unit/C/services/storage/sqlite/expected/80 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/80 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/80 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/80 diff --git a/tests/unit/C/services/storage/sqlite/expected/81 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/81 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/81 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/81 diff --git a/tests/unit/C/services/storage/sqlite/expected/82 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/82 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/82 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/82 diff --git a/tests/unit/C/services/storage/sqlite/expected/83 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/83 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/83 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/83 diff --git a/tests/unit/C/services/storage/sqlite/expected/84 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/84 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/84 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/84 diff --git a/tests/unit/C/services/storage/sqlite/expected/85 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/85 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/85 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/85 diff --git a/tests/unit/C/services/storage/sqlite/expected/86 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/86 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/86 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/86 diff --git a/tests/unit/C/services/storage/sqlite/expected/87 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/87 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/87 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/87 diff --git a/tests/unit/C/services/storage/sqlite/expected/88 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/88 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/88 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/88 diff --git a/tests/unit/C/services/storage/sqlite/expected/89 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/89 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/89 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/89 diff --git a/tests/unit/C/services/storage/sqlite/expected/9 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/9 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/9 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/9 diff --git a/tests/unit/C/services/storage/sqlite/expected/90 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/90 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/90 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/90 diff --git a/tests/unit/C/services/storage/sqlite/expected/91 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/91 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/91 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/91 diff --git a/tests/unit/C/services/storage/sqlite/expected/92 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/92 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/92 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/92 diff --git a/tests/unit/C/services/storage/sqlite/expected/93 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/93 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/93 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/93 diff --git a/tests/unit/C/services/storage/sqlite/expected/94 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/94 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/94 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/94 diff --git a/tests/unit/C/services/storage/sqlite/expected/95 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/95 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/95 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/95 diff --git a/tests/unit/C/services/storage/sqlite/expected/96 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/96 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/96 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/96 diff --git a/tests/unit/C/services/storage/sqlite/expected/97 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/97 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/97 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/97 diff --git a/tests/unit/C/services/storage/sqlite/expected/98 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/98 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/98 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/98 diff --git a/tests/unit/C/services/storage/sqlite/expected/99 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/99 similarity index 100% rename from tests/unit/C/services/storage/sqlite/expected/99 rename to tests/unit/C/services/storage/sqlite/expected_ETC_UTC/99 diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/1 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/1 new file mode 100644 index 0000000000..960917c42d --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/1 @@ -0,0 +1 @@ +{"count":1,"rows":[{"id":1,"key":"TEST1","description":"A test row","data":{"json":"test1"}}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/10 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/10 new file mode 100644 index 0000000000..89b4f4207d --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/10 @@ -0,0 +1 @@ +{"count":1,"rows":[{"max_id":1}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/100 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/100 new file mode 100644 index 0000000000..124e6704de --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/100 @@ -0,0 +1 @@ +{"count":0,"rows":[]} \ No newline at end of file diff --git a/tests/system/python/api/.gitkeep b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/101 similarity index 100% rename from tests/system/python/api/.gitkeep rename to tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/101 diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/102 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/102 new file mode 100644 index 0000000000..804a3da17d --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/102 @@ -0,0 +1 @@ +{ "removed" : 100, "unsentPurged" : 100, "unsentRetained" : 1, "readings" : 1 } \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/103 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/103 new file mode 100644 index 0000000000..d84da94821 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/103 @@ -0,0 +1 @@ +{ "response" : "appended", "readings_added" : 11 } \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/104 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/104 new file mode 100644 index 0000000000..cc1e058c38 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/104 @@ -0,0 +1 @@ +{"count":11,"rows":[{"asset_code":"msec_003_OK","read_key":"f1cfff7a-3769-4f47-9ded-000000000003","reading":{"value":3},"user_ts":"2019-01-01 11:01:01.000000"},{"asset_code":"msec_004_OK","read_key":"f1cfff7a-3769-4f47-9ded-000000000004","reading":{"value":4},"user_ts":"2019-01-02 11:02:01.000000"},{"asset_code":"msec_005_OK","read_key":"f1cfff7a-3769-4f47-9ded-000000000005","reading":{"value":5},"user_ts":"2019-01-03 11:02:02.841000"},{"asset_code":"msec_006_OK","read_key":"f1cfff7a-3769-4f47-9ded-000000000006","reading":{"value":6},"user_ts":"2019-01-04 11:03:05.123456"},{"asset_code":"msec_007_OK","read_key":"f1cfff7a-3769-4f47-9ded-000000000007","reading":{"value":7},"user_ts":"2019-01-04 11:03:05.100000"},{"asset_code":"msec_008_OK","read_key":"f1cfff7a-3769-4f47-9ded-000000000008","reading":{"value":8},"user_ts":"2019-01-04 11:03:05.123000"},{"asset_code":"msec_009_OK","read_key":"f1cfff7a-3769-4f47-9ded-000000000009","reading":{"value":9},"user_ts":"2019-03-03 11:03:03.123456"},{"asset_code":"msec_010_OK","read_key":"f1cfff7a-3769-4f47-9ded-000000000010","reading":{"value":10},"user_ts":"2019-03-04 10:03:04.123456"},{"asset_code":"msec_011_OK","read_key":"f1cfff7a-3769-4f47-9ded-000000000011","reading":{"value":11},"user_ts":"2019-03-05 12:03:05.123456"},{"asset_code":"msec_012_OK","read_key":"f1cfff7a-3769-4f47-9ded-000000000012","reading":{"value":12},"user_ts":"2019-03-04 08:33:04.123456"},{"asset_code":"msec_013_OK","read_key":"f1cfff7a-3769-4f47-9ded-000000000013","reading":{"value":13},"user_ts":"2019-03-05 13:33:05.123456"}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/105 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/105 new file mode 100644 index 0000000000..6e1d09a064 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/105 @@ -0,0 +1 @@ +{"count":1,"rows":[{"reading":{"value":9},"user_ts":"2019-03-03 11:03:03.123456"}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/106 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/106 new file mode 100644 index 0000000000..19ea46d15f --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/106 @@ -0,0 +1 @@ +{"count":1,"rows":[{"reading":{"value":9},"user_ts_alias":"2019-03-03 11:03:03.123456"}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/107 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/107 new file mode 100644 index 0000000000..d570db1c2a --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/107 @@ -0,0 +1 @@ +{"count":1,"rows":[{"user_ts_min":"2019-03-03 11:03:03.123456"}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/108 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/108 new file mode 100644 index 0000000000..5a1706da6e --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/108 @@ -0,0 +1 @@ +{"count":1,"rows":[{"user_ts_min":"2019-03-03 11:03:03.123456","user_ts_max":"2019-03-03 11:03:03.123456"}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/11 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/11 new file mode 100644 index 0000000000..7d66483ebe --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/11 @@ -0,0 +1 @@ +{ "response" : "inserted", "rows_affected" : 1 } \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/12 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/12 new file mode 100644 index 0000000000..1ebeaeaef3 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/12 @@ -0,0 +1 @@ +{"count":1,"rows":[{"id":2,"key":"TEST2","description":"An inserted row","data":{"json":"inserted object"}}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/13 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/13 new file mode 100644 index 0000000000..27530161d2 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/13 @@ -0,0 +1 @@ +{ "entryPoint" : "insert", "message" : "table foglamp.test has no column named Nonexistant", "retryable" : false} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/14 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/14 new file mode 100644 index 0000000000..95390e2964 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/14 @@ -0,0 +1,2 @@ +{ "entryPoint" : "insert", "message" : "Failed to parse JSON payload +", "retryable" : false} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/15 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/15 new file mode 100644 index 0000000000..29146ee48a --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/15 @@ -0,0 +1 @@ +{ "response" : "deleted", "rows_affected" : 1 } \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/16 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/16 new file mode 100644 index 0000000000..124e6704de --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/16 @@ -0,0 +1 @@ +{"count":0,"rows":[]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/18 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/18 new file mode 100644 index 0000000000..7d66483ebe --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/18 @@ -0,0 +1 @@ +{ "response" : "inserted", "rows_affected" : 1 } \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/19 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/19 new file mode 100644 index 0000000000..960917c42d --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/19 @@ -0,0 +1 @@ +{"count":1,"rows":[{"id":1,"key":"TEST1","description":"A test row","data":{"json":"test1"}}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/2 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/2 new file mode 100644 index 0000000000..960917c42d --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/2 @@ -0,0 +1 @@ +{"count":1,"rows":[{"id":1,"key":"TEST1","description":"A test row","data":{"json":"test1"}}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/20 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/20 new file mode 100644 index 0000000000..1ebeaeaef3 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/20 @@ -0,0 +1 @@ +{"count":1,"rows":[{"id":2,"key":"TEST2","description":"An inserted row","data":{"json":"inserted object"}}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/21 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/21 new file mode 100644 index 0000000000..8b2d266eb9 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/21 @@ -0,0 +1 @@ +{ "entryPoint" : "where clause", "message" : "The \"where\" object is missing a \"column\" property", "retryable" : false} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/22 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/22 new file mode 100644 index 0000000000..8584b11223 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/22 @@ -0,0 +1 @@ +{ "entryPoint" : "where clause", "message" : "The \"where\" object is missing a \"condition\" property", "retryable" : false} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/23 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/23 new file mode 100644 index 0000000000..299df5fdf4 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/23 @@ -0,0 +1 @@ +{ "entryPoint" : "where clause", "message" : "The \"where\" object is missing a \"value\" property", "retryable" : false} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/24 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/24 new file mode 100644 index 0000000000..305ed629e9 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/24 @@ -0,0 +1 @@ +{ "entryPoint" : "where clause", "message" : "The \"where\" property must be a JSON object", "retryable" : false} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/25 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/25 new file mode 100644 index 0000000000..1ebeaeaef3 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/25 @@ -0,0 +1 @@ +{"count":1,"rows":[{"id":2,"key":"TEST2","description":"An inserted row","data":{"json":"inserted object"}}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/26 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/26 new file mode 100644 index 0000000000..831678c175 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/26 @@ -0,0 +1 @@ +{ "entryPoint" : "Select sort", "message" : "Missing property \"column\"", "retryable" : false} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/27 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/27 new file mode 100644 index 0000000000..a8848cbc86 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/27 @@ -0,0 +1 @@ +{ "response" : "updated", "rows_affected" : 1 } \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/28 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/28 new file mode 100644 index 0000000000..fd1a64c2c5 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/28 @@ -0,0 +1 @@ +{"count":1,"rows":[{"id":2,"key":"TEST2","description":"updated description","data":{"json":"inserted object"}}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/29 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/29 new file mode 100644 index 0000000000..a8848cbc86 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/29 @@ -0,0 +1 @@ +{ "response" : "updated", "rows_affected" : 1 } \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/3 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/3 new file mode 100644 index 0000000000..124e6704de --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/3 @@ -0,0 +1 @@ +{"count":0,"rows":[]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/30 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/30 new file mode 100644 index 0000000000..40f3368d49 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/30 @@ -0,0 +1 @@ +{"count":1,"rows":[{"id":2,"key":"UPDA","description":"updated description","data":{"json":"inserted object"}}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/31 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/31 new file mode 100644 index 0000000000..81a6486e91 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/31 @@ -0,0 +1 @@ +{ "entryPoint" : "update", "message" : "Missing values or expressions object in payload", "retryable" : false} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/32 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/32 new file mode 100644 index 0000000000..d985f72c2d --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/32 @@ -0,0 +1 @@ +{"count":2,"rows":[{"count_id":1,"key":"UPDA"},{"count_id":1,"key":"TEST1"}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/33 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/33 new file mode 100644 index 0000000000..e2fdaddfd5 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/33 @@ -0,0 +1 @@ +{ "error" : "Unsupported URL: /foglamp/nothing" } \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/34 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/34 new file mode 100644 index 0000000000..4584abf305 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/34 @@ -0,0 +1 @@ +{ "entryPoint" : "retrieve", "message" : "no such table: foglamp.doesntexist", "retryable" : false} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/35 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/35 new file mode 100644 index 0000000000..54fc4416b2 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/35 @@ -0,0 +1 @@ +{ "entryPoint" : "retrieve", "message" : "no such column: doesntexist", "retryable" : false} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/37 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/37 new file mode 100644 index 0000000000..303973146f --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/37 @@ -0,0 +1 @@ +{ "response" : "appended", "readings_added" : 2 } \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/38 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/38 new file mode 100644 index 0000000000..b2d0997987 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/38 @@ -0,0 +1 @@ +{"count":2,"rows":[{"id":966457,"asset_code":"MyAsset","read_key":"5b3be500-ff95-41ae-b5a4-cc99d08bef40","reading":{"rate":18.4},"user_ts":"2017-09-21 15:00:09.025655+01","ts":"2017-10-04 11:38:39.368881+01"},{"id":966458,"asset_code":"MyAsset","read_key":"5b3be500-ff95-41ae-b5a4-cc99d18bef40","reading":{"rate":45.1},"user_ts":"2017-09-21 15:03:09.025655+01","ts":"2017-10-04 11:38:39.368881+01"}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/39 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/39 new file mode 100644 index 0000000000..124e6704de --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/39 @@ -0,0 +1 @@ +{"count":0,"rows":[]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/4 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/4 new file mode 100644 index 0000000000..960917c42d --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/4 @@ -0,0 +1 @@ +{"count":1,"rows":[{"id":1,"key":"TEST1","description":"A test row","data":{"json":"test1"}}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/40 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/40 new file mode 100644 index 0000000000..61ef4e298c --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/40 @@ -0,0 +1 @@ +{ "error" : "Missing query parameter count" } \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/41 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/41 new file mode 100644 index 0000000000..fb5f172fa7 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/41 @@ -0,0 +1 @@ +{ "error" : "Missing query parameter id" } \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/42 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/42 new file mode 100644 index 0000000000..da2cfd9351 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/42 @@ -0,0 +1 @@ +{ "removed" : 2, "unsentPurged" : 2, "unsentRetained" : 0, "readings" : 0 } \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/43 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/43 new file mode 100644 index 0000000000..960917c42d --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/43 @@ -0,0 +1 @@ +{"count":1,"rows":[{"id":1,"key":"TEST1","description":"A test row","data":{"json":"test1"}}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/44 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/44 new file mode 100644 index 0000000000..8017dbc45c --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/44 @@ -0,0 +1 @@ +{"count":1,"rows":[{"min_id":1,"max_id":1}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/45 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/45 new file mode 100644 index 0000000000..3cf1b258b8 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/45 @@ -0,0 +1 @@ +{"count":1,"rows":[{"key":"TEST1","description":"A test row"}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/46 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/46 new file mode 100644 index 0000000000..bf8e659fa0 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/46 @@ -0,0 +1 @@ +{"count":1,"rows":[{"key":"TEST1","MyDescription":"A test row"}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/47 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/47 new file mode 100644 index 0000000000..d44fea2a6f --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/47 @@ -0,0 +1 @@ +{"count":1,"rows":[{"key":"TEST1","JSONvalue":"test1"}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/48 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/48 new file mode 100644 index 0000000000..90b3a11539 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/48 @@ -0,0 +1 @@ +{"count":9,"rows":[{"key":"TEST1","description":"A test row","time":"14:14:26"},{"key":"TEST2","description":"A test row","time":"14:14:27"},{"key":"TEST3","description":"A test row","time":"13:14:28"},{"key":"TEST4","description":"A test row","time":"13:14:29"},{"key":"TEST5","description":"A test row","time":"13:15:00"},{"key":"TEST6","description":"A test row","time":"13:15:33"},{"key":"TEST7","description":"A test row","time":"13:16:20"},{"key":"TEST8","description":"A test row","time":"07:14:30"},{"key":"TEST9","description":"A test row","time":"23:14:30"}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/49 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/49 new file mode 100644 index 0000000000..fe9af5ecf6 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/49 @@ -0,0 +1 @@ +{"count":8,"rows":[{"key":"TEST2","description":"A test row","timestamp":"2017-10-10 14:14:27"},{"key":"TEST3","description":"A test row","timestamp":"2017-10-10 13:14:28"},{"key":"TEST4","description":"A test row","timestamp":"2017-10-10 13:14:29"},{"key":"TEST5","description":"A test row","timestamp":"2017-10-10 13:15:00"},{"key":"TEST6","description":"A test row","timestamp":"2017-10-10 13:15:33"},{"key":"TEST7","description":"A test row","timestamp":"2017-10-10 13:16:20"},{"key":"TEST8","description":"A test row","timestamp":"2017-10-10 07:14:30"},{"key":"TEST9","description":"A test row","timestamp":"2017-10-10 23:14:30"}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/5 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/5 new file mode 100644 index 0000000000..124e6704de --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/5 @@ -0,0 +1 @@ +{"count":0,"rows":[]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/50 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/50 new file mode 100644 index 0000000000..2d3ef169f5 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/50 @@ -0,0 +1 @@ +{ "entryPoint" : "retrieve", "message" : "return object must have either a column or json property", "retryable" : false} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/51 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/51 new file mode 100644 index 0000000000..24d3191c54 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/51 @@ -0,0 +1 @@ +{ "entryPoint" : "retrieve", "message" : "The json property is missing a properties property", "retryable" : false} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/52 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/52 new file mode 100644 index 0000000000..844af318e4 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/52 @@ -0,0 +1 @@ +{"count":1,"rows":[{"Entries":9}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/53 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/53 new file mode 100644 index 0000000000..cfd0e7926b --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/53 @@ -0,0 +1 @@ +{"count":1,"rows":[{"sum_id":43}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/54 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/54 new file mode 100644 index 0000000000..21df9c0d48 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/54 @@ -0,0 +1 @@ +{ "response" : "appended", "readings_added" : 100 } \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/55 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/55 new file mode 100644 index 0000000000..c5748d1a33 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/55 @@ -0,0 +1 @@ +{"count":1,"rows":[{"min":1,"max":98,"average":52.55,"asset_code":"MyAsset"}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/56 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/56 new file mode 100644 index 0000000000..2e72fb850e --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/56 @@ -0,0 +1 @@ +{"count":2,"rows":[{"min":1,"max":98,"average":53.7721518987342,"asset_code":"MyAsset","timestamp":"2017-10-11 15:10:51"},{"min":2,"max":96,"average":47.9523809523809,"asset_code":"MyAsset","timestamp":"2017-10-11 15:10:51"}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/57 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/57 new file mode 100644 index 0000000000..22751a50d2 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/57 @@ -0,0 +1 @@ +{"count":2,"rows":[{"min":1,"max":98,"average":53.7721518987342,"asset_code":"MyAsset","bucket":"2017-10-11 15:10:51"},{"min":2,"max":96,"average":47.9523809523809,"asset_code":"MyAsset","bucket":"2017-10-11 15:10:51"}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/58 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/58 new file mode 100644 index 0000000000..b69be284c3 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/58 @@ -0,0 +1 @@ +{"count":6,"rows":[{"id":3,"key":"TEST3","description":"A test row","data":{"prop1":"test1","obj1":{"p1":"v1","p2":"v2"}},"ts":"2017-10-10 11:14:28.622"},{"id":4,"key":"TEST4","description":"A test row","data":{"prop1":"test1","obj1":{"p1":"v1","p2":"v2"}},"ts":"2017-10-10 11:14:29.622"},{"id":5,"key":"TEST5","description":"A test row","data":{"prop1":"test1","obj1":{"p1":"v1","p2":"v2"}},"ts":"2017-10-10 11:15:00.622"},{"id":6,"key":"TEST6","description":"A test row","data":{"prop1":"test1","obj1":{"p1":"v1","p2":"v2"}},"ts":"2017-10-10 11:15:33.622"},{"id":6,"key":"TEST7","description":"A test row","data":{"prop1":"test1","obj1":{"p1":"v1","p2":"v2"}},"ts":"2017-10-10 11:16:20.622"},{"id":8,"key":"TEST8","description":"A test row","data":{"prop1":"test1","obj1":{"p1":"v1","p2":"v2"}},"ts":"2017-10-10 05:14:30.622"}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/59 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/59 new file mode 100644 index 0000000000..ef2efe563d --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/59 @@ -0,0 +1 @@ +{"count":4,"rows":[{"id":1,"key":"TEST1","description":"A test row","data":{"prop1":"test1","obj1":{"p1":"v1","p2":"v2"}},"ts":"2017-10-10 12:14:26.622"},{"id":2,"key":"TEST2","description":"A test row","data":{"prop1":"test1","obj1":{"p1":"v1","p2":"v2"}},"ts":"2017-10-10 12:14:27.422"},{"id":8,"key":"TEST8","description":"A test row","data":{"prop1":"test1","obj1":{"p1":"v1","p2":"v2"}},"ts":"2017-10-10 05:14:30.622"},{"id":9,"key":"TEST9","description":"A test row","data":{"prop1":"test1","obj1":{"p1":"v1","p2":"v2"}},"ts":"2017-10-10 21:14:30.622"}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/6 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/6 new file mode 100644 index 0000000000..124e6704de --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/6 @@ -0,0 +1 @@ +{"count":0,"rows":[]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/60 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/60 new file mode 100644 index 0000000000..4488386053 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/60 @@ -0,0 +1 @@ +{"count":9,"rows":[{"id":1,"key":"TEST1","description":"A test row","data":{"prop1":"test1","obj1":{"p1":"v1","p2":"v2"}},"ts":"2017-10-10 12:14:26.622"},{"id":2,"key":"TEST2","description":"A test row","data":{"prop1":"test1","obj1":{"p1":"v1","p2":"v2"}},"ts":"2017-10-10 12:14:27.422"},{"id":3,"key":"TEST3","description":"A test row","data":{"prop1":"test1","obj1":{"p1":"v1","p2":"v2"}},"ts":"2017-10-10 11:14:28.622"},{"id":4,"key":"TEST4","description":"A test row","data":{"prop1":"test1","obj1":{"p1":"v1","p2":"v2"}},"ts":"2017-10-10 11:14:29.622"},{"id":5,"key":"TEST5","description":"A test row","data":{"prop1":"test1","obj1":{"p1":"v1","p2":"v2"}},"ts":"2017-10-10 11:15:00.622"},{"id":6,"key":"TEST6","description":"A test row","data":{"prop1":"test1","obj1":{"p1":"v1","p2":"v2"}},"ts":"2017-10-10 11:15:33.622"},{"id":6,"key":"TEST7","description":"A test row","data":{"prop1":"test1","obj1":{"p1":"v1","p2":"v2"}},"ts":"2017-10-10 11:16:20.622"},{"id":8,"key":"TEST8","description":"A test row","data":{"prop1":"test1","obj1":{"p1":"v1","p2":"v2"}},"ts":"2017-10-10 05:14:30.622"},{"id":9,"key":"TEST9","description":"A test row","data":{"prop1":"test1","obj1":{"p1":"v1","p2":"v2"}},"ts":"2017-10-10 21:14:30.622"}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/61 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/61 new file mode 100644 index 0000000000..7300a791ac --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/61 @@ -0,0 +1 @@ +{ "entryPoint" : "update", "message" : "No rows where updated", "retryable" : false} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/62 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/62 new file mode 100644 index 0000000000..4488386053 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/62 @@ -0,0 +1 @@ +{"count":9,"rows":[{"id":1,"key":"TEST1","description":"A test row","data":{"prop1":"test1","obj1":{"p1":"v1","p2":"v2"}},"ts":"2017-10-10 12:14:26.622"},{"id":2,"key":"TEST2","description":"A test row","data":{"prop1":"test1","obj1":{"p1":"v1","p2":"v2"}},"ts":"2017-10-10 12:14:27.422"},{"id":3,"key":"TEST3","description":"A test row","data":{"prop1":"test1","obj1":{"p1":"v1","p2":"v2"}},"ts":"2017-10-10 11:14:28.622"},{"id":4,"key":"TEST4","description":"A test row","data":{"prop1":"test1","obj1":{"p1":"v1","p2":"v2"}},"ts":"2017-10-10 11:14:29.622"},{"id":5,"key":"TEST5","description":"A test row","data":{"prop1":"test1","obj1":{"p1":"v1","p2":"v2"}},"ts":"2017-10-10 11:15:00.622"},{"id":6,"key":"TEST6","description":"A test row","data":{"prop1":"test1","obj1":{"p1":"v1","p2":"v2"}},"ts":"2017-10-10 11:15:33.622"},{"id":6,"key":"TEST7","description":"A test row","data":{"prop1":"test1","obj1":{"p1":"v1","p2":"v2"}},"ts":"2017-10-10 11:16:20.622"},{"id":8,"key":"TEST8","description":"A test row","data":{"prop1":"test1","obj1":{"p1":"v1","p2":"v2"}},"ts":"2017-10-10 05:14:30.622"},{"id":9,"key":"TEST9","description":"A test row","data":{"prop1":"test1","obj1":{"p1":"v1","p2":"v2"}},"ts":"2017-10-10 21:14:30.622"}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/63 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/63 new file mode 100644 index 0000000000..f62302483f --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/63 @@ -0,0 +1 @@ +{"count":1,"rows":[{"Count":100,"asset_code":"MyAsset"}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/64 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/64 new file mode 100644 index 0000000000..4401963f82 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/64 @@ -0,0 +1 @@ +{"count":100,"rows":[{"timestamp":"2017-10-11 17:10:51.927","Rate":90},{"timestamp":"2017-10-11 17:10:51.930","Rate":13},{"timestamp":"2017-10-11 17:10:51.933","Rate":84},{"timestamp":"2017-10-11 17:10:51.936","Rate":96},{"timestamp":"2017-10-11 17:10:51.939","Rate":2},{"timestamp":"2017-10-11 17:10:51.942","Rate":54},{"timestamp":"2017-10-11 17:10:51.946","Rate":28},{"timestamp":"2017-10-11 17:10:51.949","Rate":3},{"timestamp":"2017-10-11 17:10:51.952","Rate":77},{"timestamp":"2017-10-11 17:10:51.955","Rate":38},{"timestamp":"2017-10-11 17:10:51.959","Rate":26},{"timestamp":"2017-10-11 17:10:51.963","Rate":86},{"timestamp":"2017-10-11 17:10:51.966","Rate":39},{"timestamp":"2017-10-11 17:10:51.970","Rate":57},{"timestamp":"2017-10-11 17:10:51.973","Rate":73},{"timestamp":"2017-10-11 17:10:51.979","Rate":22},{"timestamp":"2017-10-11 17:10:51.982","Rate":34},{"timestamp":"2017-10-11 17:10:51.986","Rate":78},{"timestamp":"2017-10-11 17:10:51.990","Rate":20},{"timestamp":"2017-10-11 17:10:51.993","Rate":70},{"timestamp":"2017-10-11 17:10:51.996","Rate":17},{"timestamp":"2017-10-11 17:10:52.000","Rate":2},{"timestamp":"2017-10-11 17:10:52.005","Rate":18},{"timestamp":"2017-10-11 17:10:52.009","Rate":52},{"timestamp":"2017-10-11 17:10:52.012","Rate":62},{"timestamp":"2017-10-11 17:10:52.015","Rate":47},{"timestamp":"2017-10-11 17:10:52.019","Rate":73},{"timestamp":"2017-10-11 17:10:52.022","Rate":9},{"timestamp":"2017-10-11 17:10:52.026","Rate":66},{"timestamp":"2017-10-11 17:10:52.029","Rate":30},{"timestamp":"2017-10-11 17:10:52.031","Rate":70},{"timestamp":"2017-10-11 17:10:52.034","Rate":41},{"timestamp":"2017-10-11 17:10:52.037","Rate":2},{"timestamp":"2017-10-11 17:10:52.040","Rate":69},{"timestamp":"2017-10-11 17:10:52.043","Rate":98},{"timestamp":"2017-10-11 17:10:52.046","Rate":13},{"timestamp":"2017-10-11 17:10:52.050","Rate":91},{"timestamp":"2017-10-11 17:10:52.053","Rate":18},{"timestamp":"2017-10-11 17:10:52.056","Rate":78},{"timestamp":"2017-10-11 17:10:52.059","Rate":70},{"timestamp":"2017-10-11 17:10:52.062","Rate":48},{"timestamp":"2017-10-11 17:10:52.066","Rate":94},{"timestamp":"2017-10-11 17:10:52.070","Rate":79},{"timestamp":"2017-10-11 17:10:52.073","Rate":87},{"timestamp":"2017-10-11 17:10:52.075","Rate":60},{"timestamp":"2017-10-11 17:10:52.078","Rate":48},{"timestamp":"2017-10-11 17:10:52.081","Rate":88},{"timestamp":"2017-10-11 17:10:52.084","Rate":3},{"timestamp":"2017-10-11 17:10:52.086","Rate":93},{"timestamp":"2017-10-11 17:10:52.089","Rate":83},{"timestamp":"2017-10-11 17:10:52.092","Rate":76},{"timestamp":"2017-10-11 17:10:52.095","Rate":97},{"timestamp":"2017-10-11 17:10:52.098","Rate":31},{"timestamp":"2017-10-11 17:10:52.100","Rate":49},{"timestamp":"2017-10-11 17:10:52.103","Rate":36},{"timestamp":"2017-10-11 17:10:52.106","Rate":15},{"timestamp":"2017-10-11 17:10:52.109","Rate":67},{"timestamp":"2017-10-11 17:10:52.111","Rate":67},{"timestamp":"2017-10-11 17:10:52.114","Rate":94},{"timestamp":"2017-10-11 17:10:52.116","Rate":68},{"timestamp":"2017-10-11 17:10:52.119","Rate":22},{"timestamp":"2017-10-11 17:10:52.122","Rate":54},{"timestamp":"2017-10-11 17:10:52.124","Rate":94},{"timestamp":"2017-10-11 17:10:52.127","Rate":49},{"timestamp":"2017-10-11 17:10:52.130","Rate":59},{"timestamp":"2017-10-11 17:10:52.132","Rate":6},{"timestamp":"2017-10-11 17:10:52.135","Rate":82},{"timestamp":"2017-10-11 17:10:52.137","Rate":5},{"timestamp":"2017-10-11 17:10:52.140","Rate":1},{"timestamp":"2017-10-11 17:10:52.142","Rate":53},{"timestamp":"2017-10-11 17:10:52.145","Rate":69},{"timestamp":"2017-10-11 17:10:52.147","Rate":97},{"timestamp":"2017-10-11 17:10:52.150","Rate":58},{"timestamp":"2017-10-11 17:10:52.153","Rate":76},{"timestamp":"2017-10-11 17:10:52.157","Rate":81},{"timestamp":"2017-10-11 17:10:52.160","Rate":30},{"timestamp":"2017-10-11 17:10:52.163","Rate":4},{"timestamp":"2017-10-11 17:10:52.165","Rate":67},{"timestamp":"2017-10-11 17:10:52.169","Rate":5},{"timestamp":"2017-10-11 17:10:52.171","Rate":72},{"timestamp":"2017-10-11 17:10:52.174","Rate":20},{"timestamp":"2017-10-11 17:10:52.176","Rate":58},{"timestamp":"2017-10-11 17:10:52.179","Rate":75},{"timestamp":"2017-10-11 17:10:52.182","Rate":74},{"timestamp":"2017-10-11 17:10:52.184","Rate":60},{"timestamp":"2017-10-11 17:10:52.187","Rate":96},{"timestamp":"2017-10-11 17:10:52.189","Rate":30},{"timestamp":"2017-10-11 17:10:52.192","Rate":40},{"timestamp":"2017-10-11 17:10:52.195","Rate":33},{"timestamp":"2017-10-11 17:10:52.197","Rate":87},{"timestamp":"2017-10-11 17:10:52.200","Rate":67},{"timestamp":"2017-10-11 17:10:52.203","Rate":40},{"timestamp":"2017-10-11 17:10:52.206","Rate":44},{"timestamp":"2017-10-11 17:10:52.208","Rate":7},{"timestamp":"2017-10-11 17:10:52.211","Rate":52},{"timestamp":"2017-10-11 17:10:52.214","Rate":93},{"timestamp":"2017-10-11 17:10:52.219","Rate":43},{"timestamp":"2017-10-11 17:10:52.222","Rate":66},{"timestamp":"2017-10-11 17:10:52.225","Rate":8},{"timestamp":"2017-10-11 17:10:52.228","Rate":79}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/65 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/65 new file mode 100644 index 0000000000..a9bebbfede --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/65 @@ -0,0 +1 @@ +{ "response" : "updated", "rows_affected" : 4 } \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/66 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/66 new file mode 100644 index 0000000000..6bcad7bc37 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/66 @@ -0,0 +1 @@ +{"count":4,"rows":[{"id":106,"key":"TEST6","description":"Updated with expression","data":{"prop1":"test1","obj1":{"p1":"v1","p2":"v2"}},"ts":"2017-10-10 11:15:33.622"},{"id":106,"key":"TEST7","description":"Updated with expression","data":{"prop1":"test1","obj1":{"p1":"v1","p2":"v2"}},"ts":"2017-10-10 11:16:20.622"},{"id":108,"key":"TEST8","description":"Updated with expression","data":{"prop1":"test1","obj1":{"p1":"v1","p2":"v2"}},"ts":"2017-10-10 05:14:30.622"},{"id":109,"key":"TEST9","description":"Updated with expression","data":{"prop1":"test1","obj1":{"p1":"v1","p2":"v2"}},"ts":"2017-10-10 21:14:30.622"}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/67 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/67 new file mode 100644 index 0000000000..c3fd30822b --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/67 @@ -0,0 +1 @@ +{"count":2,"rows":[{"description":"A test row"},{"description":"Updated with expression"}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/68 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/68 new file mode 100644 index 0000000000..a8848cbc86 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/68 @@ -0,0 +1 @@ +{ "response" : "updated", "rows_affected" : 1 } \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/69 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/69 new file mode 100644 index 0000000000..a5d4634456 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/69 @@ -0,0 +1 @@ +{"count":2,"rows":[{"id":2,"key":"UPDA","description":"updated description","data":{"json":"inserted object"}},{"id":1,"key":"TEST1","description":"A test row","data":{"json":"new value"}}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/7 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/7 new file mode 100644 index 0000000000..379611dc02 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/7 @@ -0,0 +1 @@ +{"count":1,"rows":[{"count_id":1}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/70 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/70 new file mode 100644 index 0000000000..7d66483ebe --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/70 @@ -0,0 +1 @@ +{ "response" : "inserted", "rows_affected" : 1 } \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/71 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/71 new file mode 100644 index 0000000000..a8848cbc86 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/71 @@ -0,0 +1 @@ +{ "response" : "updated", "rows_affected" : 1 } \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/72 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/72 new file mode 100644 index 0000000000..858823dc3c --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/72 @@ -0,0 +1 @@ +{"count":1,"rows":[{"id":4,"key":"Admin","description":"URL of the admin API","data":{"url":{"value":"new value"}}}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/73 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/73 new file mode 100644 index 0000000000..7d66483ebe --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/73 @@ -0,0 +1 @@ +{ "response" : "inserted", "rows_affected" : 1 } \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/74 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/74 new file mode 100644 index 0000000000..4bde9235b1 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/74 @@ -0,0 +1 @@ +{"count":1,"rows":[{"Count":1}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/75 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/75 new file mode 100644 index 0000000000..973df944e3 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/75 @@ -0,0 +1 @@ +{"count":9,"rows":[{"id":1,"key":"TEST1","description":"A test row","data":{"prop1":"test1","obj1":{"p1":"v1","p2":"v2"}},"ts":"2017-10-10 12:14:26.622"},{"id":2,"key":"TEST2","description":"A test row","data":{"prop1":"test1","obj1":{"p1":"v1","p2":"v2"}},"ts":"2017-10-10 12:14:27.422"},{"id":3,"key":"TEST3","description":"A test row","data":{"prop1":"test1","obj1":{"p1":"v1","p2":"v2"}},"ts":"2017-10-10 11:14:28.622"},{"id":4,"key":"TEST4","description":"A test row","data":{"prop1":"test1","obj1":{"p1":"v1","p2":"v2"}},"ts":"2017-10-10 11:14:29.622"},{"id":5,"key":"TEST5","description":"A test row","data":{"prop1":"test1","obj1":{"p1":"v1","p2":"v2"}},"ts":"2017-10-10 11:15:00.622"},{"id":106,"key":"TEST6","description":"Updated with expression","data":{"prop1":"test1","obj1":{"p1":"v1","p2":"v2"}},"ts":"2017-10-10 11:15:33.622"},{"id":106,"key":"TEST7","description":"Updated with expression","data":{"prop1":"test1","obj1":{"p1":"v1","p2":"v2"}},"ts":"2017-10-10 11:16:20.622"},{"id":108,"key":"TEST8","description":"Updated with expression","data":{"prop1":"test1","obj1":{"p1":"v1","p2":"v2"}},"ts":"2017-10-10 05:14:30.622"},{"id":109,"key":"TEST9","description":"Updated with expression","data":{"prop1":"test1","obj1":{"p1":"v1","p2":"v2"}},"ts":"2017-10-10 21:14:30.622"}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/76 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/76 new file mode 100644 index 0000000000..7e9c80429f --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/76 @@ -0,0 +1 @@ +{ "entryPoint" : "where clause", "message" : "The \"value\" of an \"newer\" condition must be an integer", "retryable" : false} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/77 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/77 new file mode 100644 index 0000000000..88ff5140f2 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/77 @@ -0,0 +1 @@ +{"count":5,"rows":[{"id":1,"key":"TEST1","description":"A test row","data":{"prop1":"test1","obj1":{"p1":"v1","p2":"v2"}},"ts":"2017-10-10 12:14:26.622"},{"id":2,"key":"TEST2","description":"A test row","data":{"prop1":"test1","obj1":{"p1":"v1","p2":"v2"}},"ts":"2017-10-10 12:14:27.422"},{"id":3,"key":"TEST3","description":"A test row","data":{"prop1":"test1","obj1":{"p1":"v1","p2":"v2"}},"ts":"2017-10-10 11:14:28.622"},{"id":4,"key":"TEST4","description":"A test row","data":{"prop1":"test1","obj1":{"p1":"v1","p2":"v2"}},"ts":"2017-10-10 11:14:29.622"},{"id":5,"key":"TEST5","description":"A test row","data":{"prop1":"test1","obj1":{"p1":"v1","p2":"v2"}},"ts":"2017-10-10 11:15:00.622"}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/78 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/78 new file mode 100644 index 0000000000..543994f16c --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/78 @@ -0,0 +1 @@ +{"count":2,"rows":[{"min":2,"max":96,"average":47.9523809523809,"user_ts":"2017-10-11 17:10:51"},{"min":1,"max":98,"average":53.7721518987342,"user_ts":"2017-10-11 17:10:52"}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/79 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/79 new file mode 100644 index 0000000000..7d66483ebe --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/79 @@ -0,0 +1 @@ +{ "response" : "inserted", "rows_affected" : 1 } \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/8 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/8 new file mode 100644 index 0000000000..cd34346b4c --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/8 @@ -0,0 +1 @@ +{"count":1,"rows":[{"avg_id":1.0}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/80 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/80 new file mode 100644 index 0000000000..29146ee48a --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/80 @@ -0,0 +1 @@ +{ "response" : "deleted", "rows_affected" : 1 } \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/81 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/81 new file mode 100644 index 0000000000..c614b3d6e0 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/81 @@ -0,0 +1 @@ +{ "entryPoint" : "appendReadings", "message" : "Payload is missing a readings array", "retryable" : false} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/82 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/82 new file mode 100644 index 0000000000..124e6704de --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/82 @@ -0,0 +1 @@ +{"count":0,"rows":[]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/83 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/83 new file mode 100644 index 0000000000..4401963f82 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/83 @@ -0,0 +1 @@ +{"count":100,"rows":[{"timestamp":"2017-10-11 17:10:51.927","Rate":90},{"timestamp":"2017-10-11 17:10:51.930","Rate":13},{"timestamp":"2017-10-11 17:10:51.933","Rate":84},{"timestamp":"2017-10-11 17:10:51.936","Rate":96},{"timestamp":"2017-10-11 17:10:51.939","Rate":2},{"timestamp":"2017-10-11 17:10:51.942","Rate":54},{"timestamp":"2017-10-11 17:10:51.946","Rate":28},{"timestamp":"2017-10-11 17:10:51.949","Rate":3},{"timestamp":"2017-10-11 17:10:51.952","Rate":77},{"timestamp":"2017-10-11 17:10:51.955","Rate":38},{"timestamp":"2017-10-11 17:10:51.959","Rate":26},{"timestamp":"2017-10-11 17:10:51.963","Rate":86},{"timestamp":"2017-10-11 17:10:51.966","Rate":39},{"timestamp":"2017-10-11 17:10:51.970","Rate":57},{"timestamp":"2017-10-11 17:10:51.973","Rate":73},{"timestamp":"2017-10-11 17:10:51.979","Rate":22},{"timestamp":"2017-10-11 17:10:51.982","Rate":34},{"timestamp":"2017-10-11 17:10:51.986","Rate":78},{"timestamp":"2017-10-11 17:10:51.990","Rate":20},{"timestamp":"2017-10-11 17:10:51.993","Rate":70},{"timestamp":"2017-10-11 17:10:51.996","Rate":17},{"timestamp":"2017-10-11 17:10:52.000","Rate":2},{"timestamp":"2017-10-11 17:10:52.005","Rate":18},{"timestamp":"2017-10-11 17:10:52.009","Rate":52},{"timestamp":"2017-10-11 17:10:52.012","Rate":62},{"timestamp":"2017-10-11 17:10:52.015","Rate":47},{"timestamp":"2017-10-11 17:10:52.019","Rate":73},{"timestamp":"2017-10-11 17:10:52.022","Rate":9},{"timestamp":"2017-10-11 17:10:52.026","Rate":66},{"timestamp":"2017-10-11 17:10:52.029","Rate":30},{"timestamp":"2017-10-11 17:10:52.031","Rate":70},{"timestamp":"2017-10-11 17:10:52.034","Rate":41},{"timestamp":"2017-10-11 17:10:52.037","Rate":2},{"timestamp":"2017-10-11 17:10:52.040","Rate":69},{"timestamp":"2017-10-11 17:10:52.043","Rate":98},{"timestamp":"2017-10-11 17:10:52.046","Rate":13},{"timestamp":"2017-10-11 17:10:52.050","Rate":91},{"timestamp":"2017-10-11 17:10:52.053","Rate":18},{"timestamp":"2017-10-11 17:10:52.056","Rate":78},{"timestamp":"2017-10-11 17:10:52.059","Rate":70},{"timestamp":"2017-10-11 17:10:52.062","Rate":48},{"timestamp":"2017-10-11 17:10:52.066","Rate":94},{"timestamp":"2017-10-11 17:10:52.070","Rate":79},{"timestamp":"2017-10-11 17:10:52.073","Rate":87},{"timestamp":"2017-10-11 17:10:52.075","Rate":60},{"timestamp":"2017-10-11 17:10:52.078","Rate":48},{"timestamp":"2017-10-11 17:10:52.081","Rate":88},{"timestamp":"2017-10-11 17:10:52.084","Rate":3},{"timestamp":"2017-10-11 17:10:52.086","Rate":93},{"timestamp":"2017-10-11 17:10:52.089","Rate":83},{"timestamp":"2017-10-11 17:10:52.092","Rate":76},{"timestamp":"2017-10-11 17:10:52.095","Rate":97},{"timestamp":"2017-10-11 17:10:52.098","Rate":31},{"timestamp":"2017-10-11 17:10:52.100","Rate":49},{"timestamp":"2017-10-11 17:10:52.103","Rate":36},{"timestamp":"2017-10-11 17:10:52.106","Rate":15},{"timestamp":"2017-10-11 17:10:52.109","Rate":67},{"timestamp":"2017-10-11 17:10:52.111","Rate":67},{"timestamp":"2017-10-11 17:10:52.114","Rate":94},{"timestamp":"2017-10-11 17:10:52.116","Rate":68},{"timestamp":"2017-10-11 17:10:52.119","Rate":22},{"timestamp":"2017-10-11 17:10:52.122","Rate":54},{"timestamp":"2017-10-11 17:10:52.124","Rate":94},{"timestamp":"2017-10-11 17:10:52.127","Rate":49},{"timestamp":"2017-10-11 17:10:52.130","Rate":59},{"timestamp":"2017-10-11 17:10:52.132","Rate":6},{"timestamp":"2017-10-11 17:10:52.135","Rate":82},{"timestamp":"2017-10-11 17:10:52.137","Rate":5},{"timestamp":"2017-10-11 17:10:52.140","Rate":1},{"timestamp":"2017-10-11 17:10:52.142","Rate":53},{"timestamp":"2017-10-11 17:10:52.145","Rate":69},{"timestamp":"2017-10-11 17:10:52.147","Rate":97},{"timestamp":"2017-10-11 17:10:52.150","Rate":58},{"timestamp":"2017-10-11 17:10:52.153","Rate":76},{"timestamp":"2017-10-11 17:10:52.157","Rate":81},{"timestamp":"2017-10-11 17:10:52.160","Rate":30},{"timestamp":"2017-10-11 17:10:52.163","Rate":4},{"timestamp":"2017-10-11 17:10:52.165","Rate":67},{"timestamp":"2017-10-11 17:10:52.169","Rate":5},{"timestamp":"2017-10-11 17:10:52.171","Rate":72},{"timestamp":"2017-10-11 17:10:52.174","Rate":20},{"timestamp":"2017-10-11 17:10:52.176","Rate":58},{"timestamp":"2017-10-11 17:10:52.179","Rate":75},{"timestamp":"2017-10-11 17:10:52.182","Rate":74},{"timestamp":"2017-10-11 17:10:52.184","Rate":60},{"timestamp":"2017-10-11 17:10:52.187","Rate":96},{"timestamp":"2017-10-11 17:10:52.189","Rate":30},{"timestamp":"2017-10-11 17:10:52.192","Rate":40},{"timestamp":"2017-10-11 17:10:52.195","Rate":33},{"timestamp":"2017-10-11 17:10:52.197","Rate":87},{"timestamp":"2017-10-11 17:10:52.200","Rate":67},{"timestamp":"2017-10-11 17:10:52.203","Rate":40},{"timestamp":"2017-10-11 17:10:52.206","Rate":44},{"timestamp":"2017-10-11 17:10:52.208","Rate":7},{"timestamp":"2017-10-11 17:10:52.211","Rate":52},{"timestamp":"2017-10-11 17:10:52.214","Rate":93},{"timestamp":"2017-10-11 17:10:52.219","Rate":43},{"timestamp":"2017-10-11 17:10:52.222","Rate":66},{"timestamp":"2017-10-11 17:10:52.225","Rate":8},{"timestamp":"2017-10-11 17:10:52.228","Rate":79}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/84 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/84 new file mode 100644 index 0000000000..6e3d42ac03 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/84 @@ -0,0 +1 @@ +{ "entryPoint" : "limit", "message" : "Limit must be specfied as an integer", "retryable" : false} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/85 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/85 new file mode 100644 index 0000000000..8d705bbb14 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/85 @@ -0,0 +1 @@ +{ "entryPoint" : "skip", "message" : "Skip must be specfied as an integer", "retryable" : false} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/86 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/86 new file mode 100644 index 0000000000..4522ec1b85 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/86 @@ -0,0 +1 @@ +{ "entryPoint" : "retrieve", "message" : "unrecognized token: \":\"", "retryable" : false} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/87 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/87 new file mode 100644 index 0000000000..1d6fe91f5d --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/87 @@ -0,0 +1 @@ +{ "entryPoint" : "retrieve", "message" : "SQLite3 plugin does not support timezones in qeueries", "retryable" : false} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/88 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/88 new file mode 100644 index 0000000000..a8848cbc86 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/88 @@ -0,0 +1 @@ +{ "response" : "updated", "rows_affected" : 1 } \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/89 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/89 new file mode 100644 index 0000000000..93af3344da --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/89 @@ -0,0 +1 @@ +{"count":1,"rows":[{"description":"added'some'ch'''ars'"}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/9 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/9 new file mode 100644 index 0000000000..f951388d78 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/9 @@ -0,0 +1 @@ +{"count":1,"rows":[{"min_id":1}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/90 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/90 new file mode 100644 index 0000000000..a8848cbc86 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/90 @@ -0,0 +1 @@ +{ "response" : "updated", "rows_affected" : 1 } \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/91 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/91 new file mode 100644 index 0000000000..54cbb9d707 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/91 @@ -0,0 +1 @@ +{"count":1,"rows":[{"min":1,"max":98,"average":52.55,"timestamp":"2017-10-11 17:10"}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/92 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/92 new file mode 100644 index 0000000000..124e6704de --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/92 @@ -0,0 +1 @@ +{"count":0,"rows":[]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/93 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/93 new file mode 100644 index 0000000000..a4bfc7b379 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/93 @@ -0,0 +1 @@ +{"count":1,"rows":[{"min":"","max":"","average":""}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/94 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/94 new file mode 100644 index 0000000000..10da128d7a --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/94 @@ -0,0 +1 @@ +{"count":1,"rows":[{"min":1,"max":98,"average":52.55,"timestamp":"2017-10-11 17"}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/95 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/95 new file mode 100644 index 0000000000..2eeb09a60b --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/95 @@ -0,0 +1 @@ +{ "response" : "appended", "readings_added" : 1 } \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/96 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/96 new file mode 100644 index 0000000000..124e6704de --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/96 @@ -0,0 +1 @@ +{"count":0,"rows":[]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/97 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/97 new file mode 100644 index 0000000000..22f00fc70c --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/97 @@ -0,0 +1 @@ +{"count":1,"rows":[{"count_id":10}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/98 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/98 new file mode 100644 index 0000000000..69ab957da3 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/98 @@ -0,0 +1 @@ +{ "entryPoint" : "where clause", "message" : "The \"value\" of a \"in\" condition must be an array and must not be empty.", "retryable" : false} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/99 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/99 new file mode 100644 index 0000000000..cfddbd8a1a --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/99 @@ -0,0 +1 @@ +{"count":1,"rows":[{"min":1,"max":98,"average":52.9207920792079,"asset_code":"MyAsset"}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/payloads/msec_add_readings_user_ts.json b/tests/unit/C/services/storage/sqlite/payloads/msec_add_readings_user_ts.json index 4116a5fb1b..31c62f9761 100644 --- a/tests/unit/C/services/storage/sqlite/payloads/msec_add_readings_user_ts.json +++ b/tests/unit/C/services/storage/sqlite/payloads/msec_add_readings_user_ts.json @@ -1,12 +1,21 @@ { "readings" : [ - { "user_ts" : "xxx", "asset_code": "msec__001_BAD", "read_key" : "f1cfff7a-3769-4f47-9ded-10000000001", "reading" : { "value" : 1 }}, - { "user_ts" : "2019-30-07 10:17:17.123456+00", "asset_code": "msec__002_BAD", "read_key" : "f1cfff7a-3769-4f47-9ded-10000000001", "reading" : { "value" : 1 }}, + { "user_ts" : "xxx", "asset_code": "msec_001_BAD", "read_key" : "f1cfff7a-3769-4f47-9ded-100000000001", "reading" : { "value" : 1 }}, + { "user_ts" : "2019-30-07 10:17:17.123456+00", "asset_code": "msec_002_BAD", "read_key" : "f1cfff7a-3769-4f47-9ded-100000000002", "reading" : { "value" : 2 }}, - { "user_ts" : "2019-01-01 10:01:01", "asset_code": "msec__001_OK", "read_key" : "f1cfff7a-3769-4f47-9ded-00000000001", "reading" : { "value" : 1 }}, - { "user_ts" : "2019-01-02 10:02:01.0", "asset_code": "msec__002_OK", "read_key" : "f1cfff7a-3769-4f47-9ded-00000000002", "reading" : { "value" : 1 }}, - { "user_ts" : "2019-01-03 10:02:02.841", "asset_code": "msec__003_OK", "read_key" : "f1cfff7a-3769-4f47-9ded-00000000003", "reading" : { "value" : 1 }}, - { "user_ts" : "2019-01-04 10:03:05.123456-01:00","asset_code": "msec__004_OK", "read_key" : "f1cfff7a-3769-4f47-9ded-00000000004", "reading" : { "value" : 1 }} + { "user_ts" : "2019-01-01 10:01:01", "asset_code": "msec_003_OK", "read_key" : "f1cfff7a-3769-4f47-9ded-000000000003", "reading" : { "value" : 3 }}, + { "user_ts" : "2019-01-02 10:02:01.0", "asset_code": "msec_004_OK", "read_key" : "f1cfff7a-3769-4f47-9ded-000000000004", "reading" : { "value" : 4 }}, + { "user_ts" : "2019-01-03 10:02:02.841", "asset_code": "msec_005_OK", "read_key" : "f1cfff7a-3769-4f47-9ded-000000000005", "reading" : { "value" : 5 }}, + { "user_ts" : "2019-01-04 10:03:05.123456", "asset_code": "msec_006_OK", "read_key" : "f1cfff7a-3769-4f47-9ded-000000000006", "reading" : { "value" : 6 }}, + + { "user_ts" : "2019-01-04 10:03:05.1+00:00", "asset_code": "msec_007_OK", "read_key" : "f1cfff7a-3769-4f47-9ded-000000000007", "reading" : { "value" : 7 }}, + { "user_ts" : "2019-01-04 10:03:05.123+00:00", "asset_code": "msec_008_OK", "read_key" : "f1cfff7a-3769-4f47-9ded-000000000008", "reading" : { "value" : 8 }}, + + { "user_ts" : "2019-03-03 10:03:03.123456+00:00","asset_code": "msec_009_OK", "read_key" : "f1cfff7a-3769-4f47-9ded-000000000009", "reading" : { "value" : 9 }}, + { "user_ts" : "2019-03-04 10:03:04.123456+01:00","asset_code": "msec_010_OK", "read_key" : "f1cfff7a-3769-4f47-9ded-000000000010", "reading" : { "value" :10 }}, + { "user_ts" : "2019-03-05 10:03:05.123456-01:00","asset_code": "msec_011_OK", "read_key" : "f1cfff7a-3769-4f47-9ded-000000000011", "reading" : { "value" :11 }}, + { "user_ts" : "2019-03-04 10:03:04.123456+02:30","asset_code": "msec_012_OK", "read_key" : "f1cfff7a-3769-4f47-9ded-000000000012", "reading" : { "value" :12 }}, + { "user_ts" : "2019-03-05 10:03:05.123456-02:30","asset_code": "msec_013_OK", "read_key" : "f1cfff7a-3769-4f47-9ded-000000000013", "reading" : { "value" :13 }} ] } \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/payloads/msec_query_asset_aggmin.json b/tests/unit/C/services/storage/sqlite/payloads/msec_query_asset_aggmin.json new file mode 100644 index 0000000000..06ac638b2a --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/payloads/msec_query_asset_aggmin.json @@ -0,0 +1,14 @@ +{ + "aggregate": + { + "operation": "min", + "column": "user_ts", + "alias": "user_ts_min" + }, + "where": + { + "column": "asset_code", + "condition": "=", + "value": "msec_009_OK" + } +} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/payloads/msec_query_asset_aggminarray.json b/tests/unit/C/services/storage/sqlite/payloads/msec_query_asset_aggminarray.json new file mode 100644 index 0000000000..ae838350f1 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/payloads/msec_query_asset_aggminarray.json @@ -0,0 +1,20 @@ +{ + "aggregate": [ + { + "operation": "min", + "column": "user_ts", + "alias": "user_ts_min" + }, + { + "operation": "max", + "column": "user_ts", + "alias": "user_ts_max" + } + ], + "where": { + "column": "asset_code", + "condition": "=", + "value": "msec_009_OK" + } +} + diff --git a/tests/unit/C/services/storage/sqlite/payloads/msec_query_asset_alias.json b/tests/unit/C/services/storage/sqlite/payloads/msec_query_asset_alias.json new file mode 100644 index 0000000000..d99088d7b1 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/payloads/msec_query_asset_alias.json @@ -0,0 +1,15 @@ +{ + "where" : { + "column" : "asset_code", + "condition" : "=", + "value" : "msec_009_OK" + }, + + "return" : [ + "reading", + { + "column" : "user_ts", + "alias" : "user_ts_alias" + } + ] +} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/payloads/msec_query_asset_noalias.json b/tests/unit/C/services/storage/sqlite/payloads/msec_query_asset_noalias.json new file mode 100644 index 0000000000..df3a033c8d --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/payloads/msec_query_asset_noalias.json @@ -0,0 +1,15 @@ +{ + "where" : { + "column" : "asset_code", + "condition" : "=", + "value" : "msec_009_OK" + }, + + "return" : [ + "reading", + { + "column" : "user_ts" + } + + ] +} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/testRunner.sh b/tests/unit/C/services/storage/sqlite/testRunner.sh index 7e8be98cc3..ebc80274ee 100755 --- a/tests/unit/C/services/storage/sqlite/testRunner.sh +++ b/tests/unit/C/services/storage/sqlite/testRunner.sh @@ -1,19 +1,59 @@ -#!/bin/sh - -# Select the required plugin, either persistent storage or in memory -export FOGLAMP_DATA=./plugin_cfg/sqlite -#export FOGLAMP_DATA=./plugin_cfg/sqlitememory +#!/usr/bin/env bash +# Default values +export FOGLAMP_DATA=./plugin_cfg/sqlite # Select the persistent storage plugin +export storage_exec="" export TZ='Etc/UTC' -if [ $# -eq 1 ] ; then - echo Starting storage layer $1 - $1 -elif [ "${FOGLAMP_ROOT}" != "" ] ; then +# +# evaluates : FOGLAMP_DATA, storage_exec, TZ, and expected_dir +# +if [[ "$@" != "" ]]; +then + # Handles input parameters + SCRIPT_NAME=`basename $0` + options=`getopt -o c:s:t: --long configuration:,storage_exec:,timezone: -n "$SCRIPT_NAME" -- "$@"` + eval set -- "$options" + + while true ; do + case "$1" in + -c|--configuration) + export FOGLAMP_DATA="$2" + shift 2 + ;; + + -s|--storage_exec) + export storage_exec="$2" + shift 2 + ;; + + -t|--timezone) + export TZ="$2" + shift 2 + ;; + --) + shift + break + ;; + esac + done +fi + +# Converts '/' to '_' and to upper case +step1="${TZ/\//_}" +expected_dir="expected_${step1^^}" + +if [[ "$storage_exec" != "" ]] ; then + echo "Starting storage layer :$storage_exec:" + echo "timezone :$TZ:" + echo "configuration :$FOGLAMP_DATA:" + echo "database file :$DEFAULT_SQLITE_DB_FILE:" + $storage_exec +elif [[ "${FOGLAMP_ROOT}" != "" ]] ; then echo "Starting storage service in :$FOGLAMP_ROOT:" - echo "timezone :$TZ" + echo "timezone :$TZ:" echo "configuration :$FOGLAMP_DATA:" - echo "database :$DEFAULT_SQLITE_DB_FILE:" + echo "database file :$DEFAULT_SQLITE_DB_FILE:" $FOGLAMP_ROOT/services/foglamp.services.storage sleep 1 else @@ -21,6 +61,9 @@ else exit 1 fi +# +# Main +# export IFS="," testNum=1 n_failed=0 @@ -41,11 +84,11 @@ else curlstate=$? fi if [ "$optional" = "" ] ; then - if [ ! -f expected/$testNum ]; then + if [ ! -f ${expected_dir}/$testNum ]; then n_unchecked=`expr $n_unchecked + 1` - echo Missing expected results for test $testNum - result unchecked + echo Missing expected results in :${expected_dir}: for test $testNum - result unchecked else - cmp -s results/$testNum expected/$testNum + cmp -s results/$testNum ${expected_dir}/$testNum if [ $? -ne "0" ]; then echo Failed n_failed=`expr $n_failed + 1` @@ -57,7 +100,7 @@ if [ "$optional" = "" ] ; then fi ( unset IFS - echo " " Expected: "`cat expected/$testNum`" >> failed + echo " " Expected: "`cat ${expected_dir}/$testNum`" >> failed echo " " Got: "`cat results/$testNum`" >> failed ) echo >> failed diff --git a/tests/unit/C/services/storage/sqlite/testset b/tests/unit/C/services/storage/sqlite/testset index 18daf92982..fa672be38f 100644 --- a/tests/unit/C/services/storage/sqlite/testset +++ b/tests/unit/C/services/storage/sqlite/testset @@ -102,4 +102,8 @@ Query Readings IN operator bad values,PUT,http://localhost:8080/storage/reading/ microseconds - Purge Readings,PUT,http://localhost:8080/storage/reading/purge?age=1&sent=0&flags=purge, microseconds - Add Readings,POST,http://localhost:8080/storage/reading,msec_add_readings_user_ts.json microseconds - Query Readings,PUT,http://localhost:8080/storage/reading/query,msec_query_readings.json +microseconds - Query asset NO alias,PUT,http://localhost:8080/storage/reading/query,msec_query_asset_noalias.json +microseconds - Query asset alias,PUT,http://localhost:8080/storage/reading/query,msec_query_asset_alias.json +microseconds - Query asset aggregate min,PUT,http://localhost:8080/storage/reading/query,msec_query_asset_aggmin.json +microseconds - Query asset aggregate min array,PUT,http://localhost:8080/storage/reading/query,msec_query_asset_aggminarray.json Shutdown,POST,http://localhost:1081/foglamp/service/shutdown,,checkstate diff --git a/tests/unit/python/foglamp/common/storage_client/data/payload_condition_in.json b/tests/unit/python/foglamp/common/storage_client/data/payload_condition_in.json new file mode 100644 index 0000000000..8c7fe7115e --- /dev/null +++ b/tests/unit/python/foglamp/common/storage_client/data/payload_condition_in.json @@ -0,0 +1,7 @@ + { + "where": { + "column": "plugin_type", + "condition": "in", + "value": ["north", "south", "rule", "delivery", "filter"] + } + } \ No newline at end of file diff --git a/tests/unit/python/foglamp/common/storage_client/data/payload_condition_not_in.json b/tests/unit/python/foglamp/common/storage_client/data/payload_condition_not_in.json new file mode 100644 index 0000000000..5456a74b9f --- /dev/null +++ b/tests/unit/python/foglamp/common/storage_client/data/payload_condition_not_in.json @@ -0,0 +1,7 @@ + { + "where": { + "column": "plugin_type", + "condition": "not in", + "value": ["north", "south"] + } + } \ No newline at end of file diff --git a/tests/unit/python/foglamp/common/storage_client/test_payload_builder.py b/tests/unit/python/foglamp/common/storage_client/test_payload_builder.py index f24fc0496b..bd881a5796 100644 --- a/tests/unit/python/foglamp/common/storage_client/test_payload_builder.py +++ b/tests/unit/python/foglamp/common/storage_client/test_payload_builder.py @@ -86,8 +86,9 @@ def test_from_payload(self, test_input, expected): (["id", "<=", 99], _payload("data/payload_conditions5.json")), (["id", "!=", "False"], _payload("data/payload_conditions6.json")), (["ts", "newer", 3600], _payload("data/payload_newer_condition.json")), - (["ts", "older", 600], _payload("data/payload_older_condition.json")) - + (["ts", "older", 600], _payload("data/payload_older_condition.json")), + (["plugin_type", "in", ['north', 'south', 'rule', 'delivery', 'filter']], _payload("data/payload_condition_in.json")), + (["plugin_type", "not in", ['north', 'south']], _payload("data/payload_condition_not_in.json")) ]) def test_conditions_payload(self, test_input, expected): res = PayloadBuilder().WHERE(test_input).payload() diff --git a/tests/unit/python/foglamp/common/test_common_utils.py b/tests/unit/python/foglamp/common/test_common_utils.py new file mode 100644 index 0000000000..9e841797e8 --- /dev/null +++ b/tests/unit/python/foglamp/common/test_common_utils.py @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- + +# FOGLAMP_BEGIN +# See: http://foglamp.readthedocs.io/ +# FOGLAMP_END + +""" Unit tests for common utils """ + +import pytest +from foglamp.common import utils as common_utils +from collections import Counter + + +@pytest.allure.feature("unit") +@pytest.allure.story("common", "utils") +class TestCommonUtils: + @pytest.mark.parametrize("test_string, expected", [ + ("Gabbar&Gang", False), + ("with;Sambha", False), + ("andothers,halkats", False), + ("@Rampur", False), + ("triedloot/arson", False), + ("For$Gold", False), + ("Andlot{more", False), + ("Andmore}", False), + ("Veeru+Jai", False), + ("Gaonwale,Thakur", False), + ("=resisted", False), + ("successfully:", False), + ("any attack!", True), + ]) + def test_check_reserved(self, test_string, expected): + actual = common_utils.check_reserved(test_string) + assert expected == actual diff --git a/tests/unit/python/foglamp/common/test_configuration_manager.py b/tests/unit/python/foglamp/common/test_configuration_manager.py index 11e063f441..afd8ce3d24 100644 --- a/tests/unit/python/foglamp/common/test_configuration_manager.py +++ b/tests/unit/python/foglamp/common/test_configuration_manager.py @@ -835,6 +835,52 @@ async def test__merge_category_vals_no_mutual_items_include_original(self, reset assert test_config_storage is not c_return_value assert test_config_new is not test_config_storage + @pytest.mark.parametrize("payload, message", [ + ((2, 'catvalue', 'catdesc'), "category_name must be a string"), + (('catname', 'catvalue', 3), "category_description must be a string") + ]) + async def test_bad_create_category(self, reset_singleton, payload, message): + storage_client_mock = MagicMock(spec=StorageClientAsync) + c_mgr = ConfigurationManager(storage_client_mock) + with pytest.raises(Exception) as excinfo: + await c_mgr.create_category(category_name=payload[0], category_value=payload[1], category_description=payload[2]) + assert excinfo.type is TypeError + assert message == str(excinfo.value) + + @pytest.mark.parametrize("rule", [ + 'value * 3 == 6', + 'value > 4', + 'value % 2 == 0', + 'value * (value + 1) == 9', + '(value + 1) / (value - 1) >= 3', + 'sqrt(value) < 1', + 'pow(value, value) != 27', + 'value ^ value == 2', + 'factorial(value) != 6', + 'fabs(value) != 3.0', + 'ceil(value) != 3', + 'floor(value) != 3', + 'sin(value) <= 0', + 'degrees(value) < 171' + ]) + @pytest.mark.asyncio + async def test_bad_rule_create_category(self, reset_singleton, rule): + + async def async_mock(return_value): + return return_value + + d = {'info': {'rule': rule, 'default': '3', 'type': 'integer', 'description': 'Test', 'value': '3'}} + storage_client_mock = MagicMock(spec=StorageClientAsync) + c_mgr = ConfigurationManager(storage_client_mock) + with patch.object(_logger, 'exception') as log_exc: + with patch.object(ConfigurationManager, '_validate_category_val', side_effect=[async_mock(d), Exception()]) as valpatch: + with pytest.raises(Exception) as excinfo: + await c_mgr.create_category('catname', 'catvalue', 'catdesc') + assert excinfo.type is ValueError + assert 'For catname category, Proposed value for item_name info is not allowed as per rule defined' == str(excinfo.value) + valpatch.assert_called_once_with('catname', 'catvalue', True) + assert 1 == log_exc.call_count + @pytest.mark.asyncio async def test_create_category_good_newval_bad_storageval_good_update(self, reset_singleton): @@ -966,13 +1012,13 @@ async def async_mock(return_value): storage_client_mock = MagicMock(spec=StorageClientAsync) c_mgr = ConfigurationManager(storage_client_mock) - with patch.object(ConfigurationManager, '_validate_category_val', return_value=async_mock(None)) as valpatch: + with patch.object(ConfigurationManager, '_validate_category_val', return_value=async_mock({})) as valpatch: with patch.object(ConfigurationManager, '_read_category_val', return_value=async_mock(None)) as readpatch: with patch.object(ConfigurationManager, '_create_new_category', return_value=async_mock(None)) as createpatch: with patch.object(ConfigurationManager, '_run_callbacks', return_value=async_mock(None)) as callbackpatch: await c_mgr.create_category('catname', 'catvalue', "catdesc") callbackpatch.assert_called_once_with('catname') - createpatch.assert_called_once_with('catname', None, 'catdesc', None) + createpatch.assert_called_once_with('catname', {}, 'catdesc', None) readpatch.assert_called_once_with('catname') valpatch.assert_called_once_with('catname', 'catvalue', True) @@ -985,18 +1031,18 @@ async def async_mock(return_value): storage_client_mock = MagicMock(spec=StorageClientAsync) c_mgr = ConfigurationManager(storage_client_mock) with patch.object(_logger, 'exception') as log_exc: - with patch.object(ConfigurationManager, '_validate_category_val', return_value=async_mock(None)) as valpatch: + with patch.object(ConfigurationManager, '_validate_category_val', return_value=async_mock({})) as valpatch: with patch.object(ConfigurationManager, '_read_category_val', return_value=async_mock(None)) as readpatch: with patch.object(ConfigurationManager, '_create_new_category', side_effect=StorageServerError(None, None, None)) as createpatch: with patch.object(ConfigurationManager, '_run_callbacks') as callbackpatch: with pytest.raises(StorageServerError): await c_mgr.create_category('catname', 'catvalue', "catdesc") callbackpatch.assert_not_called() - createpatch.assert_called_once_with('catname', None, 'catdesc', None) + createpatch.assert_called_once_with('catname', {}, 'catdesc', None) readpatch.assert_called_once_with('catname') valpatch.assert_called_once_with('catname', 'catvalue', True) assert 1 == log_exc.call_count - log_exc.assert_called_once_with('Unable to create new category based on category_name %s and category_description %s and category_json_schema %s', 'catname', 'catdesc', None) + log_exc.assert_called_once_with('Unable to create new category based on category_name %s and category_description %s and category_json_schema %s', 'catname', 'catdesc', {}) @pytest.mark.asyncio async def test_create_category_good_newval_keyerror_bad_create(self, reset_singleton): @@ -1007,18 +1053,18 @@ async def async_mock(return_value): storage_client_mock = MagicMock(spec=StorageClientAsync) c_mgr = ConfigurationManager(storage_client_mock) with patch.object(_logger, 'exception') as log_exc: - with patch.object(ConfigurationManager, '_validate_category_val', return_value=async_mock(None)) as valpatch: + with patch.object(ConfigurationManager, '_validate_category_val', return_value=async_mock({})) as valpatch: with patch.object(ConfigurationManager, '_read_category_val', return_value=async_mock(None)) as readpatch: with patch.object(ConfigurationManager, '_create_new_category', side_effect=KeyError()) as createpatch: with patch.object(ConfigurationManager, '_run_callbacks') as callbackpatch: with pytest.raises(KeyError): await c_mgr.create_category('catname', 'catvalue', "catdesc") callbackpatch.assert_not_called() - createpatch.assert_called_once_with('catname', None, 'catdesc', None) + createpatch.assert_called_once_with('catname', {}, 'catdesc', None) readpatch.assert_called_once_with('catname') valpatch.assert_called_once_with('catname', 'catvalue', True) assert 1 == log_exc.call_count - log_exc.assert_called_once_with('Unable to create new category based on category_name %s and category_description %s and category_json_schema %s', 'catname', 'catdesc', None) + log_exc.assert_called_once_with('Unable to create new category based on category_name %s and category_description %s and category_json_schema %s', 'catname', 'catdesc', {}) @pytest.mark.asyncio async def test_create_category_bad_newval(self, reset_singleton): @@ -1191,6 +1237,25 @@ async def async_mock(): readpatch.assert_called_once_with(category_name, item_name) assert 1 == log_exc.call_count + async def test_set_category_item_value_entry_with_rule_optional_attribute(self): + + async def async_mock(): + return {'rule': 'value*3==9', 'default': '3', 'description': 'Test', 'value': '3', 'type': 'integer'} + + storage_client_mock = MagicMock(spec=StorageClientAsync) + c_mgr = ConfigurationManager(storage_client_mock) + category_name = 'catname' + item_name = 'info' + new_value_entry = '13' + with patch.object(_logger, 'exception') as log_exc: + with patch.object(ConfigurationManager, '_read_item_val', return_value=async_mock()) as readpatch: + with pytest.raises(Exception) as excinfo: + await c_mgr.set_category_item_value_entry(category_name, item_name, new_value_entry) + assert excinfo.type is ValueError + assert 'Proposed value for item_name {} is not allowed as per rule defined'.format(item_name) == str(excinfo.value) + readpatch.assert_called_once_with(category_name, item_name) + assert 1 == log_exc.call_count + @pytest.mark.asyncio async def test_get_all_category_names_good(self, reset_singleton): @@ -2495,3 +2560,27 @@ async def async_mock(return_value): patch_audit.assert_not_called() patch_update.assert_not_called() patch_get_all_items.assert_called_once_with(category_name) + + @pytest.mark.parametrize("config_item_list", [ + {'info': "2"}, + {'info': "2", "info1": "9"}, + {'info1': "2", "info": "9"} + ]) + async def test_update_configuration_item_bulk_with_rule_optional_attribute(self, config_item_list, + category_name='testcat'): + async def async_mock(return_value): + return return_value + + cat_info = {'info': {'rule': 'value*3==9', 'default': '3', 'description': 'Test', 'value': '3', + 'type': 'integer'}, 'info1': {'default': '3', 'description': 'Test', 'value': '3', + 'type': 'integer'}} + storage_client_mock = MagicMock(spec=StorageClientAsync) + c_mgr = ConfigurationManager(storage_client_mock) + with patch.object(c_mgr, 'get_category_all_items', return_value=async_mock(cat_info)) as patch_get_all_items: + with patch.object(_logger, 'exception') as patch_log_exc: + with pytest.raises(Exception) as exc_info: + await c_mgr.update_configuration_item_bulk(category_name, config_item_list) + assert exc_info.type is ValueError + assert 'Proposed value for item_name info is not allowed as per rule defined' == str(exc_info.value) + assert 1 == patch_log_exc.call_count + patch_get_all_items.assert_called_once_with(category_name) diff --git a/tests/unit/python/foglamp/common/test_process.py b/tests/unit/python/foglamp/common/test_process.py index f138d35115..2a56fdcd23 100644 --- a/tests/unit/python/foglamp/common/test_process.py +++ b/tests/unit/python/foglamp/common/test_process.py @@ -1,11 +1,12 @@ # -*- coding: utf-8 -*- import pytest +import sys from unittest.mock import patch from foglamp.common.storage_client.storage_client import ReadingsStorageClientAsync, StorageClientAsync -from foglamp.common.process import FoglampProcess, SilentArgParse, ArgumentParserError +from foglamp.common.process import FoglampProcess, ArgumentParserError from foglamp.common.microservice_management_client.microservice_management_client import MicroserviceManagementClient @@ -28,15 +29,15 @@ class FoglampProcessImp(FoglampProcess): fp = FoglampProcessImp() @pytest.mark.parametrize('argslist', - [([ArgumentParserError()]), - (['corehost', ArgumentParserError()]), - (['corehost', 0, ArgumentParserError()]) + [(['pytest']), + (['pytest, ''--address', 'corehost']), + (['pytest', '--address', 'corehost', '--port', '32333']) ]) def test_constructor_missing_args(self, argslist): class FoglampProcessImp(FoglampProcess): def run(self): pass - with patch.object(SilentArgParse, 'silent_arg_parse', side_effect=argslist): + with patch.object(sys, 'argv', argslist): with pytest.raises(ArgumentParserError) as excinfo: fp = FoglampProcessImp() assert '' in str( @@ -46,16 +47,16 @@ def test_constructor_good(self): class FoglampProcessImp(FoglampProcess): def run(self): pass - with patch.object(SilentArgParse, 'silent_arg_parse', side_effect=['corehost', 0, 'sname']): + with patch.object(sys, 'argv', ['pytest', '--address', 'corehost', '--port', '32333', '--name', 'sname']): with patch.object(MicroserviceManagementClient, '__init__', return_value=None) as mmc_patch: with patch.object(ReadingsStorageClientAsync, '__init__', return_value=None) as rsc_async_patch: with patch.object(StorageClientAsync, '__init__', return_value=None) as sc_async_patch: fp = FoglampProcessImp() - mmc_patch.assert_called_once_with('corehost', 0) - rsc_async_patch.assert_called_once_with('corehost', 0) - sc_async_patch.assert_called_once_with('corehost', 0) + mmc_patch.assert_called_once_with('corehost', 32333) + rsc_async_patch.assert_called_once_with('corehost', 32333) + sc_async_patch.assert_called_once_with('corehost', 32333) assert fp._core_management_host is 'corehost' - assert fp._core_management_port is 0 + assert fp._core_management_port == 32333 assert fp._name is 'sname' assert hasattr(fp, '_core_microservice_management_client') assert hasattr(fp, '_readings_storage_async') @@ -66,7 +67,7 @@ def test_get_services_from_core(self): class FoglampProcessImp(FoglampProcess): def run(self): pass - with patch.object(SilentArgParse, 'silent_arg_parse', side_effect=['corehost', 0, 'sname']): + with patch.object(sys, 'argv', ['pytest', '--address', 'corehost', '--port', '32333', '--name', 'sname']): with patch.object(MicroserviceManagementClient, '__init__', return_value=None) as mmc_patch: with patch.object(MicroserviceManagementClient, 'get_services', return_value=None) as get_patch: with patch.object(ReadingsStorageClientAsync, '__init__', @@ -80,7 +81,7 @@ def test_register_service_with_core(self): class FoglampProcessImp(FoglampProcess): def run(self): pass - with patch.object(SilentArgParse, 'silent_arg_parse', side_effect=['corehost', 0, 'sname']): + with patch.object(sys, 'argv', ['pytest', '--address', 'corehost', '--port', '32333', '--name', 'sname']): with patch.object(MicroserviceManagementClient, '__init__', return_value=None) as mmc_patch: with patch.object(MicroserviceManagementClient, 'register_service', return_value=None) as register_patch: with patch.object(ReadingsStorageClientAsync, '__init__', @@ -94,7 +95,7 @@ def test_unregister_service_with_core(self): class FoglampProcessImp(FoglampProcess): def run(self): pass - with patch.object(SilentArgParse, 'silent_arg_parse', side_effect=['corehost', 0, 'sname']): + with patch.object(sys, 'argv', ['pytest', '--address', 'corehost', '--port', '32333', '--name', 'sname']): with patch.object(MicroserviceManagementClient, '__init__', return_value=None) as mmc_patch: with patch.object(MicroserviceManagementClient, 'unregister_service', return_value=None) as unregister_patch: with patch.object(ReadingsStorageClientAsync, '__init__', diff --git a/tests/unit/python/foglamp/services/common/test_microservice.py b/tests/unit/python/foglamp/services/common/test_microservice.py index e58c87b35b..ca78182ddb 100644 --- a/tests/unit/python/foglamp/services/common/test_microservice.py +++ b/tests/unit/python/foglamp/services/common/test_microservice.py @@ -2,11 +2,12 @@ import pytest import time -from unittest.mock import patch, MagicMock +from unittest.mock import patch from aiohttp import web import asyncio +import sys from foglamp.common.storage_client.storage_client import ReadingsStorageClientAsync, StorageClientAsync -from foglamp.common.process import FoglampProcess, SilentArgParse, ArgumentParserError +from foglamp.common.process import FoglampProcess from foglamp.services.common.microservice import FoglampMicroservice, _logger from foglamp.common.microservice_management_client.microservice_management_client import MicroserviceManagementClient @@ -91,7 +92,7 @@ async def add_track(self): pass with patch.object(asyncio, 'get_event_loop', return_value=loop): - with patch.object(SilentArgParse, 'silent_arg_parse', side_effect=['corehost', 0, 'sname']): + with patch.object(sys, 'argv', ['pytest', '--address', 'corehost', '--port', '32333', '--name', 'sname']): with patch.object(MicroserviceManagementClient, '__init__', return_value=None) as mmc_patch: with patch.object(MicroserviceManagementClient, 'create_configuration_category', return_value=None): with patch.object(MicroserviceManagementClient, 'create_child_category', @@ -108,7 +109,7 @@ async def add_track(self): fm = FoglampMicroserviceImp() # from FoglampProcess assert fm._core_management_host is 'corehost' - assert fm._core_management_port is 0 + assert fm._core_management_port == 32333 assert fm._name is 'sname' assert hasattr(fm, '_core_microservice_management_client') assert hasattr(fm, '_readings_storage_async') @@ -145,7 +146,7 @@ async def add_track(self): pass with patch.object(asyncio, 'get_event_loop', return_value=loop): - with patch.object(SilentArgParse, 'silent_arg_parse', side_effect=['corehost', 0, 'sname']): + with patch.object(sys, 'argv', ['pytest', '--address', 'corehost', '--port', '32333', '--name', 'sname']): with patch.object(MicroserviceManagementClient, '__init__', return_value=None) as mmc_patch: with patch.object(MicroserviceManagementClient, 'create_configuration_category', return_value=None): with patch.object(MicroserviceManagementClient, 'create_child_category', @@ -183,7 +184,7 @@ async def add_track(self): pass with patch.object(asyncio, 'get_event_loop', return_value=loop): - with patch.object(SilentArgParse, 'silent_arg_parse', side_effect=['corehost', 0, 'sname']): + with patch.object(sys, 'argv', ['pytest', '--address', 'corehost', '--port', '32333', '--name', 'sname']): with patch.object(MicroserviceManagementClient, '__init__', return_value=None) as mmc_patch: with patch.object(MicroserviceManagementClient, 'create_configuration_category', return_value=None): with patch.object(MicroserviceManagementClient, 'create_child_category', diff --git a/tests/unit/python/foglamp/services/core/api/test_configuration.py b/tests/unit/python/foglamp/services/core/api/test_configuration.py index c162b36aec..dd29922ed0 100644 --- a/tests/unit/python/foglamp/services/core/api/test_configuration.py +++ b/tests/unit/python/foglamp/services/core/api/test_configuration.py @@ -688,3 +688,19 @@ async def async_mock(return_value): assert result == json_response patch_get_all_items.assert_called_once_with(category_name) patch_update_bulk.assert_called_once_with(category_name, payload) + + async def test_delete_configuration(self, client, category_name='rest_api'): + result = {'result': 'Category {} deleted successfully.'.format(category_name)} + storage_client_mock = MagicMock(StorageClientAsync) + c_mgr = ConfigurationManager(storage_client_mock) + with patch.object(connect, 'get_storage_async', return_value=storage_client_mock): + with patch.object(c_mgr, 'delete_category_and_children_recursively', return_value=asyncio.sleep(.1)) as patch_delete_cat: + resp = await client.delete('/foglamp/category/{}'.format(category_name)) + assert 200 == resp.status + r = await resp.text() + json_response = json.loads(r) + assert result == json_response + assert 1 == patch_delete_cat.call_count + args, kwargs = patch_delete_cat.call_args + assert category_name == args[0] + diff --git a/tests/unit/python/foglamp/services/core/api/test_filters.py b/tests/unit/python/foglamp/services/core/api/test_filters.py index 8385565004..1c42fe1c2f 100644 --- a/tests/unit/python/foglamp/services/core/api/test_filters.py +++ b/tests/unit/python/foglamp/services/core/api/test_filters.py @@ -583,7 +583,7 @@ async def test_add_filter_pipeline_without_filter_config(self, client): args, kwargs = _add_child_patch.call_args assert user == args[2] assert ['AssetFilter'] == args[3] - create_cat_patch.assert_called_once_with(category_name='bench', category_value={'filter': {'description': 'Filter pipeline', 'type': 'JSON', 'default': '{"pipeline": ["AssetFilter"]}'}}, keep_original_items=True) + create_cat_patch.assert_called_once_with(category_name='bench', category_value={'filter': {'description': 'Filter pipeline', 'readonly' : 'true', 'type': 'JSON', 'default': '{"pipeline": ["AssetFilter"]}'}}, keep_original_items=True) query_tbl_patch.assert_called_once_with('filters', '{"where": {"column": "name", "condition": "=", "value": "AssetFilter"}}') get_cat_info_patch.assert_called_once_with(category_name=user) diff --git a/tests/unit/python/foglamp/services/core/api/test_notification.py b/tests/unit/python/foglamp/services/core/api/test_notification.py index 4af3ae1585..5dd6b37771 100644 --- a/tests/unit/python/foglamp/services/core/api/test_notification.py +++ b/tests/unit/python/foglamp/services/core/api/test_notification.py @@ -316,6 +316,14 @@ async def test_get_plugin(self, mocker, client): json_response = json.loads(result) assert rules_and_delivery == json_response + async def test_get_type(self, client): + notification_type = {'notification_type': NOTIFICATION_TYPE} + resp = await client.get('/foglamp/notification/type') + assert 200 == resp.status + result = await resp.text() + json_response = json.loads(result) + assert notification_type == json_response + async def test_get_notification(self, mocker, client): r = list(filter(lambda rules: rules['name'] == notification_config['rule']['value'], rule_config)) c = list(filter(lambda channels: channels['name'] == notification_config['channel']['value'], delivery_config)) @@ -389,7 +397,7 @@ async def test_post_notification(self, mocker, client): assert 200 == resp.status result = await resp.json() assert result['result'].endswith("Notification {} created successfully".format("Test Notification")) - update_configuration_item_bulk_calls = [call('Test Notification', {'enable': 'false', 'rule': 'threshold', 'notification_type': 'one shot', 'channel': 'email'})] + update_configuration_item_bulk_calls = [call('Test Notification', {'enable': 'false', 'rule': 'threshold', 'description': 'Test Notification', 'channel': 'email', 'notification_type': 'one shot'})] update_configuration_item_bulk.assert_has_calls(update_configuration_item_bulk_calls, any_order=True) async def test_post_notification2(self, mocker, client): @@ -413,9 +421,10 @@ async def test_post_notification2(self, mocker, client): assert 200 == resp.status result = await resp.json() assert result['result'].endswith("Notification {} created successfully".format("Test Notification")) - update_configuration_item_bulk_calls = [call('Test Notification', {'notification_type': 'one shot', 'enable': 'false', 'channel': 'email', 'rule': 'threshold'}), - call('ruleTest Notification', {'window': '100'}), - call('deliveryTest Notification', {'server': 'pop'})] + update_configuration_item_bulk_calls = [call('Test Notification', {'description': 'Test Notification', 'rule': 'threshold', 'channel': 'email', + 'notification_type': 'one shot', 'enable': 'false'}), + call('ruleTest Notification', {'window': '100'}), + call('deliveryTest Notification', {'server': 'pop'})] update_configuration_item_bulk.assert_has_calls(update_configuration_item_bulk_calls, any_order=True) async def test_post_notification_exception(self, mocker, client): diff --git a/tests/unit/python/foglamp/services/core/api/test_scheduler_api.py b/tests/unit/python/foglamp/services/core/api/test_scheduler_api.py index fbb4cac098..471c31262e 100644 --- a/tests/unit/python/foglamp/services/core/api/test_scheduler_api.py +++ b/tests/unit/python/foglamp/services/core/api/test_scheduler_api.py @@ -69,7 +69,7 @@ async def mock_coro(): async def test_get_scheduled_process(self, client): storage_client_mock = MagicMock(StorageClientAsync) - payload = '{"return": ["name"], "where": {"column": "name", "condition": "=", "value": "purge"}}' + payload = '{"return": ["name"], "where": {"column": "name", "condition": "in", "value": ["purge"]}}' response = {'rows': [{'name': 'purge'}], 'count': 1} with patch.object(connect, 'get_storage_async', return_value=storage_client_mock): with patch.object(storage_client_mock, 'query_tbl_with_payload', @@ -89,7 +89,7 @@ async def test_get_scheduled_process_bad_data(self, client): return_value=mock_coro_response(response)): resp = await client.get('/foglamp/schedule/process/bla') assert 404 == resp.status - assert 'No such Scheduled Process: bla.' == resp.reason + assert "No such Scheduled Process: ['bla']." == resp.reason class TestSchedules: diff --git a/tests/unit/python/foglamp/tasks/north/test_sending_process.py b/tests/unit/python/foglamp/tasks/north/test_sending_process.py index 1444b988dd..65ce3c76c5 100644 --- a/tests/unit/python/foglamp/tasks/north/test_sending_process.py +++ b/tests/unit/python/foglamp/tasks/north/test_sending_process.py @@ -18,7 +18,6 @@ from foglamp.common.audit_logger import AuditLogger from foglamp.common.storage_client.storage_client import StorageClientAsync, ReadingsStorageClientAsync from foglamp.tasks.north.sending_process import SendingProcess -from foglamp.common.process import FoglampProcess, SilentArgParse, ArgumentParserError from foglamp.common.microservice_management_client.microservice_management_client import MicroserviceManagementClient __author__ = "Stefano Simonelli" @@ -55,7 +54,7 @@ async def mock_audit_failure(): def fixture_sp(event_loop): """" Configures the sending process instance for the tests """ - with patch.object(SilentArgParse, 'silent_arg_parse', side_effect=['corehost', 0, 'sname']): + with patch.object(sys, 'argv', ['pytest', '--address', 'corehost', '--port', '32333', '--name', 'sname']): with patch.object(MicroserviceManagementClient, '__init__', return_value=None) as mmc_patch: with patch.object(ReadingsStorageClientAsync, '__init__', return_value=None) as rsc_async_patch: with patch.object(StorageClientAsync, '__init__', return_value=None) as sc_async_patch: @@ -77,7 +76,7 @@ def fixture_sp(event_loop): sp._task_fetch_data_sem = asyncio.Semaphore(0) sp._task_send_data_sem = asyncio.Semaphore(0) - + return sp @@ -367,7 +366,7 @@ async def test_is_stream_id_valid(self, async def test_is_north_valid(self, plugin_file, plugin_type, plugin_name, expected_result, event_loop): """Tests the possible cases of the function is_north_valid """ - with patch.object(SilentArgParse, 'silent_arg_parse', side_effect=['corehost', 0, 'sname']): + with patch.object(sys, 'argv', ['pytest', '--address', 'corehost', '--port', '32333', '--name', 'sname']): with patch.object(MicroserviceManagementClient, '__init__', return_value=None) as mmc_patch: with patch.object(ReadingsStorageClientAsync, '__init__', return_value=None) as rsc_async_patch: with patch.object(StorageClientAsync, '__init__', return_value=None) as sc_async_patch: @@ -392,7 +391,7 @@ async def mock_coroutine(): return True # Checks the Readings handling - with patch.object(SilentArgParse, 'silent_arg_parse', side_effect=['corehost', 0, 'sname']): + with patch.object(sys, 'argv', ['pytest', '--address', 'corehost', '--port', '32333', '--name', 'sname']): with patch.object(MicroserviceManagementClient, '__init__', return_value=None) as mmc_patch: with patch.object(ReadingsStorageClientAsync, '__init__', return_value=None) as rsc_async_patch: with patch.object(StorageClientAsync, '__init__', return_value=None) as sc_async_patch: @@ -472,7 +471,7 @@ async def mock_coroutine(): return p_rows # Checks the Readings handling - with patch.object(SilentArgParse, 'silent_arg_parse', side_effect=['corehost', 0, 'sname']): + with patch.object(sys, 'argv', ['pytest', '--address', 'corehost', '--port', '32333', '--name', 'sname']): with patch.object(MicroserviceManagementClient, '__init__', return_value=None) as mmc_patch: with patch.object(ReadingsStorageClientAsync, '__init__', return_value=None) as rsc_async_patch: with patch.object(StorageClientAsync, '__init__', return_value=None) as sc_async_patch: @@ -558,7 +557,7 @@ async def test_transform_in_memory_data_readings(self, """ Unit test for - _transform_in_memory_data_readings""" # Checks the Readings handling - with patch.object(SilentArgParse, 'silent_arg_parse', side_effect=['corehost', 0, 'sname']): + with patch.object(sys, 'argv', ['pytest', '--address', 'corehost', '--port', '32333', '--name', 'sname']): with patch.object(MicroserviceManagementClient, '__init__', return_value=None) as mmc_patch: with patch.object(ReadingsStorageClientAsync, '__init__', return_value=None) as rsc_async_patch: with patch.object(StorageClientAsync, '__init__', return_value=None) as sc_async_patch: @@ -729,7 +728,7 @@ async def test_load_data_into_memory_statistics(self, """Test _load_data_into_memory handling and transformations for the statistics """ # Checks the Statistics handling - with patch.object(SilentArgParse, 'silent_arg_parse', side_effect=['corehost', 0, 'sname']): + with patch.object(sys, 'argv', ['pytest', '--address', 'corehost', '--port', '32333', '--name', 'sname']): with patch.object(MicroserviceManagementClient, '__init__', return_value=None) as mmc_patch: with patch.object(ReadingsStorageClientAsync, '__init__', return_value=None) as rsc_async_patch: with patch.object(StorageClientAsync, '__init__', return_value=None) as sc_async_patch: @@ -822,7 +821,7 @@ async def test_transform_in_memory_data_statistics(self, """ Unit test for - _transform_in_memory_data_statistics""" # Checks the Statistics handling - with patch.object(SilentArgParse, 'silent_arg_parse', side_effect=['corehost', 0, 'sname']): + with patch.object(sys, 'argv', ['pytest', '--address', 'corehost', '--port', '32333', '--name', 'sname']): with patch.object(MicroserviceManagementClient, '__init__', return_value=None) as mmc_patch: with patch.object(ReadingsStorageClientAsync, '__init__', return_value=None) as rsc_async_patch: with patch.object(StorageClientAsync, '__init__', return_value=None) as sc_async_patch: @@ -860,7 +859,7 @@ async def mock_query_tbl_row_2(): rows = {"rows": [{"last_object": 10}, {"last_object": 11}]} return rows - with patch.object(SilentArgParse, 'silent_arg_parse', side_effect=['corehost', 0, 'sname']): + with patch.object(sys, 'argv', ['pytest', '--address', 'corehost', '--port', '32333', '--name', 'sname']): with patch.object(MicroserviceManagementClient, '__init__', return_value=None) as mmc_patch: with patch.object(ReadingsStorageClientAsync, '__init__', return_value=None) as rsc_async_patch: with patch.object(StorageClientAsync, '__init__', return_value=None) as sc_async_patch: @@ -927,7 +926,7 @@ async def mock_task(): return True - with patch.object(SilentArgParse, 'silent_arg_parse', side_effect=['corehost', 0, 'sname']): + with patch.object(sys, 'argv', ['pytest', '--address', 'corehost', '--port', '32333', '--name', 'sname']): with patch.object(MicroserviceManagementClient, '__init__', return_value=None) as mmc_patch: with patch.object(ReadingsStorageClientAsync, '__init__', return_value=None) as rsc_async_patch: with patch.object(StorageClientAsync, '__init__', return_value=None) as sc_async_patch: @@ -1101,7 +1100,7 @@ async def retrieve_rows(idx): return p_rows[idx] # GIVEN - with patch.object(SilentArgParse, 'silent_arg_parse', side_effect=['corehost', 0, 'sname']): + with patch.object(sys, 'argv', ['pytest', '--address', 'corehost', '--port', '32333', '--name', 'sname']): with patch.object(MicroserviceManagementClient, '__init__', return_value=None) as mmc_patch: with patch.object(ReadingsStorageClientAsync, '__init__', return_value=None) as rsc_async_patch: with patch.object(StorageClientAsync, '__init__', return_value=None) as sc_async_patch: @@ -1289,7 +1288,7 @@ async def retrieve_rows(idx): return p_rows[idx] # GIVEN - with patch.object(SilentArgParse, 'silent_arg_parse', side_effect=['corehost', 0, 'sname']): + with patch.object(sys, 'argv', ['pytest', '--address', 'corehost', '--port', '32333', '--name', 'sname']): with patch.object(MicroserviceManagementClient, '__init__', return_value=None) as mmc_patch: with patch.object(ReadingsStorageClientAsync, '__init__', return_value=None) as rsc_async_patch: with patch.object(StorageClientAsync, '__init__', return_value=None) as sc_async_patch: @@ -1433,7 +1432,7 @@ async def mock_retrieve_rows(idx): return p_rows[idx] # GIVEN - with patch.object(SilentArgParse, 'silent_arg_parse', side_effect=['corehost', 0, 'sname']): + with patch.object(sys, 'argv', ['pytest', '--address', 'corehost', '--port', '32333', '--name', 'sname']): with patch.object(MicroserviceManagementClient, '__init__', return_value=None) as mmc_patch: with patch.object(ReadingsStorageClientAsync, '__init__', return_value=None) as rsc_async_patch: with patch.object(StorageClientAsync, '__init__', return_value=None) as sc_async_patch: @@ -1602,7 +1601,7 @@ async def mock_retrieve_rows(idx): return p_rows[idx] # GIVEN - with patch.object(SilentArgParse, 'silent_arg_parse', side_effect=['corehost', 0, 'sname']): + with patch.object(sys, 'argv', ['pytest', '--address', 'corehost', '--port', '32333', '--name', 'sname']): with patch.object(MicroserviceManagementClient, '__init__', return_value=None) as mmc_patch: with patch.object(ReadingsStorageClientAsync, '__init__', return_value=None) as rsc_async_patch: with patch.object(StorageClientAsync, '__init__', return_value=None) as sc_async_patch: @@ -1786,7 +1785,7 @@ async def mock_send_rows(x): return p_send_result[x]["data_sent"], p_send_result[x]["new_last_object_id"], p_send_result[x]["num_sent"] # GIVEN - with patch.object(SilentArgParse, 'silent_arg_parse', side_effect=['corehost', 0, 'sname']): + with patch.object(sys, 'argv', ['pytest', '--address', 'corehost', '--port', '32333', '--name', 'sname']): with patch.object(MicroserviceManagementClient, '__init__', return_value=None) as mmc_patch: with patch.object(ReadingsStorageClientAsync, '__init__', return_value=None) as rsc_async_patch: with patch.object(StorageClientAsync, '__init__', return_value=None) as sc_async_patch: @@ -1832,7 +1831,7 @@ async def mock_send_rows(x): # Lets the _task_fetch_data to run for a while await asyncio.sleep(3) - + # Tear down sp._task_send_data_run = False sp._task_fetch_data_sem.release() @@ -1968,7 +1967,7 @@ async def mock_send_rows(x): return p_send_result[x]["data_sent"], p_send_result[x]["new_last_object_id"], p_send_result[x]["num_sent"] # GIVEN - with patch.object(SilentArgParse, 'silent_arg_parse', side_effect=['corehost', 0, 'sname']): + with patch.object(sys, 'argv', ['pytest', '--address', 'corehost', '--port', '32333', '--name', 'sname']): with patch.object(MicroserviceManagementClient, '__init__', return_value=None) as mmc_patch: with patch.object(ReadingsStorageClientAsync, '__init__', return_value=None) as rsc_async_patch: with patch.object(StorageClientAsync, '__init__', return_value=None) as sc_async_patch: @@ -2204,7 +2203,7 @@ async def mock_task(): """ Dummy async task """ return True - with patch.object(SilentArgParse, 'silent_arg_parse', side_effect=['corehost', 0, 'sname']): + with patch.object(sys, 'argv', ['pytest', '--address', 'corehost', '--port', '32333', '--name', 'sname']): with patch.object(MicroserviceManagementClient, '__init__', return_value=None) as mmc_patch: with patch.object(ReadingsStorageClientAsync, '__init__', return_value=None) as rsc_async_patch: with patch.object(StorageClientAsync, '__init__', return_value=None) as sc_async_patch: @@ -2230,7 +2229,7 @@ async def mock_task(): async def test_standard_plugins(self, plugin_file, plugin_type, plugin_name, event_loop): """Tests if the standard plugins are available and loadable and if they have the required methods """ - with patch.object(SilentArgParse, 'silent_arg_parse', side_effect=['corehost', 0, 'sname']): + with patch.object(sys, 'argv', ['pytest', '--address', 'corehost', '--port', '32333', '--name', 'sname']): with patch.object(MicroserviceManagementClient, '__init__', return_value=None) as mmc_patch: with patch.object(ReadingsStorageClientAsync, '__init__', return_value=None) as rsc_async_patch: with patch.object(StorageClientAsync, '__init__', return_value=None) as sc_async_patch: @@ -2290,7 +2289,7 @@ async def test_retrieve_configuration_good(self, expected_config): """ Unit tests - _retrieve_configuration - tests the transformations """ - with patch.object(SilentArgParse, 'silent_arg_parse', side_effect=['corehost', 0, 'sname']): + with patch.object(sys, 'argv', ['pytest', '--address', 'corehost', '--port', '32333', '--name', 'sname']): with patch.object(MicroserviceManagementClient, '__init__', return_value=None) as mmc_patch: with patch.object(ReadingsStorageClientAsync, '__init__', return_value=None) as rsc_async_patch: with patch.object(StorageClientAsync, '__init__', return_value=None) as sc_async_patch: @@ -2313,7 +2312,7 @@ async def test_retrieve_configuration_good(self, async def test_start_stream_not_valid(self, event_loop): """ Unit tests - _start - stream_id is not valid """ - with patch.object(SilentArgParse, 'silent_arg_parse', side_effect=['corehost', 0, 'sname']): + with patch.object(sys, 'argv', ['pytest', '--address', 'corehost', '--port', '32333', '--name', 'sname']): with patch.object(MicroserviceManagementClient, '__init__', return_value=None) as mmc_patch: with patch.object(ReadingsStorageClientAsync, '__init__', return_value=None) as rsc_async_patch: with patch.object(StorageClientAsync, '__init__', return_value=None) as sc_async_patch: @@ -2338,7 +2337,7 @@ async def mock_stat_key(): async def mock_master_stat_key(): return 'Readings Sent' - with patch.object(SilentArgParse, 'silent_arg_parse', side_effect=['corehost', 0, 'sname']): + with patch.object(sys, 'argv', ['pytest', '--address', 'corehost', '--port', '32333', '--name', 'sname']): with patch.object(MicroserviceManagementClient, '__init__', return_value=None) as mmc_patch: with patch.object(ReadingsStorageClientAsync, '__init__', return_value=None) as rsc_async_patch: with patch.object(StorageClientAsync, '__init__', return_value=None) as sc_async_patch: @@ -2374,7 +2373,7 @@ async def mock_stat_key(): async def mock_master_stat_key(): return 'Readings Sent' - with patch.object(SilentArgParse, 'silent_arg_parse', side_effect=['corehost', 0, 'sname']): + with patch.object(sys, 'argv', ['pytest', '--address', 'corehost', '--port', '32333', '--name', 'sname']): with patch.object(MicroserviceManagementClient, '__init__', return_value=None) as mmc_patch: with patch.object(ReadingsStorageClientAsync, '__init__', return_value=None) as rsc_async_patch: with patch.object(StorageClientAsync, '__init__', return_value=None) as sc_async_patch: @@ -2414,7 +2413,7 @@ async def mock_stat_key(): async def mock_master_stat_key(): return 'Readings Sent' - with patch.object(SilentArgParse, 'silent_arg_parse', side_effect=['corehost', 0, 'sname']): + with patch.object(sys, 'argv', ['pytest', '--address', 'corehost', '--port', '32333', '--name', 'sname']): with patch.object(MicroserviceManagementClient, '__init__', return_value=None) as mmc_patch: with patch.object(ReadingsStorageClientAsync, '__init__', return_value=None) as rsc_async_patch: with patch.object(StorageClientAsync, '__init__', return_value=None) as sc_async_patch: diff --git a/tests/unit/python/foglamp/tasks/statistics/test_statistics_history.py b/tests/unit/python/foglamp/tasks/statistics/test_statistics_history.py index 93fb019552..ae420dd2ec 100644 --- a/tests/unit/python/foglamp/tasks/statistics/test_statistics_history.py +++ b/tests/unit/python/foglamp/tasks/statistics/test_statistics_history.py @@ -9,13 +9,13 @@ import asyncio from unittest.mock import patch, MagicMock import pytest -from datetime import datetime + import ast from foglamp.common import logger from foglamp.common.storage_client.storage_client import StorageClientAsync from foglamp.tasks.statistics.statistics_history import StatisticsHistory from foglamp.common.process import FoglampProcess -from foglamp.plugins.common import utils +from foglamp.common import utils as common_utils __author__ = "Vaibhav Singhal" __copyright__ = "Copyright (c) 2017 OSIsoft, LLC" @@ -55,7 +55,7 @@ async def test_insert_into_stats_history(self): sh = StatisticsHistory() sh._storage_async = MagicMock(spec=StorageClientAsync) with patch.object(sh._storage_async, "insert_into_tbl", return_value=mock_coro(None)) as patch_storage: - ts = utils.local_timestamp() + ts = common_utils.local_timestamp() await sh._insert_into_stats_history(key='Bla', value=1, history_ts=ts) args, kwargs = patch_storage.call_args assert "statistics_history" == args[0] @@ -68,13 +68,14 @@ async def test_update_previous_value(self): with patch.object(logger, "setup"): sh = StatisticsHistory() sh._storage_async = MagicMock(spec=StorageClientAsync) + payload = {'updates': [{'where': {'value': 'Bla', 'condition': '=', 'column': 'key'}, 'values': {'previous_value': 1}}]} with patch.object(sh._storage_async, "update_tbl", return_value=mock_coro(None)) as patch_storage: - await sh._update_previous_value(key='Bla', value=1) + await sh._bulk_update_previous_value(payload) args, kwargs = patch_storage.call_args assert "statistics" == args[0] payload = ast.literal_eval(args[1]) - assert "Bla" == payload["where"]["value"] - assert 1 == payload["values"]["previous_value"] + assert "Bla" == payload["updates"][0]["where"]["value"] + assert 1 == payload["updates"][0]["values"]["previous_value"] async def test_run(self): with patch.object(FoglampProcess, '__init__'): @@ -92,9 +93,9 @@ async def test_run(self): } with patch.object(sh._storage_async, "query_tbl", return_value=mock_coro(retval)) as mock_keys: with patch.object(sh, "_insert_into_stats_history", return_value=mock_coro(None)) as mock_insert_history: - with patch.object(sh, "_update_previous_value", return_value=mock_coro(None)) as mock_update: + with patch.object(sh, "_bulk_update_previous_value", return_value=mock_coro(None)) as mock_update: await sh.run() - assert 2 == mock_update.call_count + assert 1 == mock_update.call_count args, kwargs = mock_insert_history.call_args assert "READINGS" == kwargs["key"] mock_keys.assert_called_once_with('statistics')