From cec5980d75aab3f633bd20c99f50ef9bdc3c5a79 Mon Sep 17 00:00:00 2001 From: stefano Date: Fri, 5 Apr 2019 14:40:37 +0200 Subject: [PATCH 001/161] FOGL-2340: test id 61 fixed - old messaged referenced --- tests/unit/C/services/storage/sqlite/expected_ETC_UTC/61 | 2 +- tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/61 | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/61 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/61 index 7300a791ac..0e21938850 100644 --- a/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/61 +++ b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/61 @@ -1 +1 @@ -{ "entryPoint" : "update", "message" : "No rows where updated", "retryable" : false} \ No newline at end of file +{ "entryPoint" : "update", "message" : "Not all updates within transaction succeeded", "retryable" : false} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/61 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/61 index 7300a791ac..0e21938850 100644 --- a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/61 +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/61 @@ -1 +1 @@ -{ "entryPoint" : "update", "message" : "No rows where updated", "retryable" : false} \ No newline at end of file +{ "entryPoint" : "update", "message" : "Not all updates within transaction succeeded", "retryable" : false} \ No newline at end of file From 4cef4cf87e499611675a71c71235c462dc2d9ba0 Mon Sep 17 00:00:00 2001 From: stefano Date: Fri, 5 Apr 2019 16:05:11 +0200 Subject: [PATCH 002/161] FOGL-2340: test id 65 - fixed the number of rows affected returned by the update --- .../storage/sqlite/common/connection.cpp | 65 ++++++++++++++++++- 1 file changed, 62 insertions(+), 3 deletions(-) diff --git a/C/plugins/storage/sqlite/common/connection.cpp b/C/plugins/storage/sqlite/common/connection.cpp index 8f48923851..32538f49a2 100644 --- a/C/plugins/storage/sqlite/common/connection.cpp +++ b/C/plugins/storage/sqlite/common/connection.cpp @@ -1166,9 +1166,21 @@ int Connection::update(const string& table, const string& payload) Document document; SQLBuffer sql; + + // FIXME_I: + Logger::getLogger()->setMinLevel("debug"); + Logger::getLogger()->debug( + "DBG update 1.0 : table |%s| payload |%s| ", + table.c_str(), + payload.c_str()); + int row = 0; ostringstream convert; + // FIXME_I: + Logger::getLogger()->debug("DBG update 2.0"); + + std::size_t arr = payload.find("updates"); bool changeReqd = (arr == std::string::npos || arr > 8); if (changeReqd) @@ -1185,17 +1197,29 @@ SQLBuffer sql; } else { + // FIXME_I: + Logger::getLogger()->debug("DBG update 2.1"); + + Value &updates = document["updates"]; if (!updates.IsArray()) { raiseError("update", "Payload is missing the updates array"); return -1; } - + + // FIXME_I: + Logger::getLogger()->debug("DBG update 2.2"); + + sql.append("BEGIN TRANSACTION;"); int i=0; for (Value::ConstValueIterator iter = updates.Begin(); iter != updates.End(); ++iter,++i) { + // FIXME_I: + Logger::getLogger()->debug("DBG update 2.3"); + + if (!iter->IsObject()) { raiseError("update", @@ -1461,6 +1485,10 @@ SQLBuffer sql; } } sql.append(';'); + + // FIXME_I: + Logger::getLogger()->debug("DBG update 2.9"); + row++; } } @@ -1471,6 +1499,12 @@ SQLBuffer sql; char *zErrMsg = NULL; int rc; + + // FIXME_I: + Logger::getLogger()->setMinLevel("debug"); + Logger::getLogger()->debug( + "DBG 1.1 : query |%s| ", query); + // Exec the UPDATE statement: no callback, no result set m_writeAccessOngoing.fetch_add(1); rc = SQLexec(dbHandle, @@ -1512,11 +1546,36 @@ SQLBuffer sql; int update = sqlite3_changes(dbHandle); + // FIXME_I: + Logger::getLogger()->setMinLevel("debug"); + Logger::getLogger()->debug( + "DBG 1.2 : update |%d| row |%d| ", update, row); + + //# FIXME_I + Logger::getLogger()->debug("DBG 1.2 : brfore raiseError"); + + int return_value=0; + if (update == 0) + { raiseError("update", "Not all updates within transaction succeeded"); + return_value = -1; + } + else + { + return_value = (row == 1 ? update : row); + } - // Return the status - return (update ? row : -1); + //# FIXME_I + Logger::getLogger()->debug("DBG 1.2 : after raiseError"); + Logger::getLogger()->debug("DBG 1.2 : return_value |%d| ", return_value); + + // Returns the number of rows affected, cases : + // + // 1) update == 0, no update, returns -1 + // 2) single command SQL that could affects multiple rows, returns 'update' + // 3) multiple SQL commands packed and executed in one SQLexec, returns 'row' + return (return_value); } // Return failure From e7603005a882a23114f855bac2152d315957ad49 Mon Sep 17 00:00:00 2001 From: stefano Date: Fri, 5 Apr 2019 16:39:28 +0200 Subject: [PATCH 003/161] FOGL-2340: update cases - added unit tests --- .../storage/sqlite/common/connection.cpp | 25 ------------- .../storage/sqlite/expected_ETC_UTC/109 | 1 + .../storage/sqlite/expected_EUROPE_ROME/109 | 1 + .../sqlite/payloads/update_multi_rows.json | 36 +++++++++++++++++++ 4 files changed, 38 insertions(+), 25 deletions(-) create mode 100644 tests/unit/C/services/storage/sqlite/expected_ETC_UTC/109 create mode 100644 tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/109 create mode 100644 tests/unit/C/services/storage/sqlite/payloads/update_multi_rows.json diff --git a/C/plugins/storage/sqlite/common/connection.cpp b/C/plugins/storage/sqlite/common/connection.cpp index 32538f49a2..17fa32eb2b 100644 --- a/C/plugins/storage/sqlite/common/connection.cpp +++ b/C/plugins/storage/sqlite/common/connection.cpp @@ -1177,10 +1177,6 @@ SQLBuffer sql; int row = 0; ostringstream convert; - // FIXME_I: - Logger::getLogger()->debug("DBG update 2.0"); - - std::size_t arr = payload.find("updates"); bool changeReqd = (arr == std::string::npos || arr > 8); if (changeReqd) @@ -1197,10 +1193,6 @@ SQLBuffer sql; } else { - // FIXME_I: - Logger::getLogger()->debug("DBG update 2.1"); - - Value &updates = document["updates"]; if (!updates.IsArray()) { @@ -1208,18 +1200,10 @@ SQLBuffer sql; return -1; } - // FIXME_I: - Logger::getLogger()->debug("DBG update 2.2"); - - sql.append("BEGIN TRANSACTION;"); int i=0; for (Value::ConstValueIterator iter = updates.Begin(); iter != updates.End(); ++iter,++i) { - // FIXME_I: - Logger::getLogger()->debug("DBG update 2.3"); - - if (!iter->IsObject()) { raiseError("update", @@ -1485,10 +1469,6 @@ SQLBuffer sql; } } sql.append(';'); - - // FIXME_I: - Logger::getLogger()->debug("DBG update 2.9"); - row++; } } @@ -1499,7 +1479,6 @@ SQLBuffer sql; char *zErrMsg = NULL; int rc; - // FIXME_I: Logger::getLogger()->setMinLevel("debug"); Logger::getLogger()->debug( @@ -1551,9 +1530,6 @@ SQLBuffer sql; Logger::getLogger()->debug( "DBG 1.2 : update |%d| row |%d| ", update, row); - //# FIXME_I - Logger::getLogger()->debug("DBG 1.2 : brfore raiseError"); - int return_value=0; if (update == 0) @@ -1567,7 +1543,6 @@ SQLBuffer sql; } //# FIXME_I - Logger::getLogger()->debug("DBG 1.2 : after raiseError"); Logger::getLogger()->debug("DBG 1.2 : return_value |%d| ", return_value); // Returns the number of rows affected, cases : diff --git a/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/109 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/109 new file mode 100644 index 0000000000..43244568e9 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/109 @@ -0,0 +1 @@ +{ "response" : "updated", "rows_affected" : 3 } \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/109 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/109 new file mode 100644 index 0000000000..43244568e9 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/109 @@ -0,0 +1 @@ +{ "response" : "updated", "rows_affected" : 3 } \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/payloads/update_multi_rows.json b/tests/unit/C/services/storage/sqlite/payloads/update_multi_rows.json new file mode 100644 index 0000000000..17baae6142 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/payloads/update_multi_rows.json @@ -0,0 +1,36 @@ +{ + "updates" : [ + { + "condition": { + "column": "id", + "condition": "=", + "value": 1 + }, + "values": { + "description": "update multi rows - 1" + } + }, + { + "condition": { + "column": "id", + "condition": "=", + "value": 2 + }, + "values": { + "description": "update multi rows - 2" + } + }, + { + "condition": { + "column": "id", + "condition": "=", + "value": 3 + }, + "values": { + "description": "update multi rows - 3" + } + } + + ] +} + From dbbc5c42a70d0a22e69f464acaf5280f4171a901 Mon Sep 17 00:00:00 2001 From: stefano Date: Fri, 5 Apr 2019 16:40:10 +0200 Subject: [PATCH 004/161] FOGL-2340: update cases - added unit tests. --- tests/unit/C/services/storage/sqlite/testset | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/unit/C/services/storage/sqlite/testset b/tests/unit/C/services/storage/sqlite/testset index adf49ade3d..d67af8fe5c 100644 --- a/tests/unit/C/services/storage/sqlite/testset +++ b/tests/unit/C/services/storage/sqlite/testset @@ -58,11 +58,11 @@ Query Readings Timebucket 1,PUT,http://localhost:8080/storage/reading/query,quer Multi And,PUT,http://localhost:8080/storage/table/test2/query,multi_and.json Multi Or,PUT,http://localhost:8080/storage/table/test2/query,multi_or.json Multi Mixed,PUT,http://localhost:8080/storage/table/test2/query,multi_mised.json -Update Bad Condition,PUT,http://localhost:8080/storage/table/test2,update_bad.json +update - Update Bad Condition,PUT,http://localhost:8080/storage/table/test2,update_bad.json Read back,GET,http://localhost:8080/storage/table/test2, Count Assets,PUT,http://localhost:8080/storage/reading/query,count_assets.json Reading Rate,PUT,http://localhost:8080/storage/reading/query,reading_property.json -Update expression,PUT,http://localhost:8080/storage/table/test2,update_expression.json +update - Update expression,PUT,http://localhost:8080/storage/table/test2,update_expression.json Read back update,PUT,http://localhost:8080/storage/table/test2/query,read_id_1xx.json Distinct,PUT,http://localhost:8080/storage/table/test2/query,where_distinct.json Update JSON,PUT,http://localhost:8080/storage/table/test,update_json.json @@ -106,4 +106,5 @@ microseconds - Query asset NO alias,PUT,http://localhost:8080/storage/reading/qu microseconds - Query asset alias,PUT,http://localhost:8080/storage/reading/query,msec_query_asset_alias.json microseconds - Query asset aggregate min,PUT,http://localhost:8080/storage/reading/query,msec_query_asset_aggmin.json microseconds - Query asset aggregate min array,PUT,http://localhost:8080/storage/reading/query,msec_query_asset_aggminarray.json +update - Update multi rows,PUT,http://localhost:8080/storage/table/test2,update_multi_rows.json Shutdown,POST,http://localhost:1081/foglamp/service/shutdown,,checkstate From 2638fd3e1857d5165cba4b05ba7b1fb3ba8169b8 Mon Sep 17 00:00:00 2001 From: stefano Date: Fri, 5 Apr 2019 17:19:31 +0200 Subject: [PATCH 005/161] FOGL-2340: added unit tests x insert - single/multi rows --- .../storage/sqlite/common/connection.cpp | 26 +++---------------- .../storage/sqlite/expected_ETC_UTC/110 | 1 + .../storage/sqlite/expected_ETC_UTC/111 | 1 + .../storage/sqlite/expected_EUROPE_ROME/110 | 1 + .../storage/sqlite/expected_EUROPE_ROME/111 | 1 + .../storage/sqlite/payloads/insert_1row.json | 6 +++++ .../sqlite/payloads/insert_multi_rows.json | 20 ++++++++++++++ tests/unit/C/services/storage/sqlite/testset | 2 ++ 8 files changed, 35 insertions(+), 23 deletions(-) create mode 100644 tests/unit/C/services/storage/sqlite/expected_ETC_UTC/110 create mode 100644 tests/unit/C/services/storage/sqlite/expected_ETC_UTC/111 create mode 100644 tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/110 create mode 100644 tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/111 create mode 100644 tests/unit/C/services/storage/sqlite/payloads/insert_1row.json create mode 100644 tests/unit/C/services/storage/sqlite/payloads/insert_multi_rows.json diff --git a/C/plugins/storage/sqlite/common/connection.cpp b/C/plugins/storage/sqlite/common/connection.cpp index 17fa32eb2b..80c69c0012 100644 --- a/C/plugins/storage/sqlite/common/connection.cpp +++ b/C/plugins/storage/sqlite/common/connection.cpp @@ -985,8 +985,9 @@ Document document; ostringstream convert; std::size_t arr = data.find("inserts"); -// Check first the 'inserts' property in JSON data -bool stdInsert = (arr == std::string::npos || arr > 8); + // Check first the 'inserts' property in JSON data + bool stdInsert = (arr == std::string::npos || arr > 8); + // If input data is not an array of iserts // create an array with one element if (stdInsert) @@ -1166,14 +1167,6 @@ int Connection::update(const string& table, const string& payload) Document document; SQLBuffer sql; - - // FIXME_I: - Logger::getLogger()->setMinLevel("debug"); - Logger::getLogger()->debug( - "DBG update 1.0 : table |%s| payload |%s| ", - table.c_str(), - payload.c_str()); - int row = 0; ostringstream convert; @@ -1479,11 +1472,6 @@ SQLBuffer sql; char *zErrMsg = NULL; int rc; - // FIXME_I: - Logger::getLogger()->setMinLevel("debug"); - Logger::getLogger()->debug( - "DBG 1.1 : query |%s| ", query); - // Exec the UPDATE statement: no callback, no result set m_writeAccessOngoing.fetch_add(1); rc = SQLexec(dbHandle, @@ -1525,11 +1513,6 @@ SQLBuffer sql; int update = sqlite3_changes(dbHandle); - // FIXME_I: - Logger::getLogger()->setMinLevel("debug"); - Logger::getLogger()->debug( - "DBG 1.2 : update |%d| row |%d| ", update, row); - int return_value=0; if (update == 0) @@ -1542,9 +1525,6 @@ SQLBuffer sql; return_value = (row == 1 ? update : row); } - //# FIXME_I - Logger::getLogger()->debug("DBG 1.2 : return_value |%d| ", return_value); - // Returns the number of rows affected, cases : // // 1) update == 0, no update, returns -1 diff --git a/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/110 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/110 new file mode 100644 index 0000000000..7d66483ebe --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/110 @@ -0,0 +1 @@ +{ "response" : "inserted", "rows_affected" : 1 } \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/111 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/111 new file mode 100644 index 0000000000..72324daa9f --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/111 @@ -0,0 +1 @@ +{ "response" : "inserted", "rows_affected" : 2 } \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/110 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/110 new file mode 100644 index 0000000000..7d66483ebe --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/110 @@ -0,0 +1 @@ +{ "response" : "inserted", "rows_affected" : 1 } \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/111 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/111 new file mode 100644 index 0000000000..72324daa9f --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/111 @@ -0,0 +1 @@ +{ "response" : "inserted", "rows_affected" : 2 } \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/payloads/insert_1row.json b/tests/unit/C/services/storage/sqlite/payloads/insert_1row.json new file mode 100644 index 0000000000..c4805e6a11 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/payloads/insert_1row.json @@ -0,0 +1,6 @@ +{ + "id" : 1000, + "key" : "INSERT_1_1", + "description" : "insert - 1 rows", + "data" : { "json" : "inserted" } +} diff --git a/tests/unit/C/services/storage/sqlite/payloads/insert_multi_rows.json b/tests/unit/C/services/storage/sqlite/payloads/insert_multi_rows.json new file mode 100644 index 0000000000..1a573c430f --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/payloads/insert_multi_rows.json @@ -0,0 +1,20 @@ +{ + "inserts" : [ + { + "id": 1000, + "key": "INSERT_2_1", + "description": "insert - multi rows", + "data": { + "json": "inserted" + } + }, + { + "id": 1001, + "key": "INSERT_2_2", + "description": "insert - multi rows", + "data": { + "json": "inserted" + } + } + ] +} diff --git a/tests/unit/C/services/storage/sqlite/testset b/tests/unit/C/services/storage/sqlite/testset index d67af8fe5c..f1441991da 100644 --- a/tests/unit/C/services/storage/sqlite/testset +++ b/tests/unit/C/services/storage/sqlite/testset @@ -107,4 +107,6 @@ microseconds - Query asset alias,PUT,http://localhost:8080/storage/reading/query microseconds - Query asset aggregate min,PUT,http://localhost:8080/storage/reading/query,msec_query_asset_aggmin.json microseconds - Query asset aggregate min array,PUT,http://localhost:8080/storage/reading/query,msec_query_asset_aggminarray.json update - Update multi rows,PUT,http://localhost:8080/storage/table/test2,update_multi_rows.json +insert - Insert 1 row,POST,http://localhost:8080/storage/table/test2,insert_1row.json +insert - Insert multi rows,POST,http://localhost:8080/storage/table/test2,insert_multi_rows.json Shutdown,POST,http://localhost:1081/foglamp/service/shutdown,,checkstate From 7bf4662eeacd43ca2a66f8a8543bf481e166786f Mon Sep 17 00:00:00 2001 From: Praveen Garg Date: Sun, 7 Apr 2019 00:55:43 +0530 Subject: [PATCH 006/161] added north egress system test with scaleset filter --- .../system/python/e2e/test_e2e_pi_scaleset.py | 176 ++++++++++++++++++ 1 file changed, 176 insertions(+) create mode 100644 tests/system/python/e2e/test_e2e_pi_scaleset.py diff --git a/tests/system/python/e2e/test_e2e_pi_scaleset.py b/tests/system/python/e2e/test_e2e_pi_scaleset.py new file mode 100644 index 0000000000..ccb271f66b --- /dev/null +++ b/tests/system/python/e2e/test_e2e_pi_scaleset.py @@ -0,0 +1,176 @@ +# -*- coding: utf-8 -*- + +# FOGLAMP_BEGIN +# See: http://foglamp.readthedocs.io/ +# FOGLAMP_END + +""" Test end to end flow with: + Ingress: HTTP south plugin + Egress: PI Server (C) plugin & scale-set filter plugin +""" + +import os +import subprocess +import http.client +import json +import time +import pytest +import utils + + +__author__ = "Praveen Garg" +__copyright__ = "Copyright (c) 2019 Dianomic Systems" +__license__ = "Apache 2.0" +__version__ = "${VERSION}" + + +SOUTH_PLUGIN = "http_south" +SVC_NAME = "Room #1" +ASSET_PREFIX = "http-" # default for HTTP South plugin +ASSET_NAME = "e1" + +TASK_NAME = "North v2 PI" + +FILTER_PLUGIN = "scale-set" +EGRESS_FILTER_NAME = "SS #1" + +READ_KEY = "temprature" +SENSOR_VALUE = 21 + +# scale(set) factor +SCALE = 9/5 +OFFSET = 32 +OUTPUT = (SENSOR_VALUE * SCALE) + OFFSET + + +class TestE2ePiEgressWithScalesetFilter: + + def get_ping_status(self, foglamp_url): + conn = http.client.HTTPConnection(foglamp_url) + conn.request("GET", '/foglamp/ping') + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + return jdoc + + def get_statistics_map(self, foglamp_url): + conn = http.client.HTTPConnection(foglamp_url) + conn.request("GET", '/foglamp/statistics') + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + return utils.serialize_stats_map(jdoc) + + @pytest.fixture + def start_south_north_with_filter(self, reset_and_start_foglamp, add_south, south_branch, + remove_data_file, remove_directories, + foglamp_url, add_filter, filter_branch, filter_name, + start_north_pi_server_c, pi_host, pi_port, pi_token): + """ This fixture clones given south & filter plugin repo, and starts south and PI north C instance with filter + + """ + fogbench_template_path = os.path.join( + os.path.expandvars('${FOGLAMP_ROOT}'), 'data/template.json') + with open(fogbench_template_path, "w") as f: + f.write( + '[{"name": "%s", "sensor_values": ' + '[{"name": "%s", "type": "number", "min": %d, "max": %d, "precision": 0}]}]' % ( + ASSET_NAME, READ_KEY, SENSOR_VALUE, SENSOR_VALUE)) + + add_south(SOUTH_PLUGIN, south_branch, foglamp_url, service_name=SVC_NAME) + + start_north_pi_server_c(foglamp_url, pi_host, pi_port, pi_token, taskname=TASK_NAME) + + filter_cfg = {"enable": "true", + "factors": json.dumps([ + { + "asset": "{}{}".format(ASSET_PREFIX, ASSET_NAME), + "datapoint": READ_KEY, + "scale": str(SCALE), + "offset": str(OFFSET) + }]) + } + + filter_cfg = {"enable": "true", + "factors": json.dumps([ + { + "asset": "{}{}".format(ASSET_PREFIX, ASSET_NAME), + "datapoint": READ_KEY, + "scale": SCALE, + "offset": OFFSET + }]) + } + + # add_filter(FILTER_PLUGIN, filter_branch, EGRESS_FILTER_NAME, filter_cfg, foglamp_url, TASK_NAME) + + yield self.start_south_north_with_filter + + remove_data_file(fogbench_template_path) + remove_directories("/tmp/foglamp-south-{}".format(ASSET_NAME.lower())) + remove_directories("/tmp/foglamp-filter-{}".format(FILTER_PLUGIN)) + + def test_end_to_end(self, start_south_north_with_filter, read_data_from_pi, foglamp_url, pi_host, pi_admin, + pi_passwd, pi_db, wait_time, retries, skip_verify_north_interface, + disable_schedule, enable_schedule): + + subprocess.run(["cd $FOGLAMP_ROOT/extras/python; python3 -m fogbench -t ../../data/template.json -p http; cd -"] + , shell=True, check=True) + # let the readings ingress + time.sleep(wait_time) + + self._verify_ping_and_statistics(foglamp_url, count=1) + + self._verify_ingest(foglamp_url, SENSOR_VALUE, read_count=1) + + if not skip_verify_north_interface: + self._verify_egress(read_data_from_pi, pi_host, pi_admin, pi_passwd, pi_db, wait_time, retries) + + def _verify_ping_and_statistics(self, foglamp_url, count): + ping_response = self.get_ping_status(foglamp_url) + assert count == ping_response["dataRead"] + # assert count == ping_response["dataSent"] + + actual_stats_map = self.get_statistics_map(foglamp_url) + key_asset_name_with_prefix = "{}{}".format(ASSET_PREFIX.upper(), ASSET_NAME.upper()) + assert count == actual_stats_map[key_asset_name_with_prefix] + assert count == actual_stats_map['READINGS'] + # assert count == actual_stats_map[TASK_NAME] + # assert count == actual_stats_map['Readings Sent'] + + def _verify_ingest(self, foglamp_url, value, read_count): + asset_name_with_prefix = "{}{}".format(ASSET_PREFIX, ASSET_NAME) + conn = http.client.HTTPConnection(foglamp_url) + conn.request("GET", '/foglamp/asset') + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + assert len(jdoc), "No asset found" + assert asset_name_with_prefix == jdoc[0]["assetCode"] + assert read_count == jdoc[0]["count"] + + conn.request("GET", '/foglamp/asset/{}'.format(asset_name_with_prefix)) + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + assert len(jdoc), "No asset found" + assert value == jdoc[0]["reading"][READ_KEY] + + def _verify_egress(self, read_data_from_pi, pi_host, pi_admin, pi_passwd, pi_db, wait_time, retries): + retry_count = 0 + data_from_pi = None + while (data_from_pi is None or data_from_pi == []) and retry_count < retries: + asset_name_with_prefix = "{}{}".format(ASSET_PREFIX, ASSET_NAME) + data_from_pi = read_data_from_pi(pi_host, pi_admin, pi_passwd, pi_db, asset_name_with_prefix, {READ_KEY}) + retry_count += 1 + time.sleep(wait_time * 2) + + if data_from_pi is None or retry_count == retries: + assert False, "Failed to read data from PI" + + assert READ_KEY in data_from_pi + assert isinstance(data_from_pi[READ_KEY], list) + assert OUTPUT in data_from_pi[READ_KEY] From 64530837d80fc08d08ec5ce8b042bfb5a903d775 Mon Sep 17 00:00:00 2001 From: Praveen Garg Date: Sun, 7 Apr 2019 23:20:47 +0530 Subject: [PATCH 007/161] fixed filter factors config item | FIXME: requirement of factors key for factors itself is redundant & confusing --- .../system/python/e2e/test_e2e_pi_scaleset.py | 22 +++++-------------- 1 file changed, 6 insertions(+), 16 deletions(-) diff --git a/tests/system/python/e2e/test_e2e_pi_scaleset.py b/tests/system/python/e2e/test_e2e_pi_scaleset.py index ccb271f66b..242f541009 100644 --- a/tests/system/python/e2e/test_e2e_pi_scaleset.py +++ b/tests/system/python/e2e/test_e2e_pi_scaleset.py @@ -84,26 +84,16 @@ def start_south_north_with_filter(self, reset_and_start_foglamp, add_south, sout start_north_pi_server_c(foglamp_url, pi_host, pi_port, pi_token, taskname=TASK_NAME) filter_cfg = {"enable": "true", - "factors": json.dumps([ - { - "asset": "{}{}".format(ASSET_PREFIX, ASSET_NAME), - "datapoint": READ_KEY, - "scale": str(SCALE), - "offset": str(OFFSET) - }]) - } - - filter_cfg = {"enable": "true", - "factors": json.dumps([ + "factors": {"factors": [ { "asset": "{}{}".format(ASSET_PREFIX, ASSET_NAME), "datapoint": READ_KEY, "scale": SCALE, "offset": OFFSET - }]) + }]} } - # add_filter(FILTER_PLUGIN, filter_branch, EGRESS_FILTER_NAME, filter_cfg, foglamp_url, TASK_NAME) + add_filter(FILTER_PLUGIN, filter_branch, EGRESS_FILTER_NAME, filter_cfg, foglamp_url, TASK_NAME) yield self.start_south_north_with_filter @@ -130,14 +120,14 @@ def test_end_to_end(self, start_south_north_with_filter, read_data_from_pi, fogl def _verify_ping_and_statistics(self, foglamp_url, count): ping_response = self.get_ping_status(foglamp_url) assert count == ping_response["dataRead"] - # assert count == ping_response["dataSent"] + assert count == ping_response["dataSent"] actual_stats_map = self.get_statistics_map(foglamp_url) key_asset_name_with_prefix = "{}{}".format(ASSET_PREFIX.upper(), ASSET_NAME.upper()) assert count == actual_stats_map[key_asset_name_with_prefix] assert count == actual_stats_map['READINGS'] - # assert count == actual_stats_map[TASK_NAME] - # assert count == actual_stats_map['Readings Sent'] + assert count == actual_stats_map[TASK_NAME] + assert count == actual_stats_map['Readings Sent'] def _verify_ingest(self, foglamp_url, value, read_count): asset_name_with_prefix = "{}{}".format(ASSET_PREFIX, ASSET_NAME) From a1b217e4acd8a71fec8f90d8e24b086c6eab79c3 Mon Sep 17 00:00:00 2001 From: Praveen Garg Date: Mon, 8 Apr 2019 16:26:23 +0530 Subject: [PATCH 008/161] fixed test to send data after adding filter; added start_task param to north pi fixture --- tests/system/python/conftest.py | 5 +++-- tests/system/python/e2e/test_e2e_pi_scaleset.py | 10 +++++----- 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/tests/system/python/conftest.py b/tests/system/python/conftest.py index 5b827a00a0..1491be7654 100644 --- a/tests/system/python/conftest.py +++ b/tests/system/python/conftest.py @@ -113,9 +113,10 @@ def _add_foglamp_south(south_plugin, south_branch, foglamp_url, service_name="pl @pytest.fixture def start_north_pi_v2(): def _start_north_pi_server_c(foglamp_url, pi_host, pi_port, pi_token, north_plugin="PI_Server_V2", - taskname="NorthReadingsToPI"): + taskname="NorthReadingsToPI", start_task=True): """Start north task""" + _enabled = "true" if start_task else "false" conn = http.client.HTTPConnection(foglamp_url) data = {"name": taskname, "plugin": "{}".format(north_plugin), @@ -124,7 +125,7 @@ def _start_north_pi_server_c(foglamp_url, pi_host, pi_port, pi_token, north_plug "schedule_day": 0, "schedule_time": 0, "schedule_repeat": 30, - "schedule_enabled": "true", + "schedule_enabled": _enabled, "config": {"producerToken": {"value": pi_token}, "URL": {"value": "https://{}:{}/ingress/messages".format(pi_host, pi_port)} } diff --git a/tests/system/python/e2e/test_e2e_pi_scaleset.py b/tests/system/python/e2e/test_e2e_pi_scaleset.py index 242f541009..2886d8b2c9 100644 --- a/tests/system/python/e2e/test_e2e_pi_scaleset.py +++ b/tests/system/python/e2e/test_e2e_pi_scaleset.py @@ -34,7 +34,7 @@ FILTER_PLUGIN = "scale-set" EGRESS_FILTER_NAME = "SS #1" -READ_KEY = "temprature" +READ_KEY = "temperature" SENSOR_VALUE = 21 # scale(set) factor @@ -65,7 +65,7 @@ def get_statistics_map(self, foglamp_url): @pytest.fixture def start_south_north_with_filter(self, reset_and_start_foglamp, add_south, south_branch, - remove_data_file, remove_directories, + remove_data_file, remove_directories, enable_schedule, foglamp_url, add_filter, filter_branch, filter_name, start_north_pi_server_c, pi_host, pi_port, pi_token): """ This fixture clones given south & filter plugin repo, and starts south and PI north C instance with filter @@ -81,7 +81,7 @@ def start_south_north_with_filter(self, reset_and_start_foglamp, add_south, sout add_south(SOUTH_PLUGIN, south_branch, foglamp_url, service_name=SVC_NAME) - start_north_pi_server_c(foglamp_url, pi_host, pi_port, pi_token, taskname=TASK_NAME) + start_north_pi_server_c(foglamp_url, pi_host, pi_port, pi_token, taskname=TASK_NAME, start_task=False) filter_cfg = {"enable": "true", "factors": {"factors": [ @@ -94,6 +94,7 @@ def start_south_north_with_filter(self, reset_and_start_foglamp, add_south, sout } add_filter(FILTER_PLUGIN, filter_branch, EGRESS_FILTER_NAME, filter_cfg, foglamp_url, TASK_NAME) + enable_schedule(foglamp_url, TASK_NAME) yield self.start_south_north_with_filter @@ -102,8 +103,7 @@ def start_south_north_with_filter(self, reset_and_start_foglamp, add_south, sout remove_directories("/tmp/foglamp-filter-{}".format(FILTER_PLUGIN)) def test_end_to_end(self, start_south_north_with_filter, read_data_from_pi, foglamp_url, pi_host, pi_admin, - pi_passwd, pi_db, wait_time, retries, skip_verify_north_interface, - disable_schedule, enable_schedule): + pi_passwd, pi_db, wait_time, retries, skip_verify_north_interface): subprocess.run(["cd $FOGLAMP_ROOT/extras/python; python3 -m fogbench -t ../../data/template.json -p http; cd -"] , shell=True, check=True) From 25eaf16d73988dee94e9e8c310426514c9631735 Mon Sep 17 00:00:00 2001 From: ashish-jabble Date: Mon, 8 Apr 2019 17:26:05 +0530 Subject: [PATCH 009/161] POST plugin route added and only tar format handled; pending .deb format --- .gitignore | 2 + .../services/core/api/plugins/__init__.py | 0 .../services/core/api/plugins/install.py | 179 ++++++++++++++++++ python/foglamp/services/core/routes.py | 5 +- 4 files changed, 184 insertions(+), 2 deletions(-) create mode 100644 python/foglamp/services/core/api/plugins/__init__.py create mode 100644 python/foglamp/services/core/api/plugins/install.py diff --git a/.gitignore b/.gitignore index 50e7aec937..cddc1a6dcb 100644 --- a/.gitignore +++ b/.gitignore @@ -14,6 +14,8 @@ data/etc/storage.json data/etc/certs/* data/var data/tmp +data/scripts +data/plugins # SQLite3 default db location and after migration data/*.db diff --git a/python/foglamp/services/core/api/plugins/__init__.py b/python/foglamp/services/core/api/plugins/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/foglamp/services/core/api/plugins/install.py b/python/foglamp/services/core/api/plugins/install.py new file mode 100644 index 0000000000..4875d0d61a --- /dev/null +++ b/python/foglamp/services/core/api/plugins/install.py @@ -0,0 +1,179 @@ +# -*- coding: utf-8 -*- + +# FOGLAMP_BEGIN +# See: http://foglamp.readthedocs.io/ +# FOGLAMP_END + +import os +import subprocess +import logging +import asyncio +from aiohttp import web +import aiohttp +import async_timeout +import tarfile +import shutil +import hashlib + +from foglamp.common import logger +from foglamp.common.common import _FOGLAMP_ROOT + + +__author__ = "Ashish Jabble" +__copyright__ = "Copyright (c) 2019 Dianomic Systems" +__license__ = "Apache 2.0" +__version__ = "${VERSION}" + + +_help = """ + ------------------------------------------------------------------------------- + | POST | /foglamp/plugins | + ------------------------------------------------------------------------------- +""" +TIME_OUT = 120 +CHUNK_SIZE = 1024 +_LOGGER = logger.setup(__name__, level=logging.INFO) + +_PATH = _FOGLAMP_ROOT + '/data/plugins/' + + +# TODO: Add unit tests +async def add_plugin(request: web.Request) -> web.Response: + """ add plugin + + :Example: + curl -X POST http://localhost:8081/foglamp/plugins + data: + URL - The URL to pull the plugin file from + format - the format of the file. One of tar or package + compressed - option boolean this is used to indicate the package is a compressed gzip image + checksum - the checksum of the file, used to verify correct upload + """ + try: + data = await request.json() + url = data.get('url', None) + file_format = data.get('format', None) + compressed = data.get('compressed', None) + # TODO: Better if we add plugin type (mandatory) + _type = data.get('type', None) + checksum = data.get('checksum', None) + if not url or not file_format or not _type or not checksum: + raise TypeError('URL, checksum, plugin type and format are mandatory.') + # TODO: add later for notification delivery and rules plugins + if _type not in ['filter', 'north', 'south']: + raise ValueError("Invalid type you have supplied; Accepted types are filter, north, south") + if file_format not in ["tar", "deb"]: + raise ValueError("Invalid format you supplied; Accepted format are tar and deb") + if compressed: + if compressed not in ['true', 'false', True, False]: + raise ValueError('Only "true", "false", true, false are allowed for value of compressed.') + is_compressed = ((isinstance(compressed, str) and compressed.lower() in ['true']) or ( + (isinstance(compressed, bool) and compressed is True))) + + # All stuff goes into _PATH + if not os.path.exists(_PATH): + os.makedirs(_PATH) + + result = await download([url]) + # _LOGGER.info("Result {} ".format(result)) + file_name = result[0].split('Successfully downloaded ')[-1] + # _LOGGER.info("file_name {} {}".format(file_name, checksum)) + + # TODO: only validate with MD5sum. Do we need to validate with SHA1, SHA256, SHA512? + # if yes then we need to add checksum type attribute in request + if validate_checksum(checksum, file_name) is False: + raise ValueError("Checksum is failed.") + + # TODO: tar this downloaded file under _PATH and install plugin FOGLAMP_ROOT + _LOGGER.info("Found {} format with compressed {}".format(file_format, is_compressed)) + _files = extract_file(file_name, is_compressed) + _LOGGER.info("Files {} {}".format(_files, type(_files))) + if file_format == 'tar': + copy_file_install_requirement(_files, _type) + else: + install_debian(_files[0]) + except (TypeError, ValueError) as ex: + raise web.HTTPBadRequest(reason=str(ex)) + except Exception as ex: + raise web.HTTPException(reason=str(ex)) + else: + return web.json_response({"message": "{} and installed".format(result)}) + + +async def get_url(url: str, session: aiohttp.ClientSession) -> str: + file_name = str(url.split("/")[-1]) + async with async_timeout.timeout(TIME_OUT): + async with session.get(url) as response: + with open(_PATH + file_name, 'wb') as fd: + async for data in response.content.iter_chunked(CHUNK_SIZE): + fd.write(data) + return 'Successfully downloaded ' + file_name + + +async def download(urls: list) -> asyncio.gather: + async with aiohttp.ClientSession() as session: + tasks = [get_url(url, session) for url in urls] + return await asyncio.gather(*tasks) + + +def validate_checksum(checksum: str, file_name: str) -> bool: + original = hashlib.md5(open(_PATH + file_name, 'rb').read()).hexdigest() + return True if original == checksum else False + + +def extract_file(file_name: str, is_compressed: bool) -> list: + mode = "r:gz" if is_compressed else "r" + tar = tarfile.open(_PATH + file_name, mode) + _LOGGER.info("Extracted to {}".format(_PATH)) + tar.extractall(_PATH) + _LOGGER.info("Extraction Done!!") + # _LOGGER.info(tar.getnames()) + return tar.getnames() + + +def install_debian(file_name: str): + # FIXME: Not working seems like we need to manipulate in /etc/sudoers.d/foglamp file + # subprocess.run(["sudo cp {} /var/cache/apt/archives/.".format(file_name)], shell=True, check=True) + # subprocess.run(["sudo apt install /var/cache/apt/archives/{}".format(file_name)], shell=True, check=True) + pass + + +def copy_file_install_requirement(_file: list, _type: str): + _py = any(f.endswith(".py") for f in _file) + _so = any(f.endswith(".so") for f in _file) + _so_1 = any(f.endswith(".so.1") for f in _file) + + if _py: + _LOGGER.exception("Python plugin found") + + if _so: + if _so_1: + _LOGGER.exception("C plugin found") + else: + _LOGGER.exception("Not a valid C plugin") + + if not _py and not _so: + _LOGGER.exception("Invalid plugin found") + raise ValueError + + dir = [] + for s in _file: + print(s) + dir.append(s.split("/")[-1]) + + assert len(dir), "No data found" + plugin_name = dir[0] + _LOGGER.exception("plugin name {} dir {} ".format(plugin_name, dir)) + + _plugin_path = "python/foglamp/plugins" if _py else "plugins" + dest_path = "{}/{}/{}/".format(_FOGLAMP_ROOT, _plugin_path, _type) + _LOGGER.exception("dest {}".format(dest_path)) + + if os.path.exists(dest_path + plugin_name) and os.path.isdir(dest_path + plugin_name): + shutil.rmtree(dest_path + plugin_name) + shutil.copytree(_PATH + plugin_name, dest_path + plugin_name) + _LOGGER.exception("file copied....to {}".format(dest_path)) + + if "requirements.sh" in dir: + _LOGGER.exception("INSTALL PIP Dep.... {}".format(dest_path + plugin_name + "/" + "requirements.sh")) + subprocess.run(["sh {}".format(dest_path + plugin_name + "/" + "requirements.sh")], shell=True) diff --git a/python/foglamp/services/core/routes.py b/python/foglamp/services/core/routes.py index 4c021f078b..b38c9c818c 100644 --- a/python/foglamp/services/core/routes.py +++ b/python/foglamp/services/core/routes.py @@ -23,7 +23,7 @@ from foglamp.services.core.api import north from foglamp.services.core.api import filters from foglamp.services.core.api import notification - +from foglamp.services.core.api.plugins import install __author__ = "Ashish Jabble, Praveen Garg, Massimiliano Pinto" __copyright__ = "Copyright (c) 2017-2018 OSIsoft, LLC" @@ -154,8 +154,9 @@ def setup(app): # Get Syslog app.router.add_route('GET', '/foglamp/syslog', support.get_syslog_entries) - # Get Plugin + # Plugins (install, discovery) app.router.add_route('GET', '/foglamp/plugins/installed', plugin_discovery.get_plugins_installed) + app.router.add_route('POST', '/foglamp/plugins', install.add_plugin) # Filters app.router.add_route('POST', '/foglamp/filter', filters.create_filter) From f114e54c178c9e4c81307c6804e0cb7a5f845596 Mon Sep 17 00:00:00 2001 From: stefano Date: Mon, 8 Apr 2019 15:06:24 +0200 Subject: [PATCH 010/161] FOGL-2340: fixed test id 79/80 --- tests/unit/C/services/storage/sqlite/payloads/delete.json | 2 +- .../unit/C/services/storage/sqlite/payloads/error-fogl690.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/unit/C/services/storage/sqlite/payloads/delete.json b/tests/unit/C/services/storage/sqlite/payloads/delete.json index ffac71cc45..a203817641 100644 --- a/tests/unit/C/services/storage/sqlite/payloads/delete.json +++ b/tests/unit/C/services/storage/sqlite/payloads/delete.json @@ -2,6 +2,6 @@ "where" : { "column" : "key", "condition" : "=", - "value" : "DEVICE " + "value" : "DEVICE" } } diff --git a/tests/unit/C/services/storage/sqlite/payloads/error-fogl690.json b/tests/unit/C/services/storage/sqlite/payloads/error-fogl690.json index 4f4c40e8f4..4780a36fb3 100644 --- a/tests/unit/C/services/storage/sqlite/payloads/error-fogl690.json +++ b/tests/unit/C/services/storage/sqlite/payloads/error-fogl690.json @@ -1 +1 @@ -{"key": "DEVICE", "value": {"readings_insert_batch_size": {"type": "integer", "default": "100", "value": "100", "description": "The maximum number of readings in a batch of inserts"}, "max_concurrent_readings_inserts": {"type": "integer", "default": "5", "value": "5", "description": "The maximum number of concurrent processes that send batches of readings to storage"}, "readings_insert_batch_timeout_seconds": {"type": "integer", "default": "1", "value": "1", "description": "The number of seconds to wait for a readings list to reach the minimum batch size"}, "max_readings_insert_batch_connection_idle_seconds": {"type": "integer", "default": "60", "value": "60", "description": "Close storage connections used to insert readings when idle for this number of seconds"}, "readings_buffer_size": {"type": "integer", "default": "500", "value": "500", "description": "The maximum number of readings to buffer in memory"}, "write_statistics_frequency_seconds": {"type": "integer", "default": "5", "value": "5", "description": "The number of seconds to wait before writing readings-related statistics to storage"}, "max_readings_insert_batch_reconnect_wait_seconds": {"type": "integer", "default": "10", "value": "10", "description": "The maximum number of seconds to wait before reconnecting to storage when inserting readings"}}, "description": "Device server configuration"} +{"key": "DEVICE", "display_name": "DEVICE", "value": {"readings_insert_batch_size": {"type": "integer", "default": "100", "value": "100", "description": "The maximum number of readings in a batch of inserts"}, "max_concurrent_readings_inserts": {"type": "integer", "default": "5", "value": "5", "description": "The maximum number of concurrent processes that send batches of readings to storage"}, "readings_insert_batch_timeout_seconds": {"type": "integer", "default": "1", "value": "1", "description": "The number of seconds to wait for a readings list to reach the minimum batch size"}, "max_readings_insert_batch_connection_idle_seconds": {"type": "integer", "default": "60", "value": "60", "description": "Close storage connections used to insert readings when idle for this number of seconds"}, "readings_buffer_size": {"type": "integer", "default": "500", "value": "500", "description": "The maximum number of readings to buffer in memory"}, "write_statistics_frequency_seconds": {"type": "integer", "default": "5", "value": "5", "description": "The number of seconds to wait before writing readings-related statistics to storage"}, "max_readings_insert_batch_reconnect_wait_seconds": {"type": "integer", "default": "10", "value": "10", "description": "The maximum number of seconds to wait before reconnecting to storage when inserting readings"}}, "description": "Device server configuration"} From d209e22b61eb3e476d994903367359e41b697e7f Mon Sep 17 00:00:00 2001 From: ashish-jabble Date: Mon, 8 Apr 2019 19:35:20 +0530 Subject: [PATCH 011/161] auth token fixes for POST and PUT notification endpoint --- .../foglamp/services/core/api/notification.py | 20 +++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/python/foglamp/services/core/api/notification.py b/python/foglamp/services/core/api/notification.py index 8ffe607169..5f3ac076e9 100644 --- a/python/foglamp/services/core/api/notification.py +++ b/python/foglamp/services/core/api/notification.py @@ -197,7 +197,13 @@ async def post_notification(request): try: # Get default config for rule and channel plugins url = '{}/plugin'.format(request.url) - list_plugins = json.loads(await _hit_get_url(url)) + try: + # When authentication is mandatory we need to pass token in request header + auth_token = request.token + except AttributeError: + auth_token = None + + list_plugins = json.loads(await _hit_get_url(url, auth_token)) r = list(filter(lambda rules: rules['name'] == rule, list_plugins['rules'])) c = list(filter(lambda channels: channels['name'] == channel, list_plugins['delivery'])) if len(r) == 0 or len(c) == 0: raise KeyError @@ -312,8 +318,13 @@ async def put_notification(request): url = str(request.url) url_parts = url.split("/foglamp/notification") url = '{}/foglamp/notification/plugin'.format(url_parts[0]) - list_plugins = json.loads(await _hit_get_url(url)) + try: + # When authentication is mandatory we need to pass token in request header + auth_token = request.token + except AttributeError: + auth_token = None + list_plugins = json.loads(await _hit_get_url(url, auth_token)) search_rule = rule if rule_changed else current_config['rule']['value'] r = list(filter(lambda rules: rules['name'] == search_rule, list_plugins['rules'])) if len(r) == 0: raise KeyError @@ -412,10 +423,11 @@ async def delete_notification(request): return web.json_response({'result': 'Notification {} deleted successfully.'.format(notif)}) -async def _hit_get_url(get_url): +async def _hit_get_url(get_url, token=None): + headers = {"Authorization": token} if token else None try: async with aiohttp.ClientSession() as session: - async with session.get(get_url) as resp: + async with session.get(get_url, headers=headers) as resp: status_code = resp.status jdoc = await resp.text() if status_code not in range(200, 209): From e9230c3a69666bbdb639c4c551f4e3524d145ebd Mon Sep 17 00:00:00 2001 From: stefano Date: Mon, 8 Apr 2019 16:11:42 +0200 Subject: [PATCH 012/161] FOGL-2340: fixed tests id 88/90 --- tests/unit/C/services/storage/sqlite/expected_ETC_UTC/80 | 2 +- tests/unit/C/services/storage/sqlite/expected_ETC_UTC/88 | 2 +- tests/unit/C/services/storage/sqlite/expected_ETC_UTC/89 | 2 +- tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/80 | 2 +- tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/88 | 2 +- tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/89 | 2 +- tests/unit/C/services/storage/sqlite/testset | 4 ++-- 7 files changed, 8 insertions(+), 8 deletions(-) diff --git a/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/80 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/80 index 29146ee48a..a8848cbc86 100644 --- a/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/80 +++ b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/80 @@ -1 +1 @@ -{ "response" : "deleted", "rows_affected" : 1 } \ No newline at end of file +{ "response" : "updated", "rows_affected" : 1 } \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/88 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/88 index a8848cbc86..93af3344da 100644 --- a/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/88 +++ b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/88 @@ -1 +1 @@ -{ "response" : "updated", "rows_affected" : 1 } \ No newline at end of file +{"count":1,"rows":[{"description":"added'some'ch'''ars'"}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/89 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/89 index 93af3344da..29146ee48a 100644 --- a/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/89 +++ b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/89 @@ -1 +1 @@ -{"count":1,"rows":[{"description":"added'some'ch'''ars'"}]} \ No newline at end of file +{ "response" : "deleted", "rows_affected" : 1 } \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/80 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/80 index 29146ee48a..a8848cbc86 100644 --- a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/80 +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/80 @@ -1 +1 @@ -{ "response" : "deleted", "rows_affected" : 1 } \ No newline at end of file +{ "response" : "updated", "rows_affected" : 1 } \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/88 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/88 index a8848cbc86..93af3344da 100644 --- a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/88 +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/88 @@ -1 +1 @@ -{ "response" : "updated", "rows_affected" : 1 } \ No newline at end of file +{"count":1,"rows":[{"description":"added'some'ch'''ars'"}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/89 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/89 index 93af3344da..29146ee48a 100644 --- a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/89 +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/89 @@ -1 +1 @@ -{"count":1,"rows":[{"description":"added'some'ch'''ars'"}]} \ No newline at end of file +{ "response" : "deleted", "rows_affected" : 1 } \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/testset b/tests/unit/C/services/storage/sqlite/testset index f1441991da..52f39b8ce9 100644 --- a/tests/unit/C/services/storage/sqlite/testset +++ b/tests/unit/C/services/storage/sqlite/testset @@ -77,7 +77,7 @@ Newer Bad,PUT,http://localhost:8080/storage/table/test2/query,newerBad.json Like,PUT,http://localhost:8080/storage/table/test2/query,where_like.json Group Time,PUT,http://localhost:8080/storage/reading/query,group_time.json Jira FOGL-690,POST,http://localhost:8080/storage/table/configuration,error-fogl690.json -Jira FOGL-690 cleanup,DELETE,http://localhost:8080/storage/table/configuration,delete.json +Set-FOGL-983,PUT,http://localhost:8080/storage/table/configuration,FOGL-983.json Add bad Readings,POST,http://localhost:8080/storage/reading,badreadings.json Query Readings Timebucket Bad,PUT,http://localhost:8080/storage/reading/query,query_readings_timebucket_bad.json Reading Rate Array,PUT,http://localhost:8080/storage/reading/query,reading_property_array.json @@ -85,8 +85,8 @@ Common Read limit max_int,PUT,http://localhost:8080/storage/table/test/query,lim Common Read skip max_int,PUT,http://localhost:8080/storage/table/test/query,skip_max_int.json Timezone,PUT,http://localhost:8080/storage/table/test2/query,timezone.json Bad Timezone,PUT,http://localhost:8080/storage/table/test2/query,timezone_bad.json -Set-FOGL-983,PUT,http://localhost:8080/storage/table/configuration,FOGL-983.json Get-FOGL-983,PUT,http://localhost:8080/storage/table/configuration/query,get-FOGL-983.json +Jira FOGL-690 cleanup,DELETE,http://localhost:8080/storage/table/configuration,delete.json Update now,PUT,http://localhost:8080/storage/table/test2,updatenow.json Get Reading series group by minutes,PUT,http://localhost:8080/storage/reading/query,series_group_by_minutes.json Get Reading series (seconds),PUT,http://localhost:8080/storage/reading/query,series_seconds.json From bbfe05bcf87a90ae646a55b855b90f743aac93ce Mon Sep 17 00:00:00 2001 From: pintomax Date: Tue, 9 Apr 2019 17:31:34 +0200 Subject: [PATCH 013/161] FOGL-2710: added POST, PUT. DELETE snapshots for Postgres storage plugin (#1516) FOGL-2710: added POST, PUT. DELETE snapshots for storage plugins --- tests/unit/C/services/storage/postgres/expected_ETC_UTC/111 | 1 + tests/unit/C/services/storage/postgres/expected_ETC_UTC/112 | 1 + tests/unit/C/services/storage/postgres/expected_ETC_UTC/113 | 1 + .../unit/C/services/storage/postgres/expected_EUROPE_ROME/111 | 1 + .../unit/C/services/storage/postgres/expected_EUROPE_ROME/112 | 1 + .../unit/C/services/storage/postgres/expected_EUROPE_ROME/113 | 1 + .../C/services/storage/postgres/payloads/add_snapshot.json | 1 + tests/unit/C/services/storage/postgres/testset | 3 +++ tests/unit/C/services/storage/sqlite/expected_ETC_UTC/112 | 1 + tests/unit/C/services/storage/sqlite/expected_ETC_UTC/113 | 1 + tests/unit/C/services/storage/sqlite/expected_ETC_UTC/114 | 1 + tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/112 | 1 + tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/113 | 1 + tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/114 | 1 + .../unit/C/services/storage/sqlite/payloads/add_snapshot.json | 1 + tests/unit/C/services/storage/sqlite/testset | 3 +++ 16 files changed, 20 insertions(+) create mode 100644 tests/unit/C/services/storage/postgres/expected_ETC_UTC/111 create mode 100644 tests/unit/C/services/storage/postgres/expected_ETC_UTC/112 create mode 100644 tests/unit/C/services/storage/postgres/expected_ETC_UTC/113 create mode 100644 tests/unit/C/services/storage/postgres/expected_EUROPE_ROME/111 create mode 100644 tests/unit/C/services/storage/postgres/expected_EUROPE_ROME/112 create mode 100644 tests/unit/C/services/storage/postgres/expected_EUROPE_ROME/113 create mode 100644 tests/unit/C/services/storage/postgres/payloads/add_snapshot.json create mode 100644 tests/unit/C/services/storage/sqlite/expected_ETC_UTC/112 create mode 100644 tests/unit/C/services/storage/sqlite/expected_ETC_UTC/113 create mode 100644 tests/unit/C/services/storage/sqlite/expected_ETC_UTC/114 create mode 100644 tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/112 create mode 100644 tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/113 create mode 100644 tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/114 create mode 100644 tests/unit/C/services/storage/sqlite/payloads/add_snapshot.json diff --git a/tests/unit/C/services/storage/postgres/expected_ETC_UTC/111 b/tests/unit/C/services/storage/postgres/expected_ETC_UTC/111 new file mode 100644 index 0000000000..234f02926b --- /dev/null +++ b/tests/unit/C/services/storage/postgres/expected_ETC_UTC/111 @@ -0,0 +1 @@ +{"created": {"id": "99", "table": "test2"} } \ No newline at end of file diff --git a/tests/unit/C/services/storage/postgres/expected_ETC_UTC/112 b/tests/unit/C/services/storage/postgres/expected_ETC_UTC/112 new file mode 100644 index 0000000000..718ce95693 --- /dev/null +++ b/tests/unit/C/services/storage/postgres/expected_ETC_UTC/112 @@ -0,0 +1 @@ +{"loaded": {"id": "99", "table": "test2"} } \ No newline at end of file diff --git a/tests/unit/C/services/storage/postgres/expected_ETC_UTC/113 b/tests/unit/C/services/storage/postgres/expected_ETC_UTC/113 new file mode 100644 index 0000000000..c808c6c192 --- /dev/null +++ b/tests/unit/C/services/storage/postgres/expected_ETC_UTC/113 @@ -0,0 +1 @@ +{"deleted": {"id": "99", "table": "test2"} } \ No newline at end of file diff --git a/tests/unit/C/services/storage/postgres/expected_EUROPE_ROME/111 b/tests/unit/C/services/storage/postgres/expected_EUROPE_ROME/111 new file mode 100644 index 0000000000..234f02926b --- /dev/null +++ b/tests/unit/C/services/storage/postgres/expected_EUROPE_ROME/111 @@ -0,0 +1 @@ +{"created": {"id": "99", "table": "test2"} } \ No newline at end of file diff --git a/tests/unit/C/services/storage/postgres/expected_EUROPE_ROME/112 b/tests/unit/C/services/storage/postgres/expected_EUROPE_ROME/112 new file mode 100644 index 0000000000..718ce95693 --- /dev/null +++ b/tests/unit/C/services/storage/postgres/expected_EUROPE_ROME/112 @@ -0,0 +1 @@ +{"loaded": {"id": "99", "table": "test2"} } \ No newline at end of file diff --git a/tests/unit/C/services/storage/postgres/expected_EUROPE_ROME/113 b/tests/unit/C/services/storage/postgres/expected_EUROPE_ROME/113 new file mode 100644 index 0000000000..c808c6c192 --- /dev/null +++ b/tests/unit/C/services/storage/postgres/expected_EUROPE_ROME/113 @@ -0,0 +1 @@ +{"deleted": {"id": "99", "table": "test2"} } \ No newline at end of file diff --git a/tests/unit/C/services/storage/postgres/payloads/add_snapshot.json b/tests/unit/C/services/storage/postgres/payloads/add_snapshot.json new file mode 100644 index 0000000000..860b259193 --- /dev/null +++ b/tests/unit/C/services/storage/postgres/payloads/add_snapshot.json @@ -0,0 +1 @@ +{ "id" : "99" } diff --git a/tests/unit/C/services/storage/postgres/testset b/tests/unit/C/services/storage/postgres/testset index 29500f50b7..7672059ea5 100644 --- a/tests/unit/C/services/storage/postgres/testset +++ b/tests/unit/C/services/storage/postgres/testset @@ -108,4 +108,7 @@ microseconds - Query asset aggregate min array,PUT,http://localhost:8080/storage Update JSON value as function,PUT,http://localhost:8080/storage/table/test,put_function_in_JSON.json Update JSON value in JSON value,PUT,http://localhost:8080/storage/table/test,put_json_in_JSON.json Get updated complex JSON value,PUT,http://localhost:8080/storage/table/test/query,get_updated_complex_JSON.json +Add table snapshot,POST,http://localhost:8080/storage/table/test2/snapshot,add_snapshot.json +Load table snapshot,PUT,http://localhost:8080/storage/table/test2/snapshot/99, +Delete table snapshot,DELETE,http://localhost:8080/storage/table/test2/snapshot/99, Shutdown,POST,http://localhost:1081/foglamp/service/shutdown,,checkstate diff --git a/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/112 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/112 new file mode 100644 index 0000000000..234f02926b --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/112 @@ -0,0 +1 @@ +{"created": {"id": "99", "table": "test2"} } \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/113 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/113 new file mode 100644 index 0000000000..718ce95693 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/113 @@ -0,0 +1 @@ +{"loaded": {"id": "99", "table": "test2"} } \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/114 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/114 new file mode 100644 index 0000000000..c808c6c192 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/114 @@ -0,0 +1 @@ +{"deleted": {"id": "99", "table": "test2"} } \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/112 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/112 new file mode 100644 index 0000000000..234f02926b --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/112 @@ -0,0 +1 @@ +{"created": {"id": "99", "table": "test2"} } \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/113 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/113 new file mode 100644 index 0000000000..718ce95693 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/113 @@ -0,0 +1 @@ +{"loaded": {"id": "99", "table": "test2"} } \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/114 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/114 new file mode 100644 index 0000000000..c808c6c192 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/114 @@ -0,0 +1 @@ +{"deleted": {"id": "99", "table": "test2"} } \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/payloads/add_snapshot.json b/tests/unit/C/services/storage/sqlite/payloads/add_snapshot.json new file mode 100644 index 0000000000..860b259193 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/payloads/add_snapshot.json @@ -0,0 +1 @@ +{ "id" : "99" } diff --git a/tests/unit/C/services/storage/sqlite/testset b/tests/unit/C/services/storage/sqlite/testset index 52f39b8ce9..0cf0d71cac 100644 --- a/tests/unit/C/services/storage/sqlite/testset +++ b/tests/unit/C/services/storage/sqlite/testset @@ -109,4 +109,7 @@ microseconds - Query asset aggregate min array,PUT,http://localhost:8080/storage update - Update multi rows,PUT,http://localhost:8080/storage/table/test2,update_multi_rows.json insert - Insert 1 row,POST,http://localhost:8080/storage/table/test2,insert_1row.json insert - Insert multi rows,POST,http://localhost:8080/storage/table/test2,insert_multi_rows.json +Add table snapshot,POST,http://localhost:8080/storage/table/test2/snapshot,add_snapshot.json +Load table snapshot,PUT,http://localhost:8080/storage/table/test2/snapshot/99, +Delete table snapshot,DELETE,http://localhost:8080/storage/table/test2/snapshot/99, Shutdown,POST,http://localhost:1081/foglamp/service/shutdown,,checkstate From 5b6e1d63853876a04ff4d6f33181018ab8afb2ef Mon Sep 17 00:00:00 2001 From: pintomax Date: Tue, 9 Apr 2019 17:39:43 +0200 Subject: [PATCH 014/161] FOGL-2698: addition of GET table snapshots endpoiont (#1513) FOGL-2698: GET endpoint for snapshots --- C/plugins/storage/postgres/connection.cpp | 61 +++++++++++++-- .../storage/postgres/include/connection.h | 2 + C/plugins/storage/postgres/plugin.cpp | 23 +++++- .../storage/sqlite/common/connection.cpp | 78 +++++++++++++++++-- .../sqlite/common/include/connection.h | 1 + C/plugins/storage/sqlite/plugin.cpp | 23 +++++- C/services/storage/include/storage_api.h | 4 +- C/services/storage/include/storage_plugin.h | 2 + C/services/storage/storage_api.cpp | 45 +++++++++++ C/services/storage/storage_plugin.cpp | 11 +++ 10 files changed, 231 insertions(+), 19 deletions(-) diff --git a/C/plugins/storage/postgres/connection.cpp b/C/plugins/storage/postgres/connection.cpp index 82a2f6a121..0f4ca54cf3 100644 --- a/C/plugins/storage/postgres/connection.cpp +++ b/C/plugins/storage/postgres/connection.cpp @@ -2555,12 +2555,12 @@ void Connection::logSQL(const char *tag, const char *stmt) * @return -1 on error, >= 0 on success * * The new created table name has the name: - * table_id + * $table_snap$id */ int Connection::create_table_snapshot(const string& table, const string& id) { string query = "SELECT * INTO TABLE foglamp."; - query += table + "_" + id + " FROM foglamp." + table; + query += table + "_snap" + id + " FROM foglamp." + table; logSQL("CreateTableSnapshot", query.c_str()); @@ -2589,7 +2589,7 @@ int Connection::load_table_snapshot(const string& table, const string& id) string purgeQuery = "DELETE FROM foglamp." + table; string query = "START TRANSACTION; " + purgeQuery; query += "; INSERT INTO foglamp." + table; - query += " SELECT * FROM foglamp." + table + "_" + id; + query += " SELECT * FROM foglamp." + table + "_snap" + id; query += "; COMMIT;"; logSQL("LoadTableSnapshot", query.c_str()); @@ -2617,18 +2617,15 @@ int Connection::load_table_snapshot(const string& table, const string& id) } /** - * Create snapshot of a common table + * Delete a snapshot of a common table * * @param table The table to snapshot * @param id The snapshot id * @return -1 on error, >= 0 on success - * - * The new created table name has the name: - * table_id */ int Connection::delete_table_snapshot(const string& table, const string& id) { - string query = "DROP TABLE foglamp." + table + "_" + id; + string query = "DROP TABLE foglamp." + table + "_snap" + id; logSQL("DeleteTableSnapshot", query.c_str()); @@ -2643,3 +2640,51 @@ int Connection::delete_table_snapshot(const string& table, const string& id) PQclear(res); return -1; } + +/** + * Get list of snapshots for a given common table + * + * @param table The given table name + * @param resultSet Output data buffer + * @return True on success, false on database errors + */ +bool Connection::get_table_snapshots(const string& table, + string& resultSet) +{ +SQLBuffer sql; + try + { + sql.append("SELECT REPLACE(table_name, '"); + sql.append(table); + sql.append("_snap', '') AS id FROM information_schema.tables "); + sql.append("WHERE table_schema = 'foglamp' AND table_name LIKE '"); + sql.append(table); + sql.append("_snap%';"); + + const char *query = sql.coalesce(); + logSQL("GetTableSnapshots", query); + + PGresult *res = PQexec(dbConnection, query); + delete[] query; + if (PQresultStatus(res) == PGRES_TUPLES_OK) + { + mapResultSet(res, resultSet); + PQclear(res); + + return true; + } + char *SQLState = PQresultErrorField(res, PG_DIAG_SQLSTATE); + if (!strcmp(SQLState, "22P02")) // Conversion error + { + raiseError("get_table_snapshots", "Unable to convert data to the required type"); + } + else + { + raiseError("get_table_snapshots", PQerrorMessage(dbConnection)); + } + PQclear(res); + return false; + } catch (exception e) { + raiseError("get_table_snapshots", "Internal error: %s", e.what()); + } +} diff --git a/C/plugins/storage/postgres/include/connection.h b/C/plugins/storage/postgres/include/connection.h index 1887827055..fee678a95e 100644 --- a/C/plugins/storage/postgres/include/connection.h +++ b/C/plugins/storage/postgres/include/connection.h @@ -34,6 +34,8 @@ class Connection { int create_table_snapshot(const std::string& table, const std::string& id); int load_table_snapshot(const std::string& table, const std::string& id); int delete_table_snapshot(const std::string& table, const std::string& id); + bool get_table_snapshots(const std::string& table, + std::string& resultSet); private: bool m_logSQL; diff --git a/C/plugins/storage/postgres/plugin.cpp b/C/plugins/storage/postgres/plugin.cpp index 75c9098430..162627be3e 100644 --- a/C/plugins/storage/postgres/plugin.cpp +++ b/C/plugins/storage/postgres/plugin.cpp @@ -39,7 +39,7 @@ static PLUGIN_INFORMATION info = { "1.0.0", // Version SP_COMMON|SP_READINGS, // Flags PLUGIN_TYPE_STORAGE, // Type - "1.1.0" // Interface version + "1.2.0" // Interface version }; /** @@ -286,6 +286,25 @@ Connection *connection = manager->allocate(); manager->release(connection); return result; } +/** + * Get all snapshots of a given common table + * + * @param handle The plugin handle + * @param table The table name + * @return List of snapshots (even empty list) or NULL for errors + * + */ +const char* plugin_get_table_snapshots(PLUGIN_HANDLE handle, + char *table) +{ +ConnectionManager *manager = (ConnectionManager *)handle; +Connection *connection = manager->allocate(); +std::string results; -}; + bool rval = connection->get_table_snapshots(std::string(table), results); + manager->release(connection); + return rval ? strdup(results.c_str()) : NULL; +} + +}; diff --git a/C/plugins/storage/sqlite/common/connection.cpp b/C/plugins/storage/sqlite/common/connection.cpp index 80c69c0012..f91ffe72fc 100644 --- a/C/plugins/storage/sqlite/common/connection.cpp +++ b/C/plugins/storage/sqlite/common/connection.cpp @@ -2840,12 +2840,12 @@ SQLBuffer sql; * @return -1 on error, >= 0 on success * * The new created table name has the name: - * table_id + * $table_snap$id */ int Connection::create_table_snapshot(const string& table, const string& id) { string query = "CREATE TABLE foglamp."; - query += table + "_" + id + " AS SELECT * FROM foglamp." + table; + query += table + "_snap" + id + " AS SELECT * FROM foglamp." + table; logSQL("CreateTableSnapshot", query.c_str()); @@ -2882,7 +2882,7 @@ int Connection::load_table_snapshot(const string& table, const string& id) string purgeQuery = "DELETE FROM foglamp." + table; string query = "BEGIN TRANSACTION; "; query += purgeQuery +"; INSERT INTO foglamp." + table; - query += " SELECT * FROM foglamp." + table + "_" + id; + query += " SELECT * FROM foglamp." + table + "_snap" + id; query += "; COMMIT TRANSACTION;"; logSQL("LoadTableSnapshot", query.c_str()); @@ -2923,18 +2923,16 @@ int Connection::load_table_snapshot(const string& table, const string& id) } /** - * Create snapshot of a common table + * Delete a snapshot of a common table * * @param table The table to snapshot * @param id The snapshot id * @return -1 on error, >= 0 on success * - * The new created table name has the name: - * table_id */ int Connection::delete_table_snapshot(const string& table, const string& id) { - string query = "DROP TABLE foglamp." + table + "_" + id; + string query = "DROP TABLE foglamp." + table + "_snap" + id; logSQL("DeleteTableSnapshot", query.c_str()); @@ -2957,4 +2955,70 @@ int Connection::delete_table_snapshot(const string& table, const string& id) return -1; } } + +/** + * Get list of snapshots for a given common table + * + * @param table The given table name + */ +bool Connection::get_table_snapshots(const string& table, + string& resultSet) +{ +SQLBuffer sql; + try { + if (dbHandle == NULL) + { + raiseError("retrieve", "No SQLite 3 db connection available"); + return false; + } + sql.append("SELECT REPLACE(name, '"); + sql.append(table); + sql.append("_snap', '') AS id FROM sqlite_master WHERE type='table' AND name LIKE '"); + sql.append(table); + sql.append("_snap%';"); + + const char *query = sql.coalesce(); + char *zErrMsg = NULL; + int rc; + sqlite3_stmt *stmt; + + logSQL("GetTableSnapshots", query); + + // Prepare the SQL statement and get the result set + rc = sqlite3_prepare_v2(dbHandle, query, -1, &stmt, NULL); + + if (rc != SQLITE_OK) + { + raiseError("get_table_snapshots", sqlite3_errmsg(dbHandle)); + Logger::getLogger()->error("SQL statement: %s", query); + delete[] query; + return false; + } + + // Call result set mapping + rc = mapResultSet(stmt, resultSet); + + // Delete result set + sqlite3_finalize(stmt); + + // Check result set mapping errors + if (rc != SQLITE_DONE) + { + raiseError("get_table_snapshots", sqlite3_errmsg(dbHandle)); + Logger::getLogger()->error("SQL statement: %s", query); + delete[] query; + // Failure + return false; + } + + // Release memory for 'query' var + delete[] query; + // Success + return true; + } catch (exception e) { + raiseError("get_table_snapshots", "Internal error: %s", e.what()); + // Failure + return false; + } +} #endif diff --git a/C/plugins/storage/sqlite/common/include/connection.h b/C/plugins/storage/sqlite/common/include/connection.h index 96be6b929d..00ec9a1847 100644 --- a/C/plugins/storage/sqlite/common/include/connection.h +++ b/C/plugins/storage/sqlite/common/include/connection.h @@ -77,6 +77,7 @@ class Connection { int create_table_snapshot(const std::string& table, const std::string& id); int load_table_snapshot(const std::string& table, const std::string& id); int delete_table_snapshot(const std::string& table, const std::string& id); + bool get_table_snapshots(const std::string& table, std::string& resultSet); #endif int appendReadings(const char *readings); bool fetchReadings(unsigned long id, unsigned int blksize, diff --git a/C/plugins/storage/sqlite/plugin.cpp b/C/plugins/storage/sqlite/plugin.cpp index cf2af154fe..eabe863287 100644 --- a/C/plugins/storage/sqlite/plugin.cpp +++ b/C/plugins/storage/sqlite/plugin.cpp @@ -39,7 +39,7 @@ static PLUGIN_INFORMATION info = { "1.0.0", // Version SP_COMMON|SP_READINGS, // Flags PLUGIN_TYPE_STORAGE, // Type - "1.0.0" // Interface version + "1.2.0" // Interface version }; /** @@ -286,5 +286,26 @@ Connection *connection = manager->allocate(); return result; } +/** + * Get all snapshots of a given common table + * + * @param handle The plugin handle + * @param table The table name + * @return List of snapshots (even empty list) or NULL for errors + * + */ +const char* plugin_get_table_snapshots(PLUGIN_HANDLE handle, + char *table) +{ +ConnectionManager *manager = (ConnectionManager *)handle; +Connection *connection = manager->allocate(); +std::string results; + + bool rval = connection->get_table_snapshots(std::string(table), results); + manager->release(connection); + + return rval ? strdup(results.c_str()) : NULL; +} + }; diff --git a/C/services/storage/include/storage_api.h b/C/services/storage/include/storage_api.h index c60c6c99c9..4e31548f55 100644 --- a/C/services/storage/include/storage_api.h +++ b/C/services/storage/include/storage_api.h @@ -27,7 +27,8 @@ using HttpServer = SimpleWeb::Server; #define READING_QUERY "^/storage/reading/query" #define READING_PURGE "^/storage/reading/purge" #define READING_INTEREST "^/storage/reading/interest/([A-Za-z\\*][a-zA-Z0-9_]*)$" -#define CREATE_TABLE_SNAPSHOT "^/storage/table/([A-Za-z][a-zA-Z_0-9_]*)/snapshot$" +#define GET_TABLE_SNAPSHOTS "^/storage/table/([A-Za-z][a-zA-Z_0-9_]*)/snapshot$" +#define CREATE_TABLE_SNAPSHOT GET_TABLE_SNAPSHOTS #define LOAD_TABLE_SNAPSHOT "^/storage/table/([A-Za-z][a-zA-Z_0-9_]*)/snapshot/([a-zA-Z_0-9_]*)$" #define DELETE_TABLE_SNAPSHOT LOAD_TABLE_SNAPSHOT @@ -71,6 +72,7 @@ class StorageApi { void createTableSnapshot(shared_ptr response, shared_ptr request); void loadTableSnapshot(shared_ptr response, shared_ptr request); void deleteTableSnapshot(shared_ptr response, shared_ptr request); + void getTableSnapshots(shared_ptr response, shared_ptr request); void printList(); public: diff --git a/C/services/storage/include/storage_plugin.h b/C/services/storage/include/storage_plugin.h index bbda989d5e..818c334f8e 100644 --- a/C/services/storage/include/storage_plugin.h +++ b/C/services/storage/include/storage_plugin.h @@ -47,6 +47,7 @@ class StoragePlugin : public Plugin { int createTableSnapshot(const std::string& table, const std::string& id); int loadTableSnapshot(const std::string& table, const std::string& id); int deleteTableSnapshot(const std::string& table, const std::string& id); + char *getTableSnapshots(const std::string& table); PLUGIN_ERROR *lastError(); private: @@ -63,6 +64,7 @@ class StoragePlugin : public Plugin { int (*createTableSnapshotPtr)(PLUGIN_HANDLE, const char *, const char *); int (*loadTableSnapshotPtr)(PLUGIN_HANDLE, const char *, const char *); int (*deleteTableSnapshotPtr)(PLUGIN_HANDLE, const char *, const char *); + char *(*getTableSnapshotsPtr)(PLUGIN_HANDLE, const char *); PLUGIN_ERROR *(*lastErrorPtr)(PLUGIN_HANDLE); }; diff --git a/C/services/storage/storage_api.cpp b/C/services/storage/storage_api.cpp index 8149ab2249..5b6c11e4a5 100644 --- a/C/services/storage/storage_api.cpp +++ b/C/services/storage/storage_api.cpp @@ -283,6 +283,15 @@ void deleteTableSnapshotWrapper(shared_ptr response, api->deleteTableSnapshot(response, request); } +/** + * Wrapper function for the delete snapshot API call. + */ +void getTableSnapshotsWrapper(shared_ptr response, + shared_ptr request) +{ + StorageApi *api = StorageApi::getInstance(); + api->getTableSnapshots(response, request); +} /** * Construct the singleton Storage API @@ -343,6 +352,7 @@ void StorageApi::initResources() m_server->resource[CREATE_TABLE_SNAPSHOT]["POST"] = createTableSnapshotWrapper; m_server->resource[LOAD_TABLE_SNAPSHOT]["PUT"] = loadTableSnapshotWrapper; m_server->resource[DELETE_TABLE_SNAPSHOT]["DELETE"] = deleteTableSnapshotWrapper; + m_server->resource[GET_TABLE_SNAPSHOTS]["GET"] = getTableSnapshotsWrapper; m_server->resource[READING_ACCESS]["POST"] = readingAppendWrapper; m_server->resource[READING_ACCESS]["GET"] = readingFetchWrapper; @@ -1144,3 +1154,38 @@ string payload; respond(response, responsePayload); } } + +/** + * Get list of a table snapshots + */ +void StorageApi::getTableSnapshots(shared_ptr response, + shared_ptr request) +{ +string sTable; +string payload; + + try + { + payload = request->content.string(); + sTable = request->path_match[TABLE_NAME_COMPONENT]; + + // Get plugin data + char* pluginResult = plugin->getTableSnapshots(sTable); + if (pluginResult) + { + string res = pluginResult; + respond(response, res); + free(pluginResult); + } + else + { + string responsePayload; + mapError(responsePayload, plugin->lastError()); + respond(response, + SimpleWeb::StatusCode::client_error_bad_request, + responsePayload); + } + } catch (exception ex) { + internalError(response, ex); + } +} diff --git a/C/services/storage/storage_plugin.cpp b/C/services/storage/storage_plugin.cpp index 5a3803c480..cf2d2cc968 100644 --- a/C/services/storage/storage_plugin.cpp +++ b/C/services/storage/storage_plugin.cpp @@ -57,6 +57,9 @@ StoragePlugin::StoragePlugin(PLUGIN_HANDLE handle) : Plugin(handle) deleteTableSnapshotPtr = (int (*)(PLUGIN_HANDLE, const char*, const char*)) manager->resolveSymbol(handle, "plugin_delete_table_snapshot"); + getTableSnapshotsPtr = + (char * (*)(PLUGIN_HANDLE, const char*)) + manager->resolveSymbol(handle, "plugin_get_table_snapshots"); } /** @@ -162,3 +165,11 @@ int StoragePlugin::deleteTableSnapshot(const string& table, const string& id) { return this->deleteTableSnapshotPtr(instance, table.c_str(), id.c_str()); } + +/** + * Call the get table snaphot method in the plugin + */ +char *StoragePlugin::getTableSnapshots(const string& table) +{ + return this->getTableSnapshotsPtr(instance, table.c_str()); +} From 1b9972876da82644149e365f4985807b88c86e2a Mon Sep 17 00:00:00 2001 From: stefano Date: Wed, 10 Apr 2019 09:34:54 +0200 Subject: [PATCH 015/161] FOGL-2510: fixed test id 13: error message changed the format --- tests/unit/C/services/storage/postgres/expected_ETC_UTC/13 | 5 +---- .../unit/C/services/storage/postgres/expected_EUROPE_ROME/13 | 5 +---- 2 files changed, 2 insertions(+), 8 deletions(-) diff --git a/tests/unit/C/services/storage/postgres/expected_ETC_UTC/13 b/tests/unit/C/services/storage/postgres/expected_ETC_UTC/13 index d3c0b8f14c..f4deef91e7 100644 --- a/tests/unit/C/services/storage/postgres/expected_ETC_UTC/13 +++ b/tests/unit/C/services/storage/postgres/expected_ETC_UTC/13 @@ -1,4 +1 @@ -{ "entryPoint" : "insert", "message" : "ERROR: column \"Nonexistant\" of relation \"test\" does not exist -LINE 1: ...foglamp.test (\"id\", \"key\", \"description\", \"data\", \"Nonexista... - ^ -", "retryable" : false} \ No newline at end of file +{ "entryPoint" : "insert", "message" : "ERROR: column \"nonexistant\" of relation \"test\" does not existLINE 1: ...RT INTO foglamp.test (id, key, description, data, Nonexistan... ^", "retryable" : false} \ No newline at end of file diff --git a/tests/unit/C/services/storage/postgres/expected_EUROPE_ROME/13 b/tests/unit/C/services/storage/postgres/expected_EUROPE_ROME/13 index d3c0b8f14c..f4deef91e7 100644 --- a/tests/unit/C/services/storage/postgres/expected_EUROPE_ROME/13 +++ b/tests/unit/C/services/storage/postgres/expected_EUROPE_ROME/13 @@ -1,4 +1 @@ -{ "entryPoint" : "insert", "message" : "ERROR: column \"Nonexistant\" of relation \"test\" does not exist -LINE 1: ...foglamp.test (\"id\", \"key\", \"description\", \"data\", \"Nonexista... - ^ -", "retryable" : false} \ No newline at end of file +{ "entryPoint" : "insert", "message" : "ERROR: column \"nonexistant\" of relation \"test\" does not existLINE 1: ...RT INTO foglamp.test (id, key, description, data, Nonexistan... ^", "retryable" : false} \ No newline at end of file From 2170d9007501b2df1cbb03f606c9257dcf2d618c Mon Sep 17 00:00:00 2001 From: stefano Date: Wed, 10 Apr 2019 09:41:29 +0200 Subject: [PATCH 016/161] FOGL-2510: fixed test id 14: error message changed the format --- tests/unit/C/services/storage/postgres/expected_ETC_UTC/14 | 3 +-- tests/unit/C/services/storage/postgres/expected_EUROPE_ROME/14 | 3 +-- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/tests/unit/C/services/storage/postgres/expected_ETC_UTC/14 b/tests/unit/C/services/storage/postgres/expected_ETC_UTC/14 index 95390e2964..fe8d3870cf 100644 --- a/tests/unit/C/services/storage/postgres/expected_ETC_UTC/14 +++ b/tests/unit/C/services/storage/postgres/expected_ETC_UTC/14 @@ -1,2 +1 @@ -{ "entryPoint" : "insert", "message" : "Failed to parse JSON payload -", "retryable" : false} \ No newline at end of file +{ "entryPoint" : "insert", "message" : "Failed to parse JSON payload", "retryable" : false} \ No newline at end of file diff --git a/tests/unit/C/services/storage/postgres/expected_EUROPE_ROME/14 b/tests/unit/C/services/storage/postgres/expected_EUROPE_ROME/14 index 95390e2964..fe8d3870cf 100644 --- a/tests/unit/C/services/storage/postgres/expected_EUROPE_ROME/14 +++ b/tests/unit/C/services/storage/postgres/expected_EUROPE_ROME/14 @@ -1,2 +1 @@ -{ "entryPoint" : "insert", "message" : "Failed to parse JSON payload -", "retryable" : false} \ No newline at end of file +{ "entryPoint" : "insert", "message" : "Failed to parse JSON payload", "retryable" : false} \ No newline at end of file From 6682a257d73aeb463fd6bdf765ddfd2123cbcb75 Mon Sep 17 00:00:00 2001 From: stefano Date: Wed, 10 Apr 2019 09:46:05 +0200 Subject: [PATCH 017/161] FOGL-2510: fixed test id 34,35: error message changed the format --- tests/unit/C/services/storage/postgres/expected_ETC_UTC/34 | 5 +---- tests/unit/C/services/storage/postgres/expected_ETC_UTC/35 | 5 +---- .../unit/C/services/storage/postgres/expected_EUROPE_ROME/34 | 5 +---- .../unit/C/services/storage/postgres/expected_EUROPE_ROME/35 | 5 +---- 4 files changed, 4 insertions(+), 16 deletions(-) diff --git a/tests/unit/C/services/storage/postgres/expected_ETC_UTC/34 b/tests/unit/C/services/storage/postgres/expected_ETC_UTC/34 index c67f744585..7ce5660a31 100644 --- a/tests/unit/C/services/storage/postgres/expected_ETC_UTC/34 +++ b/tests/unit/C/services/storage/postgres/expected_ETC_UTC/34 @@ -1,4 +1 @@ -{ "entryPoint" : "retrieve", "message" : "ERROR: relation \"foglamp.doesntexist\" does not exist -LINE 1: SELECT * FROM foglamp.doesntexist; - ^ -", "retryable" : false} \ No newline at end of file +{ "entryPoint" : "retrieve", "message" : "ERROR: relation \"foglamp.doesntexist\" does not existLINE 1: SELECT * FROM foglamp.doesntexist; ^", "retryable" : false} \ No newline at end of file diff --git a/tests/unit/C/services/storage/postgres/expected_ETC_UTC/35 b/tests/unit/C/services/storage/postgres/expected_ETC_UTC/35 index 55b1201442..b01cbb736b 100644 --- a/tests/unit/C/services/storage/postgres/expected_ETC_UTC/35 +++ b/tests/unit/C/services/storage/postgres/expected_ETC_UTC/35 @@ -1,4 +1 @@ -{ "entryPoint" : "retrieve", "message" : "ERROR: column \"doesntexist\" does not exist -LINE 1: SELECT * FROM foglamp.test WHERE \"doesntexist\" = '9'; - ^ -", "retryable" : false} \ No newline at end of file +{ "entryPoint" : "retrieve", "message" : "ERROR: column \"doesntexist\" does not existLINE 1: SELECT * FROM foglamp.test WHERE \"doesntexist\" = '9'; ^", "retryable" : false} \ No newline at end of file diff --git a/tests/unit/C/services/storage/postgres/expected_EUROPE_ROME/34 b/tests/unit/C/services/storage/postgres/expected_EUROPE_ROME/34 index c67f744585..7ce5660a31 100644 --- a/tests/unit/C/services/storage/postgres/expected_EUROPE_ROME/34 +++ b/tests/unit/C/services/storage/postgres/expected_EUROPE_ROME/34 @@ -1,4 +1 @@ -{ "entryPoint" : "retrieve", "message" : "ERROR: relation \"foglamp.doesntexist\" does not exist -LINE 1: SELECT * FROM foglamp.doesntexist; - ^ -", "retryable" : false} \ No newline at end of file +{ "entryPoint" : "retrieve", "message" : "ERROR: relation \"foglamp.doesntexist\" does not existLINE 1: SELECT * FROM foglamp.doesntexist; ^", "retryable" : false} \ No newline at end of file diff --git a/tests/unit/C/services/storage/postgres/expected_EUROPE_ROME/35 b/tests/unit/C/services/storage/postgres/expected_EUROPE_ROME/35 index 55b1201442..b01cbb736b 100644 --- a/tests/unit/C/services/storage/postgres/expected_EUROPE_ROME/35 +++ b/tests/unit/C/services/storage/postgres/expected_EUROPE_ROME/35 @@ -1,4 +1 @@ -{ "entryPoint" : "retrieve", "message" : "ERROR: column \"doesntexist\" does not exist -LINE 1: SELECT * FROM foglamp.test WHERE \"doesntexist\" = '9'; - ^ -", "retryable" : false} \ No newline at end of file +{ "entryPoint" : "retrieve", "message" : "ERROR: column \"doesntexist\" does not existLINE 1: SELECT * FROM foglamp.test WHERE \"doesntexist\" = '9'; ^", "retryable" : false} \ No newline at end of file From f0abf22aead93a69c2af2063a368832f835fb7c4 Mon Sep 17 00:00:00 2001 From: stefano Date: Wed, 10 Apr 2019 14:57:30 +0200 Subject: [PATCH 018/161] FOGL-2510: fixed PG/SQLITE unit tests --- tests/unit/C/services/storage/postgres/expected_ETC_UTC/68 | 0 tests/unit/C/services/storage/postgres/expected_ETC_UTC/84 | 2 +- tests/unit/C/services/storage/postgres/expected_ETC_UTC/90 | 5 +---- tests/unit/C/services/storage/postgres/expected_ETC_UTC/91 | 3 +-- tests/unit/C/services/storage/postgres/expected_ETC_UTC/92 | 2 +- tests/unit/C/services/storage/postgres/expected_ETC_UTC/93 | 2 +- .../C/services/storage/postgres/expected_EUROPE_ROME/68 | 0 .../C/services/storage/postgres/expected_EUROPE_ROME/84 | 2 +- .../C/services/storage/postgres/expected_EUROPE_ROME/90 | 5 +---- .../C/services/storage/postgres/expected_EUROPE_ROME/91 | 3 +-- .../C/services/storage/postgres/expected_EUROPE_ROME/92 | 2 +- .../C/services/storage/postgres/expected_EUROPE_ROME/93 | 2 +- .../unit/C/services/storage/postgres/payloads/delete.json | 2 +- .../payloads/{error-fogl690.json => fogl690-error.json} | 0 .../payloads/fogl690-ok.json} | 2 +- tests/unit/C/services/storage/postgres/testRunner.sh | 2 ++ tests/unit/C/services/storage/postgres/testset | 7 ++++--- .../C/services/storage/sqlite/payloads/fogl690-error.json | 1 + .../C/services/storage/sqlite/payloads/fogl690-ok.json | 1 + tests/unit/C/services/storage/sqlite/testRunner.sh | 2 ++ tests/unit/C/services/storage/sqlite/testset | 3 ++- 21 files changed, 24 insertions(+), 24 deletions(-) create mode 100644 tests/unit/C/services/storage/postgres/expected_ETC_UTC/68 create mode 100644 tests/unit/C/services/storage/postgres/expected_EUROPE_ROME/68 rename tests/unit/C/services/storage/postgres/payloads/{error-fogl690.json => fogl690-error.json} (100%) rename tests/unit/C/services/storage/{sqlite/payloads/error-fogl690.json => postgres/payloads/fogl690-ok.json} (95%) create mode 100644 tests/unit/C/services/storage/sqlite/payloads/fogl690-error.json create mode 100644 tests/unit/C/services/storage/sqlite/payloads/fogl690-ok.json diff --git a/tests/unit/C/services/storage/postgres/expected_ETC_UTC/68 b/tests/unit/C/services/storage/postgres/expected_ETC_UTC/68 new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/unit/C/services/storage/postgres/expected_ETC_UTC/84 b/tests/unit/C/services/storage/postgres/expected_ETC_UTC/84 index 29146ee48a..a8848cbc86 100644 --- a/tests/unit/C/services/storage/postgres/expected_ETC_UTC/84 +++ b/tests/unit/C/services/storage/postgres/expected_ETC_UTC/84 @@ -1 +1 @@ -{ "response" : "deleted", "rows_affected" : 1 } \ No newline at end of file +{ "response" : "updated", "rows_affected" : 1 } \ No newline at end of file diff --git a/tests/unit/C/services/storage/postgres/expected_ETC_UTC/90 b/tests/unit/C/services/storage/postgres/expected_ETC_UTC/90 index 6badb7fb7e..14221b84df 100644 --- a/tests/unit/C/services/storage/postgres/expected_ETC_UTC/90 +++ b/tests/unit/C/services/storage/postgres/expected_ETC_UTC/90 @@ -1,4 +1 @@ -{ "entryPoint" : "retrieve", "message" : "ERROR: syntax error at or near \"\\" -LINE 1: ...FROM foglamp.test2 WHERE \"id\" >= '2' ORDER BY \"id\\">\\"\" ASC; - ^ -", "retryable" : false} \ No newline at end of file +{ "entryPoint" : "retrieve", "message" : "ERROR: syntax error at or near \"\\"LINE 1: ...FROM foglamp.test2 WHERE \"id\" >= '2' ORDER BY \"id\\">\\"\" ASC; ^", "retryable" : false} \ No newline at end of file diff --git a/tests/unit/C/services/storage/postgres/expected_ETC_UTC/91 b/tests/unit/C/services/storage/postgres/expected_ETC_UTC/91 index 89f081d1e5..7de8e21f82 100644 --- a/tests/unit/C/services/storage/postgres/expected_ETC_UTC/91 +++ b/tests/unit/C/services/storage/postgres/expected_ETC_UTC/91 @@ -1,2 +1 @@ -{ "entryPoint" : "retrieve", "message" : "ERROR: time zone \"bad\" not recognized -", "retryable" : false} \ No newline at end of file +{ "entryPoint" : "retrieve", "message" : "ERROR: time zone \"bad\" not recognized", "retryable" : false} \ No newline at end of file diff --git a/tests/unit/C/services/storage/postgres/expected_ETC_UTC/92 b/tests/unit/C/services/storage/postgres/expected_ETC_UTC/92 index a8848cbc86..93af3344da 100644 --- a/tests/unit/C/services/storage/postgres/expected_ETC_UTC/92 +++ b/tests/unit/C/services/storage/postgres/expected_ETC_UTC/92 @@ -1 +1 @@ -{ "response" : "updated", "rows_affected" : 1 } \ No newline at end of file +{"count":1,"rows":[{"description":"added'some'ch'''ars'"}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/postgres/expected_ETC_UTC/93 b/tests/unit/C/services/storage/postgres/expected_ETC_UTC/93 index 93af3344da..29146ee48a 100644 --- a/tests/unit/C/services/storage/postgres/expected_ETC_UTC/93 +++ b/tests/unit/C/services/storage/postgres/expected_ETC_UTC/93 @@ -1 +1 @@ -{"count":1,"rows":[{"description":"added'some'ch'''ars'"}]} \ No newline at end of file +{ "response" : "deleted", "rows_affected" : 1 } \ No newline at end of file diff --git a/tests/unit/C/services/storage/postgres/expected_EUROPE_ROME/68 b/tests/unit/C/services/storage/postgres/expected_EUROPE_ROME/68 new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/unit/C/services/storage/postgres/expected_EUROPE_ROME/84 b/tests/unit/C/services/storage/postgres/expected_EUROPE_ROME/84 index 29146ee48a..a8848cbc86 100644 --- a/tests/unit/C/services/storage/postgres/expected_EUROPE_ROME/84 +++ b/tests/unit/C/services/storage/postgres/expected_EUROPE_ROME/84 @@ -1 +1 @@ -{ "response" : "deleted", "rows_affected" : 1 } \ No newline at end of file +{ "response" : "updated", "rows_affected" : 1 } \ No newline at end of file diff --git a/tests/unit/C/services/storage/postgres/expected_EUROPE_ROME/90 b/tests/unit/C/services/storage/postgres/expected_EUROPE_ROME/90 index 6badb7fb7e..14221b84df 100644 --- a/tests/unit/C/services/storage/postgres/expected_EUROPE_ROME/90 +++ b/tests/unit/C/services/storage/postgres/expected_EUROPE_ROME/90 @@ -1,4 +1 @@ -{ "entryPoint" : "retrieve", "message" : "ERROR: syntax error at or near \"\\" -LINE 1: ...FROM foglamp.test2 WHERE \"id\" >= '2' ORDER BY \"id\\">\\"\" ASC; - ^ -", "retryable" : false} \ No newline at end of file +{ "entryPoint" : "retrieve", "message" : "ERROR: syntax error at or near \"\\"LINE 1: ...FROM foglamp.test2 WHERE \"id\" >= '2' ORDER BY \"id\\">\\"\" ASC; ^", "retryable" : false} \ No newline at end of file diff --git a/tests/unit/C/services/storage/postgres/expected_EUROPE_ROME/91 b/tests/unit/C/services/storage/postgres/expected_EUROPE_ROME/91 index 89f081d1e5..7de8e21f82 100644 --- a/tests/unit/C/services/storage/postgres/expected_EUROPE_ROME/91 +++ b/tests/unit/C/services/storage/postgres/expected_EUROPE_ROME/91 @@ -1,2 +1 @@ -{ "entryPoint" : "retrieve", "message" : "ERROR: time zone \"bad\" not recognized -", "retryable" : false} \ No newline at end of file +{ "entryPoint" : "retrieve", "message" : "ERROR: time zone \"bad\" not recognized", "retryable" : false} \ No newline at end of file diff --git a/tests/unit/C/services/storage/postgres/expected_EUROPE_ROME/92 b/tests/unit/C/services/storage/postgres/expected_EUROPE_ROME/92 index a8848cbc86..93af3344da 100644 --- a/tests/unit/C/services/storage/postgres/expected_EUROPE_ROME/92 +++ b/tests/unit/C/services/storage/postgres/expected_EUROPE_ROME/92 @@ -1 +1 @@ -{ "response" : "updated", "rows_affected" : 1 } \ No newline at end of file +{"count":1,"rows":[{"description":"added'some'ch'''ars'"}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/postgres/expected_EUROPE_ROME/93 b/tests/unit/C/services/storage/postgres/expected_EUROPE_ROME/93 index 93af3344da..29146ee48a 100644 --- a/tests/unit/C/services/storage/postgres/expected_EUROPE_ROME/93 +++ b/tests/unit/C/services/storage/postgres/expected_EUROPE_ROME/93 @@ -1 +1 @@ -{"count":1,"rows":[{"description":"added'some'ch'''ars'"}]} \ No newline at end of file +{ "response" : "deleted", "rows_affected" : 1 } \ No newline at end of file diff --git a/tests/unit/C/services/storage/postgres/payloads/delete.json b/tests/unit/C/services/storage/postgres/payloads/delete.json index ffac71cc45..a203817641 100644 --- a/tests/unit/C/services/storage/postgres/payloads/delete.json +++ b/tests/unit/C/services/storage/postgres/payloads/delete.json @@ -2,6 +2,6 @@ "where" : { "column" : "key", "condition" : "=", - "value" : "DEVICE " + "value" : "DEVICE" } } diff --git a/tests/unit/C/services/storage/postgres/payloads/error-fogl690.json b/tests/unit/C/services/storage/postgres/payloads/fogl690-error.json similarity index 100% rename from tests/unit/C/services/storage/postgres/payloads/error-fogl690.json rename to tests/unit/C/services/storage/postgres/payloads/fogl690-error.json diff --git a/tests/unit/C/services/storage/sqlite/payloads/error-fogl690.json b/tests/unit/C/services/storage/postgres/payloads/fogl690-ok.json similarity index 95% rename from tests/unit/C/services/storage/sqlite/payloads/error-fogl690.json rename to tests/unit/C/services/storage/postgres/payloads/fogl690-ok.json index 4780a36fb3..792e273f27 100644 --- a/tests/unit/C/services/storage/sqlite/payloads/error-fogl690.json +++ b/tests/unit/C/services/storage/postgres/payloads/fogl690-ok.json @@ -1 +1 @@ -{"key": "DEVICE", "display_name": "DEVICE", "value": {"readings_insert_batch_size": {"type": "integer", "default": "100", "value": "100", "description": "The maximum number of readings in a batch of inserts"}, "max_concurrent_readings_inserts": {"type": "integer", "default": "5", "value": "5", "description": "The maximum number of concurrent processes that send batches of readings to storage"}, "readings_insert_batch_timeout_seconds": {"type": "integer", "default": "1", "value": "1", "description": "The number of seconds to wait for a readings list to reach the minimum batch size"}, "max_readings_insert_batch_connection_idle_seconds": {"type": "integer", "default": "60", "value": "60", "description": "Close storage connections used to insert readings when idle for this number of seconds"}, "readings_buffer_size": {"type": "integer", "default": "500", "value": "500", "description": "The maximum number of readings to buffer in memory"}, "write_statistics_frequency_seconds": {"type": "integer", "default": "5", "value": "5", "description": "The number of seconds to wait before writing readings-related statistics to storage"}, "max_readings_insert_batch_reconnect_wait_seconds": {"type": "integer", "default": "10", "value": "10", "description": "The maximum number of seconds to wait before reconnecting to storage when inserting readings"}}, "description": "Device server configuration"} +{"key": "DEVICE", "display_name": "DEVICE", "value": {"readings_insert_batch_size": {"type": "integer", "default": "100", "value": "100", "description": "The maximum number of readings in a batch of inserts"}, "max_concurrent_readings_inserts": {"type": "integer", "default": "5", "value": "5", "description": "The maximum number of concurrent processes that send batches of readings to storage"}, "readings_insert_batch_timeout_seconds": {"type": "integer", "default": "1", "value": "1", "description": "The number of seconds to wait for a readings list to reach the minimum batch size"}, "max_readings_insert_batch_connection_idle_seconds": {"type": "integer", "default": "60", "value": "60", "description": "Close storage connections used to insert readings when idle for this number of seconds"}, "readings_buffer_size": {"type": "integer", "default": "500", "value": "500", "description": "The maximum number of readings to buffer in memory"}, "write_statistics_frequency_seconds": {"type": "integer", "default": "5", "value": "5", "description": "The number of seconds to wait before writing readings-related statistics to storage"}, "max_readings_insert_batch_reconnect_wait_seconds": {"type": "integer", "default": "10", "value": "10", "description": "The maximum number of seconds to wait before reconnecting to storage when inserting readings"}}, "description": "Device server configuration"} \ No newline at end of file diff --git a/tests/unit/C/services/storage/postgres/testRunner.sh b/tests/unit/C/services/storage/postgres/testRunner.sh index 8e18aace23..20f2e74eba 100755 --- a/tests/unit/C/services/storage/postgres/testRunner.sh +++ b/tests/unit/C/services/storage/postgres/testRunner.sh @@ -7,6 +7,8 @@ export TZ='Etc/UTC' show_configuration () { + echo "FogLAMP unit tests for the PostgreSQL plugin" + echo "Starting storage layer :$storage_exec:" echo "timezone :$tz_exec:" echo "expected dir :$expected_dir:" diff --git a/tests/unit/C/services/storage/postgres/testset b/tests/unit/C/services/storage/postgres/testset index 29500f50b7..0dd033162a 100644 --- a/tests/unit/C/services/storage/postgres/testset +++ b/tests/unit/C/services/storage/postgres/testset @@ -80,8 +80,8 @@ Older,PUT,http://localhost:8080/storage/table/test2/query,older.json Newer Bad,PUT,http://localhost:8080/storage/table/test2/query,newerBad.json Like,PUT,http://localhost:8080/storage/table/test2/query,where_like.json Group Time,PUT,http://localhost:8080/storage/reading/query,group_time.json -Jira FOGL-690,POST,http://localhost:8080/storage/table/configuration,error-fogl690.json -Jira FOGL-690 cleanup,DELETE,http://localhost:8080/storage/table/configuration,delete.json +Jira FOGL-690,POST,http://localhost:8080/storage/table/configuration,fogl690-ok.json +Set-FOGL-983,PUT,http://localhost:8080/storage/table/configuration,FOGL-983.json Add bad Readings,POST,http://localhost:8080/storage/reading,badreadings.json Query Readings Timebucket Bad,PUT,http://localhost:8080/storage/reading/query,query_readings_timebucket_bad.json Reading Rate Array,PUT,http://localhost:8080/storage/reading/query,reading_property_array.json @@ -89,8 +89,8 @@ Common Read limit max_int,PUT,http://localhost:8080/storage/table/test/query,lim Common Read skip max_int,PUT,http://localhost:8080/storage/table/test/query,skip_max_int.json Timezone,PUT,http://localhost:8080/storage/table/test2/query,timezone.json Bad Timezone,PUT,http://localhost:8080/storage/table/test2/query,timezone_bad.json -Set-FOGL-983,PUT,http://localhost:8080/storage/table/configuration,FOGL-983.json Get-FOGL-983,PUT,http://localhost:8080/storage/table/configuration/query,get-FOGL-983.json +Jira FOGL-690 cleanup,DELETE,http://localhost:8080/storage/table/configuration,delete.json Numeric Column Name,PUT,http://localhost:8080/storage/table/test/query,where_numeric_column.json, Common table IN operator,PUT,http://localhost:8080/storage/table/test2/query,where_in.json Common table NOT IN operator,PUT,http://localhost:8080/storage/table/test2/query,where_not_in.json @@ -108,4 +108,5 @@ microseconds - Query asset aggregate min array,PUT,http://localhost:8080/storage Update JSON value as function,PUT,http://localhost:8080/storage/table/test,put_function_in_JSON.json Update JSON value in JSON value,PUT,http://localhost:8080/storage/table/test,put_json_in_JSON.json Get updated complex JSON value,PUT,http://localhost:8080/storage/table/test/query,get_updated_complex_JSON.json +Jira FOGL-690,POST,http://localhost:8080/storage/table/configuration,fogl690-error.json Shutdown,POST,http://localhost:1081/foglamp/service/shutdown,,checkstate diff --git a/tests/unit/C/services/storage/sqlite/payloads/fogl690-error.json b/tests/unit/C/services/storage/sqlite/payloads/fogl690-error.json new file mode 100644 index 0000000000..4f4c40e8f4 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/payloads/fogl690-error.json @@ -0,0 +1 @@ +{"key": "DEVICE", "value": {"readings_insert_batch_size": {"type": "integer", "default": "100", "value": "100", "description": "The maximum number of readings in a batch of inserts"}, "max_concurrent_readings_inserts": {"type": "integer", "default": "5", "value": "5", "description": "The maximum number of concurrent processes that send batches of readings to storage"}, "readings_insert_batch_timeout_seconds": {"type": "integer", "default": "1", "value": "1", "description": "The number of seconds to wait for a readings list to reach the minimum batch size"}, "max_readings_insert_batch_connection_idle_seconds": {"type": "integer", "default": "60", "value": "60", "description": "Close storage connections used to insert readings when idle for this number of seconds"}, "readings_buffer_size": {"type": "integer", "default": "500", "value": "500", "description": "The maximum number of readings to buffer in memory"}, "write_statistics_frequency_seconds": {"type": "integer", "default": "5", "value": "5", "description": "The number of seconds to wait before writing readings-related statistics to storage"}, "max_readings_insert_batch_reconnect_wait_seconds": {"type": "integer", "default": "10", "value": "10", "description": "The maximum number of seconds to wait before reconnecting to storage when inserting readings"}}, "description": "Device server configuration"} diff --git a/tests/unit/C/services/storage/sqlite/payloads/fogl690-ok.json b/tests/unit/C/services/storage/sqlite/payloads/fogl690-ok.json new file mode 100644 index 0000000000..792e273f27 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/payloads/fogl690-ok.json @@ -0,0 +1 @@ +{"key": "DEVICE", "display_name": "DEVICE", "value": {"readings_insert_batch_size": {"type": "integer", "default": "100", "value": "100", "description": "The maximum number of readings in a batch of inserts"}, "max_concurrent_readings_inserts": {"type": "integer", "default": "5", "value": "5", "description": "The maximum number of concurrent processes that send batches of readings to storage"}, "readings_insert_batch_timeout_seconds": {"type": "integer", "default": "1", "value": "1", "description": "The number of seconds to wait for a readings list to reach the minimum batch size"}, "max_readings_insert_batch_connection_idle_seconds": {"type": "integer", "default": "60", "value": "60", "description": "Close storage connections used to insert readings when idle for this number of seconds"}, "readings_buffer_size": {"type": "integer", "default": "500", "value": "500", "description": "The maximum number of readings to buffer in memory"}, "write_statistics_frequency_seconds": {"type": "integer", "default": "5", "value": "5", "description": "The number of seconds to wait before writing readings-related statistics to storage"}, "max_readings_insert_batch_reconnect_wait_seconds": {"type": "integer", "default": "10", "value": "10", "description": "The maximum number of seconds to wait before reconnecting to storage when inserting readings"}}, "description": "Device server configuration"} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/testRunner.sh b/tests/unit/C/services/storage/sqlite/testRunner.sh index 359679de95..ddc8a6f2f2 100755 --- a/tests/unit/C/services/storage/sqlite/testRunner.sh +++ b/tests/unit/C/services/storage/sqlite/testRunner.sh @@ -7,6 +7,8 @@ export TZ='Etc/UTC' show_configuration () { + echo "FogLAMP unit tests for the SQLite plugin" + echo "Starting storage layer :$storage_exec:" echo "timezone :$TZ:" echo "expected dir :$expected_dir:" diff --git a/tests/unit/C/services/storage/sqlite/testset b/tests/unit/C/services/storage/sqlite/testset index 52f39b8ce9..f31ba4bb86 100644 --- a/tests/unit/C/services/storage/sqlite/testset +++ b/tests/unit/C/services/storage/sqlite/testset @@ -76,7 +76,7 @@ Older,PUT,http://localhost:8080/storage/table/test2/query,older.json Newer Bad,PUT,http://localhost:8080/storage/table/test2/query,newerBad.json Like,PUT,http://localhost:8080/storage/table/test2/query,where_like.json Group Time,PUT,http://localhost:8080/storage/reading/query,group_time.json -Jira FOGL-690,POST,http://localhost:8080/storage/table/configuration,error-fogl690.json +Jira FOGL-690,POST,http://localhost:8080/storage/table/configuration,fogl690-ok.json Set-FOGL-983,PUT,http://localhost:8080/storage/table/configuration,FOGL-983.json Add bad Readings,POST,http://localhost:8080/storage/reading,badreadings.json Query Readings Timebucket Bad,PUT,http://localhost:8080/storage/reading/query,query_readings_timebucket_bad.json @@ -109,4 +109,5 @@ microseconds - Query asset aggregate min array,PUT,http://localhost:8080/storage update - Update multi rows,PUT,http://localhost:8080/storage/table/test2,update_multi_rows.json insert - Insert 1 row,POST,http://localhost:8080/storage/table/test2,insert_1row.json insert - Insert multi rows,POST,http://localhost:8080/storage/table/test2,insert_multi_rows.json +Jira FOGL-690,POST,http://localhost:8080/storage/table/configuration,fogl690-error.json Shutdown,POST,http://localhost:1081/foglamp/service/shutdown,,checkstate From 2ba5d7ce47c1afc20a55f8544de177f4036d37ab Mon Sep 17 00:00:00 2001 From: stefano Date: Wed, 10 Apr 2019 16:02:08 +0200 Subject: [PATCH 019/161] FOGL-2510: fixed PG unit tests ID 68 --- .../storage/postgres/payloads/reading_property_bad.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/unit/C/services/storage/postgres/payloads/reading_property_bad.json b/tests/unit/C/services/storage/postgres/payloads/reading_property_bad.json index fcf683f532..db333237dc 100644 --- a/tests/unit/C/services/storage/postgres/payloads/reading_property_bad.json +++ b/tests/unit/C/services/storage/postgres/payloads/reading_property_bad.json @@ -1,7 +1,7 @@ { "return" : [ { - "column": "user_ts", + "column": "user_ts_X", "format": "YYYY-MM-DD HH24:MI:SS.MS", "alias" : "timestamp"}, { From 6c3cda5e8d1eb2279eb32d7dc35bbffabbdef3fc Mon Sep 17 00:00:00 2001 From: Praveen Garg Date: Wed, 10 Apr 2019 19:33:04 +0530 Subject: [PATCH 020/161] fixed round off values comparision to and from PI --- tests/system/python/e2e/test_e2e_pi_scaleset.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/system/python/e2e/test_e2e_pi_scaleset.py b/tests/system/python/e2e/test_e2e_pi_scaleset.py index 2886d8b2c9..c1741cfad8 100644 --- a/tests/system/python/e2e/test_e2e_pi_scaleset.py +++ b/tests/system/python/e2e/test_e2e_pi_scaleset.py @@ -163,4 +163,4 @@ def _verify_egress(self, read_data_from_pi, pi_host, pi_admin, pi_passwd, pi_db, assert READ_KEY in data_from_pi assert isinstance(data_from_pi[READ_KEY], list) - assert OUTPUT in data_from_pi[READ_KEY] + assert round(OUTPUT, 1) in [round(n, 1) for n in data_from_pi[READ_KEY]] From dc2b9b5b90356345a6a45fad9033cf59760fbe1a Mon Sep 17 00:00:00 2001 From: stefano Date: Thu, 11 Apr 2019 11:50:23 +0200 Subject: [PATCH 021/161] FOGL-2510: fixed unit test ID 115 --- tests/unit/C/services/storage/sqlite/expected_ETC_UTC/115 | 1 + tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/115 | 1 + 2 files changed, 2 insertions(+) create mode 100644 tests/unit/C/services/storage/sqlite/expected_ETC_UTC/115 create mode 100644 tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/115 diff --git a/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/115 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/115 new file mode 100644 index 0000000000..3f44373803 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/115 @@ -0,0 +1 @@ +{ "entryPoint" : "insert", "message" : "NOT NULL constraint failed: configuration.display_name", "retryable" : false} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/115 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/115 new file mode 100644 index 0000000000..3f44373803 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/115 @@ -0,0 +1 @@ +{ "entryPoint" : "insert", "message" : "NOT NULL constraint failed: configuration.display_name", "retryable" : false} \ No newline at end of file From 2c91f2388cf4078f9b105925a6511f6c47e93221 Mon Sep 17 00:00:00 2001 From: ashish-jabble Date: Fri, 12 Apr 2019 14:42:22 +0530 Subject: [PATCH 022/161] _FOGLAMP_DATA handling and other feedback fixes --- .../services/core/api/plugins/install.py | 84 ++++++++----------- python/foglamp/services/core/routes.py | 4 +- 2 files changed, 38 insertions(+), 50 deletions(-) diff --git a/python/foglamp/services/core/api/plugins/install.py b/python/foglamp/services/core/api/plugins/install.py index 4875d0d61a..d1213b5a98 100644 --- a/python/foglamp/services/core/api/plugins/install.py +++ b/python/foglamp/services/core/api/plugins/install.py @@ -16,7 +16,7 @@ import hashlib from foglamp.common import logger -from foglamp.common.common import _FOGLAMP_ROOT +from foglamp.common.common import _FOGLAMP_ROOT, _FOGLAMP_DATA __author__ = "Ashish Jabble" @@ -30,12 +30,11 @@ | POST | /foglamp/plugins | ------------------------------------------------------------------------------- """ -TIME_OUT = 120 -CHUNK_SIZE = 1024 +_TIME_OUT = 120 +_CHUNK_SIZE = 1024 +_PATH = _FOGLAMP_DATA + '/plugins/' if _FOGLAMP_DATA else _FOGLAMP_ROOT + '/data/plugins/' _LOGGER = logger.setup(__name__, level=logging.INFO) -_PATH = _FOGLAMP_ROOT + '/data/plugins/' - # TODO: Add unit tests async def add_plugin(request: web.Request) -> web.Response: @@ -54,13 +53,12 @@ async def add_plugin(request: web.Request) -> web.Response: url = data.get('url', None) file_format = data.get('format', None) compressed = data.get('compressed', None) - # TODO: Better if we add plugin type (mandatory) - _type = data.get('type', None) + plugin_type = data.get('type', None) checksum = data.get('checksum', None) - if not url or not file_format or not _type or not checksum: - raise TypeError('URL, checksum, plugin type and format are mandatory.') + if not url or not file_format or not plugin_type or not checksum: + raise TypeError('URL, checksum, plugin type and format post params are mandatory.') # TODO: add later for notification delivery and rules plugins - if _type not in ['filter', 'north', 'south']: + if plugin_type not in ['filter', 'north', 'south']: raise ValueError("Invalid type you have supplied; Accepted types are filter, north, south") if file_format not in ["tar", "deb"]: raise ValueError("Invalid format you supplied; Accepted format are tar and deb") @@ -75,39 +73,37 @@ async def add_plugin(request: web.Request) -> web.Response: os.makedirs(_PATH) result = await download([url]) - # _LOGGER.info("Result {} ".format(result)) - file_name = result[0].split('Successfully downloaded ')[-1] - # _LOGGER.info("file_name {} {}".format(file_name, checksum)) + file_name = result[0] - # TODO: only validate with MD5sum. Do we need to validate with SHA1, SHA256, SHA512? - # if yes then we need to add checksum type attribute in request + # validate checksum with MD5sum if validate_checksum(checksum, file_name) is False: raise ValueError("Checksum is failed.") - # TODO: tar this downloaded file under _PATH and install plugin FOGLAMP_ROOT _LOGGER.info("Found {} format with compressed {}".format(file_format, is_compressed)) - _files = extract_file(file_name, is_compressed) - _LOGGER.info("Files {} {}".format(_files, type(_files))) if file_format == 'tar': - copy_file_install_requirement(_files, _type) + files = extract_file(file_name, is_compressed) + _LOGGER.info("Files {} {}".format(files, type(files))) + copy_file_install_requirement(files, plugin_type) else: - install_debian(_files[0]) + install_debian(file_name) + except FileNotFoundError as ex: + raise web.HTTPNotFound(reason=str(ex)) except (TypeError, ValueError) as ex: raise web.HTTPBadRequest(reason=str(ex)) except Exception as ex: raise web.HTTPException(reason=str(ex)) else: - return web.json_response({"message": "{} and installed".format(result)}) + return web.json_response({"message": "{} is successfully downloaded and installed".format(file_name)}) async def get_url(url: str, session: aiohttp.ClientSession) -> str: file_name = str(url.split("/")[-1]) - async with async_timeout.timeout(TIME_OUT): + async with async_timeout.timeout(_TIME_OUT): async with session.get(url) as response: with open(_PATH + file_name, 'wb') as fd: - async for data in response.content.iter_chunked(CHUNK_SIZE): + async for data in response.content.iter_chunked(_CHUNK_SIZE): fd.write(data) - return 'Successfully downloaded ' + file_name + return file_name async def download(urls: list) -> asyncio.gather: @@ -127,7 +123,6 @@ def extract_file(file_name: str, is_compressed: bool) -> list: _LOGGER.info("Extracted to {}".format(_PATH)) tar.extractall(_PATH) _LOGGER.info("Extraction Done!!") - # _LOGGER.info(tar.getnames()) return tar.getnames() @@ -139,41 +134,34 @@ def install_debian(file_name: str): def copy_file_install_requirement(_file: list, _type: str): - _py = any(f.endswith(".py") for f in _file) - _so = any(f.endswith(".so") for f in _file) - _so_1 = any(f.endswith(".so.1") for f in _file) - - if _py: - _LOGGER.exception("Python plugin found") + py_file = any(f.endswith(".py") for f in _file) + so_1_file = any(f.endswith(".so.1") for f in _file) # regular file + so_file = any(f.endswith(".so") for f in _file) # symlink file - if _so: - if _so_1: - _LOGGER.exception("C plugin found") - else: - _LOGGER.exception("Not a valid C plugin") - - if not _py and not _so: - _LOGGER.exception("Invalid plugin found") - raise ValueError + if not py_file and not so_file: + raise FileNotFoundError("Invalid plugin directory structure found, please check the contents of your tar file.") + if so_1_file: + if not so_file: + _LOGGER.error("Symlink file is missing") + raise FileNotFoundError("Symlink file is missing") dir = [] for s in _file: - print(s) dir.append(s.split("/")[-1]) assert len(dir), "No data found" plugin_name = dir[0] - _LOGGER.exception("plugin name {} dir {} ".format(plugin_name, dir)) - - _plugin_path = "python/foglamp/plugins" if _py else "plugins" - dest_path = "{}/{}/{}/".format(_FOGLAMP_ROOT, _plugin_path, _type) - _LOGGER.exception("dest {}".format(dest_path)) + _LOGGER.info("Plugin name {} and Dir {} ".format(plugin_name, dir)) + plugin_path = "python/foglamp/plugins" if py_file else "plugins" + dest_path = "{}/{}/{}/".format(_FOGLAMP_ROOT, plugin_path, _type) + _LOGGER.info("Destination Path {}".format(dest_path)) + # FIXME: shutil with sudo permissions (bypass) if os.path.exists(dest_path + plugin_name) and os.path.isdir(dest_path + plugin_name): shutil.rmtree(dest_path + plugin_name) shutil.copytree(_PATH + plugin_name, dest_path + plugin_name) - _LOGGER.exception("file copied....to {}".format(dest_path)) + _LOGGER.info("File copied to {}".format(dest_path)) if "requirements.sh" in dir: - _LOGGER.exception("INSTALL PIP Dep.... {}".format(dest_path + plugin_name + "/" + "requirements.sh")) + _LOGGER.info("Installing external deps required for plugins.... {}".format(dest_path + plugin_name + "/" + "requirements.sh")) subprocess.run(["sh {}".format(dest_path + plugin_name + "/" + "requirements.sh")], shell=True) diff --git a/python/foglamp/services/core/routes.py b/python/foglamp/services/core/routes.py index b38c9c818c..e00cc2915a 100644 --- a/python/foglamp/services/core/routes.py +++ b/python/foglamp/services/core/routes.py @@ -23,7 +23,7 @@ from foglamp.services.core.api import north from foglamp.services.core.api import filters from foglamp.services.core.api import notification -from foglamp.services.core.api.plugins import install +from foglamp.services.core.api.plugins import install as plugins_install __author__ = "Ashish Jabble, Praveen Garg, Massimiliano Pinto" __copyright__ = "Copyright (c) 2017-2018 OSIsoft, LLC" @@ -156,7 +156,7 @@ def setup(app): # Plugins (install, discovery) app.router.add_route('GET', '/foglamp/plugins/installed', plugin_discovery.get_plugins_installed) - app.router.add_route('POST', '/foglamp/plugins', install.add_plugin) + app.router.add_route('POST', '/foglamp/plugins', plugins_install.add_plugin) # Filters app.router.add_route('POST', '/foglamp/filter', filters.create_filter) From 781158f50cfa3597f540fff330fc5d7ce1279ee7 Mon Sep 17 00:00:00 2001 From: ashish-jabble Date: Fri, 12 Apr 2019 16:51:05 +0530 Subject: [PATCH 023/161] allowed notify delivery and plugins to install via this route --- .../services/core/api/plugins/install.py | 39 ++++++++++--------- 1 file changed, 20 insertions(+), 19 deletions(-) diff --git a/python/foglamp/services/core/api/plugins/install.py b/python/foglamp/services/core/api/plugins/install.py index d1213b5a98..920f1df82f 100644 --- a/python/foglamp/services/core/api/plugins/install.py +++ b/python/foglamp/services/core/api/plugins/install.py @@ -8,13 +8,14 @@ import subprocess import logging import asyncio -from aiohttp import web -import aiohttp -import async_timeout import tarfile import shutil import hashlib +from aiohttp import web +import aiohttp +import async_timeout + from foglamp.common import logger from foglamp.common.common import _FOGLAMP_ROOT, _FOGLAMP_DATA @@ -57,11 +58,11 @@ async def add_plugin(request: web.Request) -> web.Response: checksum = data.get('checksum', None) if not url or not file_format or not plugin_type or not checksum: raise TypeError('URL, checksum, plugin type and format post params are mandatory.') - # TODO: add later for notification delivery and rules plugins - if plugin_type not in ['filter', 'north', 'south']: - raise ValueError("Invalid type you have supplied; Accepted types are filter, north, south") + if plugin_type not in ['south', 'north', 'filter', 'notificationDelivery', 'notificationRule']: + raise ValueError("Invalid plugin type. Must be 'north' or 'south' or 'filter' " + "or 'notificationDelivery' or 'notificationRule'") if file_format not in ["tar", "deb"]: - raise ValueError("Invalid format you supplied; Accepted format are tar and deb") + raise ValueError("Invalid format. Must be 'tar' or 'deb'") if compressed: if compressed not in ['true', 'false', True, False]: raise ValueError('Only "true", "false", true, false are allowed for value of compressed.') @@ -133,10 +134,10 @@ def install_debian(file_name: str): pass -def copy_file_install_requirement(_file: list, _type: str): - py_file = any(f.endswith(".py") for f in _file) - so_1_file = any(f.endswith(".so.1") for f in _file) # regular file - so_file = any(f.endswith(".so") for f in _file) # symlink file +def copy_file_install_requirement(dir_files: list, plugin_type: str): + py_file = any(f.endswith(".py") for f in dir_files) + so_1_file = any(f.endswith(".so.1") for f in dir_files) # regular file + so_file = any(f.endswith(".so") for f in dir_files) # symlink file if not py_file and not so_file: raise FileNotFoundError("Invalid plugin directory structure found, please check the contents of your tar file.") @@ -145,15 +146,15 @@ def copy_file_install_requirement(_file: list, _type: str): if not so_file: _LOGGER.error("Symlink file is missing") raise FileNotFoundError("Symlink file is missing") - dir = [] - for s in _file: - dir.append(s.split("/")[-1]) + _dir = [] + for s in dir_files: + _dir.append(s.split("/")[-1]) - assert len(dir), "No data found" - plugin_name = dir[0] - _LOGGER.info("Plugin name {} and Dir {} ".format(plugin_name, dir)) + assert len(_dir), "No data found" + plugin_name = _dir[0] + _LOGGER.info("Plugin name {} and Dir {} ".format(plugin_name, _dir)) plugin_path = "python/foglamp/plugins" if py_file else "plugins" - dest_path = "{}/{}/{}/".format(_FOGLAMP_ROOT, plugin_path, _type) + dest_path = "{}/{}/{}/".format(_FOGLAMP_ROOT, plugin_path, plugin_type) _LOGGER.info("Destination Path {}".format(dest_path)) # FIXME: shutil with sudo permissions (bypass) @@ -162,6 +163,6 @@ def copy_file_install_requirement(_file: list, _type: str): shutil.copytree(_PATH + plugin_name, dest_path + plugin_name) _LOGGER.info("File copied to {}".format(dest_path)) - if "requirements.sh" in dir: + if "requirements.sh" in _dir: _LOGGER.info("Installing external deps required for plugins.... {}".format(dest_path + plugin_name + "/" + "requirements.sh")) subprocess.run(["sh {}".format(dest_path + plugin_name + "/" + "requirements.sh")], shell=True) From 1c6394bffe928af500f7aef89efc91c857cd270b Mon Sep 17 00:00:00 2001 From: ashish-jabble Date: Fri, 12 Apr 2019 16:51:30 +0530 Subject: [PATCH 024/161] unit tests added --- .../services/core/api/plugins/test_install.py | 146 ++++++++++++++++++ 1 file changed, 146 insertions(+) create mode 100644 tests/unit/python/foglamp/services/core/api/plugins/test_install.py diff --git a/tests/unit/python/foglamp/services/core/api/plugins/test_install.py b/tests/unit/python/foglamp/services/core/api/plugins/test_install.py new file mode 100644 index 0000000000..79c38252c4 --- /dev/null +++ b/tests/unit/python/foglamp/services/core/api/plugins/test_install.py @@ -0,0 +1,146 @@ +# -*- coding: utf-8 -*- + +# FOGLAMP_BEGIN +# See: http://foglamp.readthedocs.io/ +# FOGLAMP_END + +import json +from unittest.mock import patch +import pytest + +from aiohttp import web + +from foglamp.services.core import routes +from foglamp.services.core.api.plugins import install as plugins_install + + +__author__ = "Ashish Jabble" +__copyright__ = "Copyright (c) 2019 Dianomic Systems" +__license__ = "Apache 2.0" +__version__ = "${VERSION}" + + +@pytest.allure.feature("unit") +@pytest.allure.story("api", "plugins", "install") +class TestPluginInstall: + @pytest.fixture + def client(self, loop, test_client): + app = web.Application(loop=loop) + # fill the routes table + routes.setup(app) + return loop.run_until_complete(test_client(app)) + + @pytest.mark.parametrize("param, message", [ + ({"create": "blah"}, "URL, checksum, plugin type and format post params are mandatory."), + ({"url": "http://blah.co.in"}, "URL, checksum, plugin type and format post params are mandatory."), + ({"format": "tar"}, "URL, checksum, plugin type and format post params are mandatory."), + ({"compressed": "false"}, "URL, checksum, plugin type and format post params are mandatory."), + ({"type": "north"}, "URL, checksum, plugin type and format post params are mandatory."), + ({"checksum": "4015c2dea1cc71dbf70a23f6a203eeb6"}, + "URL, checksum, plugin type and format post params are mandatory."), + ({"url": "http://blah.co.in", "checksum": "4015c2dea1cc71dbf70a23f6a203eeb6"}, + "URL, checksum, plugin type and format post params are mandatory."), + ({"format": "tar", "checksum": "4015c2dea1cc71dbf70a23f6a203eeb6"}, + "URL, checksum, plugin type and format post params are mandatory."), + ({"format": "tar", "compressed": "false", "checksum": "4015c2dea1cc71dbf70a23f6a203eeb6"}, + "URL, checksum, plugin type and format post params are mandatory."), + ({"type": "north", "checksum": "4015c2dea1cc71dbf70a23f6a203eeb6"}, + "URL, checksum, plugin type and format post params are mandatory."), + ({"url": "http://blah.co.in", "format": "deb", "checksum": "4015c2dea1cc71dbf70a23f6a203eeb6"}, + "URL, checksum, plugin type and format post params are mandatory."), + ({"url": "http://blah.co.in", "format": "deb", "type": "blah", "checksum": "4015c2dea1cc71dbf70a23f6a203eeb6"}, + "Invalid plugin type. Must be 'north' or 'south' or 'filter' or 'notificationDelivery' or 'notificationRule'"), + ({"url": "http://blah.co.in", "format": "blah", "type": "filter", "checksum": "4015c2dea1cc71dbf70a23f6a203ee"}, + "Invalid format. Must be 'tar' or 'deb'"), + ({"url": "http://blah.co.in", "format": "tar", "type": "south", "checksum": "4015c2dea1cc71dbf70a23f6a203eeb6", + "compressed": "blah"}, 'Only "true", "false", true, false are allowed for value of compressed.') + ]) + async def test_bad_post_plugins_install(self, client, param, message): + resp = await client.post('/foglamp/plugins', data=json.dumps(param)) + assert 400 == resp.status + assert message == resp.reason + + async def test_bad_checksum_post_plugins_install(self, client): + async def async_mock(): + return [tar_file_name] + + tar_file_name = 'Benchmark.tar' + checksum_value = "4015c2dea1cc71dbf70a23f6a203eeb6" + url_value = "http://10.2.5.26:5000//download/c/{}".format(tar_file_name) + param = {"url": url_value, "format": "tar", "type": "south", "checksum": checksum_value, "compressed": "true"} + with patch.object(plugins_install, 'download', return_value=async_mock()) as download_patch: + with patch.object(plugins_install, 'validate_checksum', return_value=False) as checksum_patch: + resp = await client.post('/foglamp/plugins', data=json.dumps(param)) + assert 400 == resp.status + assert 'Checksum is failed.' == resp.reason + checksum_patch.assert_called_once_with(checksum_value, tar_file_name) + download_patch.assert_called_once_with([url_value]) + + async def test_post_plugins_install_with_tar(self, client): + async def async_mock(ret_val): + return ret_val + + plugin_name = 'coap' + tar_file_name = 'foglamp-south-coap-1.5.2.tar' + files = [plugin_name, '{}/__init__.py'.format(plugin_name), '{}/README.rst'.format(plugin_name), + '{}/{}.py'.format(plugin_name, plugin_name), '{}/requirements.sh'.format(plugin_name)] + checksum_value = "4015c2dea1cc71dbf70a23f6a203eeb6" + url_value = "http://10.2.5.26:5000/download/{}".format(tar_file_name) + param = {"url": url_value, "format": "tar", "type": "south", "checksum": checksum_value} + with patch.object(plugins_install, 'download', return_value=async_mock([tar_file_name])) as download_patch: + with patch.object(plugins_install, 'validate_checksum', return_value=True) as checksum_patch: + with patch.object(plugins_install, 'extract_file', return_value=async_mock(files)) as extract_patch: + with patch.object(plugins_install, 'copy_file_install_requirement') \ + as copy_file_install_requirement_patch: + resp = await client.post('/foglamp/plugins', data=json.dumps(param)) + assert 200 == resp.status + r = await resp.text() + output = json.loads(r) + assert '{} is successfully downloaded and installed'.format(tar_file_name) == output['message'] + assert copy_file_install_requirement_patch.called + extract_patch.assert_called_once_with(tar_file_name, False) + checksum_patch.assert_called_once_with(checksum_value, tar_file_name) + download_patch.assert_called_once_with([url_value]) + + async def test_post_plugins_install_with_compressed_tar(self, client): + async def async_mock(ret_val): + return ret_val + + plugin_name = 'rms' + tar_file_name = 'foglamp-filter-rms-1.5.2.tar.gz' + files = [plugin_name, '{}/lib{}.so.1'.format(plugin_name, plugin_name), + '{}/lib{}.so'.format(plugin_name, plugin_name)] + checksum_value = "2019c2dea1cc71dbf70a23f6a203fdgh" + url_value = "http://10.2.5.26:5000/filter/download/{}".format(tar_file_name) + param = {"url": url_value, "format": "tar", "type": "filter", "checksum": checksum_value, "compressed": "true"} + with patch.object(plugins_install, 'download', return_value=async_mock([tar_file_name])) as download_patch: + with patch.object(plugins_install, 'validate_checksum', return_value=True) as checksum_patch: + with patch.object(plugins_install, 'extract_file', return_value=async_mock(files)) as extract_patch: + with patch.object(plugins_install, 'copy_file_install_requirement') \ + as copy_file_install_requirement_patch: + resp = await client.post('/foglamp/plugins', data=json.dumps(param)) + assert 200 == resp.status + r = await resp.text() + output = json.loads(r) + assert '{} is successfully downloaded and installed'.format(tar_file_name) == output['message'] + assert copy_file_install_requirement_patch.called + extract_patch.assert_called_once_with(tar_file_name, True) + checksum_patch.assert_called_once_with(checksum_value, tar_file_name) + download_patch.assert_called_once_with([url_value]) + + async def test_post_plugins_install_with_debian(self, client): + async def async_mock(): + return [plugin_name, '{}/__init__.py'.format(plugin_name), '{}/README.rst'.format(plugin_name), + '{}/{}.py'.format(plugin_name, plugin_name), '{}/requirements.sh'.format(plugin_name)] + + plugin_name = 'coap' + checksum_value = "4015c2dea1cc71dbf70a23f6a203eeb6" + url_value = "http://10.2.5.26:5000/download/foglamp-south-coap-1.5.2.deb" + param = {"url": url_value, "format": "deb", "type": "south", "checksum": checksum_value} + with patch.object(plugins_install, 'download', return_value=async_mock()) as download_patch: + with patch.object(plugins_install, 'validate_checksum', return_value=True) as checksum_patch: + resp = await client.post('/foglamp/plugins', data=json.dumps(param)) + assert 200 == resp.status + # FIXME: Right now pass as no handling for debian case yet + checksum_patch.assert_called_once_with(checksum_value, plugin_name) + download_patch.assert_called_once_with([url_value]) From 115e04e677a7ff399875cbdf6befaf9080d85c15 Mon Sep 17 00:00:00 2001 From: Praveen Garg Date: Fri, 12 Apr 2019 18:14:32 +0530 Subject: [PATCH 025/161] added basic test for ephat for RPi --- tests/system/python/e2e/test_e2e_rpi_ephat.py | 157 ++++++++++++++++++ 1 file changed, 157 insertions(+) create mode 100644 tests/system/python/e2e/test_e2e_rpi_ephat.py diff --git a/tests/system/python/e2e/test_e2e_rpi_ephat.py b/tests/system/python/e2e/test_e2e_rpi_ephat.py new file mode 100644 index 0000000000..1c40093f81 --- /dev/null +++ b/tests/system/python/e2e/test_e2e_rpi_ephat.py @@ -0,0 +1,157 @@ +# -*- coding: utf-8 -*- + +# FOGLAMP_BEGIN +# See: http://foglamp.readthedocs.io/ +# FOGLAMP_END + +""" Test end to end flow with: + Ingress: ePhat south plugin + Egress: PI Server (C) plugin +""" + +import platform +import http.client +import json +import time +import pytest +import utils +from urllib.parse import quote +from collections import Counter + + +__author__ = "Praveen Garg" +__copyright__ = "Copyright (c) 2019 Dianomic Systems" +__license__ = "Apache 2.0" +__version__ = "${VERSION}" + + +SOUTH_PLUGIN = "envirophat" +SVC_NAME = "Room-1" +ASSET_PREFIX = "envirophat/" # default for envirophat South plugin +ASSET_NAME = "weather" +SENSOR_READ_KEY = "temperature" + +TASK_NAME = "North v2 PI" + + +@pytest.mark.skipif(platform.platform().find("arm") == -1, reason="RPi only (ePhat) test") +# RPi Linux-4.14.98-v7+-armv7l-with-debian-9.8 +class TestE2eRPiEphatEgress: + + # def test_Rpi(self): + # assert -1 != platform.platform().find("arm"), "ePhat tests are expected to be run on RPi only!" + + def get_ping_status(self, foglamp_url): + conn = http.client.HTTPConnection(foglamp_url) + conn.request("GET", '/foglamp/ping') + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + return jdoc + + def get_statistics_map(self, foglamp_url): + conn = http.client.HTTPConnection(foglamp_url) + conn.request("GET", '/foglamp/statistics') + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + return utils.serialize_stats_map(jdoc) + + @pytest.fixture + def start_south_north(self, reset_and_start_foglamp, add_south, south_branch, + remove_data_file, remove_directories, enable_schedule, foglamp_url, + start_north_pi_server_c, pi_host, pi_port, pi_token, wait_time): + """ This fixture clones given south & filter plugin repo, and starts south and PI north C instance with filter + + """ + + add_south(SOUTH_PLUGIN, south_branch, foglamp_url, service_name=SVC_NAME) + + start_north_pi_server_c(foglamp_url, pi_host, pi_port, pi_token, taskname=TASK_NAME, start_task=False) + + # let the readings ingress + time.sleep(wait_time) + + enable_schedule(foglamp_url, TASK_NAME) + + yield self.start_south_north + + remove_directories("/tmp/foglamp-south-{}".format(SOUTH_PLUGIN)) + + def test_end_to_end(self, start_south_north, read_data_from_pi, foglamp_url, pi_host, pi_admin, + pi_passwd, pi_db, wait_time, retries, skip_verify_north_interface): + + self._verify_ping_and_statistics(foglamp_url) + + self._verify_ingest(foglamp_url) + + # if not skip_verify_north_interface: + # self._verify_egress(read_data_from_pi, pi_host, pi_admin, pi_passwd, pi_db, wait_time, retries) + + def _verify_ping_and_statistics(self, foglamp_url): + ping_response = self.get_ping_status(foglamp_url) + assert ping_response["dataRead"] + # assert ping_response["dataSent"] + + actual_stats_map = self.get_statistics_map(foglamp_url) + key_asset_name_with_prefix = "{}{}".format(ASSET_PREFIX.upper(), ASSET_NAME.upper()) + assert actual_stats_map[key_asset_name_with_prefix] + assert actual_stats_map['READINGS'] + # assert actual_stats_map[TASK_NAME] + # assert actual_stats_map['Readings Sent'] + + def _verify_ingest(self, foglamp_url): + asset_name_with_prefix = "{}{}".format(ASSET_PREFIX, ASSET_NAME) + conn = http.client.HTTPConnection(foglamp_url) + + conn.request("GET", '/foglamp/asset') + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + assert len(jdoc), "No asset found" + actual_assets = [i["assetCode"] for i in jdoc] + assert asset_name_with_prefix in actual_assets + assert jdoc[0]["count"] + expected_assets = Counter(["envirophat/magnetometer", "envirophat/rgb", "envirophat/accelerometer", "envirophat/weather"]) + assert Counter(actual_assets) == expected_assets + + # foglamp/asset/envirophat%2Fweather + conn.request("GET", '/foglamp/asset/{}'.format(quote(asset_name_with_prefix, safe=''))) + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + assert len(jdoc), "No asset found" + assert jdoc[0]["reading"][SENSOR_READ_KEY] + + weather_sensors = ["temperature", "altitude", "pressure"] + for s in weather_sensors: + conn.request("GET", '/foglamp/asset/{}/{}'.format(quote(asset_name_with_prefix, safe=''), s)) + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + assert len(jdoc), "No asset found" + + # verify summary (avg|min|max) + # foglamp/asset/envirophat%2Fweather/temperature/summary + + # def _verify_egress(self, read_data_from_pi, pi_host, pi_admin, pi_passwd, pi_db, wait_time, retries): + # retry_count = 0 + # data_from_pi = None + # while (data_from_pi is None or data_from_pi == []) and retry_count < retries: + # asset_name_with_prefix = "{}{}".format(ASSET_PREFIX, ASSET_NAME) + # data_from_pi = read_data_from_pi(pi_host, pi_admin, pi_passwd, pi_db, asset_name_with_prefix, {READ_KEY}) + # retry_count += 1 + # time.sleep(wait_time * 2) + # + # if data_from_pi is None or retry_count == retries: + # assert False, "Failed to read data from PI" + # + # assert SENSOR_READ_KEY in data_from_pi + # assert isinstance(data_from_pi[SENSOR_READ_KEY], list) + # for n in data_from_pi[SENSOR_READ_KEY]]: + # assert round(n, 1) > 0.0 From 111dd6b6934004cc4246cd516838b84ac6cc14f2 Mon Sep 17 00:00:00 2001 From: stefano Date: Fri, 12 Apr 2019 14:52:27 +0200 Subject: [PATCH 026/161] FOGL-2634: fix the problem of comparing a string to a dict --- python/foglamp/common/configuration_manager.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/python/foglamp/common/configuration_manager.py b/python/foglamp/common/configuration_manager.py index 6de3199abd..c636c25694 100644 --- a/python/foglamp/common/configuration_manager.py +++ b/python/foglamp/common/configuration_manager.py @@ -14,6 +14,8 @@ import datetime import os from math import * +import collections +import ast from foglamp.common.storage_client.payload_builder import PayloadBuilder from foglamp.common.storage_client.storage_client import StorageClientAsync @@ -493,7 +495,13 @@ async def update_configuration_item_bulk(self, category_name, config_item_list): old_value = cat_info[item_name]['value'] new_val = self._clean(cat_info[item_name]['type'], new_val) - if old_value != new_val: + # it converts .old so both .new and .old are dicts + # it uses OrderedDict to preserve the sequence of the keys + old_value_dict = ast.literal_eval(old_value) + old_value_ord = collections.OrderedDict(old_value_dict) + new_val_ord = collections.OrderedDict(new_val) + + if old_value_ord != new_val_ord: payload_item = PayloadBuilder().SELECT("key", "description", "ts", "value") \ .JSON_PROPERTY(("value", [item_name, "value"], new_val)) \ .FORMAT("return", ("ts", "YYYY-MM-DD HH24:MI:SS.MS")) \ From ac94cec3d1a3f3974b6916ec128adcdd1ae4baf4 Mon Sep 17 00:00:00 2001 From: stefano Date: Fri, 12 Apr 2019 16:43:46 +0200 Subject: [PATCH 027/161] FOGL-2634: fix the problem of comparing a string to a dict --- .../foglamp/common/configuration_manager.py | 21 ++++++--- .../common/test_configuration_manager.py | 47 ++++++++++++++++++- 2 files changed, 60 insertions(+), 8 deletions(-) diff --git a/python/foglamp/common/configuration_manager.py b/python/foglamp/common/configuration_manager.py index c636c25694..96f34d1b35 100644 --- a/python/foglamp/common/configuration_manager.py +++ b/python/foglamp/common/configuration_manager.py @@ -495,13 +495,20 @@ async def update_configuration_item_bulk(self, category_name, config_item_list): old_value = cat_info[item_name]['value'] new_val = self._clean(cat_info[item_name]['type'], new_val) - # it converts .old so both .new and .old are dicts - # it uses OrderedDict to preserve the sequence of the keys - old_value_dict = ast.literal_eval(old_value) - old_value_ord = collections.OrderedDict(old_value_dict) - new_val_ord = collections.OrderedDict(new_val) - - if old_value_ord != new_val_ord: + old_value_for_check = old_value + new_val_for_check = new_val + if type(new_val) == dict: + # it converts .old so both .new and .old are dicts + # it uses OrderedDict to preserve the sequence of the keys + try: + old_value_dict = ast.literal_eval(old_value) + old_value_for_check = collections.OrderedDict(old_value_dict) + new_val_for_check = collections.OrderedDict(new_val) + except: + old_value_for_check = old_value + new_val_for_check = new_val + + if old_value_for_check != new_val_for_check: payload_item = PayloadBuilder().SELECT("key", "description", "ts", "value") \ .JSON_PROPERTY(("value", [item_name, "value"], new_val)) \ .FORMAT("return", ("ts", "YYYY-MM-DD HH24:MI:SS.MS")) \ diff --git a/tests/unit/python/foglamp/common/test_configuration_manager.py b/tests/unit/python/foglamp/common/test_configuration_manager.py index afd8ce3d24..25a4cbd30e 100644 --- a/tests/unit/python/foglamp/common/test_configuration_manager.py +++ b/tests/unit/python/foglamp/common/test_configuration_manager.py @@ -21,7 +21,6 @@ CAT_NAME = 'test' ITEM_NAME = "test_item_name" - @pytest.allure.feature("unit") @pytest.allure.story("common", "configuration_manager") class TestConfigurationManager: @@ -2511,6 +2510,7 @@ async def async_mock(return_value): assert 1 == patch_log_exc.call_count patch_get_all_items.assert_called_once_with(category_name) + async def test_update_configuration_item_bulk(self, category_name='rest_api'): async def async_mock(return_value): return return_value @@ -2561,6 +2561,51 @@ async def async_mock(return_value): patch_update.assert_not_called() patch_get_all_items.assert_called_once_with(category_name) + async def test_update_configuration_item_bulk_dict_no_change(self, category_name='rest_api'): + async def async_mock(return_value): + return return_value + + cat_info = {'providers': {'default': '{"providers": ["username", "ldap"] }', 'description': 'descr', 'type': 'JSON', 'value':'{"providers": ["username", "ldap"] }' }} + config_item_list = {"providers": {"providers": ["username", "ldap"] }} + storage_client_mock = MagicMock(spec=StorageClientAsync) + c_mgr = ConfigurationManager(storage_client_mock) + with patch.object(c_mgr, 'get_category_all_items', return_value=async_mock(cat_info)) as patch_get_all_items: + with patch.object(c_mgr._storage, 'update_tbl') as patch_update: + with patch.object(AuditLogger, 'information') as patch_audit: + with patch.object(ConfigurationManager, '_run_callbacks') as patch_callback: + result = await c_mgr.update_configuration_item_bulk(category_name, config_item_list) + assert result is None + patch_callback.assert_not_called() + patch_audit.assert_not_called() + patch_update.assert_not_called() + patch_get_all_items.assert_called_once_with(category_name) + + async def test_update_configuration_item_bulk_dict_change(self, category_name='rest_api'): + async def async_mock(return_value): + return return_value + + cat_info = {'providers': {'default': '{"providers": ["username", "ldap"] }', 'description': 'descr', 'type': 'JSON', 'value':'{"providers": ["username", "ldap"] }' }} + config_item_list = {"providers": {"providers": ["username", "ldap_new"] }} + + update_result = {"response": "updated", "rows_affected": 1} + read_val = {'allowPing': {'default': 'true', 'description': 'Allow access to ping', 'value': 'true', 'type': 'boolean'}, + 'enableHttp': {'default': 'true', 'description': 'Enable HTTP', 'value': 'false', 'type': 'boolean'}} + payload = {'updates': [{'json_properties': [{'path': ['enableHttp', 'value'], 'column': 'value', 'value': 'false'}], + 'return': ['key', 'description', {'format': 'YYYY-MM-DD HH24:MI:SS.MS', 'column': 'ts'}, 'value'], + 'where': {'value': 'rest_api', 'column': 'key', 'condition': '='}}]} + audit_details = {'items': {'enableHttp': {'oldValue': 'true', 'newValue': 'false'}}, 'category': category_name} + storage_client_mock = MagicMock(spec=StorageClientAsync) + c_mgr = ConfigurationManager(storage_client_mock) + with patch.object(c_mgr, 'get_category_all_items', return_value=async_mock(cat_info)) as patch_get_all_items: + with patch.object(c_mgr._storage, 'update_tbl', return_value=async_mock(update_result)) as patch_update: + with patch.object(c_mgr, '_read_category_val', return_value=async_mock(read_val)) as patch_read_val: + with patch.object(AuditLogger, '__init__', return_value=None): + with patch.object(AuditLogger, 'information', return_value=async_mock(None)) as patch_audit: + with patch.object(ConfigurationManager, '_run_callbacks', return_value=async_mock(None)) \ + as patch_callback: + await c_mgr.update_configuration_item_bulk(category_name, config_item_list) + assert 1 == patch_update.call_count + @pytest.mark.parametrize("config_item_list", [ {'info': "2"}, {'info': "2", "info1": "9"}, From 7319e7c380806f9bf1c02006321ebfc0e520ee2a Mon Sep 17 00:00:00 2001 From: Amarendra Date: Fri, 12 Apr 2019 23:26:12 +0530 Subject: [PATCH 028/161] FOGL-2574 - fix failing e2e notification system tests (#1479) * FOGL-2574 - fix failing e2e notification tests * Update test_e2e_notification_service_with_plugins.py * Fixed wait_time, import time * minor refactoring and root cause finding for NTFSN audit failure (#1509) * notify35 script to be uploaded to delivery plugin as test data; with sys logger * stdout to dev/null for subprocess run (within the test) * FOGL-2709 is fixed --- tests/system/python/data/notify35.py | 12 ++ ...t_e2e_notification_service_with_plugins.py | 121 +++++++++++------- 2 files changed, 89 insertions(+), 44 deletions(-) create mode 100644 tests/system/python/data/notify35.py diff --git a/tests/system/python/data/notify35.py b/tests/system/python/data/notify35.py new file mode 100644 index 0000000000..f8c57e21d5 --- /dev/null +++ b/tests/system/python/data/notify35.py @@ -0,0 +1,12 @@ +import logging +from logging.handlers import SysLogHandler + + +def notify35(message): + logger = logging.getLogger(__name__) + logger.setLevel(level=logging.INFO) + handler = SysLogHandler(address='/dev/log') + logger.addHandler(handler) + + logger.info("notify35 called with {}".format(message)) + print("Notification alert: " + str(message)) diff --git a/tests/system/python/e2e/test_e2e_notification_service_with_plugins.py b/tests/system/python/e2e/test_e2e_notification_service_with_plugins.py index d81d5fc4b1..2bdb0cb0e0 100644 --- a/tests/system/python/e2e/test_e2e_notification_service_with_plugins.py +++ b/tests/system/python/e2e/test_e2e_notification_service_with_plugins.py @@ -5,8 +5,9 @@ # FOGLAMP_END """ Test end to end flow with: - Notification service And OverMaxRule in built rule plugin - notify-python35 delivery plugin + Notification service with + OverMaxRule & UnderMinRule in-built rules plugin + notify-python35 delivery channel plugin """ import os @@ -14,6 +15,9 @@ import subprocess import http.client import json +from threading import Event +import urllib.parse + import pytest @@ -26,13 +30,13 @@ SERVICE = "notification" SERVICE_NAME = "NotificationServer #1" NOTIFY_PLUGIN = "python35" -NOTIFY_RULE = "OverMaxRule" +NOTIFY_INBUILT_RULES = ["OverMaxRule", "UnderMinRule"] def _configure_and_start_service(service_branch, foglamp_url, remove_directories): try: subprocess.run(["$FOGLAMP_ROOT/tests/system/python/scripts/install_c_service {} {}" - .format(service_branch, SERVICE)], shell=True, check=True) + .format(service_branch, SERVICE)], shell=True, check=True, stdout=subprocess.DEVNULL) except subprocess.CalledProcessError: assert False, "{} installation failed".format(SERVICE) finally: @@ -56,7 +60,7 @@ def _configure_and_start_service(service_branch, foglamp_url, remove_directories def _install_notify_plugin(notify_branch, plugin_name, remove_directories): try: subprocess.run(["$FOGLAMP_ROOT/tests/system/python/scripts/install_c_plugin {} notify {}".format( - notify_branch, plugin_name)], shell=True, check=True) + notify_branch, plugin_name)], shell=True, check=True, stdout=subprocess.DEVNULL) except subprocess.CalledProcessError: assert False, "{} installation failed".format(plugin_name) finally: @@ -75,16 +79,18 @@ def _get_result(foglamp_url, path): def _verify_service(foglamp_url, status): jdoc = _get_result(foglamp_url, '/foglamp/service') - svc = jdoc['services'][2] - assert SERVICE_NAME == svc['name'] + srvc = [s for s in jdoc['services'] if s['name'] == SERVICE_NAME] + assert 1 == len(srvc) + svc = srvc[0] assert SERVICE.capitalize() == svc['type'] assert status == svc['status'] -def _verify_audit_log_entry(foglamp_url, path, name, severity='INFORMATION'): +def _verify_audit_log_entry(foglamp_url, path, name, severity='INFORMATION', count=1): jdoc = _get_result(foglamp_url, path) + assert len(jdoc['audit']) + assert count == jdoc['totalCount'] audit_detail = jdoc['audit'][0] - assert 1 == jdoc['totalCount'] assert severity == audit_detail['severity'] assert name == audit_detail['details']['name'] @@ -99,8 +105,13 @@ def _add_notification_instance(foglamp_url, payload): assert "Notification {} created successfully".format(payload['name']) == jdoc['result'] -class TestNotificationService: +def pause_for_x_seconds(x=1): + wait_e = Event() + wait_e.clear() + wait_e.wait(timeout=x) + +class TestNotificationService: def test_service(self, reset_and_start_foglamp, service_branch, foglamp_url, wait_time, retries, remove_directories): _configure_and_start_service(service_branch, foglamp_url, remove_directories) @@ -112,13 +123,13 @@ def test_service(self, reset_and_start_foglamp, service_branch, foglamp_url, wai svc = _get_result(foglamp_url, '/foglamp/service') service_registry = svc['services'] retry_count += 1 - time.sleep(wait_time * 2) + + pause_for_x_seconds(x=wait_time * 2) if len(service_registry) == default_registry_count: assert False, "Failed to start the {} service".format(SERVICE) _verify_service(foglamp_url, status='running') - _verify_audit_log_entry(foglamp_url, '/foglamp/audit?source=NTFST', name=SERVICE_NAME) def test_get_default_notification_plugins(self, foglamp_url, remove_directories): @@ -126,26 +137,25 @@ def test_get_default_notification_plugins(self, foglamp_url, remove_directories) remove_directories(os.environ['FOGLAMP_ROOT'] + 'cmake_build/C/plugins/notificationDelivery') jdoc = _get_result(foglamp_url, '/foglamp/notification/plugin') assert [] == jdoc['delivery'] - assert 1 == len(jdoc['rules']) - assert NOTIFY_RULE == jdoc['rules'][0]['name'] + assert 2 == len(jdoc['rules']) + assert NOTIFY_INBUILT_RULES[0] == jdoc['rules'][0]['name'] + assert NOTIFY_INBUILT_RULES[1] == jdoc['rules'][1]['name'] class TestNotificationCRUD: - # FIXME: FOGL-2434 Add name with some special character @pytest.mark.parametrize("data", [ - {"name": "Test1", "description": "Test 1 notification", "rule": NOTIFY_RULE, + {"name": "Test 1", "description": "Test 1 notification", "rule": NOTIFY_INBUILT_RULES[0], "channel": NOTIFY_PLUGIN, "enabled": "false", "notification_type": "retriggered"}, - {"name": "Test2", "description": "Test 2 notification", "rule": NOTIFY_RULE, + {"name": "Test2", "description": "Test 2 notification", "rule": NOTIFY_INBUILT_RULES[0], "channel": NOTIFY_PLUGIN, "enabled": "false", "notification_type": "toggled"}, - {"name": "Test3", "description": "Test 3 notification", "rule": NOTIFY_RULE, + {"name": "Test #3", "description": "Test 3 notification", "rule": NOTIFY_INBUILT_RULES[0], "channel": NOTIFY_PLUGIN, "enabled": "false", "notification_type": "one shot"} ]) def test_create_notification_instances_with_default_rule_and_channel_python35(self, foglamp_url, notify_branch, data, remove_directories): - # FIXME: Handle in a better way; we need below code once for a test - if data['name'] == 'Test1': + if data['name'] == 'Test 1': _install_notify_plugin(notify_branch, NOTIFY_PLUGIN, remove_directories) _add_notification_instance(foglamp_url, data) @@ -153,20 +163,23 @@ def test_inbuilt_rule_plugin_and_notify_python35_delivery(self, foglamp_url): jdoc = _get_result(foglamp_url, '/foglamp/notification/plugin') assert 1 == len(jdoc['delivery']) assert NOTIFY_PLUGIN == jdoc['delivery'][0]['name'] - assert 1 == len(jdoc['rules']) - assert NOTIFY_RULE == jdoc['rules'][0]['name'] + assert 2 == len(jdoc['rules']) + assert NOTIFY_INBUILT_RULES[0] == jdoc['rules'][0]['name'] + assert NOTIFY_INBUILT_RULES[1] == jdoc['rules'][1]['name'] def test_get_notifications_and_audit_entry(self, foglamp_url): jdoc = _get_result(foglamp_url, '/foglamp/notification') assert 3 == len(jdoc['notifications']) + # Test 1, Test2 and Test #3 jdoc = _get_result(foglamp_url, '/foglamp/audit?source=NTFAD') assert 3 == jdoc['totalCount'] - def test_update_notification(self, foglamp_url, name="Test1"): + def test_update_notification(self, foglamp_url, name="Test 1"): conn = http.client.HTTPConnection(foglamp_url) data = {"notification_type": "toggled"} - conn.request("PUT", '/foglamp/notification/{}'.format(name), json.dumps(data)) + conn.request("PUT", '/foglamp/notification/{}'.format(urllib.parse.quote(name)) + , json.dumps(data)) r = conn.getresponse() assert 200 == r.status r = r.read().decode() @@ -174,12 +187,12 @@ def test_update_notification(self, foglamp_url, name="Test1"): assert "Notification {} updated successfully".format(name) == jdoc["result"] # Verify updated notification info - jdoc = _get_result(foglamp_url, '/foglamp/notification/{}'.format(name)) + jdoc = _get_result(foglamp_url, '/foglamp/notification/{}'.format(urllib.parse.quote(name))) assert "toggled" == jdoc['notification']['notificationType'] - def test_delete_notification(self, foglamp_url, name="Test3"): + def test_delete_notification(self, foglamp_url, name="Test #3"): conn = http.client.HTTPConnection(foglamp_url) - conn.request("DELETE", '/foglamp/notification/{}'.format(name)) + conn.request("DELETE", '/foglamp/notification/{}'.format(urllib.parse.quote(name))) r = conn.getresponse() assert 200 == r.status r = r.read().decode() @@ -190,8 +203,11 @@ def test_delete_notification(self, foglamp_url, name="Test3"): jdoc = _get_result(foglamp_url, '/foglamp/notification') notifications = jdoc['notifications'] assert 2 == len(notifications) - assert "Notification Test1" == notifications[0]['name'] - assert "Notification Test2" == notifications[1]['name'] + assert "Test 1" == notifications[0]['name'] + assert "Test2" == notifications[1]['name'] + + jdoc = _get_result(foglamp_url, '/foglamp/audit?source=NTFDL') + assert 1 == jdoc['totalCount'] class TestSentAndReceiveNotification: @@ -201,7 +217,7 @@ class TestSentAndReceiveNotification: ASSET_NAME = "{}".format(SOUTH_PLUGIN_NAME) @pytest.fixture - def start_south(self, add_south, remove_data_file, remove_directories, south_branch, foglamp_url, wait_time): + def start_south(self, add_south, remove_data_file, remove_directories, south_branch, foglamp_url): """ This fixture clone a south repo and starts south instance add_south: Fixture that starts any south service with given configuration remove_data_file: Fixture that remove data file created during the tests @@ -231,10 +247,13 @@ def prepare_template_reading_from_fogbench(self): return fogbench_template_path def ingest_readings_from_fogbench(self, foglamp_url, wait_time): + pause_for_x_seconds(x=wait_time*3) conn = http.client.HTTPConnection(foglamp_url) subprocess.run(["cd $FOGLAMP_ROOT/extras/python; python3 -m fogbench -t ../../data/{}; cd -" - .format(self.FOGBENCH_TEMPLATE)], shell=True, check=True) - time.sleep(wait_time) + .format(self.FOGBENCH_TEMPLATE)], shell=True, check=True, stdout=subprocess.DEVNULL) + + pause_for_x_seconds(x=wait_time) + conn.request("GET", '/foglamp/asset') r = conn.getresponse() assert 200 == r.status @@ -257,7 +276,7 @@ def configure_rule_with_latest_eval_type(self, foglamp_url, cat_name): data = {"asset": self.ASSET_NAME, "datapoint": "sensor", "evaluation_type": "latest", - "trigger_value": str(self.SENSOR_VALUE), + "trigger_value": str(self.SENSOR_VALUE + 1), } conn.request("PUT", '/foglamp/category/rule{}'.format(cat_name), json.dumps(data)) r = conn.getresponse() @@ -272,30 +291,44 @@ def enable_notification(self, foglamp_url, cat_name, is_enabled=True): assert 200 == r.status def test_sent_and_receive_notification(self, foglamp_url, start_south, wait_time): - data = {"name": "Test4", "description": "Test4_Notification", "rule": NOTIFY_RULE, "channel": NOTIFY_PLUGIN, - "enabled": False, "notification_type": "retriggered"} + data = {"name": "Test4", + "description": "Test4_Notification", + "rule": NOTIFY_INBUILT_RULES[1], + "channel": NOTIFY_PLUGIN, + "enabled": True, + "notification_type": "one shot" + } name = data['name'] _add_notification_instance(foglamp_url, data) self.configure_rule_with_latest_eval_type(foglamp_url, name) - self.enable_notification(foglamp_url, name) - time.sleep(wait_time) + # upload script NotifyPython35::configure() -> lowercase(categoryName) + _script_ + method_name + ".py" + cat_name = "delivery{}".format(name) + script_path = '$FOGLAMP_ROOT/tests/system/python/data/notify35.py' + url = 'http://' + foglamp_url + '/foglamp/category/' + cat_name + '/script/upload' + upload_script = 'curl -F "script=@{}" {}'.format(script_path, url) + subprocess.run(upload_script, shell=True, check=True, stdout=subprocess.DEVNULL) + + # enable notification delivery (it was getting disabled, as no script file was available) + self.enable_notification(foglamp_url, "delivery" + name) + self.ingest_readings_from_fogbench(foglamp_url, wait_time) + time.sleep(wait_time) _verify_audit_log_entry(foglamp_url, '/foglamp/audit?source=NTFSN', name=name) class TestStartStopNotificationService: - def test_shutdown_service_with_schedule_disable(self, foglamp_url, disable_schedule, wait_time): disable_schedule(foglamp_url, SERVICE_NAME) - _verify_service(foglamp_url, status='shutdown') - time.sleep(wait_time) - _verify_audit_log_entry(foglamp_url, '/foglamp/audit?source=NTFSD', name=SERVICE_NAME) + pause_for_x_seconds(x=wait_time) + # After shutdown there should be 1 entry for NTFSD (shutdown) + _verify_audit_log_entry(foglamp_url, '/foglamp/audit?source=NTFSD', name=SERVICE_NAME, count=1) - def test_restart_notification_service(self, foglamp_url, enable_schedule): + def test_restart_notification_service(self, foglamp_url, enable_schedule, wait_time): enable_schedule(foglamp_url, SERVICE_NAME) - + pause_for_x_seconds(x=wait_time) _verify_service(foglamp_url, status='running') - _verify_audit_log_entry(foglamp_url, '/foglamp/audit?source=NTFST', name=SERVICE_NAME) + # After restart there should be 2 entries for NTFST (start) + _verify_audit_log_entry(foglamp_url, '/foglamp/audit?source=NTFST', name=SERVICE_NAME, count=2) From e532e353cb94638ca8c2bd2d7edb661139cf8aec Mon Sep 17 00:00:00 2001 From: Amarendra Date: Mon, 15 Apr 2019 16:28:11 +0530 Subject: [PATCH 029/161] FOGL-2652, 2654 - configuration, schedules tables snapshot API (#1499) * FOGL-2652 - Configuration table snapshot API * FOGL-2654 scope added * StorageServerError message sanitized * Error messages and return messages refactored * Only admin user should use snapshot api when auth is mandatory. * Int check for snapshot_id added --- .../common/storage_client/storage_client.py | 92 ++++++++++ .../services/core/api/snapshot/__init__.py | 0 .../services/core/api/snapshot/table.py | 162 ++++++++++++++++++ python/foglamp/services/core/routes.py | 13 +- 4 files changed, 266 insertions(+), 1 deletion(-) create mode 100644 python/foglamp/services/core/api/snapshot/__init__.py create mode 100644 python/foglamp/services/core/api/snapshot/table.py diff --git a/python/foglamp/common/storage_client/storage_client.py b/python/foglamp/common/storage_client/storage_client.py index 2523c9000f..9e7cbfdb18 100644 --- a/python/foglamp/common/storage_client/storage_client.py +++ b/python/foglamp/common/storage_client/storage_client.py @@ -15,6 +15,7 @@ import aiohttp import http.client import json +import time from abc import ABC, abstractmethod from foglamp.common import logger @@ -311,6 +312,97 @@ async def query_tbl_with_payload(self, tbl_name, query_payload): return jdoc + async def post_snapshot(self, tbl_name): + """Create a table snapshot + + :param tbl_name: + :return: + + :Example: + curl -X POST http://0.0.0.0:8080/storage/table/configuration/snapshot + """ + post_url = '/storage/table/{tbl_name}/snapshot'.format(tbl_name=tbl_name) + data = {"id": str(int(time.time()))} + + url = 'http://' + self.base_url + post_url + async with aiohttp.ClientSession() as session: + async with session.post(url, data=json.dumps(data)) as resp: + status_code = resp.status + jdoc = await resp.text() + if status_code not in range(200, 209): + _LOGGER.info("POST %s", post_url) + _LOGGER.error("Error code: %d, reason: %s, details: %s", resp.status, resp.reason, jdoc) + raise StorageServerError(code=resp.status, reason=resp.reason, error=jdoc) + return json.loads(jdoc) + + async def put_snapshot(self, tbl_name, snapshot_id): + """Restore a table snapshot + + :param tbl_name: + :param snapshot_id: + :return: + + :Example: + curl -X PUT http://0.0.0.0:8080/storage/table/configuration/snapshot/cea17db8-6ccc-11e7-907b-a6006ad3dba0 + """ + put_url = '/storage/table/{tbl_name}/snapshot/{id}'.format(tbl_name=tbl_name, id=snapshot_id) + + url = 'http://' + self.base_url + put_url + async with aiohttp.ClientSession() as session: + async with session.put(url) as resp: + status_code = resp.status + jdoc = await resp.text() + if status_code not in range(200, 209): + _LOGGER.info("PUT %s", put_url) + _LOGGER.error("Error code: %d, reason: %s, details: %s", resp.status, resp.reason, jdoc) + raise StorageServerError(code=resp.status, reason=resp.reason, error=jdoc) + return json.loads(jdoc) + + async def delete_snapshot(self, tbl_name, snapshot_id): + """Delete a table snapshot + + :param tbl_name: + :param snapshot_id: + :return: + + :Example: + curl -X DELETE http://0.0.0.0:8080/storage/table/configuration/snapshot/cea17db8-6ccc-11e7-907b-a6006ad3dba0 + """ + delete_url = '/storage/table/{tbl_name}/snapshot/{id}'.format(tbl_name=tbl_name, id=snapshot_id) + + url = 'http://' + self.base_url + delete_url + async with aiohttp.ClientSession() as session: + async with session.delete(url) as resp: + status_code = resp.status + jdoc = await resp.text() + if status_code not in range(200, 209): + _LOGGER.info("DELETE %s", delete_url) + _LOGGER.error("Error code: %d, reason: %s, details: %s", resp.status, resp.reason, jdoc) + raise StorageServerError(code=resp.status, reason=resp.reason, error=jdoc) + return json.loads(jdoc) + + async def get_snapshot(self, tbl_name): + """Get a table snapshot + + :param tbl_name: + :return: + + :Example: + curl -X GET http://0.0.0.0:8080/storage/table/configuration/snapshot + """ + get_url = '/storage/table/{tbl_name}/snapshot'.format(tbl_name=tbl_name) + + url = 'http://' + self.base_url + get_url + async with aiohttp.ClientSession() as session: + async with session.get(url) as resp: + status_code = resp.status + jdoc = await resp.text() + if status_code not in range(200, 209): + _LOGGER.info("GET %s", get_url) + _LOGGER.error("Error code: %d, reason: %s, details: %s", resp.status, resp.reason, jdoc) + raise StorageServerError(code=resp.status, reason=resp.reason, error=jdoc) + return json.loads(jdoc) + class ReadingsStorageClientAsync(StorageClientAsync): """ Readings table operations """ diff --git a/python/foglamp/services/core/api/snapshot/__init__.py b/python/foglamp/services/core/api/snapshot/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/foglamp/services/core/api/snapshot/table.py b/python/foglamp/services/core/api/snapshot/table.py new file mode 100644 index 0000000000..7172d5b830 --- /dev/null +++ b/python/foglamp/services/core/api/snapshot/table.py @@ -0,0 +1,162 @@ +# -*- coding: utf-8 -*- + +# FOGLAMP_BEGIN +# See: http://foglamp.readthedocs.io/ +# FOGLAMP_END + + +import json +from aiohttp import web + +from foglamp.services.core.connect import * +from foglamp.common.storage_client.exceptions import StorageServerError +from foglamp.common.web.middleware import has_permission + + +__author__ = "Amarendra K Sinha" +__copyright__ = "Copyright (c) 2019 Dianomic Systems" +__license__ = "Apache 2.0" +__version__ = "${VERSION}" + + +_help = """ + ------------------------------------------------------------------------------- + | GET POST | /foglamp/snapshot/category | + | PUT DELETE | /foglamp/snapshot/category/{id} | + | GET POST | /foglamp/snapshot/schedule | + | PUT DELETE | /foglamp/snapshot/schedule/{id} | + ------------------------------------------------------------------------------- +""" + +_tables = { + "category": "configuration", + "schedule": "schedules" +} + + +@has_permission("admin") +async def get_snapshot(request): + """ get list of available snapshots + + :Example: + curl -X GET http://localhost:8081/foglamp/snapshot/category + curl -X GET http://localhost:8081/foglamp/snapshot/schedule + + When auth is mandatory: + curl -X GET http://localhost:8081/foglamp/snapshot/category -H "authorization: " + curl -X GET http://localhost:8081/foglamp/snapshot/schedule -H "authorization: " + """ + try: + r_path = request.path.split('/foglamp/snapshot/') + table = _tables[r_path[1]] + + _storage = get_storage_async() # from foglamp.services.core.connect + retval = await _storage.get_snapshot(table) + newlist = sorted(retval["rows"], key=lambda k: k['id'], reverse=True) + except (StorageServerError, Exception) as ex: + raise web.HTTPInternalServerError(reason='{} table snapshots could not be fetched. {}'.format(table, str(ex))) + else: + return web.json_response({"snapshots": newlist}) + + +@has_permission("admin") +async def post_snapshot(request): + """ Create a snapshot + + :Example: + curl -X POST http://localhost:8081/foglamp/snapshot/category + curl -X POST http://localhost:8081/foglamp/snapshot/schedule + + When auth is mandatory: + curl -X POST http://localhost:8081/foglamp/snapshot/category -H "authorization: " + curl -X POST http://localhost:8081/foglamp/snapshot/schedule -H "authorization: " + """ + try: + r_path = request.path.split('/foglamp/snapshot/') + table = _tables[r_path[1]] + + _storage = get_storage_async() # from foglamp.services.core.connect + retval = await _storage.post_snapshot(table) + except (StorageServerError, Exception) as ex: + raise web.HTTPInternalServerError(reason='{} table snapshot could not be created. {}'.format(table, str(ex))) + else: + return web.json_response(retval["created"]) + + +@has_permission("admin") +async def put_snapshot(request): + """restore a snapshot + + :Example: + curl -X PUT http://localhost:8081/foglamp/snapshot/category/1554202741 + curl -X PUT http://localhost:8081/foglamp/snapshot/schedule/1554202742 + + When auth is mandatory: + curl -X PUT http://localhost:8081/foglamp/snapshot/category/1554202741 -H "authorization: " + curl -X PUT http://localhost:8081/foglamp/snapshot/schedule/1554202742 -H "authorization: " + """ + try: + r_path = request.path.split('/foglamp/snapshot/') + table = _tables[r_path[1].split('/')[0]] + + snapshot_id = request.match_info.get('id', None) + + try: + snapshot_id = int(snapshot_id) + except: + raise ValueError('Invalid snapshot id: {}'.format(snapshot_id)) + + _storage = get_storage_async() # from foglamp.services.core.connect + retval = await _storage.put_snapshot(table, snapshot_id) + except StorageServerError as ex: + if int(ex.code) in range(400, 500): + raise web.HTTPBadRequest( + reason='{} table snapshot could not be restored. {}'.format(table, json.loads(ex.error)['message'])) + else: + raise web.HTTPInternalServerError( + reason='{} table snapshot could not be restored. {}'.format(table, json.loads(ex.error)['message'])) + except ValueError as ex: + raise web.HTTPBadRequest(reason=str(ex)) + except Exception as ex: + raise web.HTTPInternalServerError(reason='{} table snapshot could not be restored. {}'.format(table, str(ex))) + else: + return web.json_response(retval["loaded"]) + + +@has_permission("admin") +async def delete_snapshot(request): + """delete a snapshot + + :Example: + curl -X DELETE http://localhost:8081/foglamp/snapshot/category/1554202741 + curl -X DELETE http://localhost:8081/foglamp/snapshot/schedule/1554202742 + + When auth is mandatory: + curl -X DELETE http://localhost:8081/foglamp/snapshot/category/1554202741 -H "authorization: " + curl -X DELETE http://localhost:8081/foglamp/snapshot/schedule/1554202742 -H "authorization: " + """ + try: + r_path = request.path.split('/foglamp/snapshot/') + table = _tables[r_path[1].split('/')[0]] + + snapshot_id = request.match_info.get('id', None) + try: + snapshot_id = int(snapshot_id) + except: + raise ValueError('Invalid snapshot id: {}'.format(snapshot_id)) + + _storage = get_storage_async() # from foglamp.services.core.connect + retval = await _storage.delete_snapshot(table, snapshot_id) + except StorageServerError as ex: + if int(ex.code) in range(400, 500): + raise web.HTTPBadRequest( + reason='{} table snapshot could not be deleted. {}'.format(table, json.loads(ex.error)['message'])) + else: + raise web.HTTPInternalServerError( + reason='{} table snapshot could not be deleted. {}'.format(table, json.loads(ex.error)['message'])) + except ValueError as ex: + raise web.HTTPBadRequest(reason=str(ex)) + except Exception as ex: + raise web.HTTPInternalServerError(reason='{} table snapshot could not be deleted. {}'.format(table, str(ex))) + else: + return web.json_response(retval["deleted"]) diff --git a/python/foglamp/services/core/routes.py b/python/foglamp/services/core/routes.py index 4c021f078b..9d06d46328 100644 --- a/python/foglamp/services/core/routes.py +++ b/python/foglamp/services/core/routes.py @@ -23,9 +23,10 @@ from foglamp.services.core.api import north from foglamp.services.core.api import filters from foglamp.services.core.api import notification +from foglamp.services.core.api.snapshot import table as snapshot_table -__author__ = "Ashish Jabble, Praveen Garg, Massimiliano Pinto" +__author__ = "Ashish Jabble, Praveen Garg, Massimiliano Pinto, Amarendra K Sinha" __copyright__ = "Copyright (c) 2017-2018 OSIsoft, LLC" __license__ = "Apache 2.0" __version__ = "${VERSION}" @@ -175,6 +176,16 @@ def setup(app): app.router.add_route('PUT', '/foglamp/notification/{notification_name}', notification.put_notification) app.router.add_route('DELETE', '/foglamp/notification/{notification_name}', notification.delete_notification) + # Snapshot config + app.router.add_route('GET', '/foglamp/snapshot/category', snapshot_table.get_snapshot) + app.router.add_route('POST', '/foglamp/snapshot/category', snapshot_table.post_snapshot) + app.router.add_route('PUT', '/foglamp/snapshot/category/{id}', snapshot_table.put_snapshot) + app.router.add_route('DELETE', '/foglamp/snapshot/category/{id}', snapshot_table.delete_snapshot) + app.router.add_route('GET', '/foglamp/snapshot/schedule', snapshot_table.get_snapshot) + app.router.add_route('POST', '/foglamp/snapshot/schedule', snapshot_table.post_snapshot) + app.router.add_route('PUT', '/foglamp/snapshot/schedule/{id}', snapshot_table.put_snapshot) + app.router.add_route('DELETE', '/foglamp/snapshot/schedule/{id}', snapshot_table.delete_snapshot) + # enable cors support enable_cors(app) From c43eeb34c307a3457ed84062a1443b412ae38e95 Mon Sep 17 00:00:00 2001 From: Amarendra Date: Mon, 15 Apr 2019 16:59:01 +0530 Subject: [PATCH 030/161] Missing auth_certificates script installation added (#1523) --- Makefile | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/Makefile b/Makefile index 4788680341..53b8f8ad64 100644 --- a/Makefile +++ b/Makefile @@ -96,6 +96,7 @@ BACKUP_SRC := scripts/tasks/backup RESTORE_SRC := scripts/tasks/restore CHECK_CERTS_TASK_SCRIPT_SRC := scripts/tasks/check_certs CERTIFICATES_SCRIPT_SRC := scripts/certificates +AUTH_CERTIFICATES_SCRIPT_SRC := scripts/auth_certificates PACKAGE_UPDATE_SCRIPT_SRC := scripts/package # EXTRA SCRIPTS @@ -289,6 +290,7 @@ scripts_install : $(SCRIPTS_INSTALL_DIR) \ install_restore_script \ install_check_certificates_script \ install_certificates_script \ + install_auth_certificates_script \ install_package_update_script # create scripts install dir @@ -352,6 +354,9 @@ install_storage_script : $(SCRIPT_INSTALL_DIR) $(STORAGE_SCRIPT_SRC) install_certificates_script : $(SCRIPT_INSTALL_DIR) $(CERTIFICATES_SCRIPT_SRC) $(CP) $(CERTIFICATES_SCRIPT_SRC) $(SCRIPTS_INSTALL_DIR) +install_auth_certificates_script : $(SCRIPT_INSTALL_DIR) $(AUTH_CERTIFICATES_SCRIPT_SRC) + $(CP) $(AUTH_CERTIFICATES_SCRIPT_SRC) $(SCRIPTS_INSTALL_DIR) + install_package_update_script : $(SCRIPT_INSTALL_DIR) $(PACKAGE_UPDATE_SCRIPT_SRC) $(CP_DIR) $(PACKAGE_UPDATE_SCRIPT_SRC) $(SCRIPTS_INSTALL_DIR) chmod -R a-w $(SCRIPTS_INSTALL_DIR)/package From e85479050cbdb93c8d4f330390c4012006387039 Mon Sep 17 00:00:00 2001 From: ashish-jabble Date: Tue, 16 Apr 2019 15:31:20 +0530 Subject: [PATCH 031/161] optional config items set support added with an existing ENDPOINT PUT category with different payload --- .../foglamp/common/configuration_manager.py | 69 ++++++++++++++++++- .../services/core/api/configuration.py | 30 +++++--- .../common/test_configuration_manager.py | 54 ++++++++++++++- .../services/core/api/test_configuration.py | 35 ++++++++++ 4 files changed, 175 insertions(+), 13 deletions(-) diff --git a/python/foglamp/common/configuration_manager.py b/python/foglamp/common/configuration_manager.py index 96f34d1b35..5c939661cb 100644 --- a/python/foglamp/common/configuration_manager.py +++ b/python/foglamp/common/configuration_manager.py @@ -35,6 +35,7 @@ # MAKE UPPER_CASE _valid_type_strings = sorted(['boolean', 'integer', 'float', 'string', 'IPv4', 'IPv6', 'X509 certificate', 'password', 'JSON', 'URL', 'enumeration', 'script']) +_optional_items = sorted(['readonly', 'order', 'length', 'maximum', 'minimum', 'rule', 'deprecated', 'displayName']) RESERVED_CATG = ['South', 'North', 'General', 'Advanced', 'Utilities', 'rest_api', 'Security', 'service', 'SCHEDULER', @@ -283,7 +284,6 @@ def get_entry_val(k): d = {entry_name: entry_val} expected_item_entries.update(d) - num_entries = expected_item_entries.get(entry_name) if set_value_val_from_default_val and entry_name == 'value': raise ValueError('Specifying value_name and value_val for item_name {} is not allowed if ' @@ -312,7 +312,6 @@ def get_entry_val(k): if set_value_val_from_default_val: item_val['default'] = self._clean(item_val['type'], item_val['default']) item_val['value'] = item_val['default'] - return category_val_copy async def _create_new_category(self, category_name, category_val, category_description, display_name=None): @@ -745,6 +744,72 @@ async def set_category_item_value_entry(self, category_name, item_name, new_valu 'Unable to run callbacks for category_name %s', category_name) raise + async def set_optional_value_entry(self, category_name, item_name, optional_entry_name, new_value_entry): + try: + storage_value_entry = None + if category_name in self._cacheManager: + if item_name not in self._cacheManager.cache[category_name]['value']: + raise ValueError("No detail found for the category_name: {} and item_name: {}" + .format(category_name, item_name)) + storage_value_entry = self._cacheManager.cache[category_name]['value'][item_name] + if optional_entry_name not in storage_value_entry: + raise KeyError("{} does not exist".format(optional_entry_name)) + if storage_value_entry[optional_entry_name] == new_value_entry: + return + else: + # get storage_value_entry and compare against new_value_value with its type, update if different + storage_value_entry = await self._read_item_val(category_name, item_name) + # check for category_name and item_name combination existence in storage + if storage_value_entry is None: + raise ValueError("No detail found for the category_name: {} and item_name: {}" + .format(category_name, item_name)) + if storage_value_entry[optional_entry_name] == new_value_entry: + return + # Validate optional types + if optional_entry_name == 'readonly' or optional_entry_name == 'deprecated': + if self._validate_type_value('boolean', new_value_entry) is False: + raise ValueError('For {} category, entry value must be boolean for optional item name {}; got {}' + .format(category_name, optional_entry_name, type(new_value_entry))) + elif optional_entry_name == 'minimum' or optional_entry_name == 'maximum': + if (self._validate_type_value('integer', new_value_entry) or self._validate_type_value('float', new_value_entry)) is False: + raise ValueError('For {} category, entry value must be an integer or float for optional item ' + '{}; got {}'.format(category_name, optional_entry_name, type(new_value_entry))) + elif optional_entry_name == 'rule' or optional_entry_name == 'displayName': + if not isinstance(new_value_entry, str): + raise ValueError('For {} category, entry value must be string for optional item {}; got {}' + .format(category_name, optional_entry_name, type(new_value_entry))) + else: + if self._validate_type_value('integer', new_value_entry) is False: + raise ValueError('For {} category, entry value must be an integer for optional item {}; got {}' + .format(category_name, optional_entry_name, type(new_value_entry))) + + # Validation is fairly minimal, minimum, maximum like maximum should be greater than minimum or vice-versa + # And no link between minimum, maximum and length is needed. + if optional_entry_name == 'minimum': + if new_value_entry >= storage_value_entry['maximum']: + raise ValueError('Minimum value should be less than equal to Maximum value') + + if optional_entry_name == 'maximum': + if new_value_entry <= storage_value_entry['minimum']: + raise ValueError('Maximum value should be greater than equal to Minimum value') + + payload = PayloadBuilder().SELECT("key", "description", "ts", "value") \ + .JSON_PROPERTY(("value", [item_name, optional_entry_name], new_value_entry)) \ + .FORMAT("return", ("ts", "YYYY-MM-DD HH24:MI:SS.MS")) \ + .WHERE(["key", "=", category_name]).payload() + await self._storage.update_tbl("configuration", payload) + # always get value from storage + cat_item = await self._read_item_val(category_name, item_name) + if category_name in self._cacheManager.cache: + if item_name in self._cacheManager.cache[category_name]['value']: + self._cacheManager.cache[category_name]['value'][item_name][optional_entry_name] = cat_item[optional_entry_name] + else: + self._cacheManager.cache[category_name]['value'].update({item_name: cat_item[optional_entry_name]}) + except: + _logger.exception( + 'Unable to set optional %s entry based on category_name %s and item_name %s and value_item_entry %s', optional_entry_name, category_name, item_name, new_value_entry) + raise + async def create_category(self, category_name, category_value, category_description='', keep_original_items=False, display_name=None): """Create a new category in the database. diff --git a/python/foglamp/services/core/api/configuration.py b/python/foglamp/services/core/api/configuration.py index 4cd0bff08d..b74696f6b0 100644 --- a/python/foglamp/services/core/api/configuration.py +++ b/python/foglamp/services/core/api/configuration.py @@ -11,7 +11,7 @@ import os from foglamp.services.core import connect -from foglamp.common.configuration_manager import ConfigurationManager +from foglamp.common.configuration_manager import ConfigurationManager, _optional_items from foglamp.common.storage_client.payload_builder import PayloadBuilder from foglamp.common.audit_logger import AuditLogger from foglamp.common.common import _FOGLAMP_ROOT, _FOGLAMP_DATA @@ -205,17 +205,17 @@ async def get_category_item(request): async def set_configuration_item(request): """ Args: - request: category_name, config_item, {"value" : ""} are required + request: category_name, config_item, [{"value" : ""} OR {"optional_key": "some value"}] are required Returns: set the configuration item value in the given category. :Example: curl -X PUT -H "Content-Type: application/json" -d '{"value": "" }' http://localhost:8081/foglamp/category/{category_name}/{config_item} - For {category_name}=>PURGE update value for {config_item}=>age - curl -X PUT -H "Content-Type: application/json" -d '{"value": "24"}' http://localhost:8081/foglamp/category/PURGE_READ/age + curl -X PUT -H "Content-Type: application/json" -d '{"value": "24"}' http://localhost:8081/foglamp/category/PURGE_READ/age + curl -X PUT -H "Content-Type: application/json" -d '{"displayName": "Age"}' http://localhost:8081/foglamp/category/PURGE_READ/age """ category_name = request.match_info.get('category_name', None) config_item = request.match_info.get('config_item', None) @@ -225,7 +225,9 @@ async def set_configuration_item(request): data = await request.json() cf_mgr = ConfigurationManager(connect.get_storage_async()) - + found_optional = {} + # if multiple param keys in data and if value key is found, then value update for config item will be tried first + # otherwise it will be looking for optional keys updation try: value = data['value'] if isinstance(value, dict): @@ -233,13 +235,21 @@ async def set_configuration_item(request): elif not isinstance(value, str): raise web.HTTPBadRequest(reason='{} should be a string literal, in double quotes'.format(value)) except KeyError: - raise web.HTTPBadRequest(reason='Missing required value for {}'.format(config_item)) - + for k, v in data.items(): + # if multiple optional keys are found, then it will be update only 1 whoever comes first + if k in _optional_items: + found_optional = {k: v} + break + if not found_optional: + raise web.HTTPBadRequest(reason='Missing required value for {}'.format(config_item)) try: - await cf_mgr.set_category_item_value_entry(category_name, config_item, value) + if not found_optional: + await cf_mgr.set_category_item_value_entry(category_name, config_item, value) + else: + await cf_mgr.set_optional_value_entry(category_name, config_item, list(found_optional.keys())[0], list(found_optional.values())[0]) except ValueError as ex: - raise web.HTTPNotFound(reason=ex) - except TypeError as ex: + raise web.HTTPNotFound(reason=ex) if not found_optional else web.HTTPBadRequest(reason=ex) + except (TypeError, KeyError) as ex: raise web.HTTPBadRequest(reason=ex) result = await cf_mgr.get_category_item(category_name, config_item) diff --git a/tests/unit/python/foglamp/common/test_configuration_manager.py b/tests/unit/python/foglamp/common/test_configuration_manager.py index 25a4cbd30e..a123b91467 100644 --- a/tests/unit/python/foglamp/common/test_configuration_manager.py +++ b/tests/unit/python/foglamp/common/test_configuration_manager.py @@ -7,7 +7,7 @@ import pytest -from foglamp.common.configuration_manager import ConfigurationManager, ConfigurationManagerSingleton, _valid_type_strings, _logger +from foglamp.common.configuration_manager import ConfigurationManager, ConfigurationManagerSingleton, _valid_type_strings, _logger, _optional_items from foglamp.common.storage_client.payload_builder import PayloadBuilder from foglamp.common.storage_client.storage_client import StorageClientAsync from foglamp.common.storage_client.exceptions import StorageServerError @@ -35,6 +35,10 @@ def test_supported_validate_type_strings(self): assert 12 == len(_valid_type_strings) assert ['IPv4', 'IPv6', 'JSON', 'URL', 'X509 certificate', 'boolean', 'enumeration', 'float', 'integer', 'password', 'script', 'string'] == _valid_type_strings + def test_supported_optional_items(self): + assert 8 == len(_optional_items) + assert ['deprecated', 'displayName', 'length', 'maximum', 'minimum', 'order', 'readonly', 'rule'] == _optional_items + def test_constructor_no_storage_client_defined_no_storage_client_passed( self, reset_singleton): # first time initializing ConfigurationManager without storage client @@ -2629,3 +2633,51 @@ async def async_mock(return_value): assert 'Proposed value for item_name info is not allowed as per rule defined' == str(exc_info.value) assert 1 == patch_log_exc.call_count patch_get_all_items.assert_called_once_with(category_name) + + async def test_set_optional_value_entry_good_update(self, reset_singleton): + async def async_mock(return_value): + return return_value + + storage_client_mock = MagicMock(spec=StorageClientAsync) + c_mgr = ConfigurationManager(storage_client_mock) + category_name = 'catname' + item_name = 'itemname' + new_value_entry = '25' + optional_key_name = 'maximum' + storage_value_entry = {'readonly': 'true', 'type': 'string', 'order': '4', 'description': 'Test Optional', 'minimum': '2', 'value': '13', 'maximum': '20', 'default': '13'} + payload = {"return": ["key", "description", {"column": "ts", "format": "YYYY-MM-DD HH24:MI:SS.MS"}, "value"], "json_properties": [{"column": "value", "path": [item_name, optional_key_name], "value": new_value_entry}], "where": {"column": "key", "condition": "=", "value": category_name}} + update_result = {"response": "updated", "rows_affected": 1} + c_mgr._cacheManager.update(category_name, {item_name: storage_value_entry}) + with patch.object(ConfigurationManager, '_read_item_val', return_value=async_mock(storage_value_entry)) as readpatch: + with patch.object(c_mgr._storage, 'update_tbl', return_value=async_mock(update_result)) as patch_update: + await c_mgr.set_optional_value_entry(category_name, item_name, optional_key_name, new_value_entry) + args, kwargs = patch_update.call_args + assert 'configuration' == args[0] + assert payload == json.loads(args[1]) + readpatch.assert_called_once_with(category_name, item_name) + + @pytest.mark.parametrize("optional_key_name, new_value_entry, exc_msg", [ + ('maximum', '1', 'Maximum value should be greater than equal to Minimum value'), + ('minimum', '30', 'Minimum value should be less than equal to Maximum value'), + ('readonly', '1', "For catname category, entry value must be boolean for optional item name readonly; got "), + ('deprecated', '1', "For catname category, entry value must be boolean for optional item name deprecated; got "), + ('rule', 2, "For catname category, entry value must be string for optional item rule; got "), + ('displayName', 123, "For catname category, entry value must be string for optional item displayName; got "), + ('length', '1a', "For catname category, entry value must be an integer for optional item length; got "), + ('maximum', 'blah', "For catname category, entry value must be an integer or float for optional item maximum; got ") + ]) + async def test_set_optional_value_entry_bad_update(self, reset_singleton, optional_key_name, new_value_entry, exc_msg): + async def async_mock(return_value): + return return_value + + storage_client_mock = MagicMock(spec=StorageClientAsync) + c_mgr = ConfigurationManager(storage_client_mock) + category_name = 'catname' + item_name = 'itemname' + storage_value_entry = {'length': '255', 'displayName': category_name, 'rule': 'value * 3 == 6', 'deprecated': 'false', 'readonly': 'true', 'type': 'string', 'order': '4', 'description': 'Test Optional', 'minimum': '2', 'value': '13', 'maximum': '20', 'default': '13'} + with patch.object(ConfigurationManager, '_read_item_val', return_value=async_mock(storage_value_entry)) as readpatch: + with pytest.raises(Exception) as excinfo: + await c_mgr.set_optional_value_entry(category_name, item_name, optional_key_name, new_value_entry) + assert excinfo.type is ValueError + assert exc_msg == str(excinfo.value) + readpatch.assert_called_once_with(category_name, item_name) diff --git a/tests/unit/python/foglamp/services/core/api/test_configuration.py b/tests/unit/python/foglamp/services/core/api/test_configuration.py index dd29922ed0..b184aadb98 100644 --- a/tests/unit/python/foglamp/services/core/api/test_configuration.py +++ b/tests/unit/python/foglamp/services/core/api/test_configuration.py @@ -276,6 +276,41 @@ async def test_set_config_item_exception(self, client, category_name='rest_api', assert resp.reason is None patch_set_entry.assert_called_once_with(category_name, item_name, payload['value']) + async def test_set_optional_in_config_item(self, client, category_name='rest_api', item_name='http_port'): + async def async_mock(return_value): + return return_value + + optional_key = 'readonly' + payload = {optional_key: 'false'} + result = {optional_key: 'false', 'value': '8082', 'type': 'integer', 'default': '8081', + 'description': 'The port to accept HTTP connections on'} + + storage_client_mock = MagicMock(StorageClientAsync) + c_mgr = ConfigurationManager(storage_client_mock) + with patch.object(connect, 'get_storage_async', return_value=storage_client_mock): + with patch.object(c_mgr, 'set_optional_value_entry', return_value=async_mock(None)) as patch_set_entry: + with patch.object(c_mgr, 'get_category_item', return_value=async_mock(result)) as patch_get_cat_item: + resp = await client.put('/foglamp/category/{}/{}'.format(category_name, item_name), + data=json.dumps(payload)) + assert 200 == resp.status + r = await resp.text() + json_response = json.loads(r) + assert result == json_response + patch_get_cat_item.assert_called_once_with(category_name, item_name) + patch_set_entry.assert_called_once_with(category_name, item_name, optional_key, payload[optional_key]) + + async def test_set_optional_in_config_item_exception(self, client, category_name='rest_api', item_name='http_port'): + optional_key = 'readonly' + payload = {optional_key: '8082'} + storage_client_mock = MagicMock(StorageClientAsync) + c_mgr = ConfigurationManager(storage_client_mock) + with patch.object(connect, 'get_storage_async', return_value=storage_client_mock): + with patch.object(c_mgr, 'set_optional_value_entry', side_effect=ValueError) as patch_set_entry: + resp = await client.put('/foglamp/category/{}/{}'.format(category_name, item_name), data=json.dumps(payload)) + assert 400 == resp.status + assert resp.reason is None + patch_set_entry.assert_called_once_with(category_name, item_name, optional_key, payload[optional_key]) + async def test_delete_config_item(self, client, category_name='rest_api', item_name='http_port'): result = {'value': '8081', 'type': 'integer', 'default': '8081', 'description': 'The port to accept HTTP connections on'} From 15c5ecb896bc674dd942f33c79e61a2021924958 Mon Sep 17 00:00:00 2001 From: stefano Date: Tue, 16 Apr 2019 17:09:21 +0200 Subject: [PATCH 032/161] FOGL-2674: working stage --- .../storage/sqlite/common/connection.cpp | 160 ++++++++++++++++-- .../sqlite/common/include/connection.h | 2 +- C/plugins/storage/sqlite/common/readings.cpp | 2 +- .../storage/sqlite/expected_EUROPE_ROME/48 | 2 +- .../storage/sqlite/expected_EUROPE_ROME/49 | 2 +- 5 files changed, 148 insertions(+), 20 deletions(-) diff --git a/C/plugins/storage/sqlite/common/connection.cpp b/C/plugins/storage/sqlite/common/connection.cpp index f91ffe72fc..eae0566c3d 100644 --- a/C/plugins/storage/sqlite/common/connection.cpp +++ b/C/plugins/storage/sqlite/common/connection.cpp @@ -238,11 +238,11 @@ bool Connection::applyColumnDateTimeFormat(sqlite3_stmt *pStmt, * using the available formats in SQLite3 * for a specific column * - * If the requested format is not availble + * If the requested format is not available * the input column is used as is. * Additionally milliseconds could be rounded * upon request. - * The routine return false if datwe format is not + * The routine return false if date format is not * found and the caller might decide to raise an error * or use the non formatted value * @@ -281,7 +281,8 @@ bool retCode; outFormat.append(colName); } - outFormat.append(", 'localtime')"); // MR TRY THIS + //# FIXME_I - remove comment + outFormat.append(" )"); // MR TRY THIS retCode = true; } else @@ -299,11 +300,11 @@ bool retCode; * using the available formats in SQLite3 * for a specific column * - * If the requested format is not availble + * If the requested format is not available * the input column is used as is. * Additionally milliseconds could be rounded * upon request. - * The routine return false if datwe format is not + * The routine return false if date format is not * found and the caller might decide to raise an error * or use the non formatted value * @@ -342,6 +343,7 @@ bool retCode; outFormat.append(colName); } + //# FIXME_I - remove comment outFormat.append(", 'localtime')"); // MR force localtime retCode = true; } @@ -752,6 +754,13 @@ SQLBuffer sql; // Extra constraints to add to where clause SQLBuffer jsonConstraints; + // FIXME_I: + Logger::getLogger()->setMinLevel("debug"); + Logger::getLogger()->debug( + "DBG retrieve 1.0 : table |%s| condition |%s| ", + table.c_str(), + condition.c_str()); + try { if (dbHandle == NULL) { @@ -773,13 +782,17 @@ SQLBuffer jsonConstraints; } if (document.HasMember("aggregate")) { + // FIXME_I: + Logger::getLogger()->setMinLevel("debug"); + Logger::getLogger()->debug("DBG retrieve 1.0 : aggregate"); + sql.append("SELECT "); if (document.HasMember("modifier")) { sql.append(document["modifier"].GetString()); sql.append(' '); } - if (!jsonAggregates(document, document["aggregate"], sql, jsonConstraints)) + if (!jsonAggregates(document, document["aggregate"], sql, jsonConstraints, false)) { return false; } @@ -832,11 +845,26 @@ SQLBuffer jsonConstraints; applyColumnDateFormat((*itr)["format"].GetString(), (*itr)["column"].GetString(), new_format, true); + + // FIXME_I: + string tmp_format = (*itr)["format"].GetString(); + string tmp_column = (*itr)["column"].GetString(); + Logger::getLogger()->setMinLevel("debug"); + Logger::getLogger()->debug( + "DBG retrieve 1.1 : column |%s| format |%s| new_format |%s| ", + tmp_column.c_str(), + tmp_format.c_str(), + new_format.c_str()); + // Add the formatted column or use it as is sql.append(new_format); } else if (itr->HasMember("timezone")) { + // FIXME_I: + Logger::getLogger()->setMinLevel("debug"); + Logger::getLogger()->debug("DBG retrieve 1.1 : Timezone "); + if (! (*itr)["timezone"].IsString()) { raiseError("rerieve", @@ -859,6 +887,11 @@ SQLBuffer jsonConstraints; } else { + // FIXME_I: + Logger::getLogger()->setMinLevel("debug"); + Logger::getLogger()->debug("DBG retrieve 1.1 : NO Timezone "); + + sql.append((*itr)["column"].GetString()); } sql.append(' '); @@ -923,7 +956,7 @@ SQLBuffer jsonConstraints; delete[] jsonBuf; } } - if (!jsonModifiers(document, sql)) + if (!jsonModifiers(document, sql, false)) { return false; } @@ -935,6 +968,11 @@ SQLBuffer jsonConstraints; int rc; sqlite3_stmt *stmt; + // FIXME_I: + Logger::getLogger()->setMinLevel("debug"); + Logger::getLogger()->debug( + "DBG retrieve 1.0 : query |%s| ", query); + logSQL("CommonRetrive", query); // Prepare the SQL statement and get the result set @@ -1664,6 +1702,10 @@ bool Connection::jsonAggregates(const Value& payload, SQLBuffer& jsonConstraint, bool isTableReading) { + // FIXME_I: + Logger::getLogger()->setMinLevel("debug"); + Logger::getLogger()->debug("DBG retrieve 1.0 : jsonAggregates"); + if (aggregates.IsObject()) { if (! aggregates.HasMember("operation")) @@ -1934,6 +1976,11 @@ bool Connection::jsonAggregates(const Value& payload, } if (payload.HasMember("group")) { + // FIXME_I: + Logger::getLogger()->setMinLevel("debug"); + Logger::getLogger()->debug("DBG retrieve 1.0 : jsonAggregates - group "); + + sql.append(", "); if (payload["group"].IsObject()) { @@ -1941,11 +1988,46 @@ bool Connection::jsonAggregates(const Value& payload, if (grp.HasMember("format")) { + // FIXME_I: + Logger::getLogger()->setMinLevel("debug"); + Logger::getLogger()->debug("DBG retrieve 1.0 : jsonAggregates - format "); + + // SQLite 3 date format. string new_format; - applyColumnDateFormat(grp["format"].GetString(), - grp["column"].GetString(), - new_format); + if (isTableReading) + { + applyColumnDateFormatLocaltime(grp["format"].GetString(), + grp["column"].GetString(), + new_format); + + + // FIXME_I: + string tmp_format = grp["format"].GetString(); + string tmp_column = grp["column"].GetString(); + Logger::getLogger()->setMinLevel("debug"); + Logger::getLogger()->debug( + "DBG jsonAggregates isTableReading : column |%s| format |%s| new_format |%s| ", + tmp_column.c_str(), + tmp_format.c_str(), + new_format.c_str()); + } + else + { + applyColumnDateFormat(grp["format"].GetString(), + grp["column"].GetString(), + new_format); + + // FIXME_I: + string tmp_format = grp["format"].GetString(); + string tmp_column = grp["column"].GetString(); + Logger::getLogger()->setMinLevel("debug"); + Logger::getLogger()->debug( + "DBG jsonAggregates NOT isTableReading : column |%s| format |%s| new_format |%s| ", + tmp_column.c_str(), + tmp_format.c_str(), + new_format.c_str()); + } // Add the formatted column or use it as is sql.append(new_format); } @@ -2090,10 +2172,17 @@ bool Connection::jsonAggregates(const Value& payload, } /** - * Process the modifers for limit, skip, sort and group + * Process the modifiers for limit, skip, sort and group */ -bool Connection::jsonModifiers(const Value& payload, SQLBuffer& sql) +bool Connection::jsonModifiers(const Value& payload, + SQLBuffer& sql, + bool isTableReading) { + + // FIXME_I: + Logger::getLogger()->setMinLevel("debug"); + Logger::getLogger()->debug("DBG retrieve 1.0 : jsonModifiers"); + if (payload.HasMember("timebucket") && payload.HasMember("sort")) { raiseError("query modifiers", @@ -2103,20 +2192,59 @@ bool Connection::jsonModifiers(const Value& payload, SQLBuffer& sql) if (payload.HasMember("group")) { + + // FIXME_I: + Logger::getLogger()->setMinLevel("debug"); + Logger::getLogger()->debug("DBG jsonModifiers group 1"); + sql.append(" GROUP BY "); if (payload["group"].IsObject()) { + // FIXME_I: + Logger::getLogger()->setMinLevel("debug"); + Logger::getLogger()->debug("DBG jsonModifiers group 2"); + const Value& grp = payload["group"]; if (grp.HasMember("format")) { /** * SQLite 3 date format is limited. - * Handle all availables formats here. + * Handle all available formats here. */ string new_format; - applyColumnDateFormat(grp["format"].GetString(), - grp["column"].GetString(), - new_format); + if (isTableReading) + { + applyColumnDateFormatLocaltime(grp["format"].GetString(), + grp["column"].GetString(), + new_format); + + // FIXME_I: + string tmp_format = grp["format"].GetString(); + string tmp_column = grp["column"].GetString(); + Logger::getLogger()->setMinLevel("debug"); + Logger::getLogger()->debug( + "DBG jsonModifiers2 isTableReading : column |%s| format |%s| new_format |%s| ", + tmp_column.c_str(), + tmp_format.c_str(), + new_format.c_str()); + } + else + { + applyColumnDateFormat(grp["format"].GetString(), + grp["column"].GetString(), + new_format); + + // FIXME_I: + string tmp_format = grp["format"].GetString(); + string tmp_column = grp["column"].GetString(); + Logger::getLogger()->setMinLevel("debug"); + Logger::getLogger()->debug( + "DBG jsonModifiers2 NOT isTableReading : column |%s| format |%s| new_format |%s| ", + tmp_column.c_str(), + tmp_format.c_str(), + new_format.c_str()); + } + // Add the formatted column or use it as is sql.append(new_format); } diff --git a/C/plugins/storage/sqlite/common/include/connection.h b/C/plugins/storage/sqlite/common/include/connection.h index 00ec9a1847..68795d391f 100644 --- a/C/plugins/storage/sqlite/common/include/connection.h +++ b/C/plugins/storage/sqlite/common/include/connection.h @@ -102,7 +102,7 @@ class Connection { sqlite3 *dbHandle; int mapResultSet(void *res, std::string& resultSet); bool jsonWhereClause(const rapidjson::Value& whereClause, SQLBuffer&, bool convertLocaltime = false); - bool jsonModifiers(const rapidjson::Value&, SQLBuffer&); + bool jsonModifiers(const rapidjson::Value&, SQLBuffer&, bool isTableReading = false); bool jsonAggregates(const rapidjson::Value&, const rapidjson::Value&, SQLBuffer&, diff --git a/C/plugins/storage/sqlite/common/readings.cpp b/C/plugins/storage/sqlite/common/readings.cpp index 8a89bddeaa..d935297bca 100644 --- a/C/plugins/storage/sqlite/common/readings.cpp +++ b/C/plugins/storage/sqlite/common/readings.cpp @@ -621,7 +621,7 @@ bool isAggregate = false; */ sql.append(" WHERE asset_code = asset_code"); } - if (!jsonModifiers(document, sql)) + if (!jsonModifiers(document, sql, true)) { return false; } diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/48 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/48 index 90b3a11539..7c1b23b0bf 100644 --- a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/48 +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/48 @@ -1 +1 @@ -{"count":9,"rows":[{"key":"TEST1","description":"A test row","time":"14:14:26"},{"key":"TEST2","description":"A test row","time":"14:14:27"},{"key":"TEST3","description":"A test row","time":"13:14:28"},{"key":"TEST4","description":"A test row","time":"13:14:29"},{"key":"TEST5","description":"A test row","time":"13:15:00"},{"key":"TEST6","description":"A test row","time":"13:15:33"},{"key":"TEST7","description":"A test row","time":"13:16:20"},{"key":"TEST8","description":"A test row","time":"07:14:30"},{"key":"TEST9","description":"A test row","time":"23:14:30"}]} \ No newline at end of file +{"count":9,"rows":[{"key":"TEST1","description":"A test row","time":"12:14:26"},{"key":"TEST2","description":"A test row","time":"12:14:27"},{"key":"TEST3","description":"A test row","time":"11:14:28"},{"key":"TEST4","description":"A test row","time":"11:14:29"},{"key":"TEST5","description":"A test row","time":"11:15:00"},{"key":"TEST6","description":"A test row","time":"11:15:33"},{"key":"TEST7","description":"A test row","time":"11:16:20"},{"key":"TEST8","description":"A test row","time":"05:14:30"},{"key":"TEST9","description":"A test row","time":"21:14:30"}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/49 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/49 index fe9af5ecf6..ee07186753 100644 --- a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/49 +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/49 @@ -1 +1 @@ -{"count":8,"rows":[{"key":"TEST2","description":"A test row","timestamp":"2017-10-10 14:14:27"},{"key":"TEST3","description":"A test row","timestamp":"2017-10-10 13:14:28"},{"key":"TEST4","description":"A test row","timestamp":"2017-10-10 13:14:29"},{"key":"TEST5","description":"A test row","timestamp":"2017-10-10 13:15:00"},{"key":"TEST6","description":"A test row","timestamp":"2017-10-10 13:15:33"},{"key":"TEST7","description":"A test row","timestamp":"2017-10-10 13:16:20"},{"key":"TEST8","description":"A test row","timestamp":"2017-10-10 07:14:30"},{"key":"TEST9","description":"A test row","timestamp":"2017-10-10 23:14:30"}]} \ No newline at end of file +{"count":8,"rows":[{"key":"TEST2","description":"A test row","timestamp":"2017-10-10 12:14:27"},{"key":"TEST3","description":"A test row","timestamp":"2017-10-10 11:14:28"},{"key":"TEST4","description":"A test row","timestamp":"2017-10-10 11:14:29"},{"key":"TEST5","description":"A test row","timestamp":"2017-10-10 11:15:00"},{"key":"TEST6","description":"A test row","timestamp":"2017-10-10 11:15:33"},{"key":"TEST7","description":"A test row","timestamp":"2017-10-10 11:16:20"},{"key":"TEST8","description":"A test row","timestamp":"2017-10-10 05:14:30"},{"key":"TEST9","description":"A test row","timestamp":"2017-10-10 21:14:30"}]} \ No newline at end of file From 36a218bcf77e6da597238013e30826e291dfbc1a Mon Sep 17 00:00:00 2001 From: Aman <40791522+AmandeepArora@users.noreply.github.com> Date: Wed, 17 Apr 2019 13:23:24 +0530 Subject: [PATCH 033/161] FOGL-2731: Fix double free for query buffer in purge code (#1526) * FOGL-2731: Remove double delete for query string * remove commented-out code --- C/plugins/storage/sqlite/common/readings.cpp | 2 -- 1 file changed, 2 deletions(-) diff --git a/C/plugins/storage/sqlite/common/readings.cpp b/C/plugins/storage/sqlite/common/readings.cpp index 8a89bddeaa..962c6e459a 100644 --- a/C/plugins/storage/sqlite/common/readings.cpp +++ b/C/plugins/storage/sqlite/common/readings.cpp @@ -931,8 +931,6 @@ int blocks = 0; { raiseError("purge - phase 3", zErrMsg); sqlite3_free(zErrMsg); - // Release memory for 'query' var - delete[] query; return 0; } From e1fba092265e76aa2c3bf78f491c0b67802c8b86 Mon Sep 17 00:00:00 2001 From: stefano Date: Wed, 17 Apr 2019 11:34:01 +0200 Subject: [PATCH 034/161] FOGL-2674: unit tests fixed, code cleanup --- .../storage/sqlite/common/connection.cpp | 109 +----------------- 1 file changed, 2 insertions(+), 107 deletions(-) diff --git a/C/plugins/storage/sqlite/common/connection.cpp b/C/plugins/storage/sqlite/common/connection.cpp index eae0566c3d..d798636a0b 100644 --- a/C/plugins/storage/sqlite/common/connection.cpp +++ b/C/plugins/storage/sqlite/common/connection.cpp @@ -281,8 +281,7 @@ bool retCode; outFormat.append(colName); } - //# FIXME_I - remove comment - outFormat.append(" )"); // MR TRY THIS + outFormat.append(" )"); retCode = true; } else @@ -343,8 +342,7 @@ bool retCode; outFormat.append(colName); } - //# FIXME_I - remove comment - outFormat.append(", 'localtime')"); // MR force localtime + outFormat.append(", 'localtime')"); retCode = true; } else @@ -754,13 +752,6 @@ SQLBuffer sql; // Extra constraints to add to where clause SQLBuffer jsonConstraints; - // FIXME_I: - Logger::getLogger()->setMinLevel("debug"); - Logger::getLogger()->debug( - "DBG retrieve 1.0 : table |%s| condition |%s| ", - table.c_str(), - condition.c_str()); - try { if (dbHandle == NULL) { @@ -782,10 +773,6 @@ SQLBuffer jsonConstraints; } if (document.HasMember("aggregate")) { - // FIXME_I: - Logger::getLogger()->setMinLevel("debug"); - Logger::getLogger()->debug("DBG retrieve 1.0 : aggregate"); - sql.append("SELECT "); if (document.HasMember("modifier")) { @@ -846,25 +833,11 @@ SQLBuffer jsonConstraints; (*itr)["column"].GetString(), new_format, true); - // FIXME_I: - string tmp_format = (*itr)["format"].GetString(); - string tmp_column = (*itr)["column"].GetString(); - Logger::getLogger()->setMinLevel("debug"); - Logger::getLogger()->debug( - "DBG retrieve 1.1 : column |%s| format |%s| new_format |%s| ", - tmp_column.c_str(), - tmp_format.c_str(), - new_format.c_str()); - // Add the formatted column or use it as is sql.append(new_format); } else if (itr->HasMember("timezone")) { - // FIXME_I: - Logger::getLogger()->setMinLevel("debug"); - Logger::getLogger()->debug("DBG retrieve 1.1 : Timezone "); - if (! (*itr)["timezone"].IsString()) { raiseError("rerieve", @@ -887,11 +860,6 @@ SQLBuffer jsonConstraints; } else { - // FIXME_I: - Logger::getLogger()->setMinLevel("debug"); - Logger::getLogger()->debug("DBG retrieve 1.1 : NO Timezone "); - - sql.append((*itr)["column"].GetString()); } sql.append(' '); @@ -968,11 +936,6 @@ SQLBuffer jsonConstraints; int rc; sqlite3_stmt *stmt; - // FIXME_I: - Logger::getLogger()->setMinLevel("debug"); - Logger::getLogger()->debug( - "DBG retrieve 1.0 : query |%s| ", query); - logSQL("CommonRetrive", query); // Prepare the SQL statement and get the result set @@ -1702,10 +1665,6 @@ bool Connection::jsonAggregates(const Value& payload, SQLBuffer& jsonConstraint, bool isTableReading) { - // FIXME_I: - Logger::getLogger()->setMinLevel("debug"); - Logger::getLogger()->debug("DBG retrieve 1.0 : jsonAggregates"); - if (aggregates.IsObject()) { if (! aggregates.HasMember("operation")) @@ -1976,11 +1935,6 @@ bool Connection::jsonAggregates(const Value& payload, } if (payload.HasMember("group")) { - // FIXME_I: - Logger::getLogger()->setMinLevel("debug"); - Logger::getLogger()->debug("DBG retrieve 1.0 : jsonAggregates - group "); - - sql.append(", "); if (payload["group"].IsObject()) { @@ -1988,11 +1942,6 @@ bool Connection::jsonAggregates(const Value& payload, if (grp.HasMember("format")) { - // FIXME_I: - Logger::getLogger()->setMinLevel("debug"); - Logger::getLogger()->debug("DBG retrieve 1.0 : jsonAggregates - format "); - - // SQLite 3 date format. string new_format; if (isTableReading) @@ -2000,33 +1949,12 @@ bool Connection::jsonAggregates(const Value& payload, applyColumnDateFormatLocaltime(grp["format"].GetString(), grp["column"].GetString(), new_format); - - - // FIXME_I: - string tmp_format = grp["format"].GetString(); - string tmp_column = grp["column"].GetString(); - Logger::getLogger()->setMinLevel("debug"); - Logger::getLogger()->debug( - "DBG jsonAggregates isTableReading : column |%s| format |%s| new_format |%s| ", - tmp_column.c_str(), - tmp_format.c_str(), - new_format.c_str()); } else { applyColumnDateFormat(grp["format"].GetString(), grp["column"].GetString(), new_format); - - // FIXME_I: - string tmp_format = grp["format"].GetString(); - string tmp_column = grp["column"].GetString(); - Logger::getLogger()->setMinLevel("debug"); - Logger::getLogger()->debug( - "DBG jsonAggregates NOT isTableReading : column |%s| format |%s| new_format |%s| ", - tmp_column.c_str(), - tmp_format.c_str(), - new_format.c_str()); } // Add the formatted column or use it as is sql.append(new_format); @@ -2179,10 +2107,6 @@ bool Connection::jsonModifiers(const Value& payload, bool isTableReading) { - // FIXME_I: - Logger::getLogger()->setMinLevel("debug"); - Logger::getLogger()->debug("DBG retrieve 1.0 : jsonModifiers"); - if (payload.HasMember("timebucket") && payload.HasMember("sort")) { raiseError("query modifiers", @@ -2192,18 +2116,9 @@ bool Connection::jsonModifiers(const Value& payload, if (payload.HasMember("group")) { - - // FIXME_I: - Logger::getLogger()->setMinLevel("debug"); - Logger::getLogger()->debug("DBG jsonModifiers group 1"); - sql.append(" GROUP BY "); if (payload["group"].IsObject()) { - // FIXME_I: - Logger::getLogger()->setMinLevel("debug"); - Logger::getLogger()->debug("DBG jsonModifiers group 2"); - const Value& grp = payload["group"]; if (grp.HasMember("format")) { @@ -2217,32 +2132,12 @@ bool Connection::jsonModifiers(const Value& payload, applyColumnDateFormatLocaltime(grp["format"].GetString(), grp["column"].GetString(), new_format); - - // FIXME_I: - string tmp_format = grp["format"].GetString(); - string tmp_column = grp["column"].GetString(); - Logger::getLogger()->setMinLevel("debug"); - Logger::getLogger()->debug( - "DBG jsonModifiers2 isTableReading : column |%s| format |%s| new_format |%s| ", - tmp_column.c_str(), - tmp_format.c_str(), - new_format.c_str()); } else { applyColumnDateFormat(grp["format"].GetString(), grp["column"].GetString(), new_format); - - // FIXME_I: - string tmp_format = grp["format"].GetString(); - string tmp_column = grp["column"].GetString(); - Logger::getLogger()->setMinLevel("debug"); - Logger::getLogger()->debug( - "DBG jsonModifiers2 NOT isTableReading : column |%s| format |%s| new_format |%s| ", - tmp_column.c_str(), - tmp_format.c_str(), - new_format.c_str()); } // Add the formatted column or use it as is From 398c355be11ece8d2f5a47aedf42d13a931c00b4 Mon Sep 17 00:00:00 2001 From: Vaibhav Singhal Date: Wed, 17 Apr 2019 15:35:42 +0530 Subject: [PATCH 035/161] Test added for notification api --- tests/system/python/api/test_notification.py | 233 ++++++++++++++++++ ...t_e2e_notification_service_with_plugins.py | 2 + 2 files changed, 235 insertions(+) create mode 100644 tests/system/python/api/test_notification.py diff --git a/tests/system/python/api/test_notification.py b/tests/system/python/api/test_notification.py new file mode 100644 index 0000000000..80d4fa42bc --- /dev/null +++ b/tests/system/python/api/test_notification.py @@ -0,0 +1,233 @@ +# -*- coding: utf-8 -*- + +# FOGLAMP_BEGIN +# See: http://foglamp.readthedocs.io/ +# FOGLAMP_END + +""" Test add service using poll and async plugins for both python & C version REST API """ + +import os +import subprocess +import http.client +import json +import time +import urllib +from uuid import UUID +from collections import Counter +from urllib.parse import quote +import pytest +import apt + + +__author__ = "Vaibhav Singhal" +__copyright__ = "Copyright (c) 2019 Dianomic Systems" +__license__ = "Apache 2.0" +__version__ = "${VERSION}" + +SERVICE = "notification" +SERVICE_NAME = "Notification Server #1" +NOTIFY_PLUGIN = "slack" +NOTIFY_INBUILT_RULES = ["OverMaxRule", "UnderMinRule"] +data = {"name": "Test - 1", + "description": "Test4_Notification", + "rule": NOTIFY_INBUILT_RULES[1], + "channel": NOTIFY_PLUGIN, + "enabled": True, + "notification_type": "one shot" + } + + +class TestNotificationServiceAPI: + def test_notification_without_install(self, reset_and_start_foglamp, foglamp_url, wait_time): + # Wait for foglamp server to start + time.sleep(wait_time) + conn = http.client.HTTPConnection(foglamp_url) + conn.request("GET", '/foglamp/notification') + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + assert {'notifications': []} == jdoc + + conn.request("GET", '/foglamp/notification/plugin') + r = conn.getresponse() + assert 404 == r.status + r = r.read().decode() + assert "404: No Notification service available." == r + + conn.request("GET", '/foglamp/notification/type') + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + assert {"notification_type": ["one shot", "retriggered", "toggled"]} == jdoc + + conn.request("POST", '/foglamp/notification', json.dumps({})) + r = conn.getresponse() + assert 404 == r.status + r = r.read().decode() + assert "404: No Notification service available." == r + + def test_notification_service_add(self, service_branch, foglamp_url, wait_time, remove_directories): + try: + subprocess.run(["$FOGLAMP_ROOT/tests/system/python/scripts/install_c_service {} {}" + .format(service_branch, SERVICE)], shell=True, check=True) + except subprocess.CalledProcessError: + assert False, "{} installation failed".format(SERVICE) + finally: + remove_directories("/tmp/foglamp-service-{}".format(SERVICE)) + + # Start service + conn = http.client.HTTPConnection(foglamp_url) + data = {"name": SERVICE_NAME, + "type": "notification", + "enabled": "true" + } + conn.request("POST", '/foglamp/service', json.dumps(data)) + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + assert 2 == len(jdoc) + assert SERVICE_NAME == jdoc['name'] + + # Wait for service to get created + time.sleep(wait_time) + conn.request("GET", '/foglamp/service') + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + assert SERVICE_NAME == jdoc['services'][2]['name'] + + def test_install_delivery_plugin(self, notify_branch, remove_directories): + # Remove any external plugins if installed + remove_directories(os.path.expandvars('$FOGLAMP_ROOT/plugins/notificationDelivery')) + remove_directories(os.path.expandvars('$FOGLAMP_ROOT/plugins/notificationRule')) + try: + subprocess.run(["$FOGLAMP_ROOT/tests/system/python/scripts/install_c_plugin {} notify {}".format( + notify_branch, NOTIFY_PLUGIN)], shell=True, check=True) + except subprocess.CalledProcessError: + assert False, "{} installation failed".format(NOTIFY_PLUGIN) + finally: + remove_directories("/tmp/foglamp-notify-{}".format(NOTIFY_PLUGIN)) + + @pytest.mark.parametrize("test_input, expected_error", [ + ({"description": "Test4_Notification", "rule": NOTIFY_INBUILT_RULES[1], "channel": NOTIFY_PLUGIN, + "enabled": True, "notification_type": "one shot"}, '400: Missing name property in payload.'), + ({"name": "Test4","rule": NOTIFY_INBUILT_RULES[1], "channel": NOTIFY_PLUGIN, "enabled": True, + "notification_type": "one shot"}, '400: Missing description property in payload.'), + ({"name": "Test4", "description": "Test4_Notification", "channel": NOTIFY_PLUGIN, "enabled": True, + "notification_type": "one shot"}, '400: Missing rule property in payload.'), + ({"name": "Test4", "description": "Test4_Notification", "rule": NOTIFY_INBUILT_RULES[1], "enabled": True, + "notification_type": "one shot"}, '400: Missing channel property in payload.'), + ({"name": "Test4", "description": "Test4_Notification", "rule": NOTIFY_INBUILT_RULES[1], + "channel": NOTIFY_PLUGIN, "enabled": True}, '400: Missing notification_type property in payload.'), + ({"name": "=", "description": "Test4_Notification", "rule": NOTIFY_INBUILT_RULES[1], "channel": NOTIFY_PLUGIN, + "enabled": True, "notification_type": "one shot"}, '400: Invalid name property in payload.'), + ({"name": "Test4", "description": "Test4_Notification", "rule": "+", "channel": NOTIFY_PLUGIN, "enabled": True, + "notification_type": "one shot"}, '400: Invalid rule property in payload.'), + ({"name": "Test4", "description": "Test4_Notification", "rule": NOTIFY_INBUILT_RULES[1], "channel": ":", + "enabled": True, "notification_type": "one shot"}, '400: Invalid channel property in payload.'), + ({"name": "Test4", "description": "Test4_Notification", "rule": NOTIFY_INBUILT_RULES[1], + "channel": NOTIFY_PLUGIN, "enabled": "bla", "notification_type": "one shot"}, + '400: Only "true", "false", true, false are allowed for value of enabled.'), + ({"name": "Test4", "description": "Test4_Notification", "rule": "InvalidRulePlugin", + "channel": "InvalidChannelPlugin", "enabled": True, "notification_type": "one shot"}, + '400: Invalid rule plugin InvalidRulePlugin and/or delivery plugin InvalidChannelPlugin supplied.') + ]) + def test_invalid_create_notification_instance(self, foglamp_url, test_input, expected_error): + conn = http.client.HTTPConnection(foglamp_url) + conn.request("POST", '/foglamp/notification', json.dumps(test_input)) + r = conn.getresponse() + assert 400 == r.status + r = r.read().decode() + assert expected_error == r + + def test_create_valid_notification_instance(self, foglamp_url): + conn = http.client.HTTPConnection(foglamp_url) + conn.request("POST", '/foglamp/notification', json.dumps(data)) + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + assert "Notification {} created successfully".format(data['name']) == jdoc['result'] + + conn.request("GET", '/foglamp/notification') + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + assert data['name'] == jdoc['notifications'][0]['name'] + assert data['channel'] == jdoc['notifications'][0]['channel'] + assert 'true' == jdoc['notifications'][0]['enable'] + assert data['notification_type'] == jdoc['notifications'][0]['notificationType'] + assert data['rule'] == jdoc['notifications'][0]['rule'] + + conn.request("GET", '/foglamp/notification/plugin') + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + assert 2 == len(jdoc) + assert NOTIFY_PLUGIN == jdoc['delivery'][0]['name'] + assert "notificationDelivery" == jdoc['delivery'][0]['type'] + assert 2 == len(jdoc['rules']) + + # TODO - FOGL-2738, FOGL-2673 + @pytest.mark.parametrize("test_input, expected_error", [ + # ({"name": "=", "description": "Test4_Notification", "rule": NOTIFY_INBUILT_RULES[1], "channel": NOTIFY_PLUGIN, + # "enabled": True, "notification_type": "one shot"}, '400: Invalid name property in payload.'), + ({"name": "Test4", "description": "Test4_Notification", "rule": "+", "channel": NOTIFY_PLUGIN, "enabled": True, + "notification_type": "one shot"}, '400: Invalid rule property in payload.'), + ({"name": "Test4", "description": "Test4_Notification", "rule": NOTIFY_INBUILT_RULES[1], "channel": ":", + "enabled": True, "notification_type": "one shot"}, '400: Invalid channel property in payload.'), + ({"name": "Test4", "description": "Test4_Notification", "rule": NOTIFY_INBUILT_RULES[1], + "channel": NOTIFY_PLUGIN, "enabled": "bla", "notification_type": "one shot"}, + '400: Only "true", "false", true, false are allowed for value of enabled.'), + ({"name": "Test4", "description": "Test4_Notification", "rule": "InvalidRulePlugin", + "channel": "InvalidChannelPlugin", "enabled": True, "notification_type": "one shot"}, + '400: Invalid rule plugin:InvalidRulePlugin and/or delivery plugin:InvalidChannelPlugin supplied.') + ]) + def test_invalid_update_notification_instance(self, foglamp_url, test_input, expected_error): + conn = http.client.HTTPConnection(foglamp_url) + conn.request("PUT", '/foglamp/notification/{}'.format(urllib.parse.quote(data['name'])), json.dumps(test_input)) + r = conn.getresponse() + assert 400 == r.status + r = r.read().decode() + assert expected_error == r + + def test_update_valid_notification_instance(self, foglamp_url): + changed_data = {"description": "changed_desc"} + conn = http.client.HTTPConnection(foglamp_url) + conn.request("PUT", '/foglamp/notification/{}'.format(urllib.parse.quote(data['name'])), json.dumps(changed_data)) + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + assert "Notification {} updated successfully".format(data["name"]) == jdoc['result'] + + def test_delete_service_without_notification_delete(self, foglamp_url): + conn = http.client.HTTPConnection(foglamp_url) + conn.request("DELETE", '/foglamp/service/{}'.format(urllib.parse.quote(SERVICE_NAME))) + r = conn.getresponse() + assert 400 == r.status + r = r.read().decode() + assert "400: Notification service `{}` can not be deleted, as ['{}'] " \ + "notification instances exist.".format(SERVICE_NAME, data['name']) == r + + def test_delete_notification_and_service(self, foglamp_url): + conn = http.client.HTTPConnection(foglamp_url) + conn.request("DELETE", '/foglamp/notification/{}'.format(urllib.parse.quote(data['name']))) + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + assert "Notification {} deleted successfully.".format(data['name']) == jdoc['result'] + + conn.request("DELETE", '/foglamp/service/{}'.format(urllib.parse.quote(SERVICE_NAME))) + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + assert "Service {} deleted successfully.".format(SERVICE_NAME) == jdoc['result'] \ No newline at end of file diff --git a/tests/system/python/e2e/test_e2e_notification_service_with_plugins.py b/tests/system/python/e2e/test_e2e_notification_service_with_plugins.py index 2bdb0cb0e0..63faddb0e5 100644 --- a/tests/system/python/e2e/test_e2e_notification_service_with_plugins.py +++ b/tests/system/python/e2e/test_e2e_notification_service_with_plugins.py @@ -134,7 +134,9 @@ def test_service(self, reset_and_start_foglamp, service_branch, foglamp_url, wai def test_get_default_notification_plugins(self, foglamp_url, remove_directories): remove_directories(os.environ['FOGLAMP_ROOT'] + '/plugins/notificationDelivery') + remove_directories(os.environ['FOGLAMP_ROOT'] + '/plugins/notificationRule') remove_directories(os.environ['FOGLAMP_ROOT'] + 'cmake_build/C/plugins/notificationDelivery') + remove_directories(os.environ['FOGLAMP_ROOT'] + 'cmake_build/C/plugins/notificationRule') jdoc = _get_result(foglamp_url, '/foglamp/notification/plugin') assert [] == jdoc['delivery'] assert 2 == len(jdoc['rules']) From 2870693c73ab6491e048e403dde2e4871d5bf0a4 Mon Sep 17 00:00:00 2001 From: Vaibhav Singhal Date: Wed, 17 Apr 2019 15:41:51 +0530 Subject: [PATCH 036/161] removed unused imports --- tests/system/python/api/test_notification.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/tests/system/python/api/test_notification.py b/tests/system/python/api/test_notification.py index 80d4fa42bc..c15ddb5329 100644 --- a/tests/system/python/api/test_notification.py +++ b/tests/system/python/api/test_notification.py @@ -4,7 +4,7 @@ # See: http://foglamp.readthedocs.io/ # FOGLAMP_END -""" Test add service using poll and async plugins for both python & C version REST API """ +""" Test notification REST API """ import os import subprocess @@ -12,11 +12,7 @@ import json import time import urllib -from uuid import UUID -from collections import Counter -from urllib.parse import quote import pytest -import apt __author__ = "Vaibhav Singhal" @@ -230,4 +226,4 @@ def test_delete_notification_and_service(self, foglamp_url): assert 200 == r.status r = r.read().decode() jdoc = json.loads(r) - assert "Service {} deleted successfully.".format(SERVICE_NAME) == jdoc['result'] \ No newline at end of file + assert "Service {} deleted successfully.".format(SERVICE_NAME) == jdoc['result'] From 58a00cf4703145f767edec149707883d586f8c0f Mon Sep 17 00:00:00 2001 From: ashish-jabble Date: Wed, 17 Apr 2019 18:32:07 +0530 Subject: [PATCH 037/161] minor fixes and other doc fixes --- .../foglamp/common/configuration_manager.py | 68 +++++++++++-------- .../services/core/api/test_configuration.py | 10 ++- 2 files changed, 48 insertions(+), 30 deletions(-) diff --git a/python/foglamp/common/configuration_manager.py b/python/foglamp/common/configuration_manager.py index 5c939661cb..3f88ec4584 100644 --- a/python/foglamp/common/configuration_manager.py +++ b/python/foglamp/common/configuration_manager.py @@ -745,6 +745,18 @@ async def set_category_item_value_entry(self, category_name, item_name, new_valu raise async def set_optional_value_entry(self, category_name, item_name, optional_entry_name, new_value_entry): + """Set the "optional_key" entry of a given item within a given category. + Even we can reset the optional value by just passing new_value_entry="" + + Keyword Arguments: + category_name -- name of the category (required) + item_name -- name of item within the category whose "optional_key" entry needs to be changed (required) + optional_entry_name -- name of the optional attribute + new_value_entry -- new value entry to replace old value entry + + Return Values: + None + """ try: storage_value_entry = None if category_name in self._cacheManager: @@ -765,33 +777,35 @@ async def set_optional_value_entry(self, category_name, item_name, optional_entr .format(category_name, item_name)) if storage_value_entry[optional_entry_name] == new_value_entry: return - # Validate optional types - if optional_entry_name == 'readonly' or optional_entry_name == 'deprecated': - if self._validate_type_value('boolean', new_value_entry) is False: - raise ValueError('For {} category, entry value must be boolean for optional item name {}; got {}' - .format(category_name, optional_entry_name, type(new_value_entry))) - elif optional_entry_name == 'minimum' or optional_entry_name == 'maximum': - if (self._validate_type_value('integer', new_value_entry) or self._validate_type_value('float', new_value_entry)) is False: - raise ValueError('For {} category, entry value must be an integer or float for optional item ' - '{}; got {}'.format(category_name, optional_entry_name, type(new_value_entry))) - elif optional_entry_name == 'rule' or optional_entry_name == 'displayName': - if not isinstance(new_value_entry, str): - raise ValueError('For {} category, entry value must be string for optional item {}; got {}' - .format(category_name, optional_entry_name, type(new_value_entry))) - else: - if self._validate_type_value('integer', new_value_entry) is False: - raise ValueError('For {} category, entry value must be an integer for optional item {}; got {}' - .format(category_name, optional_entry_name, type(new_value_entry))) - - # Validation is fairly minimal, minimum, maximum like maximum should be greater than minimum or vice-versa - # And no link between minimum, maximum and length is needed. - if optional_entry_name == 'minimum': - if new_value_entry >= storage_value_entry['maximum']: - raise ValueError('Minimum value should be less than equal to Maximum value') - - if optional_entry_name == 'maximum': - if new_value_entry <= storage_value_entry['minimum']: - raise ValueError('Maximum value should be greater than equal to Minimum value') + # Validate optional types only when new_value_entry not empty; otherwise set empty value + if new_value_entry: + if optional_entry_name == 'readonly' or optional_entry_name == 'deprecated': + if self._validate_type_value('boolean', new_value_entry) is False: + raise ValueError('For {} category, entry value must be boolean for optional item name {}; got {}' + .format(category_name, optional_entry_name, type(new_value_entry))) + elif optional_entry_name == 'minimum' or optional_entry_name == 'maximum': + if (self._validate_type_value('integer', new_value_entry) or self._validate_type_value('float', new_value_entry)) is False: + raise ValueError('For {} category, entry value must be an integer or float for optional item ' + '{}; got {}'.format(category_name, optional_entry_name, type(new_value_entry))) + elif optional_entry_name == 'rule' or optional_entry_name == 'displayName': + if not isinstance(new_value_entry, str): + raise ValueError('For {} category, entry value must be string for optional item {}; got {}' + .format(category_name, optional_entry_name, type(new_value_entry))) + else: + if self._validate_type_value('integer', new_value_entry) is False: + raise ValueError('For {} category, entry value must be an integer for optional item {}; got {}' + .format(category_name, optional_entry_name, type(new_value_entry))) + + # Validation is fairly minimal, minimum, maximum like + # maximum should be greater than minimum or vice-versa + # And no link between minimum, maximum and length is needed. + if optional_entry_name == 'minimum': + if new_value_entry >= storage_value_entry['maximum']: + raise ValueError('Minimum value should be less than equal to Maximum value') + + if optional_entry_name == 'maximum': + if new_value_entry <= storage_value_entry['minimum']: + raise ValueError('Maximum value should be greater than equal to Minimum value') payload = PayloadBuilder().SELECT("key", "description", "ts", "value") \ .JSON_PROPERTY(("value", [item_name, optional_entry_name], new_value_entry)) \ diff --git a/tests/unit/python/foglamp/services/core/api/test_configuration.py b/tests/unit/python/foglamp/services/core/api/test_configuration.py index b184aadb98..970db2c54f 100644 --- a/tests/unit/python/foglamp/services/core/api/test_configuration.py +++ b/tests/unit/python/foglamp/services/core/api/test_configuration.py @@ -276,12 +276,16 @@ async def test_set_config_item_exception(self, client, category_name='rest_api', assert resp.reason is None patch_set_entry.assert_called_once_with(category_name, item_name, payload['value']) - async def test_set_optional_in_config_item(self, client, category_name='rest_api', item_name='http_port'): + @pytest.mark.parametrize("value", [ + '', + 'false', + 'true' + ]) + async def test_set_optional_in_config_item(self, client, value, category_name='rest_api', item_name='http_port', optional_key='readonly'): async def async_mock(return_value): return return_value - optional_key = 'readonly' - payload = {optional_key: 'false'} + payload = {optional_key: value} result = {optional_key: 'false', 'value': '8082', 'type': 'integer', 'default': '8081', 'description': 'The port to accept HTTP connections on'} From aed525cf11afb167d5092ba55b6996f01559dafe Mon Sep 17 00:00:00 2001 From: stefano Date: Wed, 17 Apr 2019 15:51:25 +0200 Subject: [PATCH 038/161] FOGL-2674: adding unit tests --- .../storage/sqlite/expected_ETC_UTC/116 | 1 + .../storage/sqlite/expected_ETC_UTC/117 | 1 + .../storage/sqlite/expected_ETC_UTC/118 | 1 + .../storage/sqlite/expected_ETC_UTC/119 | 1 + .../storage/sqlite/expected_ETC_UTC/120 | 1 + .../storage/sqlite/expected_ETC_UTC/121 | 1 + .../storage/sqlite/expected_ETC_UTC/122 | 1 + .../storage/sqlite/expected_ETC_UTC/123 | 1 + .../storage/sqlite/expected_ETC_UTC/124 | 1 + .../storage/sqlite/expected_ETC_UTC/125 | 1 + .../storage/sqlite/expected_ETC_UTC/126 | 1 + .../storage/sqlite/expected_EUROPE_ROME/116 | 1 + .../storage/sqlite/expected_EUROPE_ROME/117 | 1 + .../storage/sqlite/expected_EUROPE_ROME/118 | 1 + .../storage/sqlite/expected_EUROPE_ROME/119 | 1 + .../storage/sqlite/expected_EUROPE_ROME/120 | 1 + .../storage/sqlite/expected_EUROPE_ROME/121 | 1 + .../storage/sqlite/expected_EUROPE_ROME/122 | 1 + .../storage/sqlite/expected_EUROPE_ROME/123 | 1 + .../storage/sqlite/expected_EUROPE_ROME/124 | 1 + .../storage/sqlite/expected_EUROPE_ROME/125 | 1 + .../storage/sqlite/expected_EUROPE_ROME/126 | 1 + .../sqlite/payloads/tz_all_insert.json | 7 ++++++ .../sqlite/payloads/tz_all_read_2.json | 12 ++++++++++ .../sqlite/payloads/tz_all_read_3.json | 17 ++++++++++++++ .../sqlite/payloads/tz_readings_insert.json | 10 +++++++++ .../sqlite/payloads/tz_readings_read_2.json | 14 ++++++++++++ .../sqlite/payloads/tz_readings_read_3.json | 17 ++++++++++++++ .../sqlite/payloads/tz_readings_read_4.json | 22 +++++++++++++++++++ tests/unit/C/services/storage/sqlite/testset | 13 ++++++++++- 30 files changed, 133 insertions(+), 1 deletion(-) create mode 100644 tests/unit/C/services/storage/sqlite/expected_ETC_UTC/116 create mode 100644 tests/unit/C/services/storage/sqlite/expected_ETC_UTC/117 create mode 100644 tests/unit/C/services/storage/sqlite/expected_ETC_UTC/118 create mode 100644 tests/unit/C/services/storage/sqlite/expected_ETC_UTC/119 create mode 100644 tests/unit/C/services/storage/sqlite/expected_ETC_UTC/120 create mode 100644 tests/unit/C/services/storage/sqlite/expected_ETC_UTC/121 create mode 100644 tests/unit/C/services/storage/sqlite/expected_ETC_UTC/122 create mode 100644 tests/unit/C/services/storage/sqlite/expected_ETC_UTC/123 create mode 100644 tests/unit/C/services/storage/sqlite/expected_ETC_UTC/124 create mode 100644 tests/unit/C/services/storage/sqlite/expected_ETC_UTC/125 create mode 100644 tests/unit/C/services/storage/sqlite/expected_ETC_UTC/126 create mode 100644 tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/116 create mode 100644 tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/117 create mode 100644 tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/118 create mode 100644 tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/119 create mode 100644 tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/120 create mode 100644 tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/121 create mode 100644 tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/122 create mode 100644 tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/123 create mode 100644 tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/124 create mode 100644 tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/125 create mode 100644 tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/126 create mode 100644 tests/unit/C/services/storage/sqlite/payloads/tz_all_insert.json create mode 100644 tests/unit/C/services/storage/sqlite/payloads/tz_all_read_2.json create mode 100644 tests/unit/C/services/storage/sqlite/payloads/tz_all_read_3.json create mode 100644 tests/unit/C/services/storage/sqlite/payloads/tz_readings_insert.json create mode 100644 tests/unit/C/services/storage/sqlite/payloads/tz_readings_read_2.json create mode 100644 tests/unit/C/services/storage/sqlite/payloads/tz_readings_read_3.json create mode 100644 tests/unit/C/services/storage/sqlite/payloads/tz_readings_read_4.json diff --git a/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/116 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/116 new file mode 100644 index 0000000000..2e97a724e1 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/116 @@ -0,0 +1 @@ +{ "response" : "deleted", "rows_affected" : 13 } \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/117 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/117 new file mode 100644 index 0000000000..f8091d5b43 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/117 @@ -0,0 +1 @@ +{ "removed" : 12, "unsentPurged" : 12, "unsentRetained" : 0, "readings" : 0 } \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/118 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/118 new file mode 100644 index 0000000000..7d66483ebe --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/118 @@ -0,0 +1 @@ +{ "response" : "inserted", "rows_affected" : 1 } \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/119 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/119 new file mode 100644 index 0000000000..347dddc3fd --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/119 @@ -0,0 +1 @@ +{"count":1,"rows":[{"id":2000,"key":"tz_01","description":"test - timezone - all tables","data":{"test":"timezone"},"ts":"2019-04-17 14:01:02.123"}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/120 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/120 new file mode 100644 index 0000000000..347dddc3fd --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/120 @@ -0,0 +1 @@ +{"count":1,"rows":[{"id":2000,"key":"tz_01","description":"test - timezone - all tables","data":{"test":"timezone"},"ts":"2019-04-17 14:01:02.123"}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/121 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/121 new file mode 100644 index 0000000000..84a254fe37 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/121 @@ -0,0 +1 @@ +{"count":1,"rows":[{"user_ts_max":"2019-04-17 14:01:02.123456+00:00","ts_timestamp":"2019-04-17 14:01:02.123"}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/122 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/122 new file mode 100644 index 0000000000..2eeb09a60b --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/122 @@ -0,0 +1 @@ +{ "response" : "appended", "readings_added" : 1 } \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/123 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/123 new file mode 100644 index 0000000000..b44c403bb0 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/123 @@ -0,0 +1 @@ +{"count":1,"rows":[{"id":941,"asset_code":"tz_02","read_key":"f1cfff7a-3769-4f47-9ded-100000000001","reading":{"test":"2"},"user_ts":"2019-04-17 14:01:02.123456","ts":"2019-04-17 13:40:21.594"}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/124 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/124 new file mode 100644 index 0000000000..16bfa77153 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/124 @@ -0,0 +1 @@ +{"count":1,"rows":[{"asset_code":"tz_02","read_key":"f1cfff7a-3769-4f47-9ded-100000000001","reading":{"test":"2"},"user_ts":"2019-04-17 14:01:02.123456"}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/125 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/125 new file mode 100644 index 0000000000..b4cf8222bb --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/125 @@ -0,0 +1 @@ +{"count":1,"rows":[{"user_ts_max":"2019-04-17 14:01:02.123456","ts_timestamp":"2019-04-17 14:01:02.123"}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/126 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/126 new file mode 100644 index 0000000000..64cfa896e4 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/126 @@ -0,0 +1 @@ +{"count":1,"rows":[{"test_min":"2","ts_timestamp":"2019-04-17 14:01:02.123"}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/116 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/116 new file mode 100644 index 0000000000..2e97a724e1 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/116 @@ -0,0 +1 @@ +{ "response" : "deleted", "rows_affected" : 13 } \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/117 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/117 new file mode 100644 index 0000000000..f8091d5b43 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/117 @@ -0,0 +1 @@ +{ "removed" : 12, "unsentPurged" : 12, "unsentRetained" : 0, "readings" : 0 } \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/118 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/118 new file mode 100644 index 0000000000..7d66483ebe --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/118 @@ -0,0 +1 @@ +{ "response" : "inserted", "rows_affected" : 1 } \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/119 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/119 new file mode 100644 index 0000000000..347dddc3fd --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/119 @@ -0,0 +1 @@ +{"count":1,"rows":[{"id":2000,"key":"tz_01","description":"test - timezone - all tables","data":{"test":"timezone"},"ts":"2019-04-17 14:01:02.123"}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/120 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/120 new file mode 100644 index 0000000000..347dddc3fd --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/120 @@ -0,0 +1 @@ +{"count":1,"rows":[{"id":2000,"key":"tz_01","description":"test - timezone - all tables","data":{"test":"timezone"},"ts":"2019-04-17 14:01:02.123"}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/121 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/121 new file mode 100644 index 0000000000..84a254fe37 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/121 @@ -0,0 +1 @@ +{"count":1,"rows":[{"user_ts_max":"2019-04-17 14:01:02.123456+00:00","ts_timestamp":"2019-04-17 14:01:02.123"}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/122 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/122 new file mode 100644 index 0000000000..2eeb09a60b --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/122 @@ -0,0 +1 @@ +{ "response" : "appended", "readings_added" : 1 } \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/123 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/123 new file mode 100644 index 0000000000..b44c403bb0 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/123 @@ -0,0 +1 @@ +{"count":1,"rows":[{"id":941,"asset_code":"tz_02","read_key":"f1cfff7a-3769-4f47-9ded-100000000001","reading":{"test":"2"},"user_ts":"2019-04-17 14:01:02.123456","ts":"2019-04-17 13:40:21.594"}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/124 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/124 new file mode 100644 index 0000000000..16bfa77153 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/124 @@ -0,0 +1 @@ +{"count":1,"rows":[{"asset_code":"tz_02","read_key":"f1cfff7a-3769-4f47-9ded-100000000001","reading":{"test":"2"},"user_ts":"2019-04-17 14:01:02.123456"}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/125 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/125 new file mode 100644 index 0000000000..b4cf8222bb --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/125 @@ -0,0 +1 @@ +{"count":1,"rows":[{"user_ts_max":"2019-04-17 14:01:02.123456","ts_timestamp":"2019-04-17 14:01:02.123"}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/126 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/126 new file mode 100644 index 0000000000..64cfa896e4 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/126 @@ -0,0 +1 @@ +{"count":1,"rows":[{"test_min":"2","ts_timestamp":"2019-04-17 14:01:02.123"}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/payloads/tz_all_insert.json b/tests/unit/C/services/storage/sqlite/payloads/tz_all_insert.json new file mode 100644 index 0000000000..c3d55869c2 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/payloads/tz_all_insert.json @@ -0,0 +1,7 @@ +{ + "id" : 2000, + "key" : "tz_01", + "description" : "test - timezone - all tables", + "data" : { "test" : "timezone" }, + "ts" : "2019-04-17 14:01:02.123456+00:00" +} diff --git a/tests/unit/C/services/storage/sqlite/payloads/tz_all_read_2.json b/tests/unit/C/services/storage/sqlite/payloads/tz_all_read_2.json new file mode 100644 index 0000000000..b996253dd6 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/payloads/tz_all_read_2.json @@ -0,0 +1,12 @@ +{ + "where":{ + "column":"id", + "condition":"=", + "value":2000 + }, + "limit":1, + "sort":{ + "column":"id", + "direction":"ASC" + } +} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/payloads/tz_all_read_3.json b/tests/unit/C/services/storage/sqlite/payloads/tz_all_read_3.json new file mode 100644 index 0000000000..c8f9ac5445 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/payloads/tz_all_read_3.json @@ -0,0 +1,17 @@ +{ + "aggregate":{ + "operation":"max", + "column":"ts", + "alias":"user_ts_max" + }, + "group":{ + "column":"ts", + "alias":"ts_timestamp", + "format":"YYYY-MM-DD HH24:MI:SS.MS" + }, + "where":{ + "column":"id", + "condition":"=", + "value":"2000" + } +} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/payloads/tz_readings_insert.json b/tests/unit/C/services/storage/sqlite/payloads/tz_readings_insert.json new file mode 100644 index 0000000000..fc57ddce9d --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/payloads/tz_readings_insert.json @@ -0,0 +1,10 @@ +{ + "readings" : [ + { + "asset_code": "tz_02", + "read_key" : "f1cfff7a-3769-4f47-9ded-100000000001", + "reading" : { "test" : "2" }, + "user_ts" : "2019-04-17 14:01:02.123456+00:00" + } + ] +} diff --git a/tests/unit/C/services/storage/sqlite/payloads/tz_readings_read_2.json b/tests/unit/C/services/storage/sqlite/payloads/tz_readings_read_2.json new file mode 100644 index 0000000000..e43e007d6d --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/payloads/tz_readings_read_2.json @@ -0,0 +1,14 @@ +{ + "return" : [ + "asset_code", + "read_key", + "reading", + "user_ts" + ], + + "where":{ + "column":"asset_code", + "condition":"=", + "value":"tz_02" + } +} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/payloads/tz_readings_read_3.json b/tests/unit/C/services/storage/sqlite/payloads/tz_readings_read_3.json new file mode 100644 index 0000000000..92f319182e --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/payloads/tz_readings_read_3.json @@ -0,0 +1,17 @@ +{ + "aggregate":{ + "operation":"max", + "column":"user_ts", + "alias":"user_ts_max" + }, + "group":{ + "column":"user_ts", + "alias":"ts_timestamp", + "format":"YYYY-MM-DD HH24:MI:SS.MS" + }, + "where":{ + "column":"asset_code", + "condition":"=", + "value":"tz_02" + } +} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/payloads/tz_readings_read_4.json b/tests/unit/C/services/storage/sqlite/payloads/tz_readings_read_4.json new file mode 100644 index 0000000000..2dfbaba395 --- /dev/null +++ b/tests/unit/C/services/storage/sqlite/payloads/tz_readings_read_4.json @@ -0,0 +1,22 @@ +{ + "aggregate":[ + { + "operation":"min", + "json":{ + "column":"reading", + "properties":"test" + }, + "alias":"test_min" + } + ], + "group":{ + "column":"user_ts", + "alias":"ts_timestamp", + "format":"YYYY-MM-DD HH24:MI:SS.MS" + }, + "where":{ + "column":"asset_code", + "condition":"=", + "value":"tz_02" + } +} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/testset b/tests/unit/C/services/storage/sqlite/testset index b4c0ea0f74..59b51e1349 100644 --- a/tests/unit/C/services/storage/sqlite/testset +++ b/tests/unit/C/services/storage/sqlite/testset @@ -113,4 +113,15 @@ Add table snapshot,POST,http://localhost:8080/storage/table/test2/snapshot,add_s Load table snapshot,PUT,http://localhost:8080/storage/table/test2/snapshot/99, Delete table snapshot,DELETE,http://localhost:8080/storage/table/test2/snapshot/99, Jira FOGL-690,POST,http://localhost:8080/storage/table/configuration,fogl690-error.json -Shutdown,POST,http://localhost:1081/foglamp/service/shutdown,,checkstate +timezone - all tables - Delete,DELETE,http://localhost:8080/storage/table/test2, +timezone - readings - Delete,PUT,http://localhost:8080/storage/reading/purge?age=1&sent=0&flags=purge, +timezone - all tables - insert,POST,http://localhost:8080/storage/table/test2,tz_all_insert.json +timezone - all tables - read 1,GET,http://localhost:8080/storage/table/test2?id=2000, +timezone - all tables - read 2,PUT,http://localhost:8080/storage/table/test2/query,tz_all_read_2.json +timezone - all tables - read 3,PUT,http://localhost:8080/storage/table/test2/query,tz_all_read_3.json +timezone - readings - insert,POST,http://localhost:8080/storage/reading,tz_readings_insert.json +timezone - readings - read 1,GET,http://localhost:8080/storage/reading?id=1&count=2000,,checkstate +timezone - readings - read 2,PUT,http://localhost:8080/storage/reading/query,tz_readings_read_2.json +timezone - readings - read 3,PUT,http://localhost:8080/storage/reading/query,tz_readings_read_3.json +timezone - readings - read 4,PUT,http://localhost:8080/storage/reading/query,tz_readings_read_4.json +Shutdown,POST,http://localhost:1081/foglamp/service/shutdown,,checkstate \ No newline at end of file From 54b5f0d37bdb59491f6afe1066944c7faa9541ef Mon Sep 17 00:00:00 2001 From: stefano Date: Wed, 17 Apr 2019 16:58:29 +0200 Subject: [PATCH 039/161] FOGL-2674: unit tests fixed --- tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/124 | 2 +- tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/125 | 2 +- tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/126 | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/124 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/124 index 16bfa77153..fbab98efbe 100644 --- a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/124 +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/124 @@ -1 +1 @@ -{"count":1,"rows":[{"asset_code":"tz_02","read_key":"f1cfff7a-3769-4f47-9ded-100000000001","reading":{"test":"2"},"user_ts":"2019-04-17 14:01:02.123456"}]} \ No newline at end of file +{"count":1,"rows":[{"asset_code":"tz_02","read_key":"f1cfff7a-3769-4f47-9ded-100000000001","reading":{"test":"2"},"user_ts":"2019-04-17 16:01:02.123456"}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/125 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/125 index b4cf8222bb..7deff2ddc0 100644 --- a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/125 +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/125 @@ -1 +1 @@ -{"count":1,"rows":[{"user_ts_max":"2019-04-17 14:01:02.123456","ts_timestamp":"2019-04-17 14:01:02.123"}]} \ No newline at end of file +{"count":1,"rows":[{"user_ts_max":"2019-04-17 16:01:02.123456","ts_timestamp":"2019-04-17 16:01:02.123"}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/126 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/126 index 64cfa896e4..ac8fd3e5dc 100644 --- a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/126 +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/126 @@ -1 +1 @@ -{"count":1,"rows":[{"test_min":"2","ts_timestamp":"2019-04-17 14:01:02.123"}]} \ No newline at end of file +{"count":1,"rows":[{"test_min":"2","ts_timestamp":"2019-04-17 16:01:02.123"}]} \ No newline at end of file From e427271047667b7b367f401bad669b48f569a176 Mon Sep 17 00:00:00 2001 From: Amarendra Date: Thu, 18 Apr 2019 10:02:36 +0530 Subject: [PATCH 040/161] Fogl 2655 - Plugins snapshot (#1494) * FOGL-2655 - Plugin snapshot * Unix timestamp being taken as id * Path for C plugins now picked with reference to execution environment * .gitignore modified * Doc examples corrected * typo fixed * /bin/tar entry removed from sudoers. A new helper script restore_plugins_snapshot added with setuid and setgid bit set to enable the script to run with sudo privileges. * Only admin user should use snapshot api when auth is mandatory. * Absolute path changed to relative path for files in .tar.ga archive * FOGL-2655: Utility program to extract plugin snapshot tar archive (#1524) * FOGL-2655: Utility program to extract plugin snapshot tar archive; setuid bit would be set on its executable * Makefile changes, snapshot.py changes to account for new extract_plugin_snapshot executable. * Pick extraction path from env variable FOGLAMP_ROOT and try to confirm foglamp installtion at that path * Check for make vs make install refined * File list now appears in sorted desc. Redundant update plugins script removed. * Warning log added when an old plugin tar file is deleted. * Makefile refactoring for extract_plugin_snapshot setuid setting * GET endpoint refactored * temp tar file should be deleted in case of any error in POST request --- .gitignore | 1 + C/plugins/utils/CMakeLists.txt | 3 + C/plugins/utils/extract_plugin_snapshot.cpp | 87 ++++++++++ Makefile | 19 +- .../services/core/api/snapshot/plugins.py | 164 ++++++++++++++++++ python/foglamp/services/core/routes.py | 7 + python/foglamp/services/core/snapshot.py | 125 +++++++++++++ 7 files changed, 399 insertions(+), 7 deletions(-) create mode 100644 C/plugins/utils/extract_plugin_snapshot.cpp create mode 100644 python/foglamp/services/core/api/snapshot/plugins.py create mode 100644 python/foglamp/services/core/snapshot.py diff --git a/.gitignore b/.gitignore index 50e7aec937..36c8320562 100644 --- a/.gitignore +++ b/.gitignore @@ -14,6 +14,7 @@ data/etc/storage.json data/etc/certs/* data/var data/tmp +data/snapshots # SQLite3 default db location and after migration data/*.db diff --git a/C/plugins/utils/CMakeLists.txt b/C/plugins/utils/CMakeLists.txt index b0fe554180..1fc8e48ed0 100644 --- a/C/plugins/utils/CMakeLists.txt +++ b/C/plugins/utils/CMakeLists.txt @@ -12,5 +12,8 @@ include_directories(include ../../services/common/include) add_executable(${PROJECT_NAME} get_plugin_info.cpp) target_link_libraries(${PROJECT_NAME} -ldl) +add_executable(extract_plugin_snapshot extract_plugin_snapshot.cpp) + # Install library install(TARGETS ${PROJECT_NAME} DESTINATION foglamp/extras/C) +install(TARGETS extract_plugin_snapshot DESTINATION foglamp/extras/C) diff --git a/C/plugins/utils/extract_plugin_snapshot.cpp b/C/plugins/utils/extract_plugin_snapshot.cpp new file mode 100644 index 0000000000..1739d1027b --- /dev/null +++ b/C/plugins/utils/extract_plugin_snapshot.cpp @@ -0,0 +1,87 @@ +/* + * Utility to extract plugin snapshot tar archive + * + * Copyright (c) 2019 Dianomic Systems + * + * Released under the Apache 2.0 Licence + * + * Author: Amandeep Singh Arora + */ + +#include +#include +#include +#include +#include +#include +#include + +extern int errno; + +/** + * Check whether file/dir exists within FOGLAMP_ROOT + * + * @param rootdir FOGLAMP_ROOT path + * @param file relative path of file or dir inside FOGLAMP_ROOT + */ +bool checkFile(char *rootdir, char *file) +{ + char path[256]; + snprintf(path, sizeof(path), "%s/%s", rootdir, file); + return (access(path, F_OK) == 0); +} + +/** + * Extract files from within plugin snapshot tar archive + * + * Usage: extract_plugin_snapshot + * + * @param argv[1] relative/absolute path of plugin snapshot archive + */ +int main(int argc, char *argv[]) +{ + if(argc < 2) + { + printf("Usage: %s \n", argv[0]); + return 1; + } + + char *rootdir = getenv("FOGLAMP_ROOT"); + if (!rootdir || rootdir[0]==0) + { + printf("Unable to find path where archive is to be extracted\n"); + return 2; + } + struct stat sb; + stat(rootdir, &sb); + if ((sb.st_mode & S_IFMT) != S_IFDIR) + { + printf("Unable to find path where archive is to be extracted\n"); + return 2; + } + + if (!checkFile(rootdir, (char *) "bin/foglamp") || + !checkFile(rootdir, (char *) "services/foglamp.services.storage") || + !checkFile(rootdir, (char *) "python/foglamp/services/core/routes.py") || + !checkFile(rootdir, (char *) "lib/libcommon-lib.so") || + !checkFile(rootdir, (char *) "tasks/sending_process")) + { + printf("Unable to find foglamp insallation\n"); + return 2; + } + + char *args[]={(char *) "/bin/tar", (char *) "-C", (char *) "PLACEHOLDER", (char *) "-xf", (char *) "PLACEHOLDER", NULL}; + args[2] = rootdir; + args[4] = argv[1]; + + errno = 0; + int rc = execvp(args[0], args); + if (rc != 0) + { + printf("execvp failed: errno=%s\n", strerror(errno)); + return 3; + } + + return 0; +} + diff --git a/Makefile b/Makefile index 53b8f8ad64..632761e9b4 100644 --- a/Makefile +++ b/Makefile @@ -102,7 +102,7 @@ PACKAGE_UPDATE_SCRIPT_SRC := scripts/package # EXTRA SCRIPTS EXTRAS_SCRIPTS_SRC_DIR := extras/scripts -# FOGBENCH +# FOGBENCH FOGBENCH_PYTHON_SRC_DIR := extras/python/fogbench # FogLAMP Version file @@ -185,7 +185,7 @@ install : $(INSTALL_DIR) \ scripts_install \ bin_install \ extras_install \ - data_install + data_install ############################################################################### ############################ PRE-REQUISITE SCRIPTS ############################ @@ -249,7 +249,7 @@ c_install : c_build python_build : $(PYTHON_SETUP_FILE) $(CD) $(PYTHON_SRC_DIR) ; $(PYTHON_BUILD_PACKAGE) ; $(CD) $(CURRENT_DIR) ; $(CP) $(PYTHON_REQUIREMENTS_FILE) $(PYTHON_LIB_DIR)/. -# install python requirements without --user +# install python requirements without --user python_requirements : $(PYTHON_REQUIREMENTS_FILE) $(PIP_INSTALL_REQUIREMENTS) $(PYTHON_REQUIREMENTS_FILE) $(NO_CACHE_DIR) @@ -300,7 +300,7 @@ $(SCRIPTS_INSTALL_DIR) : install_common_scripts : $(SCRIPT_COMMON_INSTALL_DIR) $(COMMON_SCRIPTS_SRC) $(CP) $(COMMON_SCRIPTS_SRC)/*.sh $(SCRIPT_COMMON_INSTALL_DIR) $(CP) $(COMMON_SCRIPTS_SRC)/*.py $(SCRIPT_COMMON_INSTALL_DIR) - + install_postgres_script : $(SCRIPT_PLUGINS_STORAGE_INSTALL_DIR) \ $(POSTGRES_SCHEMA_UPDATE_DIR) $(POSTGRES_SCRIPT_SRC) $(POSTGRES_SCHEMA_UPDATE_SCRIPT_SRC) $(CP) $(POSTGRES_SCRIPT_SRC) $(SCRIPT_PLUGINS_STORAGE_INSTALL_DIR) @@ -407,7 +407,7 @@ $(BIN_INSTALL_DIR) : ####################### EXTRAS INSTALL TARGETS ################################ ############################################################################### # install bin -extras_install : $(EXTRAS_INSTALL_DIR) install_python_fogbench install_extras_scripts +extras_install : $(EXTRAS_INSTALL_DIR) install_python_fogbench install_extras_scripts setuid_extract_plugin_snapshot install_python_fogbench : $(FOGBENCH_PYTHON_INSTALL_DIR) $(FOGBENCH_PYTHON_SRC_DIR) $(CP_DIR) $(FOGBENCH_PYTHON_SRC_DIR) $(FOGBENCH_PYTHON_INSTALL_DIR) @@ -447,11 +447,16 @@ endif #$(DATA_INSTALL_DIR) : # $(MKDIR_PATH) $@ +# set setuid bit of extract_plugin_snapshot +setuid_extract_plugin_snapshot: + chmod u+s $(EXTRAS_INSTALL_DIR)/C/extract_plugin_snapshot + + ############################################################################### ######################## SUPPORTING BUILD/INSTALL TARGETS ##################### ############################################################################### # create install directory -$(INSTALL_DIR) : +$(INSTALL_DIR) : $(MKDIR_PATH) $@ ############################################################################### @@ -464,4 +469,4 @@ clean : -$(RM_DIR) $(PYTHON_BUILD_DIR) -$(RM_DIR) $(DEV_SERVICES_DIR) -$(RM) $(SYMLINK_PLUGINS_DIR) - -$(RM) $(ASYNC_INGEST_PYMODULE) + -$(RM) $(ASYNC_INGEST_PYMODULE) \ No newline at end of file diff --git a/python/foglamp/services/core/api/snapshot/plugins.py b/python/foglamp/services/core/api/snapshot/plugins.py new file mode 100644 index 0000000000..009705063d --- /dev/null +++ b/python/foglamp/services/core/api/snapshot/plugins.py @@ -0,0 +1,164 @@ +# -*- coding: utf-8 -*- + +# FOGLAMP_BEGIN +# See: http://foglamp.readthedocs.io/ +# FOGLAMP_END + + +import os +from aiohttp import web +from foglamp.services.core.snapshot import SnapshotPluginBuilder +from foglamp.common.common import _FOGLAMP_ROOT, _FOGLAMP_DATA +from foglamp.common.web.middleware import has_permission + +__author__ = "Amarendra K Sinha" +__copyright__ = "Copyright (c) 2019 Dianomic Systems" +__license__ = "Apache 2.0" +__version__ = "${VERSION}" + +_help = """ + ------------------------------------------------------------------------- + | GET POST | /foglamp/snapshot/plugins | + | PUT DELETE | /foglamp/snapshot/plugins/{id} | + ------------------------------------------------------------------------- +""" + + +@has_permission("admin") +async def get_snapshot(request): + """ get list of available snapshots + + :Example: + curl -X GET http://localhost:8081/foglamp/snapshot/plugins + + When auth is mandatory: + curl -X GET http://localhost:8081/foglamp/snapshot/plugins -H "authorization: " + """ + # Get snapshot directory path + snapshot_dir = _get_snapshot_dir() + valid_extension = '.tar.gz' + sorted_list = [] + if os.path.isdir(snapshot_dir): + for root, dirs, files in os.walk(snapshot_dir): + valid_files = list( + filter(lambda f: f.endswith(valid_extension), files)) + list_files = list(map( + lambda x: {"id": x.split("snapshot-plugin-")[1].split(".tar.gz")[0], + "name": x}, valid_files)) + sorted_list = sorted(list_files, key=lambda k: k['id'], reverse=True) + + return web.json_response({"snapshots": sorted_list}) + + +@has_permission("admin") +async def post_snapshot(request): + """ Create a snapshot by name + + :Example: + curl -X POST http://localhost:8081/foglamp/snapshot/plugins + + When auth is mandatory: + curl -X POST http://localhost:8081/foglamp/snapshot/plugins -H "authorization: " + """ + try: + snapshot_dir = _get_snapshot_dir() + snapshot_id, snapshot_name = await SnapshotPluginBuilder( + snapshot_dir).build() + except Exception as ex: + raise web.HTTPInternalServerError( + reason='Snapshot could not be created. {}'.format(str(ex))) + else: + return web.json_response({ + "message": "snapshot id={}, file={} created successfully.".format( + snapshot_id, snapshot_name)}) + + +@has_permission("admin") +async def put_snapshot(request): + """extract a snapshot + + :Example: + curl -X PUT http://localhost:8081/foglamp/snapshot/plugins/1554204238 + + When auth is mandatory: + curl -X PUT http://localhost:8081/foglamp/snapshot/plugins/1554204238 -H "authorization: " + """ + try: + snapshot_id = request.match_info.get('id', None) + snapshot_name = "snapshot-plugin-{}.tar.gz".format(snapshot_id) + + try: + snapshot_id = int(snapshot_id) + except: + raise ValueError('Invalid snapshot id: {}'.format(snapshot_id)) + + if not os.path.isdir(_get_snapshot_dir()): + raise web.HTTPNotFound(reason="No snapshot found.") + + snapshot_dir = _get_snapshot_dir() + for root, dirs, files in os.walk(snapshot_dir): + if str(snapshot_name) not in files: + raise web.HTTPNotFound(reason='{} not found'.format(snapshot_name)) + + p = "{}/{}".format(snapshot_dir, snapshot_name) + SnapshotPluginBuilder(snapshot_dir).extract_files(p) + except ValueError as ex: + raise web.HTTPBadRequest(reason=str(ex)) + except Exception as ex: + raise web.HTTPInternalServerError( + reason='Snapshot {} could not be restored. {}'.format(snapshot_name, + str(ex))) + else: + return web.json_response( + {"message": "snapshot {} restored successfully.".format( + snapshot_name)}) + + +@has_permission("admin") +async def delete_snapshot(request): + """delete a snapshot + + :Example: + curl -X DELETE http://localhost:8081/foglamp/snapshot/plugins/1554204238 + + When auth is mandatory: + curl -X DELETE http://localhost:8081/foglamp/snapshot/plugins/1554204238 -H "authorization: " + """ + try: + snapshot_id = request.match_info.get('id', None) + snapshot_name = "snapshot-plugin-{}.tar.gz".format(snapshot_id) + + try: + snapshot_id = int(snapshot_id) + except: + raise ValueError('Invalid snapshot id: {}'.format(snapshot_id)) + + if not os.path.isdir(_get_snapshot_dir()): + raise web.HTTPNotFound(reason="No snapshot found.") + + snapshot_dir = _get_snapshot_dir() + for root, dirs, files in os.walk(_get_snapshot_dir()): + if str(snapshot_name) not in files: + raise web.HTTPNotFound(reason='{} not found'.format(snapshot_name)) + + p = "{}/{}".format(snapshot_dir, snapshot_name) + os.remove(p) + except ValueError as ex: + raise web.HTTPBadRequest(reason=str(ex)) + except Exception as ex: + raise web.HTTPInternalServerError( + reason='Snapshot {} could not be deleted. {}'.format(snapshot_name, + str(ex))) + else: + return web.json_response( + {"message": "snapshot {} deleted successfully.".format( + snapshot_name)}) + + +def _get_snapshot_dir(): + if _FOGLAMP_DATA: + snapshot_dir = os.path.expanduser(_FOGLAMP_DATA + '/snapshots/plugins') + else: + snapshot_dir = os.path.expanduser( + _FOGLAMP_ROOT + '/data/snapshots/plugins') + return snapshot_dir diff --git a/python/foglamp/services/core/routes.py b/python/foglamp/services/core/routes.py index 9d06d46328..be71eca15c 100644 --- a/python/foglamp/services/core/routes.py +++ b/python/foglamp/services/core/routes.py @@ -23,6 +23,7 @@ from foglamp.services.core.api import north from foglamp.services.core.api import filters from foglamp.services.core.api import notification +from foglamp.services.core.api.snapshot import plugins as snapshot_plugins from foglamp.services.core.api.snapshot import table as snapshot_table @@ -176,6 +177,12 @@ def setup(app): app.router.add_route('PUT', '/foglamp/notification/{notification_name}', notification.put_notification) app.router.add_route('DELETE', '/foglamp/notification/{notification_name}', notification.delete_notification) + # Snapshot plugins + app.router.add_route('GET', '/foglamp/snapshot/plugins', snapshot_plugins.get_snapshot) + app.router.add_route('POST', '/foglamp/snapshot/plugins', snapshot_plugins.post_snapshot) + app.router.add_route('PUT', '/foglamp/snapshot/plugins/{id}', snapshot_plugins.put_snapshot) + app.router.add_route('DELETE', '/foglamp/snapshot/plugins/{id}', snapshot_plugins.delete_snapshot) + # Snapshot config app.router.add_route('GET', '/foglamp/snapshot/category', snapshot_table.get_snapshot) app.router.add_route('POST', '/foglamp/snapshot/category', snapshot_table.post_snapshot) diff --git a/python/foglamp/services/core/snapshot.py b/python/foglamp/services/core/snapshot.py new file mode 100644 index 0000000000..c606215aa8 --- /dev/null +++ b/python/foglamp/services/core/snapshot.py @@ -0,0 +1,125 @@ +# -*- coding: utf-8 -*- + +# FOGLAMP_BEGIN +# See: http://foglamp.readthedocs.io/ +# FOGLAMP_END + +""" Provides utility functions to take snapshot of plugins""" + +import os +from os import path +from os.path import basename +import glob +import json +import tarfile +import fnmatch +import time + +from foglamp.common import logger +from foglamp.common.common import _FOGLAMP_ROOT + + +__author__ = "Amarendra K Sinha" +__copyright__ = "Copyright (c) 2019 Dianomic Systems" +__license__ = "Apache 2.0" +__version__ = "${VERSION}" + +_LOGGER = logger.setup(__name__) +_NO_OF_FILES_TO_RETAIN = 3 +SNAPSHOT_PREFIX = "snapshot-plugin" + +class SnapshotPluginBuilder: + + _out_file_path = None + _interim_file_path = None + + def __init__(self, snapshot_plugin_dir): + try: + if not os.path.exists(snapshot_plugin_dir): + os.makedirs(snapshot_plugin_dir) + else: + self.check_and_delete_plugins_tar_files(snapshot_plugin_dir) + + self._out_file_path = snapshot_plugin_dir + self._interim_file_path = snapshot_plugin_dir + except (OSError, Exception) as ex: + _LOGGER.error("Error in initializing SnapshotPluginBuilder class: %s ", str(ex)) + raise RuntimeError(str(ex)) + + async def build(self): + def reset(tarinfo): + tarinfo.uid = tarinfo.gid = 0 + tarinfo.uname = tarinfo.gname = "root" + return tarinfo + try: + snapshot_id = str(int(time.time())) + snapshot_filename = "{}-{}.tar.gz".format(SNAPSHOT_PREFIX, snapshot_id) + tar_file_name = "{}/{}".format(self._out_file_path, snapshot_filename) + pyz = tarfile.open(tar_file_name, "w:gz") + try: + # files are being added to tarfile with relative path and NOT with absolute path. + pyz.add("{}/python/foglamp/plugins".format(_FOGLAMP_ROOT), arcname="python/foglamp/plugins", recursive=True) + # C plugins location is different with "make install" and "make" + if path.exists("{}/bin".format(_FOGLAMP_ROOT)) and path.exists("{}/bin/foglamp".format(_FOGLAMP_ROOT)): + pyz.add("{}/plugins".format(_FOGLAMP_ROOT), arcname="plugins", recursive=True, filter=reset) + else: + pyz.add("{}/C/plugins".format(_FOGLAMP_ROOT), arcname="C/plugins", recursive=True) + pyz.add("{}/plugins".format(_FOGLAMP_ROOT), arcname="plugins", recursive=True) + pyz.add("{}/cmake_build/C/plugins".format(_FOGLAMP_ROOT), arcname="cmake_build/C/plugins", recursive=True) + finally: + pyz.close() + except Exception as ex: + if os.path.isfile(tar_file_name): + os.remove(tar_file_name) + _LOGGER.error("Error in creating Snapshot .tar.gz file: %s ", str(ex)) + raise RuntimeError(str(ex)) + + self.check_and_delete_temp_files(self._interim_file_path) + self.check_and_delete_plugins_tar_files(self._out_file_path) + _LOGGER.info("Snapshot %s successfully created.", tar_file_name) + return snapshot_id, snapshot_filename + + def check_and_delete_plugins_tar_files(self, snapshot_plugin_dir): + try: + valid_extension = '.tar.gz' + for root, dirs, files in os.walk(snapshot_plugin_dir): + valid_files = list( + filter(lambda f: f.endswith(valid_extension), files)) + list_files = list(map( + lambda x: {"id": x.split("snapshot-plugin-")[1].split(".tar.gz")[0], + "name": x}, valid_files)) + sorted_list = sorted(list_files, key=lambda k: k['id'], reverse=True) + if len(sorted_list) > _NO_OF_FILES_TO_RETAIN: + for f in sorted_list[_NO_OF_FILES_TO_RETAIN:]: + _LOGGER.warning("Removing plugin snapshot file %s.", os.path.join(snapshot_plugin_dir, f['name'])) + os.remove(os.path.join(snapshot_plugin_dir, f['name'])) + except: + pass + def check_and_delete_temp_files(self, snapshot_plugin_dir): + # Delete all non *.tar.gz files + for f in os.listdir(snapshot_plugin_dir): + if not fnmatch.fnmatch(f, '{}*.tar.gz'.format(SNAPSHOT_PREFIX)): + os.remove(os.path.join(snapshot_plugin_dir, f)) + + def write_to_tar(self, pyz, temp_file, data): + with open(temp_file, 'w') as outfile: + json.dump(data, outfile, indent=4) + pyz.add(temp_file, arcname=basename(temp_file)) + + def extract_files(self, pyz): + # Extraction methods are different for production env and dev env + if path.exists("{}/bin".format(_FOGLAMP_ROOT)) and path.exists("{}/bin/foglamp".format(_FOGLAMP_ROOT)): + cmd = "{}/extras/C/extract_plugin_snapshot {}".format(_FOGLAMP_ROOT, pyz) + retcode = os.system(cmd) + if retcode != 0: + raise OSError('Error {}: {}'.format(retcode, cmd)) + return True + else: + try: + with tarfile.open(pyz, "r:gz") as tar: + # Since we are storing full path of the files, we need to specify "/" as the path to restore + tar.extractall(path=_FOGLAMP_ROOT, members=tar.getmembers()) + except Exception as ex: + raise RuntimeError("Extraction error for snapshot {}. {}".format(pyz, str(ex))) + else: + return True From b84c4addb53b8fdf7b75258f73753a3f76034d86 Mon Sep 17 00:00:00 2001 From: Vaibhav Singhal Date: Thu, 18 Apr 2019 13:56:05 +0530 Subject: [PATCH 041/161] Feedback fixes --- tests/system/python/api/test_notification.py | 64 +++++++++++--------- 1 file changed, 37 insertions(+), 27 deletions(-) diff --git a/tests/system/python/api/test_notification.py b/tests/system/python/api/test_notification.py index c15ddb5329..749f11aee5 100644 --- a/tests/system/python/api/test_notification.py +++ b/tests/system/python/api/test_notification.py @@ -24,7 +24,7 @@ SERVICE_NAME = "Notification Server #1" NOTIFY_PLUGIN = "slack" NOTIFY_INBUILT_RULES = ["OverMaxRule", "UnderMinRule"] -data = {"name": "Test - 1", +DATA = {"name": "Test - 1", "description": "Test4_Notification", "rule": NOTIFY_INBUILT_RULES[1], "channel": NOTIFY_PLUGIN, @@ -111,7 +111,7 @@ def test_install_delivery_plugin(self, notify_branch, remove_directories): @pytest.mark.parametrize("test_input, expected_error", [ ({"description": "Test4_Notification", "rule": NOTIFY_INBUILT_RULES[1], "channel": NOTIFY_PLUGIN, "enabled": True, "notification_type": "one shot"}, '400: Missing name property in payload.'), - ({"name": "Test4","rule": NOTIFY_INBUILT_RULES[1], "channel": NOTIFY_PLUGIN, "enabled": True, + ({"name": "Test4", "rule": NOTIFY_INBUILT_RULES[1], "channel": NOTIFY_PLUGIN, "enabled": True, "notification_type": "one shot"}, '400: Missing description property in payload.'), ({"name": "Test4", "description": "Test4_Notification", "channel": NOTIFY_PLUGIN, "enabled": True, "notification_type": "one shot"}, '400: Missing rule property in payload.'), @@ -142,23 +142,24 @@ def test_invalid_create_notification_instance(self, foglamp_url, test_input, exp def test_create_valid_notification_instance(self, foglamp_url): conn = http.client.HTTPConnection(foglamp_url) - conn.request("POST", '/foglamp/notification', json.dumps(data)) + conn.request("POST", '/foglamp/notification', json.dumps(DATA)) r = conn.getresponse() assert 200 == r.status r = r.read().decode() jdoc = json.loads(r) - assert "Notification {} created successfully".format(data['name']) == jdoc['result'] + assert "Notification {} created successfully".format(DATA['name']) == jdoc['result'] conn.request("GET", '/foglamp/notification') r = conn.getresponse() assert 200 == r.status r = r.read().decode() jdoc = json.loads(r) - assert data['name'] == jdoc['notifications'][0]['name'] - assert data['channel'] == jdoc['notifications'][0]['channel'] - assert 'true' == jdoc['notifications'][0]['enable'] - assert data['notification_type'] == jdoc['notifications'][0]['notificationType'] - assert data['rule'] == jdoc['notifications'][0]['rule'] + actual_data = jdoc['notifications'][0] + assert DATA['name'] == actual_data['name'] + assert DATA['channel'] == actual_data['channel'] + assert 'true' == actual_data['enable'] + assert DATA['notification_type'] == actual_data['notificationType'] + assert DATA['rule'] == actual_data['rule'] conn.request("GET", '/foglamp/notification/plugin') r = conn.getresponse() @@ -170,38 +171,46 @@ def test_create_valid_notification_instance(self, foglamp_url): assert "notificationDelivery" == jdoc['delivery'][0]['type'] assert 2 == len(jdoc['rules']) - # TODO - FOGL-2738, FOGL-2673 @pytest.mark.parametrize("test_input, expected_error", [ - # ({"name": "=", "description": "Test4_Notification", "rule": NOTIFY_INBUILT_RULES[1], "channel": NOTIFY_PLUGIN, - # "enabled": True, "notification_type": "one shot"}, '400: Invalid name property in payload.'), - ({"name": "Test4", "description": "Test4_Notification", "rule": "+", "channel": NOTIFY_PLUGIN, "enabled": True, - "notification_type": "one shot"}, '400: Invalid rule property in payload.'), - ({"name": "Test4", "description": "Test4_Notification", "rule": NOTIFY_INBUILT_RULES[1], "channel": ":", - "enabled": True, "notification_type": "one shot"}, '400: Invalid channel property in payload.'), - ({"name": "Test4", "description": "Test4_Notification", "rule": NOTIFY_INBUILT_RULES[1], - "channel": NOTIFY_PLUGIN, "enabled": "bla", "notification_type": "one shot"}, - '400: Only "true", "false", true, false are allowed for value of enabled.'), - ({"name": "Test4", "description": "Test4_Notification", "rule": "InvalidRulePlugin", - "channel": "InvalidChannelPlugin", "enabled": True, "notification_type": "one shot"}, + pytest.param({"name": "Test8"}, '400: Name update is not allowed.', marks=pytest.mark.skip(reason="FOGL-2673")), + pytest.param({"name": "="}, '400: Invalid name property in payload.', marks=pytest.mark.skip(reason="FOGL-2673")), + ({"rule": "+"}, '400: Invalid rule property in payload.'), + ({"channel": ":"}, '400: Invalid channel property in payload.'), + ({"enabled": "bla"}, '400: Only "true", "false", true, false are allowed for value of enabled.'), + ({"rule": "InvalidRulePlugin"}, + '400: Invalid rule plugin:InvalidRulePlugin and/or delivery plugin:None supplied.'), + ({"channel": "InvalidChannelPlugin"}, + '400: Invalid rule plugin:None and/or delivery plugin:InvalidChannelPlugin supplied.'), + ({"rule": "InvalidRulePlugin", "channel": "InvalidChannelPlugin"}, '400: Invalid rule plugin:InvalidRulePlugin and/or delivery plugin:InvalidChannelPlugin supplied.') ]) def test_invalid_update_notification_instance(self, foglamp_url, test_input, expected_error): conn = http.client.HTTPConnection(foglamp_url) - conn.request("PUT", '/foglamp/notification/{}'.format(urllib.parse.quote(data['name'])), json.dumps(test_input)) + conn.request("PUT", '/foglamp/notification/{}'.format(urllib.parse.quote(DATA['name'])), json.dumps(test_input)) r = conn.getresponse() assert 400 == r.status r = r.read().decode() assert expected_error == r + @pytest.mark.skip(reason="FOGL-2738") + def test_invalid_name_update_notification_instance(self, foglamp_url): + conn = http.client.HTTPConnection(foglamp_url) + changed_data = {"description": "changed_desc"} + conn.request("PUT", '/foglamp/notification/{}'.format('Invalid'), json.dumps(changed_data)) + r = conn.getresponse() + assert 400 == r.status + r = r.read().decode() + assert '400: Notification instance not found.' == r + def test_update_valid_notification_instance(self, foglamp_url): changed_data = {"description": "changed_desc"} conn = http.client.HTTPConnection(foglamp_url) - conn.request("PUT", '/foglamp/notification/{}'.format(urllib.parse.quote(data['name'])), json.dumps(changed_data)) + conn.request("PUT", '/foglamp/notification/{}'.format(urllib.parse.quote(DATA['name'])), json.dumps(changed_data)) r = conn.getresponse() assert 200 == r.status r = r.read().decode() jdoc = json.loads(r) - assert "Notification {} updated successfully".format(data["name"]) == jdoc['result'] + assert "Notification {} updated successfully".format(DATA["name"]) == jdoc['result'] def test_delete_service_without_notification_delete(self, foglamp_url): conn = http.client.HTTPConnection(foglamp_url) @@ -210,16 +219,16 @@ def test_delete_service_without_notification_delete(self, foglamp_url): assert 400 == r.status r = r.read().decode() assert "400: Notification service `{}` can not be deleted, as ['{}'] " \ - "notification instances exist.".format(SERVICE_NAME, data['name']) == r + "notification instances exist.".format(SERVICE_NAME, DATA['name']) == r def test_delete_notification_and_service(self, foglamp_url): conn = http.client.HTTPConnection(foglamp_url) - conn.request("DELETE", '/foglamp/notification/{}'.format(urllib.parse.quote(data['name']))) + conn.request("DELETE", '/foglamp/notification/{}'.format(urllib.parse.quote(DATA['name']))) r = conn.getresponse() assert 200 == r.status r = r.read().decode() jdoc = json.loads(r) - assert "Notification {} deleted successfully.".format(data['name']) == jdoc['result'] + assert "Notification {} deleted successfully.".format(DATA['name']) == jdoc['result'] conn.request("DELETE", '/foglamp/service/{}'.format(urllib.parse.quote(SERVICE_NAME))) r = conn.getresponse() @@ -227,3 +236,4 @@ def test_delete_notification_and_service(self, foglamp_url): r = r.read().decode() jdoc = json.loads(r) assert "Service {} deleted successfully.".format(SERVICE_NAME) == jdoc['result'] + assert False From c6e27650744236027665283eb3b22e201413e448 Mon Sep 17 00:00:00 2001 From: Amandeep Singh Arora Date: Thu, 18 Apr 2019 14:52:15 +0530 Subject: [PATCH 042/161] FOGL-2741: Created cmdutil to run some commands as root user using setuid --- C/plugins/utils/CMakeLists.txt | 4 +- C/plugins/utils/cmdutil.cpp | 176 ++++++++++++++++++++ C/plugins/utils/extract_plugin_snapshot.cpp | 87 ---------- Makefile | 10 +- 4 files changed, 183 insertions(+), 94 deletions(-) create mode 100644 C/plugins/utils/cmdutil.cpp delete mode 100644 C/plugins/utils/extract_plugin_snapshot.cpp diff --git a/C/plugins/utils/CMakeLists.txt b/C/plugins/utils/CMakeLists.txt index 1fc8e48ed0..40f8b4712a 100644 --- a/C/plugins/utils/CMakeLists.txt +++ b/C/plugins/utils/CMakeLists.txt @@ -12,8 +12,8 @@ include_directories(include ../../services/common/include) add_executable(${PROJECT_NAME} get_plugin_info.cpp) target_link_libraries(${PROJECT_NAME} -ldl) -add_executable(extract_plugin_snapshot extract_plugin_snapshot.cpp) +add_executable(cmdutil cmdutil.cpp) # Install library install(TARGETS ${PROJECT_NAME} DESTINATION foglamp/extras/C) -install(TARGETS extract_plugin_snapshot DESTINATION foglamp/extras/C) +install(TARGETS cmdutil DESTINATION foglamp/extras/C) diff --git a/C/plugins/utils/cmdutil.cpp b/C/plugins/utils/cmdutil.cpp new file mode 100644 index 0000000000..a96759d0d2 --- /dev/null +++ b/C/plugins/utils/cmdutil.cpp @@ -0,0 +1,176 @@ +/* + * Utility to run some commands for foglamp as root using setuid + * + * Copyright (c) 2019 Dianomic Systems + * + * Released under the Apache 2.0 Licence + * + * Author: Amandeep Singh Arora + */ + +#include +#include +#include +#include +#include +#include +#include + +extern int errno; + +/** + * Check whether file/dir exists within FOGLAMP_ROOT + * + * @param rootdir FOGLAMP_ROOT path + * @param file relative path of file or dir inside FOGLAMP_ROOT + */ +bool checkFile(char *rootdir, char *file) +{ + char path[256]; + snprintf(path, sizeof(path), "%s/%s", rootdir, file); + return (access(path, F_OK) == 0); +} + +#if 0 + sudo tar -C $FOGLAMP_ROOT -xf abc.tar.gz cmdutil tar-extract abc.tar.gz + sudo cp -r abc $FOGLAMP_ROOT/xyz cmdutil cp abc xyz + sudo rm -rf $FOGLAMP_ROOT/abc cmdutil rm abc + sudo apt install -y wiringpi cmdutil apt-install wiringpi + + sudo pip3 install aiocoap==0.3 --no-cache-dir cmdutil pip3-pkg aiocoap==0.3 + sudo pip3 install -Ir requirements.txt --no-cache-dir cmdutil pip3-req requirements.txt + +#endif + +const char *cmds[] = {"tar-extract", "cp", "rm", "apt-install", "pip3-pkg", "pip3-req"}; + +typedef enum { + TAR_EXTRACT, + CP, + RM, + APT_INSTALL, + PIP3_PKG, + PIP3_REQ +} cmdtype_t; + +char *argsArray[6][6] = { + {(char *) "/bin/tar", (char *) "-C", (char *) "PLACEHOLDER", (char *) "-xf", (char *) "PLACEHOLDER", NULL}, + {(char *) "/bin/cp", (char *) "-r", (char *) "PLACEHOLDER", (char *) "PLACEHOLDER", NULL, NULL}, + {(char *) "/bin/rm", (char *) "-rf", (char *) "PLACEHOLDER", NULL, NULL, NULL}, + {(char *) "apt", (char *) "install", (char *) "-y", (char *) "PLACEHOLDER", NULL, NULL}, + {(char *) "pip3", (char *) "install", (char *) "PLACEHOLDER", (char *) "--no-cache-dir", NULL, NULL}, + {(char *) "pip3", (char *) "install", (char *) "-Ir", (char *) "PLACEHOLDER", (char *) "--no-cache-dir", NULL} +}; + +int getCmdType(const char *cmd) +{ + for (int i=0; i + * + * Example command to execute Way to invoke cmdutil to do so + * -------------------------- ------------------------------- + * sudo tar -C $FOGLAMP_ROOT -xf abc.tar.gz cmdutil tar-extract abc.tar.gz + * sudo cp -r abc $FOGLAMP_ROOT/xyz cmdutil cp abc xyz + * sudo rm -rf $FOGLAMP_ROOT/abc cmdutil rm abc + * sudo apt install -y wiringpi cmdutil apt-install wiringpi + * + * sudo pip3 install aiocoap==0.3 --no-cache-dir cmdutil pip3-pkg aiocoap==0.3 + * sudo pip3 install -Ir requirements.txt --no-cache-dir cmdutil pip3-req requirements.txt + */ +int main(int argc, char *argv[]) +{ + if(argc < 2) + { + printf("Incorrect usage\n"); + return 1; + } + + char *rootdir = getenv("FOGLAMP_ROOT"); + if (!rootdir || rootdir[0]==0) + { + printf("Unable to find path where archive is to be extracted\n"); + return 2; + } + struct stat sb; + stat(rootdir, &sb); + if ((sb.st_mode & S_IFMT) != S_IFDIR) + { + printf("Unable to find path where archive is to be extracted\n"); + return 2; + } + + if (!checkFile(rootdir, (char *) "bin/foglamp") || + !checkFile(rootdir, (char *) "services/foglamp.services.storage") || + !checkFile(rootdir, (char *) "python/foglamp/services/core/routes.py") || + !checkFile(rootdir, (char *) "lib/libcommon-lib.so") || + !checkFile(rootdir, (char *) "tasks/sending_process")) + { + printf("Unable to find foglamp insallation\n"); + return 2; + } + + int cmdtype = getCmdType(argv[1]); + //printf("cmdtype=%d\n", cmdtype); + if(cmdtype == -1) + { + printf("Unidentified command\n"); + return 3; + } + + char *args[6]; + for(int i=0; i<6; i++) + args[i] = argsArray[cmdtype][i]; + char buf[128]; + switch (cmdtype) + { + case TAR_EXTRACT: + args[2] = rootdir; + args[4] = argv[2]; + break; + case CP: + args[2] = argv[2]; + snprintf(buf, sizeof(buf), "%s/%s", rootdir, argv[3]); + buf[sizeof(buf)-1] = '\0'; // force null terminate + args[3] = buf; + break; + case RM: + snprintf(buf, sizeof(buf), "%s/%s", rootdir, argv[2]); + buf[sizeof(buf)-1] = '\0'; // force null terminate + args[2] = buf; + break; + case APT_INSTALL: + args[3] = argv[2]; + break; + case PIP3_PKG: + args[2] = argv[2]; + break; + case PIP3_REQ: + args[3] = argv[2]; + break; + default: + printf("Unidentified command\n"); + return 3; + } + + // printf("cmd=%s %s %s %s %s %s\n", args[0], args[1], args[2], args[3]?args[3]:"", args[4]?args[4]:"", args[5]?args[5]:""); + + errno = 0; + int rc = execvp(args[0], args); + if (rc != 0) + { + printf("execvp failed: rc=%d, errno %d=%s\n", rc, errno, strerror(errno)); + return rc; + } + + return 0; +} + diff --git a/C/plugins/utils/extract_plugin_snapshot.cpp b/C/plugins/utils/extract_plugin_snapshot.cpp deleted file mode 100644 index 1739d1027b..0000000000 --- a/C/plugins/utils/extract_plugin_snapshot.cpp +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Utility to extract plugin snapshot tar archive - * - * Copyright (c) 2019 Dianomic Systems - * - * Released under the Apache 2.0 Licence - * - * Author: Amandeep Singh Arora - */ - -#include -#include -#include -#include -#include -#include -#include - -extern int errno; - -/** - * Check whether file/dir exists within FOGLAMP_ROOT - * - * @param rootdir FOGLAMP_ROOT path - * @param file relative path of file or dir inside FOGLAMP_ROOT - */ -bool checkFile(char *rootdir, char *file) -{ - char path[256]; - snprintf(path, sizeof(path), "%s/%s", rootdir, file); - return (access(path, F_OK) == 0); -} - -/** - * Extract files from within plugin snapshot tar archive - * - * Usage: extract_plugin_snapshot - * - * @param argv[1] relative/absolute path of plugin snapshot archive - */ -int main(int argc, char *argv[]) -{ - if(argc < 2) - { - printf("Usage: %s \n", argv[0]); - return 1; - } - - char *rootdir = getenv("FOGLAMP_ROOT"); - if (!rootdir || rootdir[0]==0) - { - printf("Unable to find path where archive is to be extracted\n"); - return 2; - } - struct stat sb; - stat(rootdir, &sb); - if ((sb.st_mode & S_IFMT) != S_IFDIR) - { - printf("Unable to find path where archive is to be extracted\n"); - return 2; - } - - if (!checkFile(rootdir, (char *) "bin/foglamp") || - !checkFile(rootdir, (char *) "services/foglamp.services.storage") || - !checkFile(rootdir, (char *) "python/foglamp/services/core/routes.py") || - !checkFile(rootdir, (char *) "lib/libcommon-lib.so") || - !checkFile(rootdir, (char *) "tasks/sending_process")) - { - printf("Unable to find foglamp insallation\n"); - return 2; - } - - char *args[]={(char *) "/bin/tar", (char *) "-C", (char *) "PLACEHOLDER", (char *) "-xf", (char *) "PLACEHOLDER", NULL}; - args[2] = rootdir; - args[4] = argv[1]; - - errno = 0; - int rc = execvp(args[0], args); - if (rc != 0) - { - printf("execvp failed: errno=%s\n", strerror(errno)); - return 3; - } - - return 0; -} - diff --git a/Makefile b/Makefile index 632761e9b4..8f8424e7ce 100644 --- a/Makefile +++ b/Makefile @@ -407,7 +407,7 @@ $(BIN_INSTALL_DIR) : ####################### EXTRAS INSTALL TARGETS ################################ ############################################################################### # install bin -extras_install : $(EXTRAS_INSTALL_DIR) install_python_fogbench install_extras_scripts setuid_extract_plugin_snapshot +extras_install : $(EXTRAS_INSTALL_DIR) install_python_fogbench install_extras_scripts setuid_cmdutil install_python_fogbench : $(FOGBENCH_PYTHON_INSTALL_DIR) $(FOGBENCH_PYTHON_SRC_DIR) $(CP_DIR) $(FOGBENCH_PYTHON_SRC_DIR) $(FOGBENCH_PYTHON_INSTALL_DIR) @@ -447,9 +447,9 @@ endif #$(DATA_INSTALL_DIR) : # $(MKDIR_PATH) $@ -# set setuid bit of extract_plugin_snapshot -setuid_extract_plugin_snapshot: - chmod u+s $(EXTRAS_INSTALL_DIR)/C/extract_plugin_snapshot +# set setuid bit of cmdutil +setuid_cmdutil: + chmod u+s $(EXTRAS_INSTALL_DIR)/C/cmdutil ############################################################################### @@ -469,4 +469,4 @@ clean : -$(RM_DIR) $(PYTHON_BUILD_DIR) -$(RM_DIR) $(DEV_SERVICES_DIR) -$(RM) $(SYMLINK_PLUGINS_DIR) - -$(RM) $(ASYNC_INGEST_PYMODULE) \ No newline at end of file + -$(RM) $(ASYNC_INGEST_PYMODULE) From 04cf313ca37e4c0ef221541a8b98407e3d92773b Mon Sep 17 00:00:00 2001 From: Vaibhav Singhal Date: Thu, 18 Apr 2019 15:38:30 +0530 Subject: [PATCH 043/161] Feedback fixes --- tests/system/python/api/test_notification.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/tests/system/python/api/test_notification.py b/tests/system/python/api/test_notification.py index 749f11aee5..d9834083d9 100644 --- a/tests/system/python/api/test_notification.py +++ b/tests/system/python/api/test_notification.py @@ -40,10 +40,10 @@ def test_notification_without_install(self, reset_and_start_foglamp, foglamp_url conn = http.client.HTTPConnection(foglamp_url) conn.request("GET", '/foglamp/notification') r = conn.getresponse() - assert 200 == r.status + pytest.xfail("FOGL-2748") + assert 400 == r.status r = r.read().decode() - jdoc = json.loads(r) - assert {'notifications': []} == jdoc + assert "404: No Notification service available." == r conn.request("GET", '/foglamp/notification/plugin') r = conn.getresponse() @@ -172,8 +172,8 @@ def test_create_valid_notification_instance(self, foglamp_url): assert 2 == len(jdoc['rules']) @pytest.mark.parametrize("test_input, expected_error", [ - pytest.param({"name": "Test8"}, '400: Name update is not allowed.', marks=pytest.mark.skip(reason="FOGL-2673")), - pytest.param({"name": "="}, '400: Invalid name property in payload.', marks=pytest.mark.skip(reason="FOGL-2673")), + pytest.param({"name": "Test8"}, '400: Name update is not allowed.', marks=pytest.mark.xfail(reason="FOGL-2673")), + pytest.param({"name": "="}, '400: Invalid name property in payload.', marks=pytest.mark.xfail(reason="FOGL-2673")), ({"rule": "+"}, '400: Invalid rule property in payload.'), ({"channel": ":"}, '400: Invalid channel property in payload.'), ({"enabled": "bla"}, '400: Only "true", "false", true, false are allowed for value of enabled.'), @@ -192,7 +192,7 @@ def test_invalid_update_notification_instance(self, foglamp_url, test_input, exp r = r.read().decode() assert expected_error == r - @pytest.mark.skip(reason="FOGL-2738") + @pytest.mark.xfail(reason="FOGL-2738") def test_invalid_name_update_notification_instance(self, foglamp_url): conn = http.client.HTTPConnection(foglamp_url) changed_data = {"description": "changed_desc"} @@ -236,4 +236,4 @@ def test_delete_notification_and_service(self, foglamp_url): r = r.read().decode() jdoc = json.loads(r) assert "Service {} deleted successfully.".format(SERVICE_NAME) == jdoc['result'] - assert False + From 895924fbd64734ac744e130c5cf0b8ac14f7ee4c Mon Sep 17 00:00:00 2001 From: Vaibhav Singhal Date: Thu, 18 Apr 2019 15:45:36 +0530 Subject: [PATCH 044/161] Feedback fixes --- tests/system/python/api/test_notification.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/system/python/api/test_notification.py b/tests/system/python/api/test_notification.py index d9834083d9..78c299ba49 100644 --- a/tests/system/python/api/test_notification.py +++ b/tests/system/python/api/test_notification.py @@ -41,7 +41,7 @@ def test_notification_without_install(self, reset_and_start_foglamp, foglamp_url conn.request("GET", '/foglamp/notification') r = conn.getresponse() pytest.xfail("FOGL-2748") - assert 400 == r.status + assert 404 == r.status r = r.read().decode() assert "404: No Notification service available." == r From dae112208b2407fcf89451ffc562ede9ce821549 Mon Sep 17 00:00:00 2001 From: Amandeep Singh Arora Date: Thu, 18 Apr 2019 15:50:23 +0530 Subject: [PATCH 045/161] Fix for cmdutil dependency during install phase --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 8f8424e7ce..903a2a5d6f 100644 --- a/Makefile +++ b/Makefile @@ -448,7 +448,7 @@ endif # $(MKDIR_PATH) $@ # set setuid bit of cmdutil -setuid_cmdutil: +setuid_cmdutil : c_install chmod u+s $(EXTRAS_INSTALL_DIR)/C/cmdutil From ed3f4b8e780e8fa11d35335983504795ad135b0c Mon Sep 17 00:00:00 2001 From: Vaibhav Singhal Date: Thu, 18 Apr 2019 16:39:11 +0530 Subject: [PATCH 046/161] Feedback fixes --- tests/system/python/api/test_notification.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/tests/system/python/api/test_notification.py b/tests/system/python/api/test_notification.py index 78c299ba49..9f464bbd04 100644 --- a/tests/system/python/api/test_notification.py +++ b/tests/system/python/api/test_notification.py @@ -38,12 +38,6 @@ def test_notification_without_install(self, reset_and_start_foglamp, foglamp_url # Wait for foglamp server to start time.sleep(wait_time) conn = http.client.HTTPConnection(foglamp_url) - conn.request("GET", '/foglamp/notification') - r = conn.getresponse() - pytest.xfail("FOGL-2748") - assert 404 == r.status - r = r.read().decode() - assert "404: No Notification service available." == r conn.request("GET", '/foglamp/notification/plugin') r = conn.getresponse() @@ -64,6 +58,13 @@ def test_notification_without_install(self, reset_and_start_foglamp, foglamp_url r = r.read().decode() assert "404: No Notification service available." == r + pytest.xfail("FOGL-2748") + conn.request("GET", '/foglamp/notification') + r = conn.getresponse() + assert 404 == r.status + r = r.read().decode() + assert "404: No Notification service available." == r + def test_notification_service_add(self, service_branch, foglamp_url, wait_time, remove_directories): try: subprocess.run(["$FOGLAMP_ROOT/tests/system/python/scripts/install_c_service {} {}" From 4bf243c4c8da4a5fdc1ac6d05a28e79f08fb3f42 Mon Sep 17 00:00:00 2001 From: Amandeep Singh Arora Date: Thu, 18 Apr 2019 17:34:49 +0530 Subject: [PATCH 047/161] Added support for mkdir -p --- C/plugins/utils/cmdutil.cpp | 36 +++++++++++++++++------------------- 1 file changed, 17 insertions(+), 19 deletions(-) diff --git a/C/plugins/utils/cmdutil.cpp b/C/plugins/utils/cmdutil.cpp index a96759d0d2..4feacad718 100644 --- a/C/plugins/utils/cmdutil.cpp +++ b/C/plugins/utils/cmdutil.cpp @@ -31,18 +31,7 @@ bool checkFile(char *rootdir, char *file) return (access(path, F_OK) == 0); } -#if 0 - sudo tar -C $FOGLAMP_ROOT -xf abc.tar.gz cmdutil tar-extract abc.tar.gz - sudo cp -r abc $FOGLAMP_ROOT/xyz cmdutil cp abc xyz - sudo rm -rf $FOGLAMP_ROOT/abc cmdutil rm abc - sudo apt install -y wiringpi cmdutil apt-install wiringpi - - sudo pip3 install aiocoap==0.3 --no-cache-dir cmdutil pip3-pkg aiocoap==0.3 - sudo pip3 install -Ir requirements.txt --no-cache-dir cmdutil pip3-req requirements.txt - -#endif - -const char *cmds[] = {"tar-extract", "cp", "rm", "apt-install", "pip3-pkg", "pip3-req"}; +const char *cmds[] = {"tar-extract", "cp", "rm", "apt-install", "pip3-pkg", "pip3-req", "mkdir"}; typedef enum { TAR_EXTRACT, @@ -50,16 +39,18 @@ typedef enum { RM, APT_INSTALL, PIP3_PKG, - PIP3_REQ + PIP3_REQ, + MKDIR } cmdtype_t; -char *argsArray[6][6] = { +char *argsArray[7][6] = { {(char *) "/bin/tar", (char *) "-C", (char *) "PLACEHOLDER", (char *) "-xf", (char *) "PLACEHOLDER", NULL}, {(char *) "/bin/cp", (char *) "-r", (char *) "PLACEHOLDER", (char *) "PLACEHOLDER", NULL, NULL}, {(char *) "/bin/rm", (char *) "-rf", (char *) "PLACEHOLDER", NULL, NULL, NULL}, {(char *) "apt", (char *) "install", (char *) "-y", (char *) "PLACEHOLDER", NULL, NULL}, {(char *) "pip3", (char *) "install", (char *) "PLACEHOLDER", (char *) "--no-cache-dir", NULL, NULL}, - {(char *) "pip3", (char *) "install", (char *) "-Ir", (char *) "PLACEHOLDER", (char *) "--no-cache-dir", NULL} + {(char *) "pip3", (char *) "install", (char *) "-Ir", (char *) "PLACEHOLDER", (char *) "--no-cache-dir", NULL}, + {(char *) "mkdir", (char *) "-p", (char *) "PLACEHOLDER", NULL, NULL, NULL} }; int getCmdType(const char *cmd) @@ -76,15 +67,17 @@ int getCmdType(const char *cmd) * * Usage: cmdutil * - * Example command to execute Way to invoke cmdutil to do so - * -------------------------- ------------------------------- - * sudo tar -C $FOGLAMP_ROOT -xf abc.tar.gz cmdutil tar-extract abc.tar.gz + * Example command to execute Way to invoke cmdutil to do so + * -------------------------- ------------------------------- + * sudo tar -C $FOGLAMP_ROOT -xf abc.tar.gz cmdutil tar-extract abc.tar.gz * sudo cp -r abc $FOGLAMP_ROOT/xyz cmdutil cp abc xyz * sudo rm -rf $FOGLAMP_ROOT/abc cmdutil rm abc * sudo apt install -y wiringpi cmdutil apt-install wiringpi * * sudo pip3 install aiocoap==0.3 --no-cache-dir cmdutil pip3-pkg aiocoap==0.3 * sudo pip3 install -Ir requirements.txt --no-cache-dir cmdutil pip3-req requirements.txt + * + * sudo mkdir -p $FOGLAMP_ROOT/abc cmdutil mkdir abc */ int main(int argc, char *argv[]) { @@ -156,12 +149,17 @@ int main(int argc, char *argv[]) case PIP3_REQ: args[3] = argv[2]; break; + case MKDIR: + snprintf(buf, sizeof(buf), "%s/%s", rootdir, argv[2]); + buf[sizeof(buf)-1] = '\0'; // force null terminate + args[2] = buf; + break; default: printf("Unidentified command\n"); return 3; } - // printf("cmd=%s %s %s %s %s %s\n", args[0], args[1], args[2], args[3]?args[3]:"", args[4]?args[4]:"", args[5]?args[5]:""); + printf("cmd=%s %s %s %s %s %s\n", args[0], args[1], args[2], args[3]?args[3]:"", args[4]?args[4]:"", args[5]?args[5]:""); errno = 0; int rc = execvp(args[0], args); From 1dbb4767edfd4516aadbabb4e2b36e3290d1668e Mon Sep 17 00:00:00 2001 From: stefano Date: Thu, 18 Apr 2019 15:15:33 +0200 Subject: [PATCH 048/161] FOGL-2747: fixed restore, it was using an old command line parameters handling --- .../storage/sqlite/backup_restore/restore_sqlite.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/python/foglamp/plugins/storage/sqlite/backup_restore/restore_sqlite.py b/python/foglamp/plugins/storage/sqlite/backup_restore/restore_sqlite.py index d9ad70bcea..b5affd6288 100644 --- a/python/foglamp/plugins/storage/sqlite/backup_restore/restore_sqlite.py +++ b/python/foglamp/plugins/storage/sqlite/backup_restore/restore_sqlite.py @@ -50,13 +50,13 @@ import signal import sqlite3 +from foglamp.common.parser import Parser from foglamp.common.process import FoglampProcess from foglamp.common import logger import foglamp.plugins.storage.common.lib as lib import foglamp.plugins.storage.common.exceptions as exceptions - __author__ = "Stefano Simonelli" __copyright__ = "Copyright (c) 2018 OSIsoft, LLC" __license__ = "Apache 2.0" @@ -175,8 +175,9 @@ def __init__(self): # Handled Restore command line parameters try: - self._backup_id = super().get_arg_value("--backup-id") - self._file_name = super().get_arg_value("--file") + self._backup_id = Parser.get('--backup-id') + self._file_name = Parser.get('--file') + except Exception as _ex: _message = _MESSAGES_LIST["e000003"].format(_ex) From d08d48071d9e2452d274461a4415279e78537fe2 Mon Sep 17 00:00:00 2001 From: ashish-jabble Date: Thu, 18 Apr 2019 18:52:48 +0530 Subject: [PATCH 049/161] fixes as per C-util provided in FOGL-2741 by Aman --- .../services/core/api/plugins/install.py | 46 +++++++++++-------- .../services/core/api/plugins/test_install.py | 26 +++++++++-- 2 files changed, 51 insertions(+), 21 deletions(-) diff --git a/python/foglamp/services/core/api/plugins/install.py b/python/foglamp/services/core/api/plugins/install.py index 920f1df82f..c806621935 100644 --- a/python/foglamp/services/core/api/plugins/install.py +++ b/python/foglamp/services/core/api/plugins/install.py @@ -9,7 +9,6 @@ import logging import asyncio import tarfile -import shutil import hashlib from aiohttp import web @@ -19,13 +18,11 @@ from foglamp.common import logger from foglamp.common.common import _FOGLAMP_ROOT, _FOGLAMP_DATA - __author__ = "Ashish Jabble" __copyright__ = "Copyright (c) 2019 Dianomic Systems" __license__ = "Apache 2.0" __version__ = "${VERSION}" - _help = """ ------------------------------------------------------------------------------- | POST | /foglamp/plugins | @@ -37,7 +34,6 @@ _LOGGER = logger.setup(__name__, level=logging.INFO) -# TODO: Add unit tests async def add_plugin(request: web.Request) -> web.Response: """ add plugin @@ -54,6 +50,7 @@ async def add_plugin(request: web.Request) -> web.Response: url = data.get('url', None) file_format = data.get('format', None) compressed = data.get('compressed', None) + # FIXME: plugin_type only needed when format is tar plugin_type = data.get('type', None) checksum = data.get('checksum', None) if not url or not file_format or not plugin_type or not checksum: @@ -86,7 +83,10 @@ async def add_plugin(request: web.Request) -> web.Response: _LOGGER.info("Files {} {}".format(files, type(files))) copy_file_install_requirement(files, plugin_type) else: - install_debian(file_name) + code = install_debian(file_name) + if code != 0: + # FIXME: proper message + raise ValueError('Something went wrong!') except FileNotFoundError as ex: raise web.HTTPNotFound(reason=str(ex)) except (TypeError, ValueError) as ex: @@ -128,10 +128,12 @@ def extract_file(file_name: str, is_compressed: bool) -> list: def install_debian(file_name: str): - # FIXME: Not working seems like we need to manipulate in /etc/sudoers.d/foglamp file - # subprocess.run(["sudo cp {} /var/cache/apt/archives/.".format(file_name)], shell=True, check=True) - # subprocess.run(["sudo apt install /var/cache/apt/archives/{}".format(file_name)], shell=True, check=True) - pass + apt_install = subprocess.run(["{}/extras/C/cmdutil".format(_FOGLAMP_ROOT), "apt-install", + "{}/data/plugins/{}".format(_FOGLAMP_ROOT, file_name)], + stdout=subprocess.PIPE, stderr=subprocess.PIPE) + msg = apt_install.stdout.decode("utf-8") if apt_install.returncode != 0 else apt_install.stderr.decode("utf-8") + _LOGGER.exception("O/P....{}...{}".format(msg, apt_install.returncode)) + return msg def copy_file_install_requirement(dir_files: list, plugin_type: str): @@ -154,15 +156,23 @@ def copy_file_install_requirement(dir_files: list, plugin_type: str): plugin_name = _dir[0] _LOGGER.info("Plugin name {} and Dir {} ".format(plugin_name, _dir)) plugin_path = "python/foglamp/plugins" if py_file else "plugins" - dest_path = "{}/{}/{}/".format(_FOGLAMP_ROOT, plugin_path, plugin_type) - _LOGGER.info("Destination Path {}".format(dest_path)) + full_path = "{}/{}/{}/".format(_FOGLAMP_ROOT, plugin_path, plugin_type) + dest_path = "{}/{}/".format(plugin_path, plugin_type) + + # Check if plugin dir exists then remove (for cleanup ONLY) otherwise create dir + if os.path.exists(full_path + plugin_name) and os.path.isdir(full_path + plugin_name): + cmd = "{}/extras/C/cmdutil rm {}".format(_FOGLAMP_ROOT, dest_path + plugin_name) + subprocess.run([cmd], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) + else: + cmd = "{}/extras/C/cmdutil mkdir {}".format(_FOGLAMP_ROOT, dest_path + plugin_name) + subprocess.run([cmd], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) - # FIXME: shutil with sudo permissions (bypass) - if os.path.exists(dest_path + plugin_name) and os.path.isdir(dest_path + plugin_name): - shutil.rmtree(dest_path + plugin_name) - shutil.copytree(_PATH + plugin_name, dest_path + plugin_name) - _LOGGER.info("File copied to {}".format(dest_path)) + # copy plugin files to the relative plugins directory. + cmd = "{}/extras/C/cmdutil cp {} {}".format(_FOGLAMP_ROOT, _PATH + plugin_name, dest_path) + subprocess.run([cmd], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) + _LOGGER.info("{} File copied to {}".format(cmd, full_path)) if "requirements.sh" in _dir: - _LOGGER.info("Installing external deps required for plugins.... {}".format(dest_path + plugin_name + "/" + "requirements.sh")) - subprocess.run(["sh {}".format(dest_path + plugin_name + "/" + "requirements.sh")], shell=True) + _LOGGER.info("Installing external deps required for plugins.... {}".format( + full_path + plugin_name + "/" + "requirements.sh")) + subprocess.run(["sh {}".format(full_path + plugin_name + "/" + "requirements.sh")], shell=True) diff --git a/tests/unit/python/foglamp/services/core/api/plugins/test_install.py b/tests/unit/python/foglamp/services/core/api/plugins/test_install.py index 79c38252c4..a900d0d315 100644 --- a/tests/unit/python/foglamp/services/core/api/plugins/test_install.py +++ b/tests/unit/python/foglamp/services/core/api/plugins/test_install.py @@ -139,8 +139,28 @@ async def async_mock(): param = {"url": url_value, "format": "deb", "type": "south", "checksum": checksum_value} with patch.object(plugins_install, 'download', return_value=async_mock()) as download_patch: with patch.object(plugins_install, 'validate_checksum', return_value=True) as checksum_patch: - resp = await client.post('/foglamp/plugins', data=json.dumps(param)) - assert 200 == resp.status - # FIXME: Right now pass as no handling for debian case yet + with patch.object(plugins_install, 'install_debian', return_value=0) as debian_patch: + resp = await client.post('/foglamp/plugins', data=json.dumps(param)) + assert 200 == resp.status + debian_patch.assert_called_once_with(plugin_name) checksum_patch.assert_called_once_with(checksum_value, plugin_name) download_patch.assert_called_once_with([url_value]) + + async def test_post_plugins_install_with_bad_debian(self, client): + async def async_mock(): + return [plugin_name, '{}/__init__.py'.format(plugin_name), '{}/README.rst'.format(plugin_name), + '{}/{}.py'.format(plugin_name, plugin_name), '{}/requirements.sh'.format(plugin_name)] + + plugin_name = 'coap' + checksum_value = "4015c2dea1cc71dbf70a23f6a203eeb6" + url_value = "http://10.2.5.26:5000/download/foglamp-south-coap-1.5.2.deb" + param = {"url": url_value, "format": "deb", "type": "south", "checksum": checksum_value} + with patch.object(plugins_install, 'download', return_value=async_mock()) as download_patch: + with patch.object(plugins_install, 'validate_checksum', return_value=True) as checksum_patch: + with patch.object(plugins_install, 'install_debian', return_value=100) as debian_patch: + resp = await client.post('/foglamp/plugins', data=json.dumps(param)) + assert 400 == resp.status + assert 'Something went wrong!' == resp.reason + debian_patch.assert_called_once_with(plugin_name) + checksum_patch.assert_called_once_with(checksum_value, plugin_name) + download_patch.assert_called_once_with([url_value]) \ No newline at end of file From fbd97c0e0681a1bbd627833578b1a0eef3272edb Mon Sep 17 00:00:00 2001 From: ashish-jabble Date: Thu, 18 Apr 2019 19:20:23 +0530 Subject: [PATCH 050/161] copy/paster code unit fixes --- .../foglamp/common/test_configuration_manager.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/tests/unit/python/foglamp/common/test_configuration_manager.py b/tests/unit/python/foglamp/common/test_configuration_manager.py index a123b91467..57bc51df88 100644 --- a/tests/unit/python/foglamp/common/test_configuration_manager.py +++ b/tests/unit/python/foglamp/common/test_configuration_manager.py @@ -2645,16 +2645,23 @@ async def async_mock(return_value): new_value_entry = '25' optional_key_name = 'maximum' storage_value_entry = {'readonly': 'true', 'type': 'string', 'order': '4', 'description': 'Test Optional', 'minimum': '2', 'value': '13', 'maximum': '20', 'default': '13'} + new_storage_value_entry = {'readonly': 'true', 'type': 'string', 'order': '4', 'description': 'Test Optional', 'minimum': '2', 'value': '13', 'maximum': new_value_entry, 'default': '13'} payload = {"return": ["key", "description", {"column": "ts", "format": "YYYY-MM-DD HH24:MI:SS.MS"}, "value"], "json_properties": [{"column": "value", "path": [item_name, optional_key_name], "value": new_value_entry}], "where": {"column": "key", "condition": "=", "value": category_name}} update_result = {"response": "updated", "rows_affected": 1} - c_mgr._cacheManager.update(category_name, {item_name: storage_value_entry}) - with patch.object(ConfigurationManager, '_read_item_val', return_value=async_mock(storage_value_entry)) as readpatch: + with patch.object(ConfigurationManager, '_read_item_val', side_effect=[async_mock(storage_value_entry), async_mock(new_storage_value_entry)]) as readpatch: with patch.object(c_mgr._storage, 'update_tbl', return_value=async_mock(update_result)) as patch_update: await c_mgr.set_optional_value_entry(category_name, item_name, optional_key_name, new_value_entry) args, kwargs = patch_update.call_args assert 'configuration' == args[0] assert payload == json.loads(args[1]) - readpatch.assert_called_once_with(category_name, item_name) + assert 2 == readpatch.call_count + calls = readpatch.call_args_list + args, kwargs = calls[0] + assert category_name == args[0] + assert item_name == args[1] + args, kwargs = calls[1] + assert category_name == args[0] + assert item_name == args[1] @pytest.mark.parametrize("optional_key_name, new_value_entry, exc_msg", [ ('maximum', '1', 'Maximum value should be greater than equal to Minimum value'), From c9ead4cb900cb9376c63205f9873ea0ddce00aea Mon Sep 17 00:00:00 2001 From: ashish-jabble Date: Thu, 18 Apr 2019 19:45:31 +0530 Subject: [PATCH 051/161] condition check with numeric operands (int or float) rather than with string operand for max, min optional attributes --- .../foglamp/common/configuration_manager.py | 13 ++++++-- .../common/test_configuration_manager.py | 32 ++++++++++++------- 2 files changed, 31 insertions(+), 14 deletions(-) diff --git a/python/foglamp/common/configuration_manager.py b/python/foglamp/common/configuration_manager.py index 3f88ec4584..c5345b1720 100644 --- a/python/foglamp/common/configuration_manager.py +++ b/python/foglamp/common/configuration_manager.py @@ -799,14 +799,21 @@ async def set_optional_value_entry(self, category_name, item_name, optional_entr # Validation is fairly minimal, minimum, maximum like # maximum should be greater than minimum or vice-versa # And no link between minimum, maximum and length is needed. + # condition check with numeric operands (int or float) rather than with string operands + def convert(value, _type): + return int(value) if _type == "integer" else float(value) if _type == "float" else value + if optional_entry_name == 'minimum': - if new_value_entry >= storage_value_entry['maximum']: + new = convert(new_value_entry, storage_value_entry['type']) + old = convert(storage_value_entry['maximum'], storage_value_entry['type']) + if new > old: raise ValueError('Minimum value should be less than equal to Maximum value') if optional_entry_name == 'maximum': - if new_value_entry <= storage_value_entry['minimum']: + new = convert(new_value_entry, storage_value_entry['type']) + old = convert(storage_value_entry['minimum'], storage_value_entry['type']) + if new < old: raise ValueError('Maximum value should be greater than equal to Minimum value') - payload = PayloadBuilder().SELECT("key", "description", "ts", "value") \ .JSON_PROPERTY(("value", [item_name, optional_entry_name], new_value_entry)) \ .FORMAT("return", ("ts", "YYYY-MM-DD HH24:MI:SS.MS")) \ diff --git a/tests/unit/python/foglamp/common/test_configuration_manager.py b/tests/unit/python/foglamp/common/test_configuration_manager.py index 57bc51df88..11a578f3be 100644 --- a/tests/unit/python/foglamp/common/test_configuration_manager.py +++ b/tests/unit/python/foglamp/common/test_configuration_manager.py @@ -2663,25 +2663,35 @@ async def async_mock(return_value): assert category_name == args[0] assert item_name == args[1] - @pytest.mark.parametrize("optional_key_name, new_value_entry, exc_msg", [ - ('maximum', '1', 'Maximum value should be greater than equal to Minimum value'), - ('minimum', '30', 'Minimum value should be less than equal to Maximum value'), - ('readonly', '1', "For catname category, entry value must be boolean for optional item name readonly; got "), - ('deprecated', '1', "For catname category, entry value must be boolean for optional item name deprecated; got "), - ('rule', 2, "For catname category, entry value must be string for optional item rule; got "), - ('displayName', 123, "For catname category, entry value must be string for optional item displayName; got "), - ('length', '1a', "For catname category, entry value must be an integer for optional item length; got "), - ('maximum', 'blah', "For catname category, entry value must be an integer or float for optional item maximum; got ") + @pytest.mark.parametrize("_type, optional_key_name, new_value_entry, exc_msg", [ + (int, 'maximum', '1', 'Maximum value should be greater than equal to Minimum value'), + (int, 'maximum', '00100', 'Maximum value should be greater than equal to Minimum value'), + (float, 'maximum', '11.2', 'Maximum value should be greater than equal to Minimum value'), + (int, 'minimum', '30', 'Minimum value should be less than equal to Maximum value'), + (float, 'minimum', '50.0', 'Minimum value should be less than equal to Maximum value'), + (None, 'readonly', '1', "For catname category, entry value must be boolean for optional item name readonly; got "), + (None, 'deprecated', '1', "For catname category, entry value must be boolean for optional item name deprecated; got "), + (None, 'rule', 2, "For catname category, entry value must be string for optional item rule; got "), + (None, 'displayName', 123, "For catname category, entry value must be string for optional item displayName; got "), + (None, 'length', '1a', "For catname category, entry value must be an integer for optional item length; got "), + (None, 'maximum', 'blah', "For catname category, entry value must be an integer or float for optional item maximum; got ") ]) - async def test_set_optional_value_entry_bad_update(self, reset_singleton, optional_key_name, new_value_entry, exc_msg): + async def test_set_optional_value_entry_bad_update(self, reset_singleton, _type, optional_key_name, new_value_entry, exc_msg): async def async_mock(return_value): return return_value + min = '2' + max = '20' + if _type is not None: + if isinstance(1.1, _type): + min = '12.5' + max = '40.3' + storage_client_mock = MagicMock(spec=StorageClientAsync) c_mgr = ConfigurationManager(storage_client_mock) category_name = 'catname' item_name = 'itemname' - storage_value_entry = {'length': '255', 'displayName': category_name, 'rule': 'value * 3 == 6', 'deprecated': 'false', 'readonly': 'true', 'type': 'string', 'order': '4', 'description': 'Test Optional', 'minimum': '2', 'value': '13', 'maximum': '20', 'default': '13'} + storage_value_entry = {'length': '255', 'displayName': category_name, 'rule': 'value * 3 == 6', 'deprecated': 'false', 'readonly': 'true', 'type': 'string', 'order': '4', 'description': 'Test Optional', 'minimum': min, 'value': '13', 'maximum': max, 'default': '13'} with patch.object(ConfigurationManager, '_read_item_val', return_value=async_mock(storage_value_entry)) as readpatch: with pytest.raises(Exception) as excinfo: await c_mgr.set_optional_value_entry(category_name, item_name, optional_key_name, new_value_entry) From 07e8e300e8a0c787a06749b95b64ccbb6bf60d4e Mon Sep 17 00:00:00 2001 From: stefano Date: Fri, 19 Apr 2019 11:26:25 +0200 Subject: [PATCH 052/161] FOGL-2747: fix the backup/restore in relation to the WAL mechanism --- .../sqlite/backup_restore/backup_sqlite.py | 15 +++++++++++++++ .../sqlite/backup_restore/restore_sqlite.py | 7 +++++++ 2 files changed, 22 insertions(+) diff --git a/python/foglamp/plugins/storage/sqlite/backup_restore/backup_sqlite.py b/python/foglamp/plugins/storage/sqlite/backup_restore/backup_sqlite.py index 35410f4bce..c9f0a01914 100644 --- a/python/foglamp/plugins/storage/sqlite/backup_restore/backup_sqlite.py +++ b/python/foglamp/plugins/storage/sqlite/backup_restore/backup_sqlite.py @@ -260,6 +260,21 @@ def _run_backup_command(self, _backup_file): self._logger.debug("{func} - file_name |{file}|".format(func="_run_backup_command", file=_backup_file)) + # Force the checkpoint - WAL mechanism + cmd = "{sqlite_cmd} {path}/{db} 'PRAGMA wal_checkpoint(PASSIVE);'".format( + sqlite_cmd=self._backup_lib.SQLITE_SQLITE, + path=self._backup_lib.dir_foglamp_data, + db=self._backup_lib.config['database-filename'] + ) + + # noinspection PyArgumentEqualDefault + _exit_code, output = lib.exec_wait_retry(cmd, + output_capture=True, + exit_code_ok=0, + max_retry=self._backup_lib.config['max_retry'], + timeout=self._backup_lib.config['timeout'] + ) + # Prepares the backup command cmd = "{sqlite_cmd} {path}/{db} '{backup_cmd} {file}'".format( sqlite_cmd=self._backup_lib.SQLITE_SQLITE, diff --git a/python/foglamp/plugins/storage/sqlite/backup_restore/restore_sqlite.py b/python/foglamp/plugins/storage/sqlite/backup_restore/restore_sqlite.py index b5affd6288..3a4540d833 100644 --- a/python/foglamp/plugins/storage/sqlite/backup_restore/restore_sqlite.py +++ b/python/foglamp/plugins/storage/sqlite/backup_restore/restore_sqlite.py @@ -563,6 +563,13 @@ def _run_restore_command(self, backup_file): if status != 0: raise exceptions.RestoreFailed + # Delete files related to the WAL mechanism + cmd = "rm {path}/foglamp.db-shm ".format(path=self._restore_lib.dir_foglamp_data) + status, output = lib.exec_wait_retry(cmd, True, timeout=self._restore_lib.config['timeout']) + + cmd = "rm {path}/foglamp.db-wal ".format(path=self._restore_lib.dir_foglamp_data) + status, output = lib.exec_wait_retry(cmd, True, timeout=self._restore_lib.config['timeout']) + def _foglamp_start(self): """ Starts FogLAMP after the execution of the restore From 148d5992b775bca3b1b5f840d0b1bd62007af148 Mon Sep 17 00:00:00 2001 From: ashish-jabble Date: Mon, 22 Apr 2019 11:53:06 +0530 Subject: [PATCH 053/161] plugin_type post param check restricted ONLY when format is tar & other minor comment fixes --- .../services/core/api/plugins/install.py | 17 +++++++----- .../services/core/api/plugins/test_install.py | 26 +++++++++---------- 2 files changed, 24 insertions(+), 19 deletions(-) diff --git a/python/foglamp/services/core/api/plugins/install.py b/python/foglamp/services/core/api/plugins/install.py index c806621935..259a4d36a4 100644 --- a/python/foglamp/services/core/api/plugins/install.py +++ b/python/foglamp/services/core/api/plugins/install.py @@ -50,16 +50,17 @@ async def add_plugin(request: web.Request) -> web.Response: url = data.get('url', None) file_format = data.get('format', None) compressed = data.get('compressed', None) - # FIXME: plugin_type only needed when format is tar plugin_type = data.get('type', None) checksum = data.get('checksum', None) - if not url or not file_format or not plugin_type or not checksum: - raise TypeError('URL, checksum, plugin type and format post params are mandatory.') - if plugin_type not in ['south', 'north', 'filter', 'notificationDelivery', 'notificationRule']: - raise ValueError("Invalid plugin type. Must be 'north' or 'south' or 'filter' " - "or 'notificationDelivery' or 'notificationRule'") + if not url or not file_format or not checksum: + raise TypeError('URL, checksum and format post params are mandatory.') if file_format not in ["tar", "deb"]: raise ValueError("Invalid format. Must be 'tar' or 'deb'") + if file_format == "tar" and not plugin_type: + raise ValueError("Plugin type param is required.") + if file_format == "tar" and plugin_type not in ['south', 'north', 'filter', 'notificationDelivery', 'notificationRule']: + raise ValueError("Invalid plugin type. Must be 'north' or 'south' or 'filter' " + "or 'notificationDelivery' or 'notificationRule'") if compressed: if compressed not in ['true', 'false', True, False]: raise ValueError('Only "true", "false", true, false are allowed for value of compressed.') @@ -172,6 +173,10 @@ def copy_file_install_requirement(dir_files: list, plugin_type: str): subprocess.run([cmd], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) _LOGGER.info("{} File copied to {}".format(cmd, full_path)) + # TODO: OPTIONAL (If any external dependency required to install plugin we will use this sh file), + # but this is most risky thing to run with as sudo + # Use case: plugins like opcua, usb4704 (external dep) + # dht11- For pip packages we have requirements.txt file, as this plugin needs wiringpi apt package to install; so where to put this command? if "requirements.sh" in _dir: _LOGGER.info("Installing external deps required for plugins.... {}".format( full_path + plugin_name + "/" + "requirements.sh")) diff --git a/tests/unit/python/foglamp/services/core/api/plugins/test_install.py b/tests/unit/python/foglamp/services/core/api/plugins/test_install.py index a900d0d315..1abdbb1a00 100644 --- a/tests/unit/python/foglamp/services/core/api/plugins/test_install.py +++ b/tests/unit/python/foglamp/services/core/api/plugins/test_install.py @@ -31,24 +31,24 @@ def client(self, loop, test_client): return loop.run_until_complete(test_client(app)) @pytest.mark.parametrize("param, message", [ - ({"create": "blah"}, "URL, checksum, plugin type and format post params are mandatory."), - ({"url": "http://blah.co.in"}, "URL, checksum, plugin type and format post params are mandatory."), - ({"format": "tar"}, "URL, checksum, plugin type and format post params are mandatory."), - ({"compressed": "false"}, "URL, checksum, plugin type and format post params are mandatory."), - ({"type": "north"}, "URL, checksum, plugin type and format post params are mandatory."), + ({"create": "blah"}, "URL, checksum and format post params are mandatory."), + ({"url": "http://blah.co.in"}, "URL, checksum and format post params are mandatory."), + ({"format": "tar"}, "URL, checksum and format post params are mandatory."), + ({"compressed": "false"}, "URL, checksum and format post params are mandatory."), + ({"type": "north"}, "URL, checksum and format post params are mandatory."), ({"checksum": "4015c2dea1cc71dbf70a23f6a203eeb6"}, - "URL, checksum, plugin type and format post params are mandatory."), + "URL, checksum and format post params are mandatory."), ({"url": "http://blah.co.in", "checksum": "4015c2dea1cc71dbf70a23f6a203eeb6"}, - "URL, checksum, plugin type and format post params are mandatory."), + "URL, checksum and format post params are mandatory."), ({"format": "tar", "checksum": "4015c2dea1cc71dbf70a23f6a203eeb6"}, - "URL, checksum, plugin type and format post params are mandatory."), + "URL, checksum and format post params are mandatory."), ({"format": "tar", "compressed": "false", "checksum": "4015c2dea1cc71dbf70a23f6a203eeb6"}, - "URL, checksum, plugin type and format post params are mandatory."), + "URL, checksum and format post params are mandatory."), ({"type": "north", "checksum": "4015c2dea1cc71dbf70a23f6a203eeb6"}, - "URL, checksum, plugin type and format post params are mandatory."), - ({"url": "http://blah.co.in", "format": "deb", "checksum": "4015c2dea1cc71dbf70a23f6a203eeb6"}, - "URL, checksum, plugin type and format post params are mandatory."), - ({"url": "http://blah.co.in", "format": "deb", "type": "blah", "checksum": "4015c2dea1cc71dbf70a23f6a203eeb6"}, + "URL, checksum and format post params are mandatory."), + ({"url": "http://blah.co.in", "format": "tar", "checksum": "4015c2dea1cc71dbf70a23f6a203eeb6"}, + "Plugin type param is required."), + ({"url": "http://blah.co.in", "format": "tar", "type": "blah", "checksum": "4015c2dea1cc71dbf70a23f6a203eeb6"}, "Invalid plugin type. Must be 'north' or 'south' or 'filter' or 'notificationDelivery' or 'notificationRule'"), ({"url": "http://blah.co.in", "format": "blah", "type": "filter", "checksum": "4015c2dea1cc71dbf70a23f6a203ee"}, "Invalid format. Must be 'tar' or 'deb'"), From 8794a6315dd018eedadae23f7b9e99c1377ae033 Mon Sep 17 00:00:00 2001 From: Vaibhav Singhal Date: Mon, 22 Apr 2019 15:33:04 +0530 Subject: [PATCH 054/161] all verification steps complete --- tests/system/python/e2e/test_e2e_rpi_ephat.py | 202 ++++++++++++++---- 1 file changed, 158 insertions(+), 44 deletions(-) diff --git a/tests/system/python/e2e/test_e2e_rpi_ephat.py b/tests/system/python/e2e/test_e2e_rpi_ephat.py index 1c40093f81..312c8e411e 100644 --- a/tests/system/python/e2e/test_e2e_rpi_ephat.py +++ b/tests/system/python/e2e/test_e2e_rpi_ephat.py @@ -9,7 +9,7 @@ Egress: PI Server (C) plugin """ -import platform +import os import http.client import json import time @@ -28,19 +28,25 @@ SOUTH_PLUGIN = "envirophat" SVC_NAME = "Room-1" ASSET_PREFIX = "envirophat/" # default for envirophat South plugin -ASSET_NAME = "weather" -SENSOR_READ_KEY = "temperature" +ASSET_NAME_W = "weather" +SENSOR_READ_KEY_W = {"temperature", "altitude", "pressure"} + +ASSET_NAME_M = "magnetometer" +SENSOR_READ_KEY_M = {"x", "y", "z"} + +ASSET_NAME_A = "accelerometer" +SENSOR_READ_KEY_A = {"x", "y", "z"} + +ASSET_NAME_C = "rgb" +SENSOR_READ_KEY_C = {"r", "g", "b"} TASK_NAME = "North v2 PI" -@pytest.mark.skipif(platform.platform().find("arm") == -1, reason="RPi only (ePhat) test") -# RPi Linux-4.14.98-v7+-armv7l-with-debian-9.8 +@pytest.mark.skipif('raspberrypi' != os.uname()[1], reason="RPi only (ePhat) test") +# sysname='Linux', nodename='raspberrypi', release='4.14.98+', version='#1200 ', machine='armv6l' class TestE2eRPiEphatEgress: - # def test_Rpi(self): - # assert -1 != platform.platform().find("arm"), "ePhat tests are expected to be run on RPi only!" - def get_ping_status(self, foglamp_url): conn = http.client.HTTPConnection(foglamp_url) conn.request("GET", '/foglamp/ping') @@ -60,7 +66,7 @@ def get_statistics_map(self, foglamp_url): return utils.serialize_stats_map(jdoc) @pytest.fixture - def start_south_north(self, reset_and_start_foglamp, add_south, south_branch, + def start_south_north(self, reset_and_start_foglamp, add_south, south_branch, disable_schedule, remove_data_file, remove_directories, enable_schedule, foglamp_url, start_north_pi_server_c, pi_host, pi_port, pi_token, wait_time): """ This fixture clones given south & filter plugin repo, and starts south and PI north C instance with filter @@ -73,6 +79,7 @@ def start_south_north(self, reset_and_start_foglamp, add_south, south_branch, # let the readings ingress time.sleep(wait_time) + disable_schedule(foglamp_url, SVC_NAME) enable_schedule(foglamp_url, TASK_NAME) @@ -83,27 +90,34 @@ def start_south_north(self, reset_and_start_foglamp, add_south, south_branch, def test_end_to_end(self, start_south_north, read_data_from_pi, foglamp_url, pi_host, pi_admin, pi_passwd, pi_db, wait_time, retries, skip_verify_north_interface): + # let the readings egress + time.sleep(wait_time * 2) self._verify_ping_and_statistics(foglamp_url) self._verify_ingest(foglamp_url) - # if not skip_verify_north_interface: - # self._verify_egress(read_data_from_pi, pi_host, pi_admin, pi_passwd, pi_db, wait_time, retries) + if not skip_verify_north_interface: + self._verify_egress(read_data_from_pi, pi_host, pi_admin, pi_passwd, pi_db, wait_time, retries) def _verify_ping_and_statistics(self, foglamp_url): ping_response = self.get_ping_status(foglamp_url) - assert ping_response["dataRead"] - # assert ping_response["dataSent"] + assert ping_response["dataRead"] > 0 + assert ping_response["dataSent"] > 0 actual_stats_map = self.get_statistics_map(foglamp_url) - key_asset_name_with_prefix = "{}{}".format(ASSET_PREFIX.upper(), ASSET_NAME.upper()) - assert actual_stats_map[key_asset_name_with_prefix] + assert actual_stats_map["{}{}".format(ASSET_PREFIX.upper(), ASSET_NAME_W.upper())] + assert actual_stats_map["{}{}".format(ASSET_PREFIX.upper(), ASSET_NAME_M.upper())] + assert actual_stats_map["{}{}".format(ASSET_PREFIX.upper(), ASSET_NAME_A.upper())] + assert actual_stats_map["{}{}".format(ASSET_PREFIX.upper(), ASSET_NAME_C.upper())] assert actual_stats_map['READINGS'] - # assert actual_stats_map[TASK_NAME] - # assert actual_stats_map['Readings Sent'] + assert actual_stats_map[TASK_NAME] + assert actual_stats_map['Readings Sent'] def _verify_ingest(self, foglamp_url): - asset_name_with_prefix = "{}{}".format(ASSET_PREFIX, ASSET_NAME) + asset_name_with_prefix_w = "{}{}".format(ASSET_PREFIX, ASSET_NAME_W) + asset_name_with_prefix_m = "{}{}".format(ASSET_PREFIX, ASSET_NAME_M) + asset_name_with_prefix_a = "{}{}".format(ASSET_PREFIX, ASSET_NAME_A) + asset_name_with_prefix_c = "{}{}".format(ASSET_PREFIX, ASSET_NAME_C) conn = http.client.HTTPConnection(foglamp_url) conn.request("GET", '/foglamp/asset') @@ -113,45 +127,145 @@ def _verify_ingest(self, foglamp_url): jdoc = json.loads(r) assert len(jdoc), "No asset found" actual_assets = [i["assetCode"] for i in jdoc] - assert asset_name_with_prefix in actual_assets + assert asset_name_with_prefix_w in actual_assets + assert asset_name_with_prefix_m in actual_assets + assert asset_name_with_prefix_a in actual_assets + assert asset_name_with_prefix_c in actual_assets assert jdoc[0]["count"] - expected_assets = Counter(["envirophat/magnetometer", "envirophat/rgb", "envirophat/accelerometer", "envirophat/weather"]) + expected_assets = Counter([asset_name_with_prefix_w, asset_name_with_prefix_m, + asset_name_with_prefix_a, asset_name_with_prefix_c]) assert Counter(actual_assets) == expected_assets # foglamp/asset/envirophat%2Fweather - conn.request("GET", '/foglamp/asset/{}'.format(quote(asset_name_with_prefix, safe=''))) + conn.request("GET", '/foglamp/asset/{}'.format(quote(asset_name_with_prefix_w, safe=''))) r = conn.getresponse() assert 200 == r.status r = r.read().decode() - jdoc = json.loads(r) - assert len(jdoc), "No asset found" - assert jdoc[0]["reading"][SENSOR_READ_KEY] + jdoc_asset = json.loads(r) + + for _sensor in SENSOR_READ_KEY_W: + assert len(jdoc_asset), "No data found for asset '{}'".format(asset_name_with_prefix_w) + assert jdoc_asset[0]["reading"][_sensor] is not None + conn.request("GET", '/foglamp/asset/{}/{}'.format(quote(asset_name_with_prefix_w, safe=''), _sensor)) + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + assert len(jdoc), "No data found for asset '{}' and datapoint '{}'".format(asset_name_with_prefix_w, _sensor) + + # foglamp/asset/envirophat%2Fmagnetometer + conn.request("GET", '/foglamp/asset/{}'.format(quote(asset_name_with_prefix_m, safe=''))) + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc_asset = json.loads(r) + + for _sensor in SENSOR_READ_KEY_M: + assert len(jdoc_asset), "No data found for asset '{}'".format(asset_name_with_prefix_m) + assert jdoc_asset[0]["reading"][_sensor] is not None + conn.request("GET", '/foglamp/asset/{}/{}'.format(quote(asset_name_with_prefix_m, safe=''), _sensor)) + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + assert len(jdoc), "No data found for asset '{}' and datapoint '{}'".format(asset_name_with_prefix_m, + _sensor) + + # foglamp/asset/envirophat%2Faccelerometer + conn.request("GET", '/foglamp/asset/{}'.format(quote(asset_name_with_prefix_a, safe=''))) + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc_asset = json.loads(r) + + for _sensor in SENSOR_READ_KEY_A: + assert len(jdoc_asset), "No data found for asset '{}'".format(asset_name_with_prefix_a) + assert jdoc_asset[0]["reading"][_sensor] is not None + conn.request("GET", '/foglamp/asset/{}/{}'.format(quote(asset_name_with_prefix_a, safe=''), _sensor)) + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + assert len(jdoc), "No data found for asset '{}' and datapoint '{}'".format(asset_name_with_prefix_a, + _sensor) + # foglamp/asset/envirophat%2Frgb + conn.request("GET", '/foglamp/asset/{}'.format(quote(asset_name_with_prefix_c, safe=''))) + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc_asset = json.loads(r) - weather_sensors = ["temperature", "altitude", "pressure"] - for s in weather_sensors: - conn.request("GET", '/foglamp/asset/{}/{}'.format(quote(asset_name_with_prefix, safe=''), s)) + for _sensor in SENSOR_READ_KEY_C: + assert len(jdoc_asset), "No data found for asset '{}'".format(asset_name_with_prefix_c) + assert jdoc_asset[0]["reading"][_sensor] is not None + conn.request("GET", '/foglamp/asset/{}/{}'.format(quote(asset_name_with_prefix_c, safe=''), _sensor)) r = conn.getresponse() assert 200 == r.status r = r.read().decode() jdoc = json.loads(r) - assert len(jdoc), "No asset found" + assert len(jdoc), "No data found for asset '{}' and datapoint '{}'".format(asset_name_with_prefix_c, + _sensor) # verify summary (avg|min|max) # foglamp/asset/envirophat%2Fweather/temperature/summary + # This will be covered in browser API test + + def _verify_egress(self, read_data_from_pi, pi_host, pi_admin, pi_passwd, pi_db, wait_time, retries): + retry_count = 0 + data_from_pi_w = None + data_from_pi_m = None + data_from_pi_a = None + data_from_pi_c = None + + asset_name_with_prefix_w = "{}{}".format(ASSET_PREFIX, ASSET_NAME_W) + asset_name_with_prefix_a = "{}{}".format(ASSET_PREFIX, ASSET_NAME_A) + asset_name_with_prefix_m = "{}{}".format(ASSET_PREFIX, ASSET_NAME_M) + asset_name_with_prefix_c = "{}{}".format(ASSET_PREFIX, ASSET_NAME_C) + + while (data_from_pi_w is None or data_from_pi_w == []) and retry_count < retries: + data_from_pi_w = read_data_from_pi(pi_host, pi_admin, pi_passwd, pi_db, asset_name_with_prefix_w, + SENSOR_READ_KEY_W) + + data_from_pi_m = read_data_from_pi(pi_host, pi_admin, pi_passwd, pi_db, asset_name_with_prefix_m, + SENSOR_READ_KEY_M) + + data_from_pi_a = read_data_from_pi(pi_host, pi_admin, pi_passwd, pi_db, asset_name_with_prefix_a, + SENSOR_READ_KEY_A) + + data_from_pi_c = read_data_from_pi(pi_host, pi_admin, pi_passwd, pi_db, asset_name_with_prefix_c, + SENSOR_READ_KEY_C) + + retry_count += 1 + time.sleep(wait_time * 2) + + if data_from_pi_w is None or data_from_pi_m is None or data_from_pi_a is None or data_from_pi_c is None\ + or retry_count == retries: + assert False, "Failed to read data from PI" + # TODO: Remove prints, used for debugging purpose only + print("data\nWeather={}\nMagnet={}\nAcel={}\nColor={}\n".format(data_from_pi_w, data_from_pi_m, data_from_pi_a, + data_from_pi_c)) + + for _s in SENSOR_READ_KEY_W: + assert _s in data_from_pi_w + print("Weather, Sensor={}".format(_s), sum([abs(number) for number in data_from_pi_w[_s]])) + # Verify that sum of absolute values of each sensor of weather (6-7 datapoints) is not zero + assert sum([abs(number) for number in data_from_pi_w[_s]]) != 0 + + for _s in SENSOR_READ_KEY_A: + assert _s in data_from_pi_a + print("Accelerometer, Sensor={}".format(_s), sum([abs(number) for number in data_from_pi_a[_s]])) + # Verify that sum of absolute values of each sensor of accelerometer (6-7 datapoints) is not zero + assert sum([abs(number) for number in data_from_pi_a[_s]]) != 0 + + for _s in SENSOR_READ_KEY_M: + assert _s in data_from_pi_m + print("Magnetometer, Sensor={}".format(_s), sum([abs(number) for number in data_from_pi_m[_s]])) + # Verify that sum of absolute values of each sensor of magnetometer (6-7 datapoints) is not zero + assert sum([abs(number) for number in data_from_pi_m[_s]]) != 0 + + for _s in SENSOR_READ_KEY_C: + assert _s in data_from_pi_c + print("Color, Sensor={}".format(_s), sum([abs(number) for number in data_from_pi_c[_s]])) + # Verify that sum of absolute values of each sensor of color rgb (6-7 datapoints) is not zero + assert sum([abs(number) for number in data_from_pi_c[_s]]) != 0 - # def _verify_egress(self, read_data_from_pi, pi_host, pi_admin, pi_passwd, pi_db, wait_time, retries): - # retry_count = 0 - # data_from_pi = None - # while (data_from_pi is None or data_from_pi == []) and retry_count < retries: - # asset_name_with_prefix = "{}{}".format(ASSET_PREFIX, ASSET_NAME) - # data_from_pi = read_data_from_pi(pi_host, pi_admin, pi_passwd, pi_db, asset_name_with_prefix, {READ_KEY}) - # retry_count += 1 - # time.sleep(wait_time * 2) - # - # if data_from_pi is None or retry_count == retries: - # assert False, "Failed to read data from PI" - # - # assert SENSOR_READ_KEY in data_from_pi - # assert isinstance(data_from_pi[SENSOR_READ_KEY], list) - # for n in data_from_pi[SENSOR_READ_KEY]]: - # assert round(n, 1) > 0.0 From 9d458e7cc7ba2fc6d818996a29cb881d6fa1e89e Mon Sep 17 00:00:00 2001 From: Praveen Garg Date: Mon, 22 Apr 2019 16:16:31 +0530 Subject: [PATCH 055/161] minor refactoring and moved to rpi directory --- tests/system/python/rpi/test_e2e_rpi_ephat.py | 269 ++++++++++++++++++ 1 file changed, 269 insertions(+) create mode 100644 tests/system/python/rpi/test_e2e_rpi_ephat.py diff --git a/tests/system/python/rpi/test_e2e_rpi_ephat.py b/tests/system/python/rpi/test_e2e_rpi_ephat.py new file mode 100644 index 0000000000..bcb6b6f2b1 --- /dev/null +++ b/tests/system/python/rpi/test_e2e_rpi_ephat.py @@ -0,0 +1,269 @@ +# -*- coding: utf-8 -*- + +# FOGLAMP_BEGIN +# See: http://foglamp.readthedocs.io/ +# FOGLAMP_END + +""" Test end to end flow with: + Ingress: ePhat south plugin + Egress: PI Server (C) plugin +""" + +import os +import http.client +import json +import time +import pytest +import utils +from urllib.parse import quote +from collections import Counter + + +__author__ = "Praveen Garg, Vaibhav Singhal" +__copyright__ = "Copyright (c) 2019 Dianomic Systems" +__license__ = "Apache 2.0" +__version__ = "${VERSION}" + + +SOUTH_PLUGIN = "envirophat" +SVC_NAME = "Room-1" + +ASSET_PREFIX = "envirophat/" # default for envirophat South plugin + +ASSET_NAME_W = "weather" +SENSOR_READ_KEY_W = {"temperature", "altitude", "pressure"} + +ASSET_NAME_M = "magnetometer" +SENSOR_READ_KEY_M = {"x", "y", "z"} + +ASSET_NAME_A = "accelerometer" +SENSOR_READ_KEY_A = {"x", "y", "z"} + +ASSET_NAME_C = "rgb" +SENSOR_READ_KEY_C = {"r", "g", "b"} + +TASK_NAME = "North v2 PI" + + +@pytest.mark.skipif('raspberrypi' != os.uname()[1], reason="RPi only (ePhat) test") +# sysname='Linux', nodename='raspberrypi', release='4.14.98+', version='#1200 ', machine='armv6l' +class TestE2eRPiEphatEgress: + + def get_ping_status(self, foglamp_url): + conn = http.client.HTTPConnection(foglamp_url) + conn.request("GET", '/foglamp/ping') + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + return jdoc + + def get_statistics_map(self, foglamp_url): + conn = http.client.HTTPConnection(foglamp_url) + conn.request("GET", '/foglamp/statistics') + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + return utils.serialize_stats_map(jdoc) + + @pytest.fixture + def start_south_north(self, reset_and_start_foglamp, add_south, south_branch, disable_schedule, + remove_data_file, remove_directories, enable_schedule, foglamp_url, + start_north_pi_server_c, pi_host, pi_port, pi_token, wait_time): + """ This fixture clones given south & filter plugin repo, and starts south and PI north C instance + + """ + + add_south(SOUTH_PLUGIN, south_branch, foglamp_url, service_name=SVC_NAME) + + start_north_pi_server_c(foglamp_url, pi_host, pi_port, pi_token, taskname=TASK_NAME, start_task=False) + + # let the readings ingress + time.sleep(wait_time) + disable_schedule(foglamp_url, SVC_NAME) + + enable_schedule(foglamp_url, TASK_NAME) + + yield self.start_south_north + + remove_directories("/tmp/foglamp-south-{}".format(SOUTH_PLUGIN)) + + def test_end_to_end(self, start_south_north, read_data_from_pi, foglamp_url, pi_host, pi_admin, + pi_passwd, pi_db, wait_time, retries, skip_verify_north_interface): + + # let the readings egress + time.sleep(wait_time * 2) + self._verify_ping_and_statistics(foglamp_url) + + self._verify_ingest(foglamp_url) + + if not skip_verify_north_interface: + self._verify_egress(read_data_from_pi, pi_host, pi_admin, pi_passwd, pi_db, wait_time, retries) + + def _verify_ping_and_statistics(self, foglamp_url): + ping_response = self.get_ping_status(foglamp_url) + assert ping_response["dataRead"] + assert ping_response["dataSent"] + + actual_stats_map = self.get_statistics_map(foglamp_url) + assert actual_stats_map["{}{}".format(ASSET_PREFIX.upper(), ASSET_NAME_W.upper())] + assert actual_stats_map["{}{}".format(ASSET_PREFIX.upper(), ASSET_NAME_M.upper())] + assert actual_stats_map["{}{}".format(ASSET_PREFIX.upper(), ASSET_NAME_A.upper())] + assert actual_stats_map["{}{}".format(ASSET_PREFIX.upper(), ASSET_NAME_C.upper())] + assert actual_stats_map['READINGS'] + assert actual_stats_map[TASK_NAME] + assert actual_stats_map['Readings Sent'] + + def _verify_ingest(self, foglamp_url): + asset_name_with_prefix_w = "{}{}".format(ASSET_PREFIX, ASSET_NAME_W) + asset_name_with_prefix_m = "{}{}".format(ASSET_PREFIX, ASSET_NAME_M) + asset_name_with_prefix_a = "{}{}".format(ASSET_PREFIX, ASSET_NAME_A) + asset_name_with_prefix_c = "{}{}".format(ASSET_PREFIX, ASSET_NAME_C) + conn = http.client.HTTPConnection(foglamp_url) + + conn.request("GET", '/foglamp/asset') + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + assert len(jdoc), "No asset found" + actual_assets = [i["assetCode"] for i in jdoc] + assert asset_name_with_prefix_w in actual_assets + assert asset_name_with_prefix_m in actual_assets + assert asset_name_with_prefix_a in actual_assets + assert asset_name_with_prefix_c in actual_assets + assert jdoc[0]["count"] + expected_assets = Counter([asset_name_with_prefix_w, asset_name_with_prefix_m, + asset_name_with_prefix_a, asset_name_with_prefix_c]) + assert Counter(actual_assets) == expected_assets + + # foglamp/asset/envirophat%2Fweather + conn.request("GET", '/foglamp/asset/{}'.format(quote(asset_name_with_prefix_w, safe=''))) + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc_asset = json.loads(r) + + for _sensor in SENSOR_READ_KEY_W: + assert len(jdoc_asset), "No data found for asset '{}'".format(asset_name_with_prefix_w) + assert jdoc_asset[0]["reading"][_sensor] is not None + conn.request("GET", '/foglamp/asset/{}/{}'.format(quote(asset_name_with_prefix_w, safe=''), _sensor)) + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + assert len(jdoc), "No data found for asset '{}' and datapoint '{}'".format(asset_name_with_prefix_w, _sensor) + + # foglamp/asset/envirophat%2Fmagnetometer + conn.request("GET", '/foglamp/asset/{}'.format(quote(asset_name_with_prefix_m, safe=''))) + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc_asset = json.loads(r) + + for _sensor in SENSOR_READ_KEY_M: + assert len(jdoc_asset), "No data found for asset '{}'".format(asset_name_with_prefix_m) + assert jdoc_asset[0]["reading"][_sensor] is not None + conn.request("GET", '/foglamp/asset/{}/{}'.format(quote(asset_name_with_prefix_m, safe=''), _sensor)) + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + assert len(jdoc), "No data found for asset '{}' and datapoint '{}'".format(asset_name_with_prefix_m, + _sensor) + + # foglamp/asset/envirophat%2Faccelerometer + conn.request("GET", '/foglamp/asset/{}'.format(quote(asset_name_with_prefix_a, safe=''))) + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc_asset = json.loads(r) + + for _sensor in SENSOR_READ_KEY_A: + assert len(jdoc_asset), "No data found for asset '{}'".format(asset_name_with_prefix_a) + assert jdoc_asset[0]["reading"][_sensor] is not None + conn.request("GET", '/foglamp/asset/{}/{}'.format(quote(asset_name_with_prefix_a, safe=''), _sensor)) + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + assert len(jdoc), "No data found for asset '{}' and datapoint '{}'".format(asset_name_with_prefix_a, + _sensor) + # foglamp/asset/envirophat%2Frgb + conn.request("GET", '/foglamp/asset/{}'.format(quote(asset_name_with_prefix_c, safe=''))) + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc_asset = json.loads(r) + + for _sensor in SENSOR_READ_KEY_C: + assert len(jdoc_asset), "No data found for asset '{}'".format(asset_name_with_prefix_c) + assert jdoc_asset[0]["reading"][_sensor] is not None + conn.request("GET", '/foglamp/asset/{}/{}'.format(quote(asset_name_with_prefix_c, safe=''), _sensor)) + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + assert len(jdoc), "No data found for asset '{}' and datapoint '{}'".format(asset_name_with_prefix_c, + _sensor) + + def _verify_egress(self, read_data_from_pi, pi_host, pi_admin, pi_passwd, pi_db, wait_time, retries): + retry_count = 0 + + data_from_pi_w = None + data_from_pi_m = None + data_from_pi_a = None + data_from_pi_c = None + + asset_name_with_prefix_w = "{}{}".format(ASSET_PREFIX, ASSET_NAME_W) + asset_name_with_prefix_a = "{}{}".format(ASSET_PREFIX, ASSET_NAME_A) + asset_name_with_prefix_m = "{}{}".format(ASSET_PREFIX, ASSET_NAME_M) + asset_name_with_prefix_c = "{}{}".format(ASSET_PREFIX, ASSET_NAME_C) + + while (data_from_pi_w is None or data_from_pi_w == []) and retry_count < retries: + data_from_pi_w = read_data_from_pi(pi_host, pi_admin, pi_passwd, pi_db, asset_name_with_prefix_w, + SENSOR_READ_KEY_W) + + data_from_pi_m = read_data_from_pi(pi_host, pi_admin, pi_passwd, pi_db, asset_name_with_prefix_m, + SENSOR_READ_KEY_M) + + data_from_pi_a = read_data_from_pi(pi_host, pi_admin, pi_passwd, pi_db, asset_name_with_prefix_a, + SENSOR_READ_KEY_A) + + data_from_pi_c = read_data_from_pi(pi_host, pi_admin, pi_passwd, pi_db, asset_name_with_prefix_c, + SENSOR_READ_KEY_C) + + retry_count += 1 + time.sleep(wait_time * 2) + + if data_from_pi_w is None or data_from_pi_m is None or data_from_pi_a is None or data_from_pi_c is None\ + or retry_count == retries: + assert False, "Failed to read data from PI" + + print("Data read from PI System:\nWeather={}\nMagnetometer={}\nAccelerometer={}\nrgbColor={}\n".format( + data_from_pi_w, data_from_pi_m, data_from_pi_a, data_from_pi_c)) + + for w in SENSOR_READ_KEY_W: + assert w in data_from_pi_w + abs_sum_w = sum([abs(n) for n in data_from_pi_w[w]]) + print("Weather (sum of absolute values), Sensor={}".format(w), abs_sum_w) + assert abs_sum_w, "Sum of weather sensor absolute values is 0" + + for a in SENSOR_READ_KEY_A: + assert a in data_from_pi_a + abs_sum_a = sum([abs(n) for n in data_from_pi_a[a]]) + print("Accelerometer (sum of absolute values), Sensor={}".format(a), abs_sum_a) + assert abs_sum_a, "Sum of accelerometer sensor absolute values is 0" + + for m in SENSOR_READ_KEY_M: + assert m in data_from_pi_m + abs_sum_m = sum([abs(n) for n in data_from_pi_a[m]]) + print("Magnetometer (sum of absolute values), Sensor={}".format(m), abs_sum_m) + assert abs_sum_m, "Sum of magnetometer sensor absolute values is 0" + + for c in SENSOR_READ_KEY_C: + assert c in data_from_pi_c + abs_sum_c = sum([abs(n) for n in data_from_pi_c[c]]) + print("RGB colors (sum of absolute values), Sensor={}".format(c), abs_sum_c) + assert abs_sum_c, "Sum of rgb sensors absolute values is 0" From ab6ea416871c6f183f25c276deda21affdb74c06 Mon Sep 17 00:00:00 2001 From: Praveen Garg Date: Mon, 22 Apr 2019 16:17:49 +0530 Subject: [PATCH 056/161] moved to rpi directory --- tests/system/python/e2e/test_e2e_rpi_ephat.py | 271 ------------------ 1 file changed, 271 deletions(-) delete mode 100644 tests/system/python/e2e/test_e2e_rpi_ephat.py diff --git a/tests/system/python/e2e/test_e2e_rpi_ephat.py b/tests/system/python/e2e/test_e2e_rpi_ephat.py deleted file mode 100644 index 312c8e411e..0000000000 --- a/tests/system/python/e2e/test_e2e_rpi_ephat.py +++ /dev/null @@ -1,271 +0,0 @@ -# -*- coding: utf-8 -*- - -# FOGLAMP_BEGIN -# See: http://foglamp.readthedocs.io/ -# FOGLAMP_END - -""" Test end to end flow with: - Ingress: ePhat south plugin - Egress: PI Server (C) plugin -""" - -import os -import http.client -import json -import time -import pytest -import utils -from urllib.parse import quote -from collections import Counter - - -__author__ = "Praveen Garg" -__copyright__ = "Copyright (c) 2019 Dianomic Systems" -__license__ = "Apache 2.0" -__version__ = "${VERSION}" - - -SOUTH_PLUGIN = "envirophat" -SVC_NAME = "Room-1" -ASSET_PREFIX = "envirophat/" # default for envirophat South plugin -ASSET_NAME_W = "weather" -SENSOR_READ_KEY_W = {"temperature", "altitude", "pressure"} - -ASSET_NAME_M = "magnetometer" -SENSOR_READ_KEY_M = {"x", "y", "z"} - -ASSET_NAME_A = "accelerometer" -SENSOR_READ_KEY_A = {"x", "y", "z"} - -ASSET_NAME_C = "rgb" -SENSOR_READ_KEY_C = {"r", "g", "b"} - -TASK_NAME = "North v2 PI" - - -@pytest.mark.skipif('raspberrypi' != os.uname()[1], reason="RPi only (ePhat) test") -# sysname='Linux', nodename='raspberrypi', release='4.14.98+', version='#1200 ', machine='armv6l' -class TestE2eRPiEphatEgress: - - def get_ping_status(self, foglamp_url): - conn = http.client.HTTPConnection(foglamp_url) - conn.request("GET", '/foglamp/ping') - r = conn.getresponse() - assert 200 == r.status - r = r.read().decode() - jdoc = json.loads(r) - return jdoc - - def get_statistics_map(self, foglamp_url): - conn = http.client.HTTPConnection(foglamp_url) - conn.request("GET", '/foglamp/statistics') - r = conn.getresponse() - assert 200 == r.status - r = r.read().decode() - jdoc = json.loads(r) - return utils.serialize_stats_map(jdoc) - - @pytest.fixture - def start_south_north(self, reset_and_start_foglamp, add_south, south_branch, disable_schedule, - remove_data_file, remove_directories, enable_schedule, foglamp_url, - start_north_pi_server_c, pi_host, pi_port, pi_token, wait_time): - """ This fixture clones given south & filter plugin repo, and starts south and PI north C instance with filter - - """ - - add_south(SOUTH_PLUGIN, south_branch, foglamp_url, service_name=SVC_NAME) - - start_north_pi_server_c(foglamp_url, pi_host, pi_port, pi_token, taskname=TASK_NAME, start_task=False) - - # let the readings ingress - time.sleep(wait_time) - disable_schedule(foglamp_url, SVC_NAME) - - enable_schedule(foglamp_url, TASK_NAME) - - yield self.start_south_north - - remove_directories("/tmp/foglamp-south-{}".format(SOUTH_PLUGIN)) - - def test_end_to_end(self, start_south_north, read_data_from_pi, foglamp_url, pi_host, pi_admin, - pi_passwd, pi_db, wait_time, retries, skip_verify_north_interface): - - # let the readings egress - time.sleep(wait_time * 2) - self._verify_ping_and_statistics(foglamp_url) - - self._verify_ingest(foglamp_url) - - if not skip_verify_north_interface: - self._verify_egress(read_data_from_pi, pi_host, pi_admin, pi_passwd, pi_db, wait_time, retries) - - def _verify_ping_and_statistics(self, foglamp_url): - ping_response = self.get_ping_status(foglamp_url) - assert ping_response["dataRead"] > 0 - assert ping_response["dataSent"] > 0 - - actual_stats_map = self.get_statistics_map(foglamp_url) - assert actual_stats_map["{}{}".format(ASSET_PREFIX.upper(), ASSET_NAME_W.upper())] - assert actual_stats_map["{}{}".format(ASSET_PREFIX.upper(), ASSET_NAME_M.upper())] - assert actual_stats_map["{}{}".format(ASSET_PREFIX.upper(), ASSET_NAME_A.upper())] - assert actual_stats_map["{}{}".format(ASSET_PREFIX.upper(), ASSET_NAME_C.upper())] - assert actual_stats_map['READINGS'] - assert actual_stats_map[TASK_NAME] - assert actual_stats_map['Readings Sent'] - - def _verify_ingest(self, foglamp_url): - asset_name_with_prefix_w = "{}{}".format(ASSET_PREFIX, ASSET_NAME_W) - asset_name_with_prefix_m = "{}{}".format(ASSET_PREFIX, ASSET_NAME_M) - asset_name_with_prefix_a = "{}{}".format(ASSET_PREFIX, ASSET_NAME_A) - asset_name_with_prefix_c = "{}{}".format(ASSET_PREFIX, ASSET_NAME_C) - conn = http.client.HTTPConnection(foglamp_url) - - conn.request("GET", '/foglamp/asset') - r = conn.getresponse() - assert 200 == r.status - r = r.read().decode() - jdoc = json.loads(r) - assert len(jdoc), "No asset found" - actual_assets = [i["assetCode"] for i in jdoc] - assert asset_name_with_prefix_w in actual_assets - assert asset_name_with_prefix_m in actual_assets - assert asset_name_with_prefix_a in actual_assets - assert asset_name_with_prefix_c in actual_assets - assert jdoc[0]["count"] - expected_assets = Counter([asset_name_with_prefix_w, asset_name_with_prefix_m, - asset_name_with_prefix_a, asset_name_with_prefix_c]) - assert Counter(actual_assets) == expected_assets - - # foglamp/asset/envirophat%2Fweather - conn.request("GET", '/foglamp/asset/{}'.format(quote(asset_name_with_prefix_w, safe=''))) - r = conn.getresponse() - assert 200 == r.status - r = r.read().decode() - jdoc_asset = json.loads(r) - - for _sensor in SENSOR_READ_KEY_W: - assert len(jdoc_asset), "No data found for asset '{}'".format(asset_name_with_prefix_w) - assert jdoc_asset[0]["reading"][_sensor] is not None - conn.request("GET", '/foglamp/asset/{}/{}'.format(quote(asset_name_with_prefix_w, safe=''), _sensor)) - r = conn.getresponse() - assert 200 == r.status - r = r.read().decode() - jdoc = json.loads(r) - assert len(jdoc), "No data found for asset '{}' and datapoint '{}'".format(asset_name_with_prefix_w, _sensor) - - # foglamp/asset/envirophat%2Fmagnetometer - conn.request("GET", '/foglamp/asset/{}'.format(quote(asset_name_with_prefix_m, safe=''))) - r = conn.getresponse() - assert 200 == r.status - r = r.read().decode() - jdoc_asset = json.loads(r) - - for _sensor in SENSOR_READ_KEY_M: - assert len(jdoc_asset), "No data found for asset '{}'".format(asset_name_with_prefix_m) - assert jdoc_asset[0]["reading"][_sensor] is not None - conn.request("GET", '/foglamp/asset/{}/{}'.format(quote(asset_name_with_prefix_m, safe=''), _sensor)) - r = conn.getresponse() - assert 200 == r.status - r = r.read().decode() - jdoc = json.loads(r) - assert len(jdoc), "No data found for asset '{}' and datapoint '{}'".format(asset_name_with_prefix_m, - _sensor) - - # foglamp/asset/envirophat%2Faccelerometer - conn.request("GET", '/foglamp/asset/{}'.format(quote(asset_name_with_prefix_a, safe=''))) - r = conn.getresponse() - assert 200 == r.status - r = r.read().decode() - jdoc_asset = json.loads(r) - - for _sensor in SENSOR_READ_KEY_A: - assert len(jdoc_asset), "No data found for asset '{}'".format(asset_name_with_prefix_a) - assert jdoc_asset[0]["reading"][_sensor] is not None - conn.request("GET", '/foglamp/asset/{}/{}'.format(quote(asset_name_with_prefix_a, safe=''), _sensor)) - r = conn.getresponse() - assert 200 == r.status - r = r.read().decode() - jdoc = json.loads(r) - assert len(jdoc), "No data found for asset '{}' and datapoint '{}'".format(asset_name_with_prefix_a, - _sensor) - # foglamp/asset/envirophat%2Frgb - conn.request("GET", '/foglamp/asset/{}'.format(quote(asset_name_with_prefix_c, safe=''))) - r = conn.getresponse() - assert 200 == r.status - r = r.read().decode() - jdoc_asset = json.loads(r) - - for _sensor in SENSOR_READ_KEY_C: - assert len(jdoc_asset), "No data found for asset '{}'".format(asset_name_with_prefix_c) - assert jdoc_asset[0]["reading"][_sensor] is not None - conn.request("GET", '/foglamp/asset/{}/{}'.format(quote(asset_name_with_prefix_c, safe=''), _sensor)) - r = conn.getresponse() - assert 200 == r.status - r = r.read().decode() - jdoc = json.loads(r) - assert len(jdoc), "No data found for asset '{}' and datapoint '{}'".format(asset_name_with_prefix_c, - _sensor) - - # verify summary (avg|min|max) - # foglamp/asset/envirophat%2Fweather/temperature/summary - # This will be covered in browser API test - - def _verify_egress(self, read_data_from_pi, pi_host, pi_admin, pi_passwd, pi_db, wait_time, retries): - retry_count = 0 - data_from_pi_w = None - data_from_pi_m = None - data_from_pi_a = None - data_from_pi_c = None - - asset_name_with_prefix_w = "{}{}".format(ASSET_PREFIX, ASSET_NAME_W) - asset_name_with_prefix_a = "{}{}".format(ASSET_PREFIX, ASSET_NAME_A) - asset_name_with_prefix_m = "{}{}".format(ASSET_PREFIX, ASSET_NAME_M) - asset_name_with_prefix_c = "{}{}".format(ASSET_PREFIX, ASSET_NAME_C) - - while (data_from_pi_w is None or data_from_pi_w == []) and retry_count < retries: - data_from_pi_w = read_data_from_pi(pi_host, pi_admin, pi_passwd, pi_db, asset_name_with_prefix_w, - SENSOR_READ_KEY_W) - - data_from_pi_m = read_data_from_pi(pi_host, pi_admin, pi_passwd, pi_db, asset_name_with_prefix_m, - SENSOR_READ_KEY_M) - - data_from_pi_a = read_data_from_pi(pi_host, pi_admin, pi_passwd, pi_db, asset_name_with_prefix_a, - SENSOR_READ_KEY_A) - - data_from_pi_c = read_data_from_pi(pi_host, pi_admin, pi_passwd, pi_db, asset_name_with_prefix_c, - SENSOR_READ_KEY_C) - - retry_count += 1 - time.sleep(wait_time * 2) - - if data_from_pi_w is None or data_from_pi_m is None or data_from_pi_a is None or data_from_pi_c is None\ - or retry_count == retries: - assert False, "Failed to read data from PI" - # TODO: Remove prints, used for debugging purpose only - print("data\nWeather={}\nMagnet={}\nAcel={}\nColor={}\n".format(data_from_pi_w, data_from_pi_m, data_from_pi_a, - data_from_pi_c)) - - for _s in SENSOR_READ_KEY_W: - assert _s in data_from_pi_w - print("Weather, Sensor={}".format(_s), sum([abs(number) for number in data_from_pi_w[_s]])) - # Verify that sum of absolute values of each sensor of weather (6-7 datapoints) is not zero - assert sum([abs(number) for number in data_from_pi_w[_s]]) != 0 - - for _s in SENSOR_READ_KEY_A: - assert _s in data_from_pi_a - print("Accelerometer, Sensor={}".format(_s), sum([abs(number) for number in data_from_pi_a[_s]])) - # Verify that sum of absolute values of each sensor of accelerometer (6-7 datapoints) is not zero - assert sum([abs(number) for number in data_from_pi_a[_s]]) != 0 - - for _s in SENSOR_READ_KEY_M: - assert _s in data_from_pi_m - print("Magnetometer, Sensor={}".format(_s), sum([abs(number) for number in data_from_pi_m[_s]])) - # Verify that sum of absolute values of each sensor of magnetometer (6-7 datapoints) is not zero - assert sum([abs(number) for number in data_from_pi_m[_s]]) != 0 - - for _s in SENSOR_READ_KEY_C: - assert _s in data_from_pi_c - print("Color, Sensor={}".format(_s), sum([abs(number) for number in data_from_pi_c[_s]])) - # Verify that sum of absolute values of each sensor of color rgb (6-7 datapoints) is not zero - assert sum([abs(number) for number in data_from_pi_c[_s]]) != 0 - From 795bbb177472f64286358d2a45301663b80a8672 Mon Sep 17 00:00:00 2001 From: Praveen Garg Date: Mon, 22 Apr 2019 16:20:21 +0530 Subject: [PATCH 057/161] fixed verification --- tests/system/python/rpi/test_e2e_rpi_ephat.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/system/python/rpi/test_e2e_rpi_ephat.py b/tests/system/python/rpi/test_e2e_rpi_ephat.py index bcb6b6f2b1..75136797a5 100644 --- a/tests/system/python/rpi/test_e2e_rpi_ephat.py +++ b/tests/system/python/rpi/test_e2e_rpi_ephat.py @@ -258,7 +258,7 @@ def _verify_egress(self, read_data_from_pi, pi_host, pi_admin, pi_passwd, pi_db, for m in SENSOR_READ_KEY_M: assert m in data_from_pi_m - abs_sum_m = sum([abs(n) for n in data_from_pi_a[m]]) + abs_sum_m = sum([abs(n) for n in data_from_pi_m[m]]) print("Magnetometer (sum of absolute values), Sensor={}".format(m), abs_sum_m) assert abs_sum_m, "Sum of magnetometer sensor absolute values is 0" From 0508537da40e98cca5beb486d8a9ef5d42b391c2 Mon Sep 17 00:00:00 2001 From: Praveen Garg Date: Mon, 22 Apr 2019 16:50:30 +0530 Subject: [PATCH 058/161] print more info --- tests/system/python/rpi/test_e2e_rpi_ephat.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/system/python/rpi/test_e2e_rpi_ephat.py b/tests/system/python/rpi/test_e2e_rpi_ephat.py index 75136797a5..2f79ab45e1 100644 --- a/tests/system/python/rpi/test_e2e_rpi_ephat.py +++ b/tests/system/python/rpi/test_e2e_rpi_ephat.py @@ -247,23 +247,23 @@ def _verify_egress(self, read_data_from_pi, pi_host, pi_admin, pi_passwd, pi_db, for w in SENSOR_READ_KEY_W: assert w in data_from_pi_w abs_sum_w = sum([abs(n) for n in data_from_pi_w[w]]) - print("Weather (sum of absolute values), Sensor={}".format(w), abs_sum_w) + print("Weather (sum of {} absolute values), Sensor={}".format(len(data_from_pi_w[w]), w), abs_sum_w) assert abs_sum_w, "Sum of weather sensor absolute values is 0" for a in SENSOR_READ_KEY_A: assert a in data_from_pi_a abs_sum_a = sum([abs(n) for n in data_from_pi_a[a]]) - print("Accelerometer (sum of absolute values), Sensor={}".format(a), abs_sum_a) + print("Accelerometer (sum of {} absolute values, Sensor={}".format(len(data_from_pi_a[a]), a), abs_sum_a) assert abs_sum_a, "Sum of accelerometer sensor absolute values is 0" for m in SENSOR_READ_KEY_M: assert m in data_from_pi_m abs_sum_m = sum([abs(n) for n in data_from_pi_m[m]]) - print("Magnetometer (sum of absolute values), Sensor={}".format(m), abs_sum_m) + print("Magnetometer (sum of {} absolute values), Sensor={}".format(len(data_from_pi_m[m]), m), abs_sum_m) assert abs_sum_m, "Sum of magnetometer sensor absolute values is 0" for c in SENSOR_READ_KEY_C: assert c in data_from_pi_c abs_sum_c = sum([abs(n) for n in data_from_pi_c[c]]) - print("RGB colors (sum of absolute values), Sensor={}".format(c), abs_sum_c) + print("RGB colors (sum of {} absolute values), Sensor={}".format(len(data_from_pi_c[c]), c), abs_sum_c) assert abs_sum_c, "Sum of rgb sensors absolute values is 0" From e48ba05cfe0ab3120704bf06465000c61ed5bf95 Mon Sep 17 00:00:00 2001 From: ashish-jabble Date: Mon, 22 Apr 2019 17:51:54 +0530 Subject: [PATCH 059/161] Prevent update/delete to configuration items for which optional attribute readonly has been set to TRUE --- .../services/core/api/configuration.py | 31 +++- python/foglamp/services/core/server.py | 2 + .../services/core/api/test_configuration.py | 152 ++++++++++++++---- 3 files changed, 151 insertions(+), 34 deletions(-) diff --git a/python/foglamp/services/core/api/configuration.py b/python/foglamp/services/core/api/configuration.py index b74696f6b0..feced8d348 100644 --- a/python/foglamp/services/core/api/configuration.py +++ b/python/foglamp/services/core/api/configuration.py @@ -244,6 +244,14 @@ async def set_configuration_item(request): raise web.HTTPBadRequest(reason='Missing required value for {}'.format(config_item)) try: if not found_optional: + try: + is_core_mgt = request.is_core_mgt + except AttributeError: + storage_value_entry = await cf_mgr.get_category_item(category_name, config_item) + if 'readonly' in storage_value_entry: + if storage_value_entry['readonly'] == 'true': + raise TypeError("Update not allowed for {} item_name as it has readonly attribute set".format(config_item)) + await cf_mgr.set_category_item_value_entry(category_name, config_item, value) else: await cf_mgr.set_optional_value_entry(category_name, config_item, list(found_optional.keys())[0], list(found_optional.values())[0]) @@ -273,6 +281,18 @@ async def update_configuration_item_bulk(request): if not data: return web.HTTPBadRequest(reason='Nothing to update') cf_mgr = ConfigurationManager(connect.get_storage_async()) + try: + is_core_mgt = request.is_core_mgt + except AttributeError: + for item_name, new_val in data.items(): + storage_value_entry = await cf_mgr.get_category_item(category_name, item_name) + if storage_value_entry is None: + raise KeyError("{} config item not found".format(item_name)) + else: + if 'readonly' in storage_value_entry: + if storage_value_entry['readonly'] == 'true': + raise TypeError( + "Bulk update not allowed for {} item_name as it has readonly attribute set".format(item_name)) await cf_mgr.update_configuration_item_bulk(category_name, data) except (NameError, KeyError) as ex: raise web.HTTPNotFound(reason=ex) @@ -377,16 +397,23 @@ async def delete_configuration_item_value(request): category_name = urllib.parse.unquote(category_name) if category_name is not None else None config_item = urllib.parse.unquote(config_item) if config_item is not None else None - # TODO: make it optimized and elegant cf_mgr = ConfigurationManager(connect.get_storage_async()) try: category_item = await cf_mgr.get_category_item(category_name, config_item) if category_item is None: raise ValueError - + try: + is_core_mgt = request.is_core_mgt + except AttributeError: + if 'readonly' in category_item: + if category_item['readonly'] == 'true': + raise TypeError( + "Delete not allowed for {} item_name as it has readonly attribute set".format(config_item)) await cf_mgr.set_category_item_value_entry(category_name, config_item, category_item['default']) except ValueError: raise web.HTTPNotFound(reason="No detail found for the category_name: {} and config_item: {}".format(category_name, config_item)) + except TypeError as ex: + raise web.HTTPBadRequest(reason=str(ex)) result = await cf_mgr.get_category_item(category_name, config_item) diff --git a/python/foglamp/services/core/server.py b/python/foglamp/services/core/server.py index a21c881bbd..ccc212fd0d 100755 --- a/python/foglamp/services/core/server.py +++ b/python/foglamp/services/core/server.py @@ -1327,11 +1327,13 @@ async def get_configuration_item(cls, request): @classmethod async def update_configuration_item(cls, request): + request.is_core_mgt = True res = await conf_api.set_configuration_item(request) return res @classmethod async def delete_configuration_item(cls, request): + request.is_core_mgt = True res = await conf_api.delete_configuration_item_value(request) return res diff --git a/tests/unit/python/foglamp/services/core/api/test_configuration.py b/tests/unit/python/foglamp/services/core/api/test_configuration.py index 970db2c54f..3a24bdf137 100644 --- a/tests/unit/python/foglamp/services/core/api/test_configuration.py +++ b/tests/unit/python/foglamp/services/core/api/test_configuration.py @@ -211,28 +211,31 @@ async def async_mock(): patch_get_cat_item.assert_called_once_with(category_name, item_name) async def test_set_config_item(self, client, category_name='rest_api', item_name='http_port'): + async def async_mock(return_value): + return return_value + payload = {"value": '8082'} result = {'value': '8082', 'type': 'integer', 'default': '8081', 'description': 'The port to accept HTTP connections on'} - - async def async_mock_set_item(): - return None - - async def async_mock(): - return result - storage_client_mock = MagicMock(StorageClientAsync) c_mgr = ConfigurationManager(storage_client_mock) with patch.object(connect, 'get_storage_async', return_value=storage_client_mock): - with patch.object(c_mgr, 'set_category_item_value_entry', return_value=async_mock_set_item()) as patch_set_entry: - with patch.object(c_mgr, 'get_category_item', return_value=async_mock()) as patch_get_cat_item: + with patch.object(c_mgr, 'set_category_item_value_entry', return_value=async_mock(None)) as patch_set_entry: + with patch.object(c_mgr, 'get_category_item', side_effect=[async_mock(result), async_mock(result)]) as patch_get_cat_item: resp = await client.put('/foglamp/category/{}/{}'.format(category_name, item_name), data=json.dumps(payload)) assert 200 == resp.status r = await resp.text() json_response = json.loads(r) assert result == json_response - patch_get_cat_item.assert_called_once_with(category_name, item_name) + assert 2 == patch_get_cat_item.call_count + calls = patch_get_cat_item.call_args_list + args, kwargs = calls[0] + assert category_name == args[0] + assert item_name == args[1] + args, kwargs = calls[1] + assert category_name == args[0] + assert item_name == args[1] patch_set_entry.assert_called_once_with(category_name, item_name, payload['value']) @pytest.mark.parametrize("payload, message", [ @@ -249,32 +252,46 @@ async def test_set_config_item_bad_request(self, client, payload, message, categ assert message == resp.reason async def test_set_config_item_not_found(self, client, category_name='rest_api', item_name='http_port'): - async def async_mock(): - return None + async def async_mock(return_value): + return return_value payload = {"value": '8082'} storage_client_mock = MagicMock(StorageClientAsync) c_mgr = ConfigurationManager(storage_client_mock) + storage_value_entry = {'value': '8082', 'type': 'integer', 'default': '8081', + 'description': 'The port to accept HTTP connections on'} with patch.object(connect, 'get_storage_async', return_value=storage_client_mock): - with patch.object(c_mgr, 'set_category_item_value_entry', return_value=async_mock()) as patch_set_entry: - with patch.object(c_mgr, 'get_category_item', return_value=async_mock()) as patch_get_cat_item: + with patch.object(c_mgr, 'set_category_item_value_entry', return_value=async_mock(None)) as patch_set_entry: + with patch.object(c_mgr, 'get_category_item', side_effect=[async_mock(storage_value_entry), async_mock(None)]) as patch_get_cat_item: resp = await client.put('/foglamp/category/{}/{}'.format(category_name, item_name), data=json.dumps(payload)) assert 404 == resp.status assert "No detail found for the category_name: {} and config_item: {}".format(category_name, item_name) == resp.reason - patch_get_cat_item.assert_called_once_with(category_name, item_name) + assert 2 == patch_get_cat_item.call_count + calls = patch_get_cat_item.call_args_list + args, kwargs = calls[0] + assert category_name == args[0] + assert item_name == args[1] + args, kwargs = calls[1] + assert category_name == args[0] + assert item_name == args[1] patch_set_entry.assert_called_once_with(category_name, item_name, payload['value']) - async def test_set_config_item_exception(self, client, category_name='rest_api', item_name='http_port'): + async def test_set_config_item_not_allowed(self, client, category_name='rest_api', item_name='http_port'): + async def async_mock(return_value): + return return_value + payload = {"value": '8082'} storage_client_mock = MagicMock(StorageClientAsync) c_mgr = ConfigurationManager(storage_client_mock) + storage_value_entry = {'value': '8082', 'type': 'integer', 'default': '8081', + 'description': 'The port to accept HTTP connections on', 'readonly': 'true'} with patch.object(connect, 'get_storage_async', return_value=storage_client_mock): - with patch.object(c_mgr, 'set_category_item_value_entry', side_effect=ValueError) as patch_set_entry: + with patch.object(c_mgr, 'get_category_item', return_value=async_mock(storage_value_entry)) as patch_get_cat: resp = await client.put('/foglamp/category/{}/{}'.format(category_name, item_name), data=json.dumps(payload)) - assert 404 == resp.status - assert resp.reason is None - patch_set_entry.assert_called_once_with(category_name, item_name, payload['value']) + assert 400 == resp.status + assert 'Update not allowed for {} item_name as it has readonly attribute set'.format(item_name) == resp.reason + patch_get_cat.assert_called_once_with(category_name, item_name) @pytest.mark.parametrize("value", [ '', @@ -341,6 +358,25 @@ async def async_mock(): assert category_name == args[0] assert item_name == args[1] + async def test_delete_config_item_not_allowed(self, client, category_name='rest_api', item_name='http_port'): + result = {'value': '8081', 'type': 'integer', 'default': '8081', + 'description': 'The port to accept HTTP connections on', 'readonly': 'true'} + + async def async_mock(): + return result + + storage_client_mock = MagicMock(StorageClientAsync) + c_mgr = ConfigurationManager(storage_client_mock) + with patch.object(connect, 'get_storage_async', return_value=storage_client_mock): + with patch.object(c_mgr, 'get_category_item', side_effect=[async_mock()]) as patch_get_cat_item: + resp = await client.delete('/foglamp/category/{}/{}/value'.format(category_name, item_name)) + assert 400 == resp.status + assert 'Delete not allowed for {} item_name as it has readonly attribute set'.format(item_name) == resp.reason + assert 1 == patch_get_cat_item.call_count + args, kwargs = patch_get_cat_item.call_args + assert category_name == args[0] + assert item_name == args[1] + async def test_delete_config_item_not_found_before_set_config(self, client, category_name='rest_api', item_name='http_port'): async def async_mock(): return None @@ -696,11 +732,53 @@ async def test_update_bulk_config_exception(self, client, code, exception_name, storage_client_mock = MagicMock(spec=StorageClientAsync) c_mgr = ConfigurationManager(storage_client_mock) with patch.object(connect, 'get_storage_async', return_value=storage_client_mock): - with patch.object(c_mgr, 'update_configuration_item_bulk', side_effect=exception_name) as patch_update_bulk: + with patch.object(c_mgr, 'get_category_item', side_effect=exception_name) as patch_get_cat_item: resp = await client.put('/foglamp/category/{}'.format(category_name), data=json.dumps(payload)) assert code == resp.status assert resp.reason is None - patch_update_bulk.assert_called_once_with(category_name, payload) + assert 1 == patch_get_cat_item.call_count + calls = patch_get_cat_item.call_args_list + args, kwargs = calls[0] + assert category_name == args[0] + assert list(payload)[0] == args[1] + + async def test_update_bulk_config_item_not_found(self, client, category_name='rest_api'): + async def async_mock(return_value): + return return_value + + payload = {"http_port": "8082", "authentication": "mandatory"} + storage_client_mock = MagicMock(spec=StorageClientAsync) + c_mgr = ConfigurationManager(storage_client_mock) + with patch.object(connect, 'get_storage_async', return_value=storage_client_mock): + with patch.object(c_mgr, 'get_category_item', return_value=async_mock(None)) as patch_get_cat_item: + resp = await client.put('/foglamp/category/{}'.format(category_name), data=json.dumps(payload)) + assert 404 == resp.status + assert "'{} config item not found'".format(list(payload)[0]) == resp.reason + assert 1 == patch_get_cat_item.call_count + calls = patch_get_cat_item.call_args_list + args, kwargs = calls[0] + assert category_name == args[0] + assert list(payload)[0] == args[1] + + async def test_update_bulk_config_not_allowed(self, client, category_name='rest_api'): + async def async_mock(return_value): + return return_value + + payload = {"http_port": "8082", "authentication": "mandatory"} + storage_client_mock = MagicMock(spec=StorageClientAsync) + c_mgr = ConfigurationManager(storage_client_mock) + storage_value_entry = {'description': 'Port to accept HTTP connections on', 'displayName': 'HTTP Port', + 'value': '8081', 'default': '8081', 'order': '2', 'type': 'integer', 'readonly': 'true'} + with patch.object(connect, 'get_storage_async', return_value=storage_client_mock): + with patch.object(c_mgr, 'get_category_item', return_value=async_mock(storage_value_entry)) as patch_get_cat_item: + resp = await client.put('/foglamp/category/{}'.format(category_name), data=json.dumps(payload)) + assert 400 == resp.status + assert 'Bulk update not allowed for {} item_name as it has readonly attribute set'.format(list(payload)[0]) == resp.reason + assert 1 == patch_get_cat_item.call_count + calls = patch_get_cat_item.call_args_list + args, kwargs = calls[0] + assert category_name == args[0] + assert list(payload)[0] == args[1] @pytest.mark.parametrize("category_name", [ "rest_api", "Rest $API" @@ -717,16 +795,27 @@ async def async_mock(return_value): payload = {"http_port": "8082", "authentication": "mandatory"} storage_client_mock = MagicMock(spec=StorageClientAsync) c_mgr = ConfigurationManager(storage_client_mock) + storage_value_entry1 = {'description': 'Port to accept HTTP connections on', 'displayName': 'HTTP Port', 'value': '8081', 'default': '8081', 'order': '2', 'type': 'integer'} + storage_value_entry2 = {'options': ['mandatory', 'optional'], 'description': 'API Call Authentication', 'displayName': 'Authentication', 'value': 'optional', 'default': 'optional', 'order': '5', 'type': 'enumeration'} with patch.object(connect, 'get_storage_async', return_value=storage_client_mock): - with patch.object(c_mgr, 'update_configuration_item_bulk', return_value=async_mock(response)) as patch_update_bulk: - with patch.object(c_mgr, 'get_category_all_items', return_value=async_mock(result)) as patch_get_all_items: - resp = await client.put('/foglamp/category/{}'.format(category_name), data=json.dumps(payload)) - assert 200 == resp.status - r = await resp.text() - json_response = json.loads(r) - assert result == json_response - patch_get_all_items.assert_called_once_with(category_name) - patch_update_bulk.assert_called_once_with(category_name, payload) + with patch.object(c_mgr, 'get_category_item', side_effect=[async_mock(storage_value_entry1), async_mock(storage_value_entry2)]) as patch_get_cat_item: + with patch.object(c_mgr, 'update_configuration_item_bulk', return_value=async_mock(response)) as patch_update_bulk: + with patch.object(c_mgr, 'get_category_all_items', return_value=async_mock(result)) as patch_get_all_items: + resp = await client.put('/foglamp/category/{}'.format(category_name), data=json.dumps(payload)) + assert 200 == resp.status + r = await resp.text() + json_response = json.loads(r) + assert result == json_response + patch_get_all_items.assert_called_once_with(category_name) + patch_update_bulk.assert_called_once_with(category_name, payload) + assert 2 == patch_get_cat_item.call_count + calls = patch_get_cat_item.call_args_list + args, kwargs = calls[0] + assert category_name == args[0] + assert list(payload)[0] == args[1] + args, kwargs = calls[1] + assert category_name == args[0] + assert list(payload)[1] == args[1] async def test_delete_configuration(self, client, category_name='rest_api'): result = {'result': 'Category {} deleted successfully.'.format(category_name)} @@ -742,4 +831,3 @@ async def test_delete_configuration(self, client, category_name='rest_api'): assert 1 == patch_delete_cat.call_count args, kwargs = patch_delete_cat.call_args assert category_name == args[0] - From 8af5f5e55ff75d04cfaea8a0ea6d32b6c4003d2c Mon Sep 17 00:00:00 2001 From: Cloud User Date: Mon, 22 Apr 2019 23:30:05 +0000 Subject: [PATCH 060/161] FOGL-2751 changes to compile on RHEL --- C/common/config_category.cpp | 4 +++- C/services/south/ingest.cpp | 2 +- CMakeLists.txt | 6 ++++++ 3 files changed, 10 insertions(+), 2 deletions(-) diff --git a/C/common/config_category.cpp b/C/common/config_category.cpp index 3893202b5c..dce4fdf791 100644 --- a/C/common/config_category.cpp +++ b/C/common/config_category.cpp @@ -19,6 +19,8 @@ #include #include #include +#include + using namespace std; using namespace rapidjson; @@ -1507,7 +1509,7 @@ ConfigCategoryChange::ConfigCategoryChange(const string& json) catch (exception* e) { Logger::getLogger()->error("Configuration parse error in category %s item '%s', %s: %s", - m_name, + m_name.c_str(), itr->name.GetString(), json.c_str(), e->what()); diff --git a/C/services/south/ingest.cpp b/C/services/south/ingest.cpp index 0ace99507e..90caaf03eb 100644 --- a/C/services/south/ingest.cpp +++ b/C/services/south/ingest.cpp @@ -380,7 +380,7 @@ vector* newQ = new vector(); lock_guard guard(m_qMutex); // BUffer current data in m_data - m_queue->insert(m_queue->cbegin(), + m_queue->insert(m_queue->begin(), m_data->begin(), m_data->end()); // Is it possible that some of the readings are stored in DB, and others are not? diff --git a/CMakeLists.txt b/CMakeLists.txt index c6696579ba..3d312134db 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,7 +1,13 @@ cmake_minimum_required (VERSION 2.8.8) project (FogLAMP) +set(CMAKE_BUILD_TYPE Debug) +set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -g -Wall") set(CMAKE_CXX_FLAGS "-std=c++11 -O3") +set(PYTHON_LIBRARY /opt/rh/rh-python36/root/usr/lib64) +set(PYTHON_INCLUDE_DIR /opt/rh/rh-python36/root/usr/include/python3.6m) +set(SQLITE_INCLUDE_DIR /usr/local/include) +set(SQLITE_LIBRARIES /usr/local/lib) add_subdirectory(C/common) add_subdirectory(C/services/common) From c1f39ddab7da16e40d58bbfc98d8fd7536003652 Mon Sep 17 00:00:00 2001 From: stefano Date: Tue, 23 Apr 2019 15:41:47 +0200 Subject: [PATCH 061/161] FOGL-2740: PG backup fixed. --- python/foglamp/plugins/storage/postgres/backup_restore/lib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/foglamp/plugins/storage/postgres/backup_restore/lib.py b/python/foglamp/plugins/storage/postgres/backup_restore/lib.py index 0da14b7e9b..c3d59c47c3 100644 --- a/python/foglamp/plugins/storage/postgres/backup_restore/lib.py +++ b/python/foglamp/plugins/storage/postgres/backup_restore/lib.py @@ -206,7 +206,7 @@ class BackupStatus (object): class BackupRestoreLib(object): """ Library of functionalities for the backup restore operations that requires information/state to be stored """ - STORAGE_EXE = "/services/storage" + STORAGE_EXE = "/services/foglamp.services.storage" MAX_NUMBER_OF_BACKUPS_TO_RETRIEVE = 9999 """" Maximum number of backup information to retrieve from the storage layer""" From eb39c15d8f5f03ce6672161c3dd5314f6755ed49 Mon Sep 17 00:00:00 2001 From: stefano Date: Tue, 23 Apr 2019 16:00:49 +0200 Subject: [PATCH 062/161] FOGL-2740: PG restore fixed. --- .../storage/postgres/backup_restore/restore_postgres.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/python/foglamp/plugins/storage/postgres/backup_restore/restore_postgres.py b/python/foglamp/plugins/storage/postgres/backup_restore/restore_postgres.py index 006f7ef9b2..5495ee7e92 100644 --- a/python/foglamp/plugins/storage/postgres/backup_restore/restore_postgres.py +++ b/python/foglamp/plugins/storage/postgres/backup_restore/restore_postgres.py @@ -50,6 +50,7 @@ import signal import uuid +from foglamp.common.parser import Parser from foglamp.services.core import server from foglamp.common.process import FoglampProcess from foglamp.common import logger @@ -238,8 +239,9 @@ def __init__(self): # Handled Restore command line parameters try: - self._backup_id = super().get_arg_value("--backup-id") - self._file_name = super().get_arg_value("--file") + self._backup_id = Parser.get('--backup-id') + self._file_name = Parser.get('--file') + except Exception as _ex: _message = _MESSAGES_LIST["e000003"].format(_ex) From 63e04538630ae576b3d03bcea8ef1ac17b601280 Mon Sep 17 00:00:00 2001 From: ashish-jabble Date: Wed, 24 Apr 2019 13:04:06 +0530 Subject: [PATCH 063/161] install debian cmd fixes with non-cmdutil --- python/foglamp/services/core/api/plugins/install.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/python/foglamp/services/core/api/plugins/install.py b/python/foglamp/services/core/api/plugins/install.py index 259a4d36a4..f68fd47a83 100644 --- a/python/foglamp/services/core/api/plugins/install.py +++ b/python/foglamp/services/core/api/plugins/install.py @@ -129,12 +129,11 @@ def extract_file(file_name: str, is_compressed: bool) -> list: def install_debian(file_name: str): - apt_install = subprocess.run(["{}/extras/C/cmdutil".format(_FOGLAMP_ROOT), "apt-install", - "{}/data/plugins/{}".format(_FOGLAMP_ROOT, file_name)], - stdout=subprocess.PIPE, stderr=subprocess.PIPE) - msg = apt_install.stdout.decode("utf-8") if apt_install.returncode != 0 else apt_install.stderr.decode("utf-8") - _LOGGER.exception("O/P....{}...{}".format(msg, apt_install.returncode)) - return msg + cmd = "sudo apt -y install {}/data/plugins/{}".format(_FOGLAMP_ROOT, file_name) + ret_code = os.system(cmd) + ret_code = os.system(cmd) + _LOGGER.exception("Return Code....{}".format(ret_code)) + return ret_code def copy_file_install_requirement(dir_files: list, plugin_type: str): From 3b4cc52838753c785e8926dfc22636abbe4e67f1 Mon Sep 17 00:00:00 2001 From: ashish-jabble Date: Wed, 24 Apr 2019 13:37:56 +0530 Subject: [PATCH 064/161] FOGL-2614 browser system API tests added (#1465) * browser system API tests added; initial commit with basic test data * browser tests added with programmatic plugin * change in plugin name to avoid discovery by pytest --- .../system/python/api/test_browser_assets.py | 340 ++++++++++++++++++ tests/system/python/data/dummyplugin.py | 173 +++++++++ 2 files changed, 513 insertions(+) create mode 100644 tests/system/python/api/test_browser_assets.py create mode 100644 tests/system/python/data/dummyplugin.py diff --git a/tests/system/python/api/test_browser_assets.py b/tests/system/python/api/test_browser_assets.py new file mode 100644 index 0000000000..8441ab417c --- /dev/null +++ b/tests/system/python/api/test_browser_assets.py @@ -0,0 +1,340 @@ +# -*- coding: utf-8 -*- + +# FOGLAMP_BEGIN +# See: http://foglamp.readthedocs.io/ +# FOGLAMP_END + +""" Test Browser Assets REST API """ + + +import os +import shutil +import http.client +import time +import json +import pytest +from datetime import datetime + + +__author__ = "Ashish Jabble, Vaibhav Singhal" +__copyright__ = "Copyright (c) 2019 Dianomic Systems" +__license__ = "Apache 2.0" +__version__ = "${VERSION}" + +ASSET_NAME = 'test-loudness' +SENSOR = 'loudness' +SENSOR_VALUES = [1, 2, 3, 4, 5, 6] +SOUTH_PLUGIN_NAME = 'dummyplugin' +SERVICE_NAME = 'TestBrowserAPI' + + +def validate_date_format(dt_txt, fmt): + try: + datetime.strptime(dt_txt, fmt) + except ValueError: + return False + else: + return True + + +class TestBrowserAssets: + + @pytest.fixture + def start_south(self, reset_and_start_foglamp, remove_directories, foglamp_url, south_plugin=SOUTH_PLUGIN_NAME): + """ This fixture clone a south repo and starts south instance + reset_and_start_foglamp: Fixture that resets and starts foglamp, no explicit invocation, called at start + remove_directories: Fixture that remove directories created during the tests""" + + # Create a south plugin + plugin_dir = os.path.join(os.path.expandvars('${FOGLAMP_ROOT}'), 'python/foglamp/plugins/south/dummyplugin') + plugin_file = os.path.join(os.path.expandvars('${FOGLAMP_ROOT}'), 'tests/system/python/data/dummyplugin.py') + try: + os.mkdir(plugin_dir) + except FileExistsError: + print("Directory ", plugin_dir, " already exists") + + shutil.copy2(plugin_file, plugin_dir) + # Create south service + conn = http.client.HTTPConnection(foglamp_url) + data = {"name": "{}".format(SERVICE_NAME), "type": "South", "plugin": "{}".format(south_plugin), + "enabled": "true", "config": {}} + conn.request("POST", '/foglamp/service', json.dumps(data)) + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + retval = json.loads(r) + assert SERVICE_NAME == retval["name"] + + yield self.start_south + + # Cleanup code that runs after the caller test is over + remove_directories(plugin_dir) + + def test_get_asset_counts(self, start_south, foglamp_url, wait_time): + """Test that browsing an asset gives correct asset name and asset count""" + time.sleep(wait_time * 2) + conn = http.client.HTTPConnection(foglamp_url) + conn.request("GET", '/foglamp/asset') + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + assert len(jdoc), "No data found" + assert ASSET_NAME == jdoc[0]["assetCode"] + assert 6 == jdoc[0]["count"] + + def test_get_asset(self, foglamp_url): + """Test that browsing an asset gives correct asset values""" + conn = http.client.HTTPConnection(foglamp_url) + conn.request("GET", '/foglamp/asset/{}'.format(ASSET_NAME)) + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + assert len(jdoc), "No data found" + i = 0 + for val in SENSOR_VALUES: + assert {SENSOR: val} == jdoc[i]['reading'] + i += 1 + + @pytest.mark.parametrize(("query", "expected_count", "expected_values"), [ + ('?limit=1', 1, [SENSOR_VALUES[0]]), + ('?limit=1&skip=1', 1, [SENSOR_VALUES[1]]), + ('?seconds=59', 2, SENSOR_VALUES[0:2]), + ('?minutes=15', 4, SENSOR_VALUES[0:4]), + ('?hours=4', 5, SENSOR_VALUES[0:5]), + ('?hours=20&minutes=20&seconds=59&limit=20', 2, SENSOR_VALUES[0:2]), # Verify that if a combination of hrs, min, sec is used, shortest period will apply + ('?limit=&hours=&minutes=&seconds=', 6, SENSOR_VALUES) + # In case of empty params, all values are returned + ]) + def test_get_asset_query(self, foglamp_url, query, expected_count, expected_values): + """Test that browsing an asset with query parameters gives correct asset values""" + conn = http.client.HTTPConnection(foglamp_url) + conn.request("GET", '/foglamp/asset/{}{}'.format(ASSET_NAME, query)) + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + assert len(jdoc) == expected_count + i = 0 + for item in expected_values: + assert {SENSOR: item} == jdoc[i]['reading'] + i += 1 + + @pytest.mark.parametrize("request_params, response_code, response_message", [ + ('?limit=invalid', 400, "Limit must be a positive integer"), + ('?limit=-1', 400, "Limit must be a positive integer"), + ('?skip=invalid', 400, "Skip/Offset must be a positive integer"), + ('?skip=-1', 400, "Skip/Offset must be a positive integer"), + ('?minutes=-1', 400, "Time must be a positive integer"), + ('?minutes=blah', 400, "Time must be a positive integer"), + ('?seconds=-1', 400, "Time must be a positive integer"), + ('?seconds=blah', 400, "Time must be a positive integer"), + ('?hours=-1', 400, "Time must be a positive integer"), + ('?hours=blah', 400, "Time must be a positive integer") + ]) + def test_get_asset_query_bad_data(self, foglamp_url, request_params, response_code, response_message): + """Test that browsing an asset with invalid query parameters generates http errors""" + conn = http.client.HTTPConnection(foglamp_url) + conn.request("GET", '/foglamp/asset/{}{}'.format(ASSET_NAME, request_params)) + r = conn.getresponse() + conn.close() + assert response_code == r.status + assert response_message == r.reason + + def test_get_asset_reading(self, foglamp_url): + """Test that browsing an asset's data point gives correct asset data point values""" + conn = http.client.HTTPConnection(foglamp_url) + conn.request("GET", '/foglamp/asset/{}/{}'.format(ASSET_NAME, SENSOR)) + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + assert len(jdoc), "No data found" + i = 0 + for val in SENSOR_VALUES: + assert val == jdoc[i][SENSOR] + i += 1 + + @pytest.mark.parametrize(("query", "expected_count", "expected_values"), [ + ('?limit=1', 1, [SENSOR_VALUES[0]]), + ('?limit=1&skip=1', 1, [SENSOR_VALUES[1]]), + ('?seconds=59', 2, SENSOR_VALUES[0:2]), + ('?minutes=15', 4, SENSOR_VALUES[0:4]), + ('?hours=4', 5, SENSOR_VALUES[0:5]), + ('?hours=20&minutes=20&seconds=59&limit=20', 2, SENSOR_VALUES[0:2]), # Verify that if a combination of hrs, min, sec is used, shortest period will apply + ('?limit=&hours=&minutes=&seconds=', 6, SENSOR_VALUES) + # In case of empty params, all values are returned + ]) + def test_get_asset_readings_query(self, foglamp_url, query, expected_count, expected_values): + """Test that browsing an asset's data point with query parameters gives correct asset data point values""" + conn = http.client.HTTPConnection(foglamp_url) + conn.request("GET", '/foglamp/asset/{}/{}{}'.format(ASSET_NAME, SENSOR, query)) + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + assert len(jdoc) == expected_count + i = 0 + for item in expected_values: + assert item == jdoc[i][SENSOR] + i += 1 + + def test_get_asset_summary(self, foglamp_url): + """Test that browsing an asset's summary gives correct min, max and average values""" + conn = http.client.HTTPConnection(foglamp_url) + conn.request("GET", '/foglamp/asset/{}/summary'.format(ASSET_NAME)) + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + assert len(jdoc), "No data found" + summary = jdoc[0][SENSOR] + avg = sum(SENSOR_VALUES) / len(SENSOR_VALUES) + assert avg == summary['average'] + assert max(SENSOR_VALUES) == summary['max'] + assert min(SENSOR_VALUES) == summary['min'] + + def test_get_asset_readings_summary_invalid_sensor(self, foglamp_url): + """Test that browsing a non existing asset's data point summary gives blank min, max and average values""" + conn = http.client.HTTPConnection(foglamp_url) + invalid_sensor = "invalid" + conn.request("GET", '/foglamp/asset/{}/{}/summary'.format(ASSET_NAME, invalid_sensor)) + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + summary = jdoc[invalid_sensor] + assert "" == summary['average'] + assert "" == summary['max'] + assert "" == summary['min'] + + def test_get_asset_readings_summary(self, foglamp_url): + """Test that browsing an asset's data point summary gives correct min, max and average values""" + conn = http.client.HTTPConnection(foglamp_url) + conn.request("GET", '/foglamp/asset/{}/{}/summary'.format(ASSET_NAME, SENSOR)) + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + assert len(jdoc), "No data found" + summary = jdoc[SENSOR] + avg = sum(SENSOR_VALUES) / len(SENSOR_VALUES) + assert avg == summary['average'] + assert max(SENSOR_VALUES) == summary['max'] + assert min(SENSOR_VALUES) == summary['min'] + + def test_get_asset_series(self, foglamp_url): + """Test that browsing an asset's data point time series gives correct min, max and average values + for all timestamps""" + conn = http.client.HTTPConnection(foglamp_url) + conn.request("GET", '/foglamp/asset/{}/{}/series'.format(ASSET_NAME, SENSOR)) + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + i = 0 + # Min, average and max values of a time series data is noting but the value itself if readings were ingested at + # different timestamps + for val in SENSOR_VALUES: + assert val == jdoc[i]['min'] + assert val == jdoc[i]['average'] + assert val == jdoc[i]['max'] + i += 1 + + @pytest.mark.parametrize(("query", "expected_count", "expected_values"), [ + ('?limit=1', 1, [SENSOR_VALUES[0]]), + ('?limit=1&skip=1', 1, [SENSOR_VALUES[1]]), + ('?seconds=59', 2, SENSOR_VALUES[0:2]), + ('?minutes=15', 4, SENSOR_VALUES[0:4]), + ('?hours=4', 5, SENSOR_VALUES[0:5]), + ('?hours=20&minutes=20&seconds=59&limit=20', 2, SENSOR_VALUES[0:2]), # Verify that if a combination of hrs, min, sec is used, shortest period will apply + ('?limit=&hours=&minutes=&seconds=', 6, SENSOR_VALUES) + # In case of empty params, all values are returned + ]) + def test_get_asset_series_query_time_limit(self, foglamp_url, query, expected_count, expected_values): + """Test that browsing an asset's data point time series with query parameter + gives correct min, max and average values for all timestamps""" + conn = http.client.HTTPConnection(foglamp_url) + conn.request("GET", '/foglamp/asset/{}/{}/series{}'.format(ASSET_NAME, SENSOR, query)) + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + assert len(jdoc) == expected_count + i = 0 + for item in expected_values: + assert item == jdoc[i]['min'] + assert item == jdoc[i]['average'] + assert item == jdoc[i]['max'] + i += 1 + + def test_get_asset_series_query_group_sec(self, foglamp_url): + """Test that browsing an asset's data point time series with seconds grouping + gives correct min, max and average values""" + conn = http.client.HTTPConnection(foglamp_url) + conn.request("GET", '/foglamp/asset/{}/{}/series{}'.format(ASSET_NAME, SENSOR, '?group=seconds')) + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + assert len(jdoc) == 6 + for i in range(0, len(jdoc)): + assert SENSOR_VALUES[i] == jdoc[i]['min'] + assert SENSOR_VALUES[i] == jdoc[i]['average'] + assert SENSOR_VALUES[i] == jdoc[i]['max'] + assert validate_date_format(jdoc[i]['timestamp'], '%Y-%m-%d %H:%M:%S'), "timestamp format do not match" + + def test_get_asset_series_query_group_min(self, foglamp_url): + """Test that browsing an asset's data point time series with minutes grouping + gives correct min, max and average values""" + conn = http.client.HTTPConnection(foglamp_url) + conn.request("GET", '/foglamp/asset/{}/{}/series{}'.format(ASSET_NAME, SENSOR, '?group=minutes')) + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + assert len(jdoc) == 5 + + assert (sum(SENSOR_VALUES[0:2]) / len(SENSOR_VALUES[0:2])) == jdoc[0]['average'] + assert min(SENSOR_VALUES[0:2]) == jdoc[0]['min'] + assert max(SENSOR_VALUES[0:2]) == jdoc[0]['max'] + assert validate_date_format(jdoc[0]['timestamp'], '%Y-%m-%d %H:%M'), "timestamp format do not match" + + for i in range(1, len(jdoc) - 1): + assert SENSOR_VALUES[i + 1] == jdoc[i]['min'] + assert SENSOR_VALUES[i + 1] == jdoc[i]['average'] + assert SENSOR_VALUES[i + 1] == jdoc[i]['max'] + assert validate_date_format(jdoc[i + 1]['timestamp'], '%Y-%m-%d %H:%M'), "timestamp format do not match" + + def test_get_asset_series_query_group_hrs(self, foglamp_url): + """Test that browsing an asset's data point time series with hour grouping + gives correct min, max and average values""" + conn = http.client.HTTPConnection(foglamp_url) + conn.request("GET", '/foglamp/asset/{}/{}/series{}'.format(ASSET_NAME, SENSOR, '?group=hours')) + r = conn.getresponse() + assert 200 == r.status + r = r.read().decode() + jdoc = json.loads(r) + assert len(jdoc) == 3 + + assert (sum(SENSOR_VALUES[0:4]) / len(SENSOR_VALUES[0:4])) == jdoc[0]['average'] + assert min(SENSOR_VALUES[0:4]) == jdoc[0]['min'] + assert max(SENSOR_VALUES[0:4]) == jdoc[0]['max'] + assert validate_date_format(jdoc[0]['timestamp'], '%Y-%m-%d %H'), "timestamp format do not match" + + for i in range(4, 6): + assert SENSOR_VALUES[i] == jdoc[i - 3]['min'] + assert SENSOR_VALUES[i] == jdoc[i - 3]['average'] + assert SENSOR_VALUES[i] == jdoc[i - 3]['max'] + assert validate_date_format(jdoc[i - 3]['timestamp'], '%Y-%m-%d %H'), "timestamp format do not match" + + def test_get_asset_sensor_readings_invalid_group(self, foglamp_url): + """Test that browsing an asset's data point time series with invalid grouping + gives http error""" + conn = http.client.HTTPConnection(foglamp_url) + conn.request("GET", '/foglamp/asset/{}/{}/series?group=blah'.format(ASSET_NAME, SENSOR)) + r = conn.getresponse() + conn.close() + assert r.status == 400 + assert r.reason == "blah is not a valid group" diff --git a/tests/system/python/data/dummyplugin.py b/tests/system/python/data/dummyplugin.py new file mode 100644 index 0000000000..f47de77468 --- /dev/null +++ b/tests/system/python/data/dummyplugin.py @@ -0,0 +1,173 @@ +# FOGLAMP_BEGIN +# See: http://foglamp.readthedocs.io/ +# FOGLAMP_END + +""" Async Plugin used for testing purpose """ +import asyncio +import copy +import uuid +import logging +import async_ingest + +from foglamp.common import logger +from foglamp.services.south import exceptions +from threading import Thread +from datetime import datetime, timezone, timedelta + +__author__ = "Vaibhav Singhal" +__copyright__ = "Copyright (c) 2019 Dianomic Systems" +__license__ = "Apache 2.0" +__version__ = "${VERSION}" + +c_callback = None +c_ingest_ref = None +loop = None +_task = None +t = None + +_DEFAULT_CONFIG = { + 'plugin': { + 'description': 'Test Async Plugin', + 'type': 'string', + 'default': 'dummyplugin', + 'readonly': 'true' + }, + 'assetPrefix': { + 'description': 'Prefix of asset name', + 'type': 'string', + 'default': 'test-', + 'order': '1', + 'displayName': 'Asset Name Prefix' + }, + 'loudnessAssetName': { + 'description': 'Loudness sensor asset name', + 'type': 'string', + 'default': 'loudness', + 'order': '3', + 'displayName': 'Loudness Sensor Asset Name' + } +} + +_LOGGER = logger.setup(__name__, level=logging.INFO) + + +def plugin_info(): + """ Returns information about the plugin. + Args: + Returns: + dict: plugin information + Raises: + """ + return { + 'name': 'TEST Async Plugin', + 'version': '2.0.0', + 'mode': 'async', + 'type': 'south', + 'interface': '1.0', + 'config': _DEFAULT_CONFIG + } + + +def plugin_init(config): + """ Initialise the plugin. + Args: + config: JSON configuration document for the South plugin configuration category + Returns: + data: JSON object to be used in future calls to the plugin + Raises: + """ + handle = copy.deepcopy(config) + return handle + + +def plugin_start(handle): + """ Extracts data from the sensor and returns it in a JSON document as a Python dict. + Available for async mode only. + Args: + handle: handle returned by the plugin initialisation call + Returns: + returns a sensor reading in a JSON document, as a Python dict, if it is available + None - If no reading is available + Raises: + TimeoutError + """ + global _task, loop, t + loop = asyncio.new_event_loop() + _task = asyncio.ensure_future(_start_aiotest(handle), loop=loop) + + def run(): + global loop + loop.run_forever() + + t = Thread(target=run) + t.start() + + +async def _start_aiotest(handle): + # This plugin adds 6 data points 2 within same min, 2 within same hour and 2 within same day + # this data is useful when testing asset browsing based on timestamps + ts_lst = list() + ts_lst.append(str(datetime.now(timezone.utc).astimezone())) + ts_lst.append(str(datetime.now(timezone.utc).astimezone() - timedelta(seconds=3))) + ts_lst.append(str(datetime.now(timezone.utc).astimezone() - timedelta(minutes=5))) + ts_lst.append(str(datetime.now(timezone.utc).astimezone() - timedelta(minutes=6))) + ts_lst.append(str(datetime.now(timezone.utc).astimezone() - timedelta(hours=3))) + ts_lst.append(str(datetime.now(timezone.utc).astimezone() - timedelta(hours=5))) + + i = 1 + for user_ts in ts_lst: + try: + data = list() + data.append({ + 'asset': '{}{}'.format(handle['assetPrefix']['value'], handle['loudnessAssetName']['value']), + 'timestamp': user_ts, + 'key': str(uuid.uuid4()), + 'readings': {"loudness": i} + }) + async_ingest.ingest_callback(c_callback, c_ingest_ref, data) + await asyncio.sleep(0.1) + + except (Exception, RuntimeError) as ex: + _LOGGER.exception("TEST exception: {}".format(str(ex))) + raise exceptions.DataRetrievalError(ex) + else: + i += 1 + + +def plugin_register_ingest(handle, callback, ingest_ref): + """Required plugin interface component to communicate to South C server + + Args: + handle: handle returned by the plugin initialisation call + callback: C opaque object required to passed back to C->ingest method + ingest_ref: C opaque object required to passed back to C->ingest method + """ + global c_callback, c_ingest_ref + c_callback = callback + c_ingest_ref = ingest_ref + + +def plugin_reconfigure(handle, new_config): + """ Reconfigures the plugin + + Args: + handle: handle returned by the plugin initialisation call + new_config: JSON object representing the new configuration category for the category + Returns: + new_handle: new handle to be used in the future calls + """ + _LOGGER.info("Old config for TEST plugin {} \n new config {}".format(handle, new_config)) + new_handle = copy.deepcopy(new_config) + return new_handle + + +def plugin_shutdown(handle): + """ Shutdowns the plugin doing required cleanup, to be called prior to the South plugin service being shut down. + + Args: + handle: handle returned by the plugin initialisation call + Returns: + plugin shutdown + """ + _LOGGER.info('TEST plugin shut down.') + From 46103dcc02f146bf5cfd3c587fd58947dd3e4479 Mon Sep 17 00:00:00 2001 From: ashish-jabble Date: Wed, 24 Apr 2019 14:42:56 +0530 Subject: [PATCH 065/161] return code and proper message reason fixes --- .../services/core/api/plugins/install.py | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/python/foglamp/services/core/api/plugins/install.py b/python/foglamp/services/core/api/plugins/install.py index f68fd47a83..d2e008e4ae 100644 --- a/python/foglamp/services/core/api/plugins/install.py +++ b/python/foglamp/services/core/api/plugins/install.py @@ -84,10 +84,9 @@ async def add_plugin(request: web.Request) -> web.Response: _LOGGER.info("Files {} {}".format(files, type(files))) copy_file_install_requirement(files, plugin_type) else: - code = install_debian(file_name) + code, msg = install_debian(file_name) if code != 0: - # FIXME: proper message - raise ValueError('Something went wrong!') + raise ValueError(msg) except FileNotFoundError as ex: raise web.HTTPNotFound(reason=str(ex)) except (TypeError, ValueError) as ex: @@ -129,11 +128,17 @@ def extract_file(file_name: str, is_compressed: bool) -> list: def install_debian(file_name: str): - cmd = "sudo apt -y install {}/data/plugins/{}".format(_FOGLAMP_ROOT, file_name) - ret_code = os.system(cmd) + cmd = "sudo apt -y install {}/data/plugins/{} > {}/data/plugins/output.txt 2>&1".format(_FOGLAMP_ROOT, file_name, _FOGLAMP_ROOT) + _LOGGER.exception("CMD....{}".format(cmd)) ret_code = os.system(cmd) _LOGGER.exception("Return Code....{}".format(ret_code)) - return ret_code + msg = "" + with open("{}/data/plugins/output.txt".format(_FOGLAMP_ROOT), 'r') as fh: + for line in fh: + line = line.rstrip("\n") + msg += line + _LOGGER.exception("Message.....{}", msg) + return ret_code, msg def copy_file_install_requirement(dir_files: list, plugin_type: str): From 73250d258687b2ff7a99a61fad4e95abf8e988fe Mon Sep 17 00:00:00 2001 From: Vaibhav Singhal Date: Wed, 24 Apr 2019 15:42:51 +0530 Subject: [PATCH 066/161] bypass user confirmation to add to knownhost --- tests/system/python/e2e/test_e2e_foglamp_pair.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/tests/system/python/e2e/test_e2e_foglamp_pair.py b/tests/system/python/e2e/test_e2e_foglamp_pair.py index e29a9b6144..adc9eb7655 100644 --- a/tests/system/python/e2e/test_e2e_foglamp_pair.py +++ b/tests/system/python/e2e/test_e2e_foglamp_pair.py @@ -76,17 +76,17 @@ def reset_and_start_foglamp_remote(self, storage_plugin, remote_user, remote_ip, """ if remote_foglamp_path is None: remote_foglamp_path = '/home/{}/FogLAMP'.format(remote_user) - subprocess.run(["ssh -i {} {}@{} 'export FOGLAMP_ROOT={};$FOGLAMP_ROOT/scripts/foglamp kill'".format(key_path, remote_user, + subprocess.run(["ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -i {} {}@{} 'export FOGLAMP_ROOT={};$FOGLAMP_ROOT/scripts/foglamp kill'".format(key_path, remote_user, remote_ip, remote_foglamp_path)], shell=True, check=True) if storage_plugin == 'postgres': - subprocess.run(["ssh -i {} {}@{} sed -i 's/sqlite/postgres/g' {}/data/etc/storage.json".format(key_path, remote_user, remote_ip, remote_foglamp_path)], shell=True, check=True) + subprocess.run(["ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -i {} {}@{} sed -i 's/sqlite/postgres/g' {}/data/etc/storage.json".format(key_path, remote_user, remote_ip, remote_foglamp_path)], shell=True, check=True) else: - subprocess.run(["ssh -i {} {}@{} sed -i 's/postgres/sqlite/g' {}/data/etc/storage.json".format(key_path, remote_user, remote_ip, remote_foglamp_path)], shell=True, check=True) + subprocess.run(["ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -i {} {}@{} sed -i 's/postgres/sqlite/g' {}/data/etc/storage.json".format(key_path, remote_user, remote_ip, remote_foglamp_path)], shell=True, check=True) - subprocess.run(["ssh -i {} {}@{} 'export FOGLAMP_ROOT={};echo YES | $FOGLAMP_ROOT/scripts/foglamp reset'".format(key_path, remote_user, remote_ip, remote_foglamp_path)], shell=True, check=True) - subprocess.run(["ssh -i {} {}@{} 'export FOGLAMP_ROOT={};$FOGLAMP_ROOT/scripts/foglamp start'".format(key_path, remote_user, remote_ip, remote_foglamp_path)], shell=True) - stat = subprocess.run(["ssh -i {} {}@{} 'export FOGLAMP_ROOT={}; $FOGLAMP_ROOT/scripts/foglamp status'".format(key_path, remote_user, remote_ip, remote_foglamp_path)], shell=True, stdout=subprocess.PIPE) + subprocess.run(["ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -i {} {}@{} 'export FOGLAMP_ROOT={};echo YES | $FOGLAMP_ROOT/scripts/foglamp reset'".format(key_path, remote_user, remote_ip, remote_foglamp_path)], shell=True, check=True) + subprocess.run(["ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -i {} {}@{} 'export FOGLAMP_ROOT={};$FOGLAMP_ROOT/scripts/foglamp start'".format(key_path, remote_user, remote_ip, remote_foglamp_path)], shell=True) + stat = subprocess.run(["ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -i {} {}@{} 'export FOGLAMP_ROOT={}; $FOGLAMP_ROOT/scripts/foglamp status'".format(key_path, remote_user, remote_ip, remote_foglamp_path)], shell=True, stdout=subprocess.PIPE) assert "FogLAMP not running." not in stat.stdout.decode("utf-8") @pytest.fixture @@ -114,7 +114,7 @@ def start_south_north_remote(self, reset_and_start_foglamp_remote, use_pip_cache # Install http_south python plugin on remote machine try: subprocess.run([ - "scp -i {} $FOGLAMP_ROOT/tests/system/python/scripts/install_python_plugin {}@{}:/tmp/".format( + "scp -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -i {} $FOGLAMP_ROOT/tests/system/python/scripts/install_python_plugin {}@{}:/tmp/".format( key_path, remote_user, remote_ip)], shell=True, check=True) subprocess.run(["ssh -i {} {}@{} 'export FOGLAMP_ROOT={}; /tmp/install_python_plugin {} south {} {}'".format( key_path, remote_user, remote_ip, remote_foglamp_path, south_branch, south_plugin, use_pip_cache)], From eb70ad8072e2a9f1431403d9e4ade5ad43a9bd3b Mon Sep 17 00:00:00 2001 From: ashish-jabble Date: Wed, 24 Apr 2019 17:02:49 +0530 Subject: [PATCH 067/161] test fixes and foglamp.sudoer changes --- python/foglamp/services/core/api/plugins/install.py | 2 +- scripts/extras/foglamp.sudoers | 3 +-- .../python/foglamp/services/core/api/plugins/test_install.py | 4 ++-- 3 files changed, 4 insertions(+), 5 deletions(-) diff --git a/python/foglamp/services/core/api/plugins/install.py b/python/foglamp/services/core/api/plugins/install.py index d2e008e4ae..1cca541cfb 100644 --- a/python/foglamp/services/core/api/plugins/install.py +++ b/python/foglamp/services/core/api/plugins/install.py @@ -137,7 +137,7 @@ def install_debian(file_name: str): for line in fh: line = line.rstrip("\n") msg += line - _LOGGER.exception("Message.....{}", msg) + _LOGGER.exception("Message.....{}".format(msg)) return ret_code, msg diff --git a/scripts/extras/foglamp.sudoers b/scripts/extras/foglamp.sudoers index 12c0471785..48c60ab4cc 100644 --- a/scripts/extras/foglamp.sudoers +++ b/scripts/extras/foglamp.sudoers @@ -1,2 +1 @@ -%sudo ALL=(ALL) NOPASSWD: /usr/bin/apt-get -y update, /usr/bin/apt-get -y install foglamp - +%sudo ALL=(ALL) NOPASSWD: /usr/bin/apt-get -y update, /usr/bin/apt-get -y install foglamp, /usr/bin/apt -y install /usr/local/foglamp/data/plugins/foglamp*.deb diff --git a/tests/unit/python/foglamp/services/core/api/plugins/test_install.py b/tests/unit/python/foglamp/services/core/api/plugins/test_install.py index 1abdbb1a00..4d686022cb 100644 --- a/tests/unit/python/foglamp/services/core/api/plugins/test_install.py +++ b/tests/unit/python/foglamp/services/core/api/plugins/test_install.py @@ -139,7 +139,7 @@ async def async_mock(): param = {"url": url_value, "format": "deb", "type": "south", "checksum": checksum_value} with patch.object(plugins_install, 'download', return_value=async_mock()) as download_patch: with patch.object(plugins_install, 'validate_checksum', return_value=True) as checksum_patch: - with patch.object(plugins_install, 'install_debian', return_value=0) as debian_patch: + with patch.object(plugins_install, 'install_debian', return_value=(0, 'Success')) as debian_patch: resp = await client.post('/foglamp/plugins', data=json.dumps(param)) assert 200 == resp.status debian_patch.assert_called_once_with(plugin_name) @@ -157,7 +157,7 @@ async def async_mock(): param = {"url": url_value, "format": "deb", "type": "south", "checksum": checksum_value} with patch.object(plugins_install, 'download', return_value=async_mock()) as download_patch: with patch.object(plugins_install, 'validate_checksum', return_value=True) as checksum_patch: - with patch.object(plugins_install, 'install_debian', return_value=100) as debian_patch: + with patch.object(plugins_install, 'install_debian', return_value=(256, 'Something went wrong!')) as debian_patch: resp = await client.post('/foglamp/plugins', data=json.dumps(param)) assert 400 == resp.status assert 'Something went wrong!' == resp.reason From 417d2c4fdbe7e82db9d113319160dcc2b199e89a Mon Sep 17 00:00:00 2001 From: ashish-jabble Date: Wed, 24 Apr 2019 18:13:32 +0530 Subject: [PATCH 068/161] apt-install removed from cmdutil --- C/plugins/utils/cmdutil.cpp | 12 +++--------- 1 file changed, 3 insertions(+), 9 deletions(-) diff --git a/C/plugins/utils/cmdutil.cpp b/C/plugins/utils/cmdutil.cpp index 4feacad718..ba81da9285 100644 --- a/C/plugins/utils/cmdutil.cpp +++ b/C/plugins/utils/cmdutil.cpp @@ -31,23 +31,21 @@ bool checkFile(char *rootdir, char *file) return (access(path, F_OK) == 0); } -const char *cmds[] = {"tar-extract", "cp", "rm", "apt-install", "pip3-pkg", "pip3-req", "mkdir"}; +const char *cmds[] = {"tar-extract", "cp", "rm", "pip3-pkg", "pip3-req", "mkdir"}; typedef enum { TAR_EXTRACT, CP, RM, - APT_INSTALL, PIP3_PKG, PIP3_REQ, MKDIR } cmdtype_t; -char *argsArray[7][6] = { +char *argsArray[][6] = { {(char *) "/bin/tar", (char *) "-C", (char *) "PLACEHOLDER", (char *) "-xf", (char *) "PLACEHOLDER", NULL}, {(char *) "/bin/cp", (char *) "-r", (char *) "PLACEHOLDER", (char *) "PLACEHOLDER", NULL, NULL}, {(char *) "/bin/rm", (char *) "-rf", (char *) "PLACEHOLDER", NULL, NULL, NULL}, - {(char *) "apt", (char *) "install", (char *) "-y", (char *) "PLACEHOLDER", NULL, NULL}, {(char *) "pip3", (char *) "install", (char *) "PLACEHOLDER", (char *) "--no-cache-dir", NULL, NULL}, {(char *) "pip3", (char *) "install", (char *) "-Ir", (char *) "PLACEHOLDER", (char *) "--no-cache-dir", NULL}, {(char *) "mkdir", (char *) "-p", (char *) "PLACEHOLDER", NULL, NULL, NULL} @@ -72,7 +70,6 @@ int getCmdType(const char *cmd) * sudo tar -C $FOGLAMP_ROOT -xf abc.tar.gz cmdutil tar-extract abc.tar.gz * sudo cp -r abc $FOGLAMP_ROOT/xyz cmdutil cp abc xyz * sudo rm -rf $FOGLAMP_ROOT/abc cmdutil rm abc - * sudo apt install -y wiringpi cmdutil apt-install wiringpi * * sudo pip3 install aiocoap==0.3 --no-cache-dir cmdutil pip3-pkg aiocoap==0.3 * sudo pip3 install -Ir requirements.txt --no-cache-dir cmdutil pip3-req requirements.txt @@ -140,9 +137,6 @@ int main(int argc, char *argv[]) buf[sizeof(buf)-1] = '\0'; // force null terminate args[2] = buf; break; - case APT_INSTALL: - args[3] = argv[2]; - break; case PIP3_PKG: args[2] = argv[2]; break; @@ -159,7 +153,7 @@ int main(int argc, char *argv[]) return 3; } - printf("cmd=%s %s %s %s %s %s\n", args[0], args[1], args[2], args[3]?args[3]:"", args[4]?args[4]:"", args[5]?args[5]:""); + //printf("cmd=%s %s %s %s %s %s\n", args[0], args[1], args[2], args[3]?args[3]:"", args[4]?args[4]:"", args[5]?args[5]:""); errno = 0; int rc = execvp(args[0], args); From 7c46077ee78261d011adadd176dc158ccb6570ed Mon Sep 17 00:00:00 2001 From: ashish-jabble Date: Wed, 24 Apr 2019 19:06:59 +0530 Subject: [PATCH 069/161] debug logger added and other code refactoring --- .../services/core/api/plugins/install.py | 43 +++++++++++-------- .../services/core/api/plugins/test_install.py | 4 +- 2 files changed, 28 insertions(+), 19 deletions(-) diff --git a/python/foglamp/services/core/api/plugins/install.py b/python/foglamp/services/core/api/plugins/install.py index 1cca541cfb..b0f9b1bbd8 100644 --- a/python/foglamp/services/core/api/plugins/install.py +++ b/python/foglamp/services/core/api/plugins/install.py @@ -78,13 +78,13 @@ async def add_plugin(request: web.Request) -> web.Response: if validate_checksum(checksum, file_name) is False: raise ValueError("Checksum is failed.") - _LOGGER.info("Found {} format with compressed {}".format(file_format, is_compressed)) + _LOGGER.debug("Found {} format with compressed {}".format(file_format, is_compressed)) if file_format == 'tar': files = extract_file(file_name, is_compressed) - _LOGGER.info("Files {} {}".format(files, type(files))) + _LOGGER.debug("Files {} {}".format(files, type(files))) copy_file_install_requirement(files, plugin_type) else: - code, msg = install_debian(file_name) + code, msg = install_deb(file_name) if code != 0: raise ValueError(msg) except FileNotFoundError as ex: @@ -121,23 +121,31 @@ def validate_checksum(checksum: str, file_name: str) -> bool: def extract_file(file_name: str, is_compressed: bool) -> list: mode = "r:gz" if is_compressed else "r" tar = tarfile.open(_PATH + file_name, mode) - _LOGGER.info("Extracted to {}".format(_PATH)) + _LOGGER.debug("Extracted to {}".format(_PATH)) tar.extractall(_PATH) - _LOGGER.info("Extraction Done!!") return tar.getnames() -def install_debian(file_name: str): - cmd = "sudo apt -y install {}/data/plugins/{} > {}/data/plugins/output.txt 2>&1".format(_FOGLAMP_ROOT, file_name, _FOGLAMP_ROOT) - _LOGGER.exception("CMD....{}".format(cmd)) +def install_deb(file_name: str): + deb_file_path = "data/plugins/{}".format(file_name) + stdout_file_path = "data/plugins/output.txt" + cmd = "sudo apt -y install {} > {} 2>&1".format(_FOGLAMP_ROOT + deb_file_path, _FOGLAMP_ROOT + stdout_file_path) + _LOGGER.debug("CMD....{}".format(cmd)) ret_code = os.system(cmd) - _LOGGER.exception("Return Code....{}".format(ret_code)) + _LOGGER.debug("Return Code....{}".format(ret_code)) msg = "" - with open("{}/data/plugins/output.txt".format(_FOGLAMP_ROOT), 'r') as fh: + with open("{}".format(_FOGLAMP_ROOT + stdout_file_path), 'r') as fh: for line in fh: line = line.rstrip("\n") msg += line - _LOGGER.exception("Message.....{}".format(msg)) + _LOGGER.debug("Message.....{}".format(msg)) + # Remove stdout file + cmd = "{}/extras/C/cmdutil rm {}".format(_FOGLAMP_ROOT, stdout_file_path) + subprocess.run([cmd], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) + + # Remove downloaded debian file + cmd = "{}/extras/C/cmdutil rm {}".format(_FOGLAMP_ROOT, deb_file_path) + subprocess.run([cmd], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) return ret_code, msg @@ -159,7 +167,7 @@ def copy_file_install_requirement(dir_files: list, plugin_type: str): assert len(_dir), "No data found" plugin_name = _dir[0] - _LOGGER.info("Plugin name {} and Dir {} ".format(plugin_name, _dir)) + _LOGGER.debug("Plugin name {} and Dir {} ".format(plugin_name, _dir)) plugin_path = "python/foglamp/plugins" if py_file else "plugins" full_path = "{}/{}/{}/".format(_FOGLAMP_ROOT, plugin_path, plugin_type) dest_path = "{}/{}/".format(plugin_path, plugin_type) @@ -175,13 +183,14 @@ def copy_file_install_requirement(dir_files: list, plugin_type: str): # copy plugin files to the relative plugins directory. cmd = "{}/extras/C/cmdutil cp {} {}".format(_FOGLAMP_ROOT, _PATH + plugin_name, dest_path) subprocess.run([cmd], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) - _LOGGER.info("{} File copied to {}".format(cmd, full_path)) + _LOGGER.debug("{} File copied to {}".format(cmd, full_path)) # TODO: OPTIONAL (If any external dependency required to install plugin we will use this sh file), # but this is most risky thing to run with as sudo # Use case: plugins like opcua, usb4704 (external dep) # dht11- For pip packages we have requirements.txt file, as this plugin needs wiringpi apt package to install; so where to put this command? - if "requirements.sh" in _dir: - _LOGGER.info("Installing external deps required for plugins.... {}".format( - full_path + plugin_name + "/" + "requirements.sh")) - subprocess.run(["sh {}".format(full_path + plugin_name + "/" + "requirements.sh")], shell=True) + # if "requirements.sh" in _dir: + # _LOGGER.info("Installing external deps required for plugins.... {}".format( + # full_path + plugin_name + "/" + "requirements.sh")) + # subprocess.run(["sh {}".format(full_path + plugin_name + "/" + "requirements.sh")], shell=True) + # Also removed downloaded and extracted tar file at the end diff --git a/tests/unit/python/foglamp/services/core/api/plugins/test_install.py b/tests/unit/python/foglamp/services/core/api/plugins/test_install.py index 4d686022cb..23282ccf14 100644 --- a/tests/unit/python/foglamp/services/core/api/plugins/test_install.py +++ b/tests/unit/python/foglamp/services/core/api/plugins/test_install.py @@ -139,7 +139,7 @@ async def async_mock(): param = {"url": url_value, "format": "deb", "type": "south", "checksum": checksum_value} with patch.object(plugins_install, 'download', return_value=async_mock()) as download_patch: with patch.object(plugins_install, 'validate_checksum', return_value=True) as checksum_patch: - with patch.object(plugins_install, 'install_debian', return_value=(0, 'Success')) as debian_patch: + with patch.object(plugins_install, 'install_deb', return_value=(0, 'Success')) as debian_patch: resp = await client.post('/foglamp/plugins', data=json.dumps(param)) assert 200 == resp.status debian_patch.assert_called_once_with(plugin_name) @@ -157,7 +157,7 @@ async def async_mock(): param = {"url": url_value, "format": "deb", "type": "south", "checksum": checksum_value} with patch.object(plugins_install, 'download', return_value=async_mock()) as download_patch: with patch.object(plugins_install, 'validate_checksum', return_value=True) as checksum_patch: - with patch.object(plugins_install, 'install_debian', return_value=(256, 'Something went wrong!')) as debian_patch: + with patch.object(plugins_install, 'install_deb', return_value=(256, 'Something went wrong!')) as debian_patch: resp = await client.post('/foglamp/plugins', data=json.dumps(param)) assert 400 == resp.status assert 'Something went wrong!' == resp.reason From c81086b8b69786fef78d2f9bf723bc08be621143 Mon Sep 17 00:00:00 2001 From: ashish-jabble Date: Thu, 25 Apr 2019 15:37:46 +0530 Subject: [PATCH 070/161] handle requirements.txt installation via pip3-req cmd utility and other minor fixes along with bad unit tests --- .../services/core/api/plugins/install.py | 38 ++++++++++----- .../services/core/api/plugins/test_install.py | 47 +++++++++++++++---- 2 files changed, 66 insertions(+), 19 deletions(-) diff --git a/python/foglamp/services/core/api/plugins/install.py b/python/foglamp/services/core/api/plugins/install.py index b0f9b1bbd8..cefded8403 100644 --- a/python/foglamp/services/core/api/plugins/install.py +++ b/python/foglamp/services/core/api/plugins/install.py @@ -58,7 +58,8 @@ async def add_plugin(request: web.Request) -> web.Response: raise ValueError("Invalid format. Must be 'tar' or 'deb'") if file_format == "tar" and not plugin_type: raise ValueError("Plugin type param is required.") - if file_format == "tar" and plugin_type not in ['south', 'north', 'filter', 'notificationDelivery', 'notificationRule']: + if file_format == "tar" and plugin_type not in ['south', 'north', 'filter', 'notificationDelivery', + 'notificationRule']: raise ValueError("Invalid plugin type. Must be 'north' or 'south' or 'filter' " "or 'notificationDelivery' or 'notificationRule'") if compressed: @@ -82,7 +83,9 @@ async def add_plugin(request: web.Request) -> web.Response: if file_format == 'tar': files = extract_file(file_name, is_compressed) _LOGGER.debug("Files {} {}".format(files, type(files))) - copy_file_install_requirement(files, plugin_type) + code, msg = copy_file_install_requirement(files, plugin_type, file_name) + if code != 0: + raise ValueError(msg) else: code, msg = install_deb(file_name) if code != 0: @@ -149,7 +152,7 @@ def install_deb(file_name: str): return ret_code, msg -def copy_file_install_requirement(dir_files: list, plugin_type: str): +def copy_file_install_requirement(dir_files: list, plugin_type: str, file_name: str) -> tuple: py_file = any(f.endswith(".py") for f in dir_files) so_1_file = any(f.endswith(".so.1") for f in dir_files) # regular file so_file = any(f.endswith(".so") for f in dir_files) # symlink file @@ -185,12 +188,25 @@ def copy_file_install_requirement(dir_files: list, plugin_type: str): subprocess.run([cmd], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) _LOGGER.debug("{} File copied to {}".format(cmd, full_path)) - # TODO: OPTIONAL (If any external dependency required to install plugin we will use this sh file), - # but this is most risky thing to run with as sudo + # TODO: FOGL-2760 Handle external dependency for plugins which can be installed via tar file # Use case: plugins like opcua, usb4704 (external dep) - # dht11- For pip packages we have requirements.txt file, as this plugin needs wiringpi apt package to install; so where to put this command? - # if "requirements.sh" in _dir: - # _LOGGER.info("Installing external deps required for plugins.... {}".format( - # full_path + plugin_name + "/" + "requirements.sh")) - # subprocess.run(["sh {}".format(full_path + plugin_name + "/" + "requirements.sh")], shell=True) - # Also removed downloaded and extracted tar file at the end + # dht11- For pip packages we have requirements.txt file, as this plugin needs wiringpi apt package to install + py_req = filter(lambda x: x.startswith('requirement') and x.endswith('.txt'), _dir) + requirement = list(py_req) + code = 0 + msg = "" + if requirement: + cmd = "{}/extras/C/cmdutil pip3-req {}{}/{}".format(_FOGLAMP_ROOT, _PATH, plugin_name, requirement[0]) + s = subprocess.run([cmd], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) + code = s.returncode + msg = s.stderr.decode("utf-8") if code != 0 else s.stdout.decode("utf-8") + msg = msg.replace("\n", "").strip() + _LOGGER.debug("Return code {} and msg {}".format(code, msg)) + + # Also removed downloaded and extracted tar file + cmd = "{}/extras/C/cmdutil rm data/plugins/{}".format(_FOGLAMP_ROOT, file_name) + subprocess.run([cmd], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) + cmd = "{}/extras/C/cmdutil rm data/plugins/{}".format(_FOGLAMP_ROOT, plugin_name) + subprocess.run([cmd], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) + + return code, msg diff --git a/tests/unit/python/foglamp/services/core/api/plugins/test_install.py b/tests/unit/python/foglamp/services/core/api/plugins/test_install.py index 23282ccf14..cadec9cd25 100644 --- a/tests/unit/python/foglamp/services/core/api/plugins/test_install.py +++ b/tests/unit/python/foglamp/services/core/api/plugins/test_install.py @@ -76,6 +76,34 @@ async def async_mock(): checksum_patch.assert_called_once_with(checksum_value, tar_file_name) download_patch.assert_called_once_with([url_value]) + async def test_bad_post_plugins_install_with_tar(self, client): + async def async_mock(ret_val): + return ret_val + + plugin_name = 'mqtt_sparkplug' + sub_dir = 'sparkplug_b' + tar_file_name = 'foglamp-south-mqtt_sparkplug-1.5.2.tar' + files = [plugin_name, '{}/__init__.py'.format(plugin_name), '{}/README.rst'.format(plugin_name), + '{}/{}.py'.format(plugin_name, plugin_name), '{}/requirements.txt'.format(plugin_name), + '{}/{}/__init__.py'.format(plugin_name, sub_dir), '{}/{}/{}.py'.format(plugin_name, sub_dir, sub_dir), + '{}/{}/{}_pb2.py'.format(plugin_name, sub_dir, sub_dir)] + checksum_value = "77b74584e09fc28467599636e47f3fc5" + url_value = "http://10.2.5.26:5000/download/{}".format(tar_file_name) + msg = 'Could not find a version that satisfies the requirement pt==1.4.0' + param = {"url": url_value, "format": "tar", "type": "south", "checksum": checksum_value} + with patch.object(plugins_install, 'download', return_value=async_mock([tar_file_name])) as download_patch: + with patch.object(plugins_install, 'validate_checksum', return_value=True) as checksum_patch: + with patch.object(plugins_install, 'extract_file', return_value=async_mock(files)) as extract_patch: + with patch.object(plugins_install, 'copy_file_install_requirement', + return_value=(1, msg)) as copy_file_install_requirement_patch: + resp = await client.post('/foglamp/plugins', data=json.dumps(param)) + assert 400 == resp.status + assert msg == resp.reason + assert copy_file_install_requirement_patch.called + extract_patch.assert_called_once_with(tar_file_name, False) + checksum_patch.assert_called_once_with(checksum_value, tar_file_name) + download_patch.assert_called_once_with([url_value]) + async def test_post_plugins_install_with_tar(self, client): async def async_mock(ret_val): return ret_val @@ -83,14 +111,14 @@ async def async_mock(ret_val): plugin_name = 'coap' tar_file_name = 'foglamp-south-coap-1.5.2.tar' files = [plugin_name, '{}/__init__.py'.format(plugin_name), '{}/README.rst'.format(plugin_name), - '{}/{}.py'.format(plugin_name, plugin_name), '{}/requirements.sh'.format(plugin_name)] + '{}/{}.py'.format(plugin_name, plugin_name), '{}/requirements.txt'.format(plugin_name)] checksum_value = "4015c2dea1cc71dbf70a23f6a203eeb6" url_value = "http://10.2.5.26:5000/download/{}".format(tar_file_name) param = {"url": url_value, "format": "tar", "type": "south", "checksum": checksum_value} with patch.object(plugins_install, 'download', return_value=async_mock([tar_file_name])) as download_patch: with patch.object(plugins_install, 'validate_checksum', return_value=True) as checksum_patch: with patch.object(plugins_install, 'extract_file', return_value=async_mock(files)) as extract_patch: - with patch.object(plugins_install, 'copy_file_install_requirement') \ + with patch.object(plugins_install, 'copy_file_install_requirement', return_value=(0, 'Success')) \ as copy_file_install_requirement_patch: resp = await client.post('/foglamp/plugins', data=json.dumps(param)) assert 200 == resp.status @@ -116,7 +144,7 @@ async def async_mock(ret_val): with patch.object(plugins_install, 'download', return_value=async_mock([tar_file_name])) as download_patch: with patch.object(plugins_install, 'validate_checksum', return_value=True) as checksum_patch: with patch.object(plugins_install, 'extract_file', return_value=async_mock(files)) as extract_patch: - with patch.object(plugins_install, 'copy_file_install_requirement') \ + with patch.object(plugins_install, 'copy_file_install_requirement', return_value=(0, 'Success')) \ as copy_file_install_requirement_patch: resp = await client.post('/foglamp/plugins', data=json.dumps(param)) assert 200 == resp.status @@ -131,7 +159,7 @@ async def async_mock(ret_val): async def test_post_plugins_install_with_debian(self, client): async def async_mock(): return [plugin_name, '{}/__init__.py'.format(plugin_name), '{}/README.rst'.format(plugin_name), - '{}/{}.py'.format(plugin_name, plugin_name), '{}/requirements.sh'.format(plugin_name)] + '{}/{}.py'.format(plugin_name, plugin_name), '{}/requirements.txt'.format(plugin_name)] plugin_name = 'coap' checksum_value = "4015c2dea1cc71dbf70a23f6a203eeb6" @@ -146,7 +174,7 @@ async def async_mock(): checksum_patch.assert_called_once_with(checksum_value, plugin_name) download_patch.assert_called_once_with([url_value]) - async def test_post_plugins_install_with_bad_debian(self, client): + async def test_bad_post_plugins_install_with_debian(self, client): async def async_mock(): return [plugin_name, '{}/__init__.py'.format(plugin_name), '{}/README.rst'.format(plugin_name), '{}/{}.py'.format(plugin_name, plugin_name), '{}/requirements.sh'.format(plugin_name)] @@ -155,12 +183,15 @@ async def async_mock(): checksum_value = "4015c2dea1cc71dbf70a23f6a203eeb6" url_value = "http://10.2.5.26:5000/download/foglamp-south-coap-1.5.2.deb" param = {"url": url_value, "format": "deb", "type": "south", "checksum": checksum_value} + msg = 'The following packages have unmet dependencies: foglamp-south-coap:armhf : Depends: ' \ + 'foglamp:armhf (>= 1.5) but it is not installableE: Unable to correct problems, ' \ + 'you have held broken packages.' with patch.object(plugins_install, 'download', return_value=async_mock()) as download_patch: with patch.object(plugins_install, 'validate_checksum', return_value=True) as checksum_patch: - with patch.object(plugins_install, 'install_deb', return_value=(256, 'Something went wrong!')) as debian_patch: + with patch.object(plugins_install, 'install_deb', return_value=(256, msg)) as debian_patch: resp = await client.post('/foglamp/plugins', data=json.dumps(param)) assert 400 == resp.status - assert 'Something went wrong!' == resp.reason + assert msg == resp.reason debian_patch.assert_called_once_with(plugin_name) checksum_patch.assert_called_once_with(checksum_value, plugin_name) - download_patch.assert_called_once_with([url_value]) \ No newline at end of file + download_patch.assert_called_once_with([url_value]) From 93ca8e1886199749c7a994a8d7c12cadcd4be3e2 Mon Sep 17 00:00:00 2001 From: Vaibhav Singhal Date: Thu, 25 Apr 2019 15:54:41 +0530 Subject: [PATCH 071/161] fix left in FOGL-2661 --- tests/system/python/e2e/test_e2e_foglamp_pair.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/system/python/e2e/test_e2e_foglamp_pair.py b/tests/system/python/e2e/test_e2e_foglamp_pair.py index adc9eb7655..c4dd52bbc9 100644 --- a/tests/system/python/e2e/test_e2e_foglamp_pair.py +++ b/tests/system/python/e2e/test_e2e_foglamp_pair.py @@ -116,7 +116,7 @@ def start_south_north_remote(self, reset_and_start_foglamp_remote, use_pip_cache subprocess.run([ "scp -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -i {} $FOGLAMP_ROOT/tests/system/python/scripts/install_python_plugin {}@{}:/tmp/".format( key_path, remote_user, remote_ip)], shell=True, check=True) - subprocess.run(["ssh -i {} {}@{} 'export FOGLAMP_ROOT={}; /tmp/install_python_plugin {} south {} {}'".format( + subprocess.run(["ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -i {} {}@{} 'export FOGLAMP_ROOT={}; /tmp/install_python_plugin {} south {} {}'".format( key_path, remote_user, remote_ip, remote_foglamp_path, south_branch, south_plugin, use_pip_cache)], shell=True, check=True) From a2f22c1c2945271cbe7a8f838a04fa25e9b34bfb Mon Sep 17 00:00:00 2001 From: ashish-jabble Date: Thu, 25 Apr 2019 18:03:51 +0530 Subject: [PATCH 072/161] some path fixes when debian installation --- python/foglamp/services/core/api/plugins/install.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/python/foglamp/services/core/api/plugins/install.py b/python/foglamp/services/core/api/plugins/install.py index cefded8403..6e29fe18c4 100644 --- a/python/foglamp/services/core/api/plugins/install.py +++ b/python/foglamp/services/core/api/plugins/install.py @@ -130,8 +130,8 @@ def extract_file(file_name: str, is_compressed: bool) -> list: def install_deb(file_name: str): - deb_file_path = "data/plugins/{}".format(file_name) - stdout_file_path = "data/plugins/output.txt" + deb_file_path = "/data/plugins/{}".format(file_name) + stdout_file_path = "/data/plugins/output.txt" cmd = "sudo apt -y install {} > {} 2>&1".format(_FOGLAMP_ROOT + deb_file_path, _FOGLAMP_ROOT + stdout_file_path) _LOGGER.debug("CMD....{}".format(cmd)) ret_code = os.system(cmd) @@ -204,9 +204,9 @@ def copy_file_install_requirement(dir_files: list, plugin_type: str, file_name: _LOGGER.debug("Return code {} and msg {}".format(code, msg)) # Also removed downloaded and extracted tar file - cmd = "{}/extras/C/cmdutil rm data/plugins/{}".format(_FOGLAMP_ROOT, file_name) + cmd = "{}/extras/C/cmdutil rm /data/plugins/{}".format(_FOGLAMP_ROOT, file_name) subprocess.run([cmd], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) - cmd = "{}/extras/C/cmdutil rm data/plugins/{}".format(_FOGLAMP_ROOT, plugin_name) + cmd = "{}/extras/C/cmdutil rm /data/plugins/{}".format(_FOGLAMP_ROOT, plugin_name) subprocess.run([cmd], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) return code, msg From 0130131d0533ce5f4f23902172cd9d0521ea1b11 Mon Sep 17 00:00:00 2001 From: Cloud User Date: Thu, 25 Apr 2019 12:42:03 +0000 Subject: [PATCH 073/161] RHEL related make changes --- C/plugins/common/CMakeLists.txt | 1 + C/plugins/storage/sqlite/CMakeLists.txt | 3 +++ C/services/common/CMakeLists.txt | 10 +++++-- .../python/CMakeLists.txt | 8 ++++-- .../async_ingest_pymodule/CMakeLists.txt | 6 +++-- CMakeLists.txt | 26 ++++++++++++++++--- 6 files changed, 44 insertions(+), 10 deletions(-) diff --git a/C/plugins/common/CMakeLists.txt b/C/plugins/common/CMakeLists.txt index 46fbfd6ebe..8cb324cdbf 100644 --- a/C/plugins/common/CMakeLists.txt +++ b/C/plugins/common/CMakeLists.txt @@ -28,6 +28,7 @@ include_directories(../../services/common/include) include_directories(../../thirdparty/Simple-Web-Server) include_directories(../../thirdparty/rapidjson/include) +link_directories(${PYTHON_LIBRARY_DIRS}) set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${PROJECT_BINARY_DIR}/../../lib) # Create shared library diff --git a/C/plugins/storage/sqlite/CMakeLists.txt b/C/plugins/storage/sqlite/CMakeLists.txt index b2491ce3a0..784fa647fc 100644 --- a/C/plugins/storage/sqlite/CMakeLists.txt +++ b/C/plugins/storage/sqlite/CMakeLists.txt @@ -27,6 +27,9 @@ set_target_properties(${PROJECT_NAME} PROPERTIES SOVERSION 1) set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${CMAKE_CURRENT_SOURCE_DIR}") find_package(sqlite3) +message("SQLITE_INCLUDE_DIR: ${SQLITE_INCLUDE_DIR}") +message("SQLITE_LIBRARIES: ${SQLITE_LIBRARIES}") + # Link with SQLite3 library target_link_libraries(${PROJECT_NAME} -lsqlite3) diff --git a/C/services/common/CMakeLists.txt b/C/services/common/CMakeLists.txt index 4ef1d3ca6a..730d79d107 100644 --- a/C/services/common/CMakeLists.txt +++ b/C/services/common/CMakeLists.txt @@ -1,4 +1,4 @@ -cmake_minimum_required(VERSION 2.4.0) +cmake_minimum_required(VERSION 2.6.0) project(services-common-lib) @@ -10,13 +10,19 @@ set(DLLIB -ldl) file(GLOB SOURCES *.cpp) # Find python3.x dev/lib package -find_package(PythonLibs 3 REQUIRED) +#set(Python_ADDITIONAL_VERSIONS 3.6) +#find_package(PythonLibs 3 REQUIRED) +PKG_CHECK_MODULES(PYTHON REQUIRED python3) + +message("PYTHON_LIBRARIES: ${PYTHON_LIBRARIES}") +message("PYTHON_INCLUDE_DIRS: ${PYTHON_INCLUDE_DIRS}") # Include header files include_directories(include ../../common/include ../../thirdparty/Simple-Web-Server ../../thirdparty/rapidjson/include) # Add Python 3.x header files include_directories(${PYTHON_INCLUDE_DIRS}) +link_directories(${PYTHON_LIBRARY_DIRS}) set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${PROJECT_BINARY_DIR}/../../lib) diff --git a/C/services/south-plugin-interfaces/python/CMakeLists.txt b/C/services/south-plugin-interfaces/python/CMakeLists.txt index 3138f79fdb..fa148c34e1 100644 --- a/C/services/south-plugin-interfaces/python/CMakeLists.txt +++ b/C/services/south-plugin-interfaces/python/CMakeLists.txt @@ -1,4 +1,4 @@ -cmake_minimum_required(VERSION 2.6.0) +cmake_minimum_required(VERSION 2.8.12) project(south-plugin-python-interface) @@ -12,7 +12,9 @@ set(SERVICE_COMMON_LIB services-common-lib) file(GLOB SOURCES python_plugin_interface.cpp pyobject_reading_parser.cpp) # Find Python.h 3.x dev/lib package -find_package(PythonLibs 3 REQUIRED) +#set(Python_ADDITIONAL_VERSIONS 3.6) +#find_package(PythonLibs 3 REQUIRED) +PKG_CHECK_MODULES(PYTHON REQUIRED python3) # Include header files include_directories(include ../../../common/include ../../../services/common/include ../../../services/south/include ../../../thirdparty/rapidjson/include) @@ -20,6 +22,8 @@ include_directories(include ../../../common/include ../../../services/common/inc # Add Python 3.x header files include_directories(${PYTHON_INCLUDE_DIRS}) +link_directories(${PYTHON_LIBRARY_DIRS}) + set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${PROJECT_BINARY_DIR}/../../../lib) # Create shared library diff --git a/C/services/south-plugin-interfaces/python/async_ingest_pymodule/CMakeLists.txt b/C/services/south-plugin-interfaces/python/async_ingest_pymodule/CMakeLists.txt index c2e8ce92d6..9afe1e8de9 100644 --- a/C/services/south-plugin-interfaces/python/async_ingest_pymodule/CMakeLists.txt +++ b/C/services/south-plugin-interfaces/python/async_ingest_pymodule/CMakeLists.txt @@ -12,8 +12,9 @@ set(SERVICE_COMMON_LIB services-common-lib) file(GLOB SOURCES ingest_callback_pymodule.cpp ../pyobject_reading_parser.cpp) # Find Python 3.5 or higher dev/lib/interp package -find_package(PythonLibs 3.5 REQUIRED) -find_package(PythonInterp 3.5 REQUIRED) +#find_package(PythonLibs 3.5 REQUIRED) +#find_package(PythonInterp 3.5 REQUIRED) +PKG_CHECK_MODULES(PYTHON REQUIRED python3) # Include header files include_directories(include ../../../../common/include ../../../../services/common/include ../../../../services/south/include ../../../../thirdparty/rapidjson/include) @@ -21,6 +22,7 @@ include_directories(include ../../../../common/include ../../../../services/comm # Add Python 3.x header files include_directories(${PYTHON_INCLUDE_DIRS}) +link_directories(${PYTHON_LIBRARY_DIRS}) link_directories(${PROJECT_BINARY_DIR}/../../../../lib) set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${PROJECT_BINARY_DIR}/../../../../../../python) diff --git a/CMakeLists.txt b/CMakeLists.txt index 3d312134db..31f978dac1 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,11 +1,29 @@ cmake_minimum_required (VERSION 2.8.8) project (FogLAMP) -set(CMAKE_BUILD_TYPE Debug) -set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -g -Wall") +#set(CMAKE_BUILD_TYPE Debug) +#set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -g -Wall") set(CMAKE_CXX_FLAGS "-std=c++11 -O3") -set(PYTHON_LIBRARY /opt/rh/rh-python36/root/usr/lib64) -set(PYTHON_INCLUDE_DIR /opt/rh/rh-python36/root/usr/include/python3.6m) +#set(PYTHON_LIBRARIES /opt/rh/rh-python36/root/usr/lib64/libpython3.6m.so) +#set(PYTHON_INCLUDE_DIRS /opt/rh/rh-python36/root/usr/include/python3.6m) +#set(PYTHON_EXECUTABLE /opt/rh/rh-python36/root/usr/bin/python3.6) +#execute_process(COMMAND python -c "from distutils.sysconfig import get_python_inc; print(get_python_inc())" OUTPUT_VARIABLE PYTHON_INCLUDE_DIR) +#execute_process(COMMAND python -c "import distutils.sysconfig as sysconfig; print(sysconfig.get_config_var('LIBDIR'))" OUT_VARIABLE PYTHON_LIBRARY) +#execute_process(COMMAND which python3 OUT_VARIABLE PYTHON_EXECUTABLE) + +FIND_PACKAGE(PkgConfig REQUIRED) +#set(CMAKE_PREFIX_PATH "/opt/rh/rh-python36/root/usr/lib64/pkgconfig") +#set(ENV{PKG_CONFIG_PATH} "$ENV{PKG_CONFIG_PATH}:/opt/rh/rh-python36/root/usr/lib64/pkgconfig") +#PKG_CHECK_MODULES(PYTHON REQUIRED python3) +#PKG_SEARCH_MODULE(PYTHON REQUIRED python3) + +# PYTHON_INCLUDE_DIRS & PYTHON_LIBRARIES are set here +#message("PYTHON_INCLUDE_DIRS: ${PYTHON_INCLUDE_DIRS}") +#message("PYTHON_LIBRARIES: ${PYTHON_LIBRARIES}") +#message("PYTHON_LIBRARY_DIRS: ${PYTHON_LIBRARY_DIRS}") + +#set(PYTHON_LIBRARIES "${PYTHON_LIBRARY_DIRS}/${PYTHON_LIBRARIES}") + set(SQLITE_INCLUDE_DIR /usr/local/include) set(SQLITE_LIBRARIES /usr/local/lib) From 189395bfad406b988f2eba3bd45b55aa9820defe Mon Sep 17 00:00:00 2001 From: Amandeep Singh Arora Date: Thu, 25 Apr 2019 15:13:25 +0000 Subject: [PATCH 074/161] Further changes --- C/services/common/CMakeLists.txt | 13 ++----------- 1 file changed, 2 insertions(+), 11 deletions(-) diff --git a/C/services/common/CMakeLists.txt b/C/services/common/CMakeLists.txt index 730d79d107..d8fdc508f8 100644 --- a/C/services/common/CMakeLists.txt +++ b/C/services/common/CMakeLists.txt @@ -1,4 +1,4 @@ -cmake_minimum_required(VERSION 2.6.0) +cmake_minimum_required(VERSION 2.8.0) project(services-common-lib) @@ -10,19 +10,13 @@ set(DLLIB -ldl) file(GLOB SOURCES *.cpp) # Find python3.x dev/lib package -#set(Python_ADDITIONAL_VERSIONS 3.6) -#find_package(PythonLibs 3 REQUIRED) -PKG_CHECK_MODULES(PYTHON REQUIRED python3) - -message("PYTHON_LIBRARIES: ${PYTHON_LIBRARIES}") -message("PYTHON_INCLUDE_DIRS: ${PYTHON_INCLUDE_DIRS}") +pkg_check_modules(PYTHON REQUIRED python3) # Include header files include_directories(include ../../common/include ../../thirdparty/Simple-Web-Server ../../thirdparty/rapidjson/include) # Add Python 3.x header files include_directories(${PYTHON_INCLUDE_DIRS}) -link_directories(${PYTHON_LIBRARY_DIRS}) set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${PROJECT_BINARY_DIR}/../../lib) @@ -30,9 +24,6 @@ set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${PROJECT_BINARY_DIR}/../../lib) add_library(${PROJECT_NAME} SHARED ${SOURCES}) target_link_libraries(${PROJECT_NAME} ${DLLIB}) -# Add Python 3.x library -target_link_libraries(${PROJECT_NAME} ${PYTHON_LIBRARIES}) - set_target_properties(${PROJECT_NAME} PROPERTIES SOVERSION 1) # Install library From 12a64d0c4a5d51a909a99a282511696ae4ac7e8e Mon Sep 17 00:00:00 2001 From: Amandeep Singh Arora Date: Thu, 25 Apr 2019 15:39:53 +0000 Subject: [PATCH 075/161] Cleanup --- C/plugins/common/CMakeLists.txt | 1 - C/plugins/storage/sqlite/CMakeLists.txt | 3 --- C/services/common/CMakeLists.txt | 2 +- .../python/CMakeLists.txt | 6 ++---- .../python/async_ingest_pymodule/CMakeLists.txt | 3 +-- CMakeLists.txt | 16 ---------------- 6 files changed, 4 insertions(+), 27 deletions(-) diff --git a/C/plugins/common/CMakeLists.txt b/C/plugins/common/CMakeLists.txt index 8cb324cdbf..46fbfd6ebe 100644 --- a/C/plugins/common/CMakeLists.txt +++ b/C/plugins/common/CMakeLists.txt @@ -28,7 +28,6 @@ include_directories(../../services/common/include) include_directories(../../thirdparty/Simple-Web-Server) include_directories(../../thirdparty/rapidjson/include) -link_directories(${PYTHON_LIBRARY_DIRS}) set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${PROJECT_BINARY_DIR}/../../lib) # Create shared library diff --git a/C/plugins/storage/sqlite/CMakeLists.txt b/C/plugins/storage/sqlite/CMakeLists.txt index 784fa647fc..b2491ce3a0 100644 --- a/C/plugins/storage/sqlite/CMakeLists.txt +++ b/C/plugins/storage/sqlite/CMakeLists.txt @@ -27,9 +27,6 @@ set_target_properties(${PROJECT_NAME} PROPERTIES SOVERSION 1) set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${CMAKE_CURRENT_SOURCE_DIR}") find_package(sqlite3) -message("SQLITE_INCLUDE_DIR: ${SQLITE_INCLUDE_DIR}") -message("SQLITE_LIBRARIES: ${SQLITE_LIBRARIES}") - # Link with SQLite3 library target_link_libraries(${PROJECT_NAME} -lsqlite3) diff --git a/C/services/common/CMakeLists.txt b/C/services/common/CMakeLists.txt index d8fdc508f8..13c32932bf 100644 --- a/C/services/common/CMakeLists.txt +++ b/C/services/common/CMakeLists.txt @@ -1,4 +1,4 @@ -cmake_minimum_required(VERSION 2.8.0) +cmake_minimum_required(VERSION 2.4.0) project(services-common-lib) diff --git a/C/services/south-plugin-interfaces/python/CMakeLists.txt b/C/services/south-plugin-interfaces/python/CMakeLists.txt index fa148c34e1..dd7791fa57 100644 --- a/C/services/south-plugin-interfaces/python/CMakeLists.txt +++ b/C/services/south-plugin-interfaces/python/CMakeLists.txt @@ -1,4 +1,4 @@ -cmake_minimum_required(VERSION 2.8.12) +cmake_minimum_required(VERSION 2.6.0) project(south-plugin-python-interface) @@ -12,9 +12,7 @@ set(SERVICE_COMMON_LIB services-common-lib) file(GLOB SOURCES python_plugin_interface.cpp pyobject_reading_parser.cpp) # Find Python.h 3.x dev/lib package -#set(Python_ADDITIONAL_VERSIONS 3.6) -#find_package(PythonLibs 3 REQUIRED) -PKG_CHECK_MODULES(PYTHON REQUIRED python3) +pkg_check_modules(PYTHON REQUIRED python3) # Include header files include_directories(include ../../../common/include ../../../services/common/include ../../../services/south/include ../../../thirdparty/rapidjson/include) diff --git a/C/services/south-plugin-interfaces/python/async_ingest_pymodule/CMakeLists.txt b/C/services/south-plugin-interfaces/python/async_ingest_pymodule/CMakeLists.txt index 9afe1e8de9..e76512717a 100644 --- a/C/services/south-plugin-interfaces/python/async_ingest_pymodule/CMakeLists.txt +++ b/C/services/south-plugin-interfaces/python/async_ingest_pymodule/CMakeLists.txt @@ -12,9 +12,8 @@ set(SERVICE_COMMON_LIB services-common-lib) file(GLOB SOURCES ingest_callback_pymodule.cpp ../pyobject_reading_parser.cpp) # Find Python 3.5 or higher dev/lib/interp package -#find_package(PythonLibs 3.5 REQUIRED) #find_package(PythonInterp 3.5 REQUIRED) -PKG_CHECK_MODULES(PYTHON REQUIRED python3) +pkg_check_modules(PYTHON REQUIRED python3) # Include header files include_directories(include ../../../../common/include ../../../../services/common/include ../../../../services/south/include ../../../../thirdparty/rapidjson/include) diff --git a/CMakeLists.txt b/CMakeLists.txt index 31f978dac1..ba17c266eb 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -4,25 +4,9 @@ project (FogLAMP) #set(CMAKE_BUILD_TYPE Debug) #set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -g -Wall") set(CMAKE_CXX_FLAGS "-std=c++11 -O3") -#set(PYTHON_LIBRARIES /opt/rh/rh-python36/root/usr/lib64/libpython3.6m.so) -#set(PYTHON_INCLUDE_DIRS /opt/rh/rh-python36/root/usr/include/python3.6m) -#set(PYTHON_EXECUTABLE /opt/rh/rh-python36/root/usr/bin/python3.6) -#execute_process(COMMAND python -c "from distutils.sysconfig import get_python_inc; print(get_python_inc())" OUTPUT_VARIABLE PYTHON_INCLUDE_DIR) -#execute_process(COMMAND python -c "import distutils.sysconfig as sysconfig; print(sysconfig.get_config_var('LIBDIR'))" OUT_VARIABLE PYTHON_LIBRARY) #execute_process(COMMAND which python3 OUT_VARIABLE PYTHON_EXECUTABLE) FIND_PACKAGE(PkgConfig REQUIRED) -#set(CMAKE_PREFIX_PATH "/opt/rh/rh-python36/root/usr/lib64/pkgconfig") -#set(ENV{PKG_CONFIG_PATH} "$ENV{PKG_CONFIG_PATH}:/opt/rh/rh-python36/root/usr/lib64/pkgconfig") -#PKG_CHECK_MODULES(PYTHON REQUIRED python3) -#PKG_SEARCH_MODULE(PYTHON REQUIRED python3) - -# PYTHON_INCLUDE_DIRS & PYTHON_LIBRARIES are set here -#message("PYTHON_INCLUDE_DIRS: ${PYTHON_INCLUDE_DIRS}") -#message("PYTHON_LIBRARIES: ${PYTHON_LIBRARIES}") -#message("PYTHON_LIBRARY_DIRS: ${PYTHON_LIBRARY_DIRS}") - -#set(PYTHON_LIBRARIES "${PYTHON_LIBRARY_DIRS}/${PYTHON_LIBRARIES}") set(SQLITE_INCLUDE_DIR /usr/local/include) set(SQLITE_LIBRARIES /usr/local/lib) From 53cf4a6abccd4409a3dca79a5196c7e8ddc36059 Mon Sep 17 00:00:00 2001 From: Amandeep Singh Arora Date: Fri, 26 Apr 2019 05:52:27 +0000 Subject: [PATCH 076/161] Cleanup --- CMakeLists.txt | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index ba17c266eb..78555c70f5 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,15 +1,9 @@ cmake_minimum_required (VERSION 2.8.8) project (FogLAMP) -#set(CMAKE_BUILD_TYPE Debug) -#set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -g -Wall") set(CMAKE_CXX_FLAGS "-std=c++11 -O3") -#execute_process(COMMAND which python3 OUT_VARIABLE PYTHON_EXECUTABLE) -FIND_PACKAGE(PkgConfig REQUIRED) - -set(SQLITE_INCLUDE_DIR /usr/local/include) -set(SQLITE_LIBRARIES /usr/local/lib) +find_package(PkgConfig REQUIRED) add_subdirectory(C/common) add_subdirectory(C/services/common) From 35c0a57ac6a44f1c6aabd170dc3f3480767281cc Mon Sep 17 00:00:00 2001 From: ashish-jabble Date: Fri, 26 Apr 2019 11:59:28 +0530 Subject: [PATCH 077/161] unit test fixes --- .../services/core/api/test_configuration.py | 38 ++++++------------- 1 file changed, 11 insertions(+), 27 deletions(-) diff --git a/tests/unit/python/foglamp/services/core/api/test_configuration.py b/tests/unit/python/foglamp/services/core/api/test_configuration.py index 3a24bdf137..b76423a3a6 100644 --- a/tests/unit/python/foglamp/services/core/api/test_configuration.py +++ b/tests/unit/python/foglamp/services/core/api/test_configuration.py @@ -728,7 +728,8 @@ async def test_update_bulk_config_bad_request(self, client, category_name='rest_ (500, Exception) ]) async def test_update_bulk_config_exception(self, client, code, exception_name, category_name='rest_api'): - payload = {"http_port": "8082", "authentication": "mandatory"} + config_item_name = "authentication" + payload = {config_item_name: "required"} storage_client_mock = MagicMock(spec=StorageClientAsync) c_mgr = ConfigurationManager(storage_client_mock) with patch.object(connect, 'get_storage_async', return_value=storage_client_mock): @@ -736,35 +737,29 @@ async def test_update_bulk_config_exception(self, client, code, exception_name, resp = await client.put('/foglamp/category/{}'.format(category_name), data=json.dumps(payload)) assert code == resp.status assert resp.reason is None - assert 1 == patch_get_cat_item.call_count - calls = patch_get_cat_item.call_args_list - args, kwargs = calls[0] - assert category_name == args[0] - assert list(payload)[0] == args[1] + patch_get_cat_item.assert_called_once_with(category_name, config_item_name) async def test_update_bulk_config_item_not_found(self, client, category_name='rest_api'): async def async_mock(return_value): return return_value - payload = {"http_port": "8082", "authentication": "mandatory"} + config_item_name = "https" + payload = {config_item_name: "8082"} storage_client_mock = MagicMock(spec=StorageClientAsync) c_mgr = ConfigurationManager(storage_client_mock) with patch.object(connect, 'get_storage_async', return_value=storage_client_mock): with patch.object(c_mgr, 'get_category_item', return_value=async_mock(None)) as patch_get_cat_item: resp = await client.put('/foglamp/category/{}'.format(category_name), data=json.dumps(payload)) assert 404 == resp.status - assert "'{} config item not found'".format(list(payload)[0]) == resp.reason - assert 1 == patch_get_cat_item.call_count - calls = patch_get_cat_item.call_args_list - args, kwargs = calls[0] - assert category_name == args[0] - assert list(payload)[0] == args[1] + assert "'{} config item not found'".format(config_item_name) == resp.reason + patch_get_cat_item.assert_called_once_with(category_name, config_item_name) async def test_update_bulk_config_not_allowed(self, client, category_name='rest_api'): async def async_mock(return_value): return return_value - payload = {"http_port": "8082", "authentication": "mandatory"} + config_item_name = "http_port" + payload = {config_item_name: "8082"} storage_client_mock = MagicMock(spec=StorageClientAsync) c_mgr = ConfigurationManager(storage_client_mock) storage_value_entry = {'description': 'Port to accept HTTP connections on', 'displayName': 'HTTP Port', @@ -773,12 +768,8 @@ async def async_mock(return_value): with patch.object(c_mgr, 'get_category_item', return_value=async_mock(storage_value_entry)) as patch_get_cat_item: resp = await client.put('/foglamp/category/{}'.format(category_name), data=json.dumps(payload)) assert 400 == resp.status - assert 'Bulk update not allowed for {} item_name as it has readonly attribute set'.format(list(payload)[0]) == resp.reason - assert 1 == patch_get_cat_item.call_count - calls = patch_get_cat_item.call_args_list - args, kwargs = calls[0] - assert category_name == args[0] - assert list(payload)[0] == args[1] + assert 'Bulk update not allowed for {} item_name as it has readonly attribute set'.format(config_item_name) == resp.reason + patch_get_cat_item.assert_called_once_with(category_name, config_item_name) @pytest.mark.parametrize("category_name", [ "rest_api", "Rest $API" @@ -809,13 +800,6 @@ async def async_mock(return_value): patch_get_all_items.assert_called_once_with(category_name) patch_update_bulk.assert_called_once_with(category_name, payload) assert 2 == patch_get_cat_item.call_count - calls = patch_get_cat_item.call_args_list - args, kwargs = calls[0] - assert category_name == args[0] - assert list(payload)[0] == args[1] - args, kwargs = calls[1] - assert category_name == args[0] - assert list(payload)[1] == args[1] async def test_delete_configuration(self, client, category_name='rest_api'): result = {'result': 'Category {} deleted successfully.'.format(category_name)} From 50e2c25c44c71224e5fc2191f01161c3521372a7 Mon Sep 17 00:00:00 2001 From: Amandeep Singh Arora Date: Fri, 26 Apr 2019 12:45:45 +0530 Subject: [PATCH 078/161] Fix test cases compilation/linking --- tests/unit/C/CMakeLists.txt | 3 ++- tests/unit/C/common/CMakeLists.txt | 2 +- tests/unit/C/plugins/common/CMakeLists.txt | 2 +- tests/unit/C/services/core/CMakeLists.txt | 2 +- 4 files changed, 5 insertions(+), 4 deletions(-) diff --git a/tests/unit/C/CMakeLists.txt b/tests/unit/C/CMakeLists.txt index bd85a89276..0b13bcb7e1 100644 --- a/tests/unit/C/CMakeLists.txt +++ b/tests/unit/C/CMakeLists.txt @@ -16,7 +16,8 @@ find_package(Boost 1.53.0 COMPONENTS ${BOOST_COMPONENTS} REQUIRED) include_directories(SYSTEM ${Boost_INCLUDE_DIR}) # Find python3.x dev/lib package -find_package(PythonLibs 3 REQUIRED) +find_package(PkgConfig REQUIRED) +pkg_check_modules(PYTHON REQUIRED python3) include_directories(../../../C/common/include) include_directories(../../../C/plugins/common/include) diff --git a/tests/unit/C/common/CMakeLists.txt b/tests/unit/C/common/CMakeLists.txt index 26866b23ed..fcad084a73 100644 --- a/tests/unit/C/common/CMakeLists.txt +++ b/tests/unit/C/common/CMakeLists.txt @@ -32,7 +32,7 @@ set(PLUGINS_COMMON_LIB plugins-common-lib) file(GLOB unittests "*.cpp") # Find python3.x dev/lib package -find_package(PythonLibs 3 REQUIRED) +pkg_check_modules(PYTHON REQUIRED python3) # Add Python 3.x header files include_directories(${PYTHON_INCLUDE_DIRS}) diff --git a/tests/unit/C/plugins/common/CMakeLists.txt b/tests/unit/C/plugins/common/CMakeLists.txt index b3353b263a..12f30dff35 100644 --- a/tests/unit/C/plugins/common/CMakeLists.txt +++ b/tests/unit/C/plugins/common/CMakeLists.txt @@ -32,7 +32,7 @@ set(PLUGINS_COMMON_LIB plugins-common-lib) file(GLOB unittests "*.cpp") # Find python3.x dev/lib package -find_package(PythonLibs 3 REQUIRED) +pkg_check_modules(PYTHON REQUIRED python3) # Add Python 3.x header files include_directories(${PYTHON_INCLUDE_DIRS}) diff --git a/tests/unit/C/services/core/CMakeLists.txt b/tests/unit/C/services/core/CMakeLists.txt index 0c2ac21b5f..f16de0495b 100644 --- a/tests/unit/C/services/core/CMakeLists.txt +++ b/tests/unit/C/services/core/CMakeLists.txt @@ -32,7 +32,7 @@ file(GLOB test_sources "../../../../../C/services/core/*.cpp") file(GLOB unittests "*.cpp") # Find python3.x dev/lib package -find_package(PythonLibs 3 REQUIRED) +pkg_check_modules(PYTHON REQUIRED python3) # Add Python 3.x header files include_directories(${PYTHON_INCLUDE_DIRS}) From cb9a56c2a45b3e6e8e7117a95d507391ce9dbba8 Mon Sep 17 00:00:00 2001 From: Amandeep Singh Arora Date: Fri, 26 Apr 2019 15:15:35 +0530 Subject: [PATCH 079/161] Fixing C unit test cases w.r.t. pkg-config usage --- requirements.sh | 1 + tests/unit/C/common/CMakeLists.txt | 1 + tests/unit/C/plugins/common/CMakeLists.txt | 1 + tests/unit/C/services/core/CMakeLists.txt | 1 + 4 files changed, 4 insertions(+) diff --git a/requirements.sh b/requirements.sh index 1330f6ad45..765c46d86b 100755 --- a/requirements.sh +++ b/requirements.sh @@ -31,4 +31,5 @@ sudo apt install -y cmake g++ make build-essential autoconf automake uuid-dev sudo apt install -y libtool libboost-dev libboost-system-dev libboost-thread-dev libpq-dev libssl-dev libz-dev sudo apt install -y python-dbus python-dev python3-dev python3-pip sudo apt install -y sqlite3 libsqlite3-dev +sudo apt install -y pkg-config # sudo apt install -y postgresql diff --git a/tests/unit/C/common/CMakeLists.txt b/tests/unit/C/common/CMakeLists.txt index fcad084a73..fe5151c41f 100644 --- a/tests/unit/C/common/CMakeLists.txt +++ b/tests/unit/C/common/CMakeLists.txt @@ -32,6 +32,7 @@ set(PLUGINS_COMMON_LIB plugins-common-lib) file(GLOB unittests "*.cpp") # Find python3.x dev/lib package +find_package(PkgConfig REQUIRED) pkg_check_modules(PYTHON REQUIRED python3) # Add Python 3.x header files diff --git a/tests/unit/C/plugins/common/CMakeLists.txt b/tests/unit/C/plugins/common/CMakeLists.txt index 12f30dff35..a0458684de 100644 --- a/tests/unit/C/plugins/common/CMakeLists.txt +++ b/tests/unit/C/plugins/common/CMakeLists.txt @@ -32,6 +32,7 @@ set(PLUGINS_COMMON_LIB plugins-common-lib) file(GLOB unittests "*.cpp") # Find python3.x dev/lib package +find_package(PkgConfig REQUIRED) pkg_check_modules(PYTHON REQUIRED python3) # Add Python 3.x header files diff --git a/tests/unit/C/services/core/CMakeLists.txt b/tests/unit/C/services/core/CMakeLists.txt index f16de0495b..0f002d9201 100644 --- a/tests/unit/C/services/core/CMakeLists.txt +++ b/tests/unit/C/services/core/CMakeLists.txt @@ -32,6 +32,7 @@ file(GLOB test_sources "../../../../../C/services/core/*.cpp") file(GLOB unittests "*.cpp") # Find python3.x dev/lib package +find_package(PkgConfig REQUIRED) pkg_check_modules(PYTHON REQUIRED python3) # Add Python 3.x header files From 5a6af5e4abfa6971b1c0fda96878ab854dc0e94b Mon Sep 17 00:00:00 2001 From: Vaibhav Singhal Date: Fri, 26 Apr 2019 16:26:47 +0530 Subject: [PATCH 080/161] removed bash test framework for testing --- docs/building_foglamp/index.rst | 1 - docs/building_foglamp/systemtest.rst | 389 ----------------- tests/system/README.rst | 101 ----- .../end_to_end_OCS/e/0010_prepare.expected | 1 - .../end_to_end_OCS/e/0020_start.expected | 1 - .../e/0025_prepare_plugins.expected | 5 - .../e/0030_prepare_OCS.expected | 2 - .../e/0035_wait_for_start.expected | 0 .../end_to_end_OCS/e/0040_inject.expected | 1 - .../e/0050_wait_for_flush.expected | 0 .../e/0060_read_from_REST.expected | 9 - .../e/0070_read_from_OCS.expected | 1 - .../system/suites/end_to_end_OCS/e/README.rst | 6 - .../system/suites/end_to_end_OCS/r/README.rst | 6 - tests/system/suites/end_to_end_OCS/suite.cfg | 46 -- tests/system/suites/end_to_end_OCS/suite.desc | 1 - .../suites/end_to_end_OCS/t/0010_prepare.test | 15 - .../suites/end_to_end_OCS/t/0020_start.desc | 1 - .../suites/end_to_end_OCS/t/0020_start.test | 31 -- .../t/0025_prepare_plugins.desc | 1 - .../t/0025_prepare_plugins.test | 7 - .../end_to_end_OCS/t/0030_prepare_OCS.test | 61 --- .../end_to_end_OCS/t/0035_wait_for_start.test | 3 - .../suites/end_to_end_OCS/t/0040_inject.test | 8 - .../end_to_end_OCS/t/0050_wait_for_flush.test | 4 - .../end_to_end_OCS/t/0060_read_from_REST.test | 16 - .../end_to_end_OCS/t/0070_read_from_OCS.test | 26 -- .../system/suites/end_to_end_OCS/t/README.rst | 6 - .../end_to_end_PI/e/0010_prepare.expected | 1 - .../end_to_end_PI/e/0020_start.expected | 1 - .../e/0025_prepare_plugins.expected | 5 - .../end_to_end_PI/e/0030_prepare_PI.expected | 1 - .../e/0035_wait_for_start.expected | 0 .../end_to_end_PI/e/0040_inject.expected | 1 - .../e/0050_wait_for_flush.expected | 0 .../e/0060_read_from_REST.expected | 9 - .../e/0070_read_from_PI.expected | 1 - .../system/suites/end_to_end_PI/e/README.rst | 6 - .../system/suites/end_to_end_PI/r/README.rst | 6 - tests/system/suites/end_to_end_PI/suite.cfg | 40 -- tests/system/suites/end_to_end_PI/suite.desc | 1 - .../suites/end_to_end_PI/t/0010_prepare.test | 15 - .../suites/end_to_end_PI/t/0020_start.desc | 1 - .../suites/end_to_end_PI/t/0020_start.test | 29 -- .../end_to_end_PI/t/0025_prepare_plugins.desc | 1 - .../end_to_end_PI/t/0025_prepare_plugins.test | 7 - .../end_to_end_PI/t/0030_prepare_PI.test | 64 --- .../end_to_end_PI/t/0035_wait_for_start.test | 3 - .../suites/end_to_end_PI/t/0040_inject.test | 9 - .../end_to_end_PI/t/0050_wait_for_flush.test | 4 - .../end_to_end_PI/t/0060_read_from_REST.test | 10 - .../end_to_end_PI/t/0070_read_from_PI.test | 141 ------ .../system/suites/end_to_end_PI/t/README.rst | 6 - tests/system/suites/foglamp-test | 400 ------------------ .../suites/smoke/e/0010_prepare.expected | 1 - .../system/suites/smoke/e/0020_start.expected | 1 - .../smoke/e/0030_prepare_plugins.expected | 5 - .../smoke/e/0035_wait_for_start.expected | 0 .../suites/smoke/e/0040_inject.expected | 1 - .../smoke/e/0050_wait_for_flush.expected | 0 .../smoke/e/0060_read_from_REST.expected | 9 - tests/system/suites/smoke/e/README.rst | 6 - tests/system/suites/smoke/r/README.rst | 6 - tests/system/suites/smoke/suite.cfg | 41 -- tests/system/suites/smoke/suite.desc | 1 - tests/system/suites/smoke/t/0010_prepare.test | 12 - tests/system/suites/smoke/t/0020_start.desc | 1 - tests/system/suites/smoke/t/0020_start.test | 34 -- .../suites/smoke/t/0030_prepare_plugins.desc | 1 - .../suites/smoke/t/0030_prepare_plugins.test | 7 - .../suites/smoke/t/0035_wait_for_start.test | 3 - tests/system/suites/smoke/t/0040_inject.test | 6 - .../suites/smoke/t/0050_wait_for_flush.test | 4 - .../suites/smoke/t/0060_read_from_REST.test | 6 - tests/system/suites/smoke/t/README.rst | 6 - tests/system/tests/README.rst | 6 - tests/system/tests/__init__.py | 0 tests/system/tests/bash/README.rst | 10 - .../tests/bash/check_foglamp_status.bash | 5 - .../tests/bash/check_foglamp_status.desc | 1 - .../system/tests/bash/count_assets_http.bash | 3 - .../system/tests/bash/count_assets_https.bash | 3 - .../system/tests/bash/enable_plugin_coap.bash | 101 ----- .../tests/bash/exec_any_foglamp_command.bash | 5 - .../tests/bash/exec_any_foglamp_command.desc | 1 - .../tests/bash/inject_fogbench_data.bash | 5 - tests/system/tests/bash/pi_delete.bash | 60 --- tests/system/tests/bash/pi_delete.desc | 1 - .../system/tests/bash/read_an_asset_http.bash | 7 - .../tests/bash/read_an_asset_https.bash | 5 - tests/system/tests/bash/sleep.bash | 4 - .../system/tests/bash/wait_creation_cfg.bash | 36 -- .../system/tests/bash/wait_creation_cfg.desc | 1 - .../tests/bash/wait_foglamp_status.bash | 55 --- .../tests/bash/wait_foglamp_status.desc | 2 - .../tests/bash/wait_plugin_available.bash | 36 -- .../tests/bash/wait_plugin_available.desc | 1 - tests/system/tests/python/README.rst | 10 - tests/system/tests/python/__init__.py | 0 tests/system/tests/python/common/__init__.py | 0 tests/system/tests/python/common/ocs.py | 129 ------ .../tests/python/ocs_clean_namespace.desc | 1 - .../tests/python/ocs_clean_namespace.py | 28 -- .../tests/python/ocs_read_an_asset.desc | 1 - .../system/tests/python/ocs_read_an_asset.py | 41 -- .../tests/python/ocs_read_streams_list.desc | 1 - .../tests/python/ocs_read_streams_list.py | 31 -- 107 files changed, 2252 deletions(-) delete mode 100644 docs/building_foglamp/systemtest.rst delete mode 100644 tests/system/README.rst delete mode 100644 tests/system/suites/end_to_end_OCS/e/0010_prepare.expected delete mode 100644 tests/system/suites/end_to_end_OCS/e/0020_start.expected delete mode 100644 tests/system/suites/end_to_end_OCS/e/0025_prepare_plugins.expected delete mode 100644 tests/system/suites/end_to_end_OCS/e/0030_prepare_OCS.expected delete mode 100644 tests/system/suites/end_to_end_OCS/e/0035_wait_for_start.expected delete mode 100644 tests/system/suites/end_to_end_OCS/e/0040_inject.expected delete mode 100644 tests/system/suites/end_to_end_OCS/e/0050_wait_for_flush.expected delete mode 100644 tests/system/suites/end_to_end_OCS/e/0060_read_from_REST.expected delete mode 100644 tests/system/suites/end_to_end_OCS/e/0070_read_from_OCS.expected delete mode 100644 tests/system/suites/end_to_end_OCS/e/README.rst delete mode 100644 tests/system/suites/end_to_end_OCS/r/README.rst delete mode 100644 tests/system/suites/end_to_end_OCS/suite.cfg delete mode 100644 tests/system/suites/end_to_end_OCS/suite.desc delete mode 100755 tests/system/suites/end_to_end_OCS/t/0010_prepare.test delete mode 100644 tests/system/suites/end_to_end_OCS/t/0020_start.desc delete mode 100755 tests/system/suites/end_to_end_OCS/t/0020_start.test delete mode 100644 tests/system/suites/end_to_end_OCS/t/0025_prepare_plugins.desc delete mode 100755 tests/system/suites/end_to_end_OCS/t/0025_prepare_plugins.test delete mode 100755 tests/system/suites/end_to_end_OCS/t/0030_prepare_OCS.test delete mode 100755 tests/system/suites/end_to_end_OCS/t/0035_wait_for_start.test delete mode 100755 tests/system/suites/end_to_end_OCS/t/0040_inject.test delete mode 100755 tests/system/suites/end_to_end_OCS/t/0050_wait_for_flush.test delete mode 100755 tests/system/suites/end_to_end_OCS/t/0060_read_from_REST.test delete mode 100755 tests/system/suites/end_to_end_OCS/t/0070_read_from_OCS.test delete mode 100644 tests/system/suites/end_to_end_OCS/t/README.rst delete mode 100644 tests/system/suites/end_to_end_PI/e/0010_prepare.expected delete mode 100644 tests/system/suites/end_to_end_PI/e/0020_start.expected delete mode 100644 tests/system/suites/end_to_end_PI/e/0025_prepare_plugins.expected delete mode 100644 tests/system/suites/end_to_end_PI/e/0030_prepare_PI.expected delete mode 100644 tests/system/suites/end_to_end_PI/e/0035_wait_for_start.expected delete mode 100644 tests/system/suites/end_to_end_PI/e/0040_inject.expected delete mode 100644 tests/system/suites/end_to_end_PI/e/0050_wait_for_flush.expected delete mode 100644 tests/system/suites/end_to_end_PI/e/0060_read_from_REST.expected delete mode 100644 tests/system/suites/end_to_end_PI/e/0070_read_from_PI.expected delete mode 100644 tests/system/suites/end_to_end_PI/e/README.rst delete mode 100644 tests/system/suites/end_to_end_PI/r/README.rst delete mode 100644 tests/system/suites/end_to_end_PI/suite.cfg delete mode 100644 tests/system/suites/end_to_end_PI/suite.desc delete mode 100755 tests/system/suites/end_to_end_PI/t/0010_prepare.test delete mode 100644 tests/system/suites/end_to_end_PI/t/0020_start.desc delete mode 100755 tests/system/suites/end_to_end_PI/t/0020_start.test delete mode 100644 tests/system/suites/end_to_end_PI/t/0025_prepare_plugins.desc delete mode 100755 tests/system/suites/end_to_end_PI/t/0025_prepare_plugins.test delete mode 100755 tests/system/suites/end_to_end_PI/t/0030_prepare_PI.test delete mode 100755 tests/system/suites/end_to_end_PI/t/0035_wait_for_start.test delete mode 100755 tests/system/suites/end_to_end_PI/t/0040_inject.test delete mode 100755 tests/system/suites/end_to_end_PI/t/0050_wait_for_flush.test delete mode 100755 tests/system/suites/end_to_end_PI/t/0060_read_from_REST.test delete mode 100755 tests/system/suites/end_to_end_PI/t/0070_read_from_PI.test delete mode 100644 tests/system/suites/end_to_end_PI/t/README.rst delete mode 100755 tests/system/suites/foglamp-test delete mode 100644 tests/system/suites/smoke/e/0010_prepare.expected delete mode 100644 tests/system/suites/smoke/e/0020_start.expected delete mode 100644 tests/system/suites/smoke/e/0030_prepare_plugins.expected delete mode 100644 tests/system/suites/smoke/e/0035_wait_for_start.expected delete mode 100644 tests/system/suites/smoke/e/0040_inject.expected delete mode 100644 tests/system/suites/smoke/e/0050_wait_for_flush.expected delete mode 100644 tests/system/suites/smoke/e/0060_read_from_REST.expected delete mode 100644 tests/system/suites/smoke/e/README.rst delete mode 100644 tests/system/suites/smoke/r/README.rst delete mode 100644 tests/system/suites/smoke/suite.cfg delete mode 100644 tests/system/suites/smoke/suite.desc delete mode 100755 tests/system/suites/smoke/t/0010_prepare.test delete mode 100644 tests/system/suites/smoke/t/0020_start.desc delete mode 100755 tests/system/suites/smoke/t/0020_start.test delete mode 100644 tests/system/suites/smoke/t/0030_prepare_plugins.desc delete mode 100755 tests/system/suites/smoke/t/0030_prepare_plugins.test delete mode 100755 tests/system/suites/smoke/t/0035_wait_for_start.test delete mode 100755 tests/system/suites/smoke/t/0040_inject.test delete mode 100755 tests/system/suites/smoke/t/0050_wait_for_flush.test delete mode 100755 tests/system/suites/smoke/t/0060_read_from_REST.test delete mode 100644 tests/system/suites/smoke/t/README.rst delete mode 100644 tests/system/tests/README.rst delete mode 100644 tests/system/tests/__init__.py delete mode 100644 tests/system/tests/bash/README.rst delete mode 100755 tests/system/tests/bash/check_foglamp_status.bash delete mode 100644 tests/system/tests/bash/check_foglamp_status.desc delete mode 100755 tests/system/tests/bash/count_assets_http.bash delete mode 100755 tests/system/tests/bash/count_assets_https.bash delete mode 100755 tests/system/tests/bash/enable_plugin_coap.bash delete mode 100755 tests/system/tests/bash/exec_any_foglamp_command.bash delete mode 100644 tests/system/tests/bash/exec_any_foglamp_command.desc delete mode 100755 tests/system/tests/bash/inject_fogbench_data.bash delete mode 100755 tests/system/tests/bash/pi_delete.bash delete mode 100644 tests/system/tests/bash/pi_delete.desc delete mode 100755 tests/system/tests/bash/read_an_asset_http.bash delete mode 100755 tests/system/tests/bash/read_an_asset_https.bash delete mode 100755 tests/system/tests/bash/sleep.bash delete mode 100755 tests/system/tests/bash/wait_creation_cfg.bash delete mode 100644 tests/system/tests/bash/wait_creation_cfg.desc delete mode 100755 tests/system/tests/bash/wait_foglamp_status.bash delete mode 100644 tests/system/tests/bash/wait_foglamp_status.desc delete mode 100755 tests/system/tests/bash/wait_plugin_available.bash delete mode 100644 tests/system/tests/bash/wait_plugin_available.desc delete mode 100644 tests/system/tests/python/README.rst delete mode 100644 tests/system/tests/python/__init__.py delete mode 100644 tests/system/tests/python/common/__init__.py delete mode 100644 tests/system/tests/python/common/ocs.py delete mode 100644 tests/system/tests/python/ocs_clean_namespace.desc delete mode 100644 tests/system/tests/python/ocs_clean_namespace.py delete mode 100644 tests/system/tests/python/ocs_read_an_asset.desc delete mode 100644 tests/system/tests/python/ocs_read_an_asset.py delete mode 100644 tests/system/tests/python/ocs_read_streams_list.desc delete mode 100644 tests/system/tests/python/ocs_read_streams_list.py diff --git a/docs/building_foglamp/index.rst b/docs/building_foglamp/index.rst index 1bbb70be54..05853c80ec 100644 --- a/docs/building_foglamp/index.rst +++ b/docs/building_foglamp/index.rst @@ -10,6 +10,5 @@ Building FogLAMP Developers Guide building_foglamp 04_installation 06_testing - systemtest 04_utilities 05_tasks \ No newline at end of file diff --git a/docs/building_foglamp/systemtest.rst b/docs/building_foglamp/systemtest.rst deleted file mode 100644 index b510251c47..0000000000 --- a/docs/building_foglamp/systemtest.rst +++ /dev/null @@ -1,389 +0,0 @@ -.. Developers' Guide - -.. |br| raw:: html - -
- -.. Images - - -.. Links - -.. Links in new tabs - -.. |unit test| raw:: html - - here - -.. |integration test| raw:: html - - here - -.. |system test| raw:: html - - here - - -.. ============================================= - - -****************************** -System Test Utility and Suites -****************************** - -FogLAMP comes with a test utility that facilitates the work of developers who wish to implement new plugins or modify the core of the platform. The utility is simple yet powerful: it relies on a set of scripts that allow the full automation of the tests. - - -First, Some Terminology -======================= - -Before we start our adventure in the world of FogLAMP testing, let's set a common background. These are some terms that are used by the FogLAMP developers that you will find in this documentation: - -- **Unit tests**: these are tests that developers prepare to test their functions and modules. Unit tests can vary in complexity, but in principle they are meant to test a small piece of code. A unit test does not have a particular meaning to users, but it is an essential part of the software development: by running unit tests, developers can check if a modified piece of code still behaves as expected and it will not cause issues to the whole system. Unit testing is explained |unit test|. -- **Integration tests**: these are tests that developers prepare to test larger modules or microservices. Integration tests usually require a significant development effort in building other mocked modules and services. We are not planning to provide any integration test in the foreseeable future. Integration testing is explained |integration test|. -- **System tests**: these are tests that can be used by developers and even by power users who want to test FogLAMP running with all the necessary microservices running. In order to execute these tests, FogLAMP must be built, installed (although it could be in a development environment) and ready to run. System testing is explained |system test|. -- **Test suite**: this is a set of tests combined together to execute a meaningful test of the system. Examples of test suites are a *smoke test* (a quick and simple system test used to verify that FogLAMP is working properly), or *end-to-end test* (a test used to verify that FogLAMP can successfully collect, store and forward data from South to North and East/West or from North to South and East/West. -- **End to end tests**: this is a type of system test that, in FogLAMP terms, can test the collection, storage and transfer of data. For example, we can call "End to End" test a suite that tests the collection of data from a South plugin and the storage of the same data in a PI server, through the North plugin. The concept of "End to End" relies on the fact that data is collected from one end (the South plugin) and it is tested all the way up to the other end (the PI Server). - - -FogLAMP System Test Principles -============================== - - -Test Files ----------- - -Tests rely on a set of test files that can be combined together following the logic of a specific test. Test files are like bricks or building blocks that can be used to create test suites. Test files may, for example, stop and start FogLAMP, inject a set of data points, read data from FogLAMP or send data North. They may be optionally paired with *description files* to provide a brief description of the test file, and with *ReStructuredText files*, to provide a more detailed explanation. Test, description and ReStructuredText files must have the same name. |br| Test files are organized by type, for example bash and Python scripts or executables. - - -Suite Directories ------------------ - -Each test suite is "physically" a directory, and the name of the test suite is the name of the directory. Suites are self-contained, i.e. everything that is defined or executed in a suite is stored in the suite directory, with the only exception of the test files, which are referenced in the *suite files*. - - -Suite Files ------------ - -Suite files are bash scripts that are automatically executed in alphabetical order. They are optionally paired with *description files*, to provide a brief description of the suite file, and with *ReStructuredText files*, to provide a more detailed explanation. Suite, description and ReStructuredText files must have the same name. |br| Suite files automatically generate an output that is compared with expected results. When the output of a suite file does not match the expected result, the test fails. - - -foglamp-test ------------- -*foglamp-test* is the utility used to execute test suites. The utility is position-dependent, i.e. it must be executed from the directory where it is stored. All the directories at the same level of the utility are identified at test suites and the name if the directory is the name of the suite. In fact, the utility must be executed as ``./foglamp-test ``, where *test-suite* is the name of the suite to execute and the name of the suite directory. - - -Directories and Files ---------------------- - -This is a list of directories, sub-directories and files that are used in system tests. |br| - -The starting point is the **system test base directory**. You will find it in the FogLAMP source repository under *tests/system*. - -- Under the system test base directory we have: - - - **suites**: the directory containing the test suites. - - - *foglamp-test*: the system test utility. It must be executed from this position with the command ``./foglamp-test ``, where *test-suite* is a sub-directory and the name of the suite that you want to execute. - - *test-suite*: any sub-directory at this position is a test suite. - - - *suite.desc*: an optional file containing a brief description of the test suite. - - *suite.rst*: an optional ReStructuredText file contaning a more comprehensive description of the test suite. - - **e**: a directory containing all the expected results from the execution of the test suite. Files in this directory have the same name of the suite files and suffix *.expected*. - - **r**: a directory containing all the results from the execution of a test suite. This directory contains files with the same name of the suite files, with two suffixes: - - - *.result*: files containing standard output and standard error as a result of the execution of the suite files. - - *.temp*: temporary files generated by the suite files contaning temporary and intermidate information, often used to prepare the result files. - - - **t**: a directory containing the suite files that will be executed in chronological order. This directory contains files with the same name and the following suffixes: - - - *.test*: the bash script suite file. - - *.desc*: an optional file containing a short description of the suite file. - - *.rst*: an optional ReStructuredText file containing a more comprehensive description of the suite file. - - - **tests**: the directory containing the test files. Test files are organized in these sub-directories: - - - **bash**: a directory containing test files written in bash. The directory contains files with teh same name and the following suffixes: - - - *.bash*: the test file - - *.desc*: an optional file containing a short description of the test file. - - *.rst*: an optional ReStructuredText file containing a more comprehensive description of the test file. - -This is an example of a direcory tree from the system test base directory: - -.. code-block:: console - - foglamp@vbox-dev:~/FogLAMP/tests/system$ tree - . - ├── README.rst - ├── suites - │   ├── foglamp-test - │   └── smoke - │   ├── e - │   │   ├── 001_prepare.expected - │   │   ├── 002_start.expected - │   │   ├── 003_inject.expected - │   │   ├── 004_wait_for_flush.expected - │   │   ├── 005_read_from_REST.expected - │   │   └── README.rst - │   ├── r - │   │   ├── 001_prepare.result - │   │   ├── 002_start.result - │   │   ├── 002_start.temp - │   │   ├── 003_inject.1.temp - │   │   ├── 003_inject.2.temp - │   │   ├── 003_inject.result - │   │   ├── 004_wait_for_flush.result - │   │   ├── 005_read_from_REST.result - │   │   ├── 005_read_from_REST.temp - │   │   └── README.rst - │   ├── suite.desc - │   └── t - │   ├── 001_prepare.test - │   ├── 001_start.desc - │   ├── 002_start.test - │   ├── 003_inject.test - │   ├── 004_wait_for_flush.test - │   ├── 005_read_from_REST.test - │   └── README.rst - └── tests - ├── bash - │   ├── check_foglamp_status.bash - │   ├── check_foglamp_status.desc - │   ├── count_assets_http.bash - │   ├── exec_any_foglamp_command.bash - │   ├── exec_any_foglamp_command.desc - │   ├── inject_fogbench_data.bash - │   ├── read_an_asset_http.bash - │   ├── README.rst - │   └── sleep.bash - └── README.rst - - 7 directories, 36 files - foglamp@vbox-dev:~/FogLAMP/tests/system$ - - -How to Prepare a Test Suite -=========================== - -In this section we will see how to prepare a new test suite. The objective is to familiarize with the various components, so that you can create your own suite. - - -Step 1: the Building Blocks, i.e. the Test Files ------------------------------------------------- - -The first thing to do is to create some building blocks. These are test files, normally written in bash or Python, that can be reused as many times as you wish in multiple test suites. |br| There are no limitations in the logic you may want to add to each test file, but you should consider these guidelines: - -- **Verify the consistency of each file**: the most common error in the test suite is the modification of a test file that is used in many test suites. When you modify a test file, make sure that the file will produce the same results. If the results change, then you must modify all the result files affected by the test file. -- **Document the test file**: it is the most obvious suggestion, but also the one that is often ignored. Try to avoid to create test files that are obscured, with unknown behaviour, because there is a high risk of recreating many times the very same test file simply because you are not aware that there is another test file with the same logic. -- **Do not make test files too generic**: it is ok to pass parameters to test files and make them act as called libraries, but also consider that the more generic the file is, the more it is likely that the execution will produce an unexpected behavior that will cause false failures. -- **Use predefined environment variables**: *foglamp-test* creates some environment variables ready for developer to use. You should use these variables instead of trying to set the same set of variables in a test file. - -These are examples of test files: - -- *check_foglamp_status*: this script executes the ``foglamp status`` command, but it only provides the first line of the command, i.e. if FogLAMP is running or not. -- *exec_any_foglamp_command*: this is a script used to generically call any command of the *foglamp* utility. - - -Pre-defined Variables -~~~~~~~~~~~~~~~~~~~~~ - -These pre-defined variables are helpful in the test files: - -- **FOGLAMP_EXE**: the foglamp script. Based on the *FOGLAMP_ROOT* variable and the presence of the *foglamp* command, the *foglamp-test* utility has already selected the script for you. By using the variable, you will have consistent executions along the whole suite. -- **FOGBENCH_EXE**: the fogbench script. As for *FOGLAMP_EXE*, this variable guarantees the consistency of the execution along the whole suite. -- **SUITE_NAME**: the name of the suite that is currently executed. The variable is also the name of the directory containing the suite files. -- **SUITE_BASEDIR**: the path to the suite directory, i.e. the directory containing all the suite files. -- **TEST_BASEDIR**: the path to the tests directory, i.e. the directory containing the building blocks (the test files) for the suites. -- **RESULT_DIR**: the path to the result directory, which is part of the suite. -- **TEST_NAME**: name of the suite file currently in execution. From the content of this variable, you can find the suite file (suffix .test), the expected file (suffix .expected), the result file (suffix .result) and the temporary files (suffix .temp). - - -Step 2: the Suite Files ------------------------ - -Once you have a set of test files available, you can combine them together in the suite files. There are no limitations to the number of test files added to the suite files, or to the logic added to the suite file to support the execution of the test: theoretically, a developer may completely ignore the test files and add all the logic in the suite file, but in doing so he/she will certainly replicate most of the logic. |br| Here are some guidelines you may want to adopt when you prepare a suite file: - -- **Send unnecessary output to /dev/null**: if you do not want to include the output of a command or a test file in the result file, simply add ``> /dev/null 2>&1`` to the line in the suite file. -- **Send intermediate data to a temporary file**: the correct format is to add ``> $RESULT_DIR/$TEST_NAME.temp 2>&1`` to the command that you need to review the output before it will become part of the result file. -- **Use** ``echo -e`` **or** ``expect`` **to manage interactive input**: some scripts require interactive input, and commands like ``echo -e`` can help in automating the input. - -The suite files have access to the same environment variables used by the test files. - - -Step 3: Putting Everything Together ------------------------------------ - -Now you are almost ready to execute your first suite, there is still one important thing missing: the result files. Result files are necessary to provide a comparison between the expected behavior and the actual reasult of a test. The creation of result files is easy, just follow these guidelines: - -- **Prepare the test and suite files**: first, you need to select which test files to use and combine them in the suite file. -- **Execute the foglamp-test utility**: it is likely that the utility will stop at the first test with a failure. This happens if the suite file generates an output (a *.result* file), but there are not expected files (a *.expect* file) to compare. -- **Check and approve the result file**: once you are happy with the content of the result file, simply move the file into the *e* (as in "expected") directory, by changing the suffix to *.expected*. -- **Repeat again until the suite is completed**: when you execute the foglamp-test utility again, the first test will pass, but then the utility will stop on the second test. You must repeat this procedure for all the suite files. - -One last point: don't worry about the *.result* and *.temp* files left by the utility: *foglamp-test* will remove these files right before the same suite is executed. - - -Executing a Test Suite -====================== - -A test suite is executed with the *foglamp-test* utility. You simply move to the *suites* directory in the system test base directory, select the suite you want to execute and run it. The *--list* arguments shows a list of the available suites: - -.. code-block:: console - - foglamp@vbox-dev:~/FogLAMP/tests/system/suites$ ./foglamp-test --list - ##### FogLAMP System Test ##### - Available test suites: - smoke: Smoke Test suite - foglamp@vbox-dev:~/FogLAMP/tests/system/suites$ - - -If you want to see details of a suite, select a suite and add the *--list* argument again: - -.. code-block:: console - - foglamp@vbox-dev:~/FogLAMP/tests/system/suites$ ./foglamp-test smoke --list - ##### FogLAMP System Test ##### - Test Suite: smoke - Smoke Test suite - - Tests in the smoke suite: - 001_prepare: - >>> bash/exec_any_foglamp_command: Execute the foglamp command with any paremeter. - >>> bash/check_foglamp_status: Execute the foglamp status command and retrieves the result. - >>> bash/exec_any_foglamp_command: Execute the foglamp command with any paremeter. - 002_start: - >>> bash/exec_any_foglamp_command: Execute the foglamp command with any paremeter. - >>> bash/check_foglamp_status: Execute the foglamp status command and retrieves the result. - 003_inject: - >>> bash/inject_fogbench_data: - 004_wait_for_flush: - >>> bash/sleep: - 005_read_from_REST: - >>> bash/count_assets_http: - >>> bash/read_an_asset_http: - - foglamp@vbox-dev:~/FogLAMP/tests/system/suites$ - - -Once you have selected the test suite you want to execute, you can run it by simply passing it as a parameter to the *foglamp-test*: - -.. code-block:: console - - foglamp@vbox-dev:~/FogLAMP/tests/system/suites$ ./foglamp-test smoke - ##### FogLAMP System Test ##### - Script Suite: smoke - Suite DIR: /home/foglamp/FogLAMP/tests/system/suites/smoke - Test DIR: /home/foglamp/FogLAMP/tests/system/tests - FogLAMP Root: /usr/local/foglamp - FogLAMP Data: - - Suite Start: 2018-01-31 15:15:06.082467 - [2018-01-31 15:15:06.087680] - 001_prepare - [2018-01-31 15:15:07.067504] (.972 seconds) - PASSED - [2018-01-31 15:15:07.076041] - 002_start - [2018-01-31 15:15:13.492360] (6.412 seconds) - PASSED - [2018-01-31 15:15:13.499524] - 003_inject - [2018-01-31 15:15:13.659411] (.155 seconds) - PASSED - [2018-01-31 15:15:13.666810] - 004_wait_for_flush - [2018-01-31 15:15:23.678761] (10.006 seconds) - PASSED - [2018-01-31 15:15:23.686204] - 005_read_from_REST - [2018-01-31 15:15:23.732168] (.41 seconds) - PASSED - Total Execution Time: 17.652 seconds. - Suite End: 2018-01-31 15:15:23.740412 - COMPLETED - foglamp@vbox-dev:~/FogLAMP/tests/system/suites$ - - -Let's have a look at the output of this execution: - -- The first part of the output shows some generic info, such as the directories and environment variables used by the suite files. -- The second part is about the test execution. It begins with the date and time of when the tests start. -- The following information is related to the individual test files, one for each line. Information are: - - - Starting date and time of the execution of the suite file - - Name of the suite file - - Ending date and time of the execution of the suite file - - Elapsed time in seconds for the execution - - Result of the execution, i.e. *PASSED* or *FAILED* - -- The last part of the output shows the total execution time for the suite and the final date and time. - -In the example above, you may see the suite has been completed and all the tests have passed. - - -Checking a Failed Test ----------------------- - -This is what you may see if one of the tests if the suite fails: - - -.. code-block:: console - - foglamp@vbox-dev:~/FogLAMP/tests/system/suites$ ./foglamp-test smoke - ##### FogLAMP System Test ##### - Script Suite: smoke - Suite DIR: /home/foglamp/FogLAMP-997-system_test/tests/system/suites/smoke - Test DIR: /home/foglamp/FogLAMP-997-system_test/tests/system/tests - FogLAMP Root: /usr/local/foglamp - FogLAMP Data: - - Suite Start: 2018-01-31 16:57:59.332437 - [2018-01-31 16:57:59.337390] - 001_prepare - [2018-01-31 16:58:00.369863] (1.026 seconds) - PASSED - [2018-01-31 16:58:00.376950] - 002_start - [2018-01-31 16:58:06.792647] (6.410 seconds) - PASSED - [2018-01-31 16:58:06.800447] - 003_inject - [2018-01-31 16:58:06.960875] (.155 seconds) - PASSED - [2018-01-31 16:58:06.970094] - 004_wait_for_flush - [2018-01-31 16:58:16.980510] (10.005 seconds) - PASSED - [2018-01-31 16:58:16.987632] - 005_read_from_REST - [2018-01-31 16:58:17.031112] (.39 seconds) - FAILED - Expect/Result MISMATCH - Total Execution Time: 17.701 seconds. - Suite End: 2018-01-31 16:58:17.039578 - INCOMPLETED - foglamp@vbox-dev:~/FogLAMP/tests/system/suites$ - - -As you can see, the result of the execution of the suite file *005_read_from_REST* has failed. The utility also gives you an idea of the reason why it fails. Possible reasons are: - -- **Expect/Result MISMATCH**: the result of the execution is different from the expected result -- **MISSING Result**: the execution has not generated any result file -- **UNEXPECTED Result**: the execution has generated a result file, but there are no expected results for the execution of this suite file - -In the case presented above, the mismatch would be pretty simple to check using the diff utility: - - -.. code-block:: console - - foglamp@vbox-dev:~/FogLAMP/tests/system/suites$ diff smoke/e/005_read_from_REST.expected smoke/r/005_read_from_REST.result - 4c4 - < "count": 2 - --- - > "count": 1 - foglamp@vbox-dev:~/FogLAMP/tests/system/suites$ - - -So here there is a mismatch between the JSON part of an output expected and the actual result of an executed suite file. Now the developer should figure out what is the issue. - - -The *smoke* Suite -================= - -The *smoke* suite is a simple set of tests that checks if a running version of FogLAMP can perform the basic operations expected by the platform. In its current form, it executes 5 steps: - -1. **Test preparation**: stop FogLAMP (if it is currently running) and reset the internal database. Building blocks are: - - - *exec_any_foglamp_command* - passing the ``stop`` argument - - *check_foglamp_status* - - *exec_any_foglamp_command* - passing the ``reset`` argument - -2. **Test start**: start FogLAMP and make sure that the necessary services are running. Building blocks are: - - - *exec_any_foglamp_command* - passing the ``start`` argument - - *check_foglamp_status* - -3. **Inject a new reading via CoAP using Fogbench**: prepare the injection template and run fogbench. Building blocks are: - - - *inject_fogbench_data* - passing a template prepared by the suite file - -4. **Wait for the data to flush**: data is flushed every 5 seconds, so the test waits for 10 seconds before it proceeds to the next step. Building blocks are: - - - *sleep* - passing *10* as a value in seconds - -5. **Read data via REST API**: check the count of readings and the content of the reading. Building blocks are: - - - *count_assets_http* - the default (HTTP) connection is used - - *read_an_asset_http* - passing the key used in the injection step as argument - - -All steps are replicable and available to Intel and ARM architectures. - diff --git a/tests/system/README.rst b/tests/system/README.rst deleted file mode 100644 index e2fd002121..0000000000 --- a/tests/system/README.rst +++ /dev/null @@ -1,101 +0,0 @@ - -.. |System Test Utility and Suites| raw:: html - - System Test Utility and Suites - -.. |installed| raw:: html - - installed - -.. |build| raw:: html - - build - -.. |set| raw:: html - - set - -.. ============================================= - -******************** -FogLAMP System Tests -******************** - -System tests are the third category of test in FogLAMP. These test ensures that end to end flow of a FogLAMP system is -working as expected. - -A typical example can be ingesting asset data in FogLAMP database, and sending to a cloud system with different set of -configuration rules. - -Since these kinds of tests interacts between two or more heterogeneous systems, these are often slow in nature. - -Running FogLAMP System tests -============================== - -Test Prerequisites ------------------- - -Install the following prerequisites to run a System tests suite :: - - apt-get install jq - -Also, foglamp must have: - - 1. All dependencies |installed| - 2. |build| - 3. and FogLAMP_ROOT must be |set| - - -Test Execution --------------- - -The complete documentation on the System test suite is available as this page |System Test Utility and Suites|. - -Some tests suite, ``end_to_end_PI`` and ``end_to_end_OCS``, requires some information to be executed -like for example the PI-Server or the OCS account that should be used. - -The configuration file ``suite.cfg``, available in each tests suite directory, should be edited proving -the information related to the specific environment. - -Tests suite end_to_end_PI -+++++++++++++++++++++++++ - -The following variables should be properly updated :: - - export PI_SERVER=pi-server - export PI_SERVER_PORT=5460 - export PI_SERVER_UID=pi-server-uid - export PI_SERVER_PWD=pi-server-pwd - export PI_SERVER_DATABASE=pi-server-db - export CONNECTOR_RELAY_VERSION=x.x - - export OMF_PRODUCER_TOKEN=xxx - -Tests suite end_to_end_OCS -++++++++++++++++++++++++++ - -The following variables should be properly update :: - - export OCS_TENANT="ocs_tenant_id" - export OCS_CLIENT_ID="ocs_client_id" - export OCS_CLIENT_SECRET="ocs_client_secret" - - export OCS_NAMESPACE="ocs_namespace_0001" - - export OCS_TOKEN="ocs_north_0001" - - - -Samples execution -+++++++++++++++++ - -List the tests available in the ``smoke`` tests suite :: - - cd ${FOGLAMP_ROOT}/tests/system/suites - ./foglamp-test smoke -l - -Execute all the tests of the ``smoke`` tests suite :: - - cd ${FOGLAMP_ROOT}/tests/system/suites - ./foglamp-test smoke - diff --git a/tests/system/suites/end_to_end_OCS/e/0010_prepare.expected b/tests/system/suites/end_to_end_OCS/e/0010_prepare.expected deleted file mode 100644 index 3f926844de..0000000000 --- a/tests/system/suites/end_to_end_OCS/e/0010_prepare.expected +++ /dev/null @@ -1 +0,0 @@ -FogLAMP not running. diff --git a/tests/system/suites/end_to_end_OCS/e/0020_start.expected b/tests/system/suites/end_to_end_OCS/e/0020_start.expected deleted file mode 100644 index 5b80673bf5..0000000000 --- a/tests/system/suites/end_to_end_OCS/e/0020_start.expected +++ /dev/null @@ -1 +0,0 @@ -FogLAMP started. diff --git a/tests/system/suites/end_to_end_OCS/e/0025_prepare_plugins.expected b/tests/system/suites/end_to_end_OCS/e/0025_prepare_plugins.expected deleted file mode 100644 index 359be07fa3..0000000000 --- a/tests/system/suites/end_to_end_OCS/e/0025_prepare_plugins.expected +++ /dev/null @@ -1,5 +0,0 @@ -{ - "name": "coap", - "type": "Southbound", - "status": "running" -} diff --git a/tests/system/suites/end_to_end_OCS/e/0030_prepare_OCS.expected b/tests/system/suites/end_to_end_OCS/e/0030_prepare_OCS.expected deleted file mode 100644 index 74c5438db5..0000000000 --- a/tests/system/suites/end_to_end_OCS/e/0030_prepare_OCS.expected +++ /dev/null @@ -1,2 +0,0 @@ -[] -FogLAMP started. diff --git a/tests/system/suites/end_to_end_OCS/e/0035_wait_for_start.expected b/tests/system/suites/end_to_end_OCS/e/0035_wait_for_start.expected deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/tests/system/suites/end_to_end_OCS/e/0040_inject.expected b/tests/system/suites/end_to_end_OCS/e/0040_inject.expected deleted file mode 100644 index 0420691aae..0000000000 --- a/tests/system/suites/end_to_end_OCS/e/0040_inject.expected +++ /dev/null @@ -1 +0,0 @@ -Total Messages Transferred: 1 diff --git a/tests/system/suites/end_to_end_OCS/e/0050_wait_for_flush.expected b/tests/system/suites/end_to_end_OCS/e/0050_wait_for_flush.expected deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/tests/system/suites/end_to_end_OCS/e/0060_read_from_REST.expected b/tests/system/suites/end_to_end_OCS/e/0060_read_from_REST.expected deleted file mode 100644 index 2771fa8d05..0000000000 --- a/tests/system/suites/end_to_end_OCS/e/0060_read_from_REST.expected +++ /dev/null @@ -1,9 +0,0 @@ -[ - { - "assetCode": "fogbench_smoke_test", - "count": 1 - } -] -{ - "sensor": 10 -} diff --git a/tests/system/suites/end_to_end_OCS/e/0070_read_from_OCS.expected b/tests/system/suites/end_to_end_OCS/e/0070_read_from_OCS.expected deleted file mode 100644 index f599e28b8a..0000000000 --- a/tests/system/suites/end_to_end_OCS/e/0070_read_from_OCS.expected +++ /dev/null @@ -1 +0,0 @@ -10 diff --git a/tests/system/suites/end_to_end_OCS/e/README.rst b/tests/system/suites/end_to_end_OCS/e/README.rst deleted file mode 100644 index 9b46d9d2a2..0000000000 --- a/tests/system/suites/end_to_end_OCS/e/README.rst +++ /dev/null @@ -1,6 +0,0 @@ -***************** -Extpected Results -***************** - -Files in this directory are the expected results of a test. -All output files have .expected suffix. diff --git a/tests/system/suites/end_to_end_OCS/r/README.rst b/tests/system/suites/end_to_end_OCS/r/README.rst deleted file mode 100644 index 365c8ab43d..0000000000 --- a/tests/system/suites/end_to_end_OCS/r/README.rst +++ /dev/null @@ -1,6 +0,0 @@ -************** -Actual Results -************** - -Files in this directory are the actual results of a test. -All output files have .result suffix. diff --git a/tests/system/suites/end_to_end_OCS/suite.cfg b/tests/system/suites/end_to_end_OCS/suite.cfg deleted file mode 100644 index 0231042a35..0000000000 --- a/tests/system/suites/end_to_end_OCS/suite.cfg +++ /dev/null @@ -1,46 +0,0 @@ -#!/bin/bash - -# FogLAMP server references -export FOGLAMP_SERVER=localhost -export FOGLAMP_PORT=8081 - -export TMP_DIR=/tmp - -# Plugins information -export PLUGIN_COAP_NAME=foglamp-south-coap -export PLUGIN_COAP_REPO=https://github.com/foglamp/${PLUGIN_COAP_NAME} - -# Configurations related to FogLAMP -export SENDING_PROCESS_OCS_DATA="North_Readings_to_OCS" -export TMP_FILE_ADD_NORTH_OCS_READINGS="${TMP_DIR}/add_north_ocs_readings.json" - -# Related to the specific OCS account -export OCS_TENANT="ocs_tenant_id" -export OCS_CLIENT_ID="ocs_client_id" -export OCS_CLIENT_SECRET="ocs_client_secret" - -# OCS namespace where the information are stored -export OCS_NAMESPACE="ocs_namespace_0001" - -# Used to authenticate as a valid publisher and required to ingest data into OCS using OMF -export OCS_TOKEN="ocs_north_0001" - -# OCS entry point for OMF messages -# TENANT_ID_PLACEHOLDER and NAMESPACE_ID_PLACEHOLDER, if present, will be replaced with the values of OCS_TENANT and OCS_NAMESPACE -export OCS_URL="https://dat-a.osisoft.com/api/tenants/TENANT_ID_PLACEHOLDER/namespaces/NAMESPACE_ID_PLACEHOLDER/omf" - -# Identifies sensors and measurements types -export OCS_TYPE_ID=1 - -# Define the asset information -# The asset name in the OCS tests suite is different respect the one in the PI-Server tests suite -# for OCS limitation in the naming -export ASSET_CODE="fogbench_smoke_test" - -# Define the asset information to retrieve from OCS -export OCS_STREAM="${OCS_TYPE_ID}measurement_${ASSET_CODE}" -export START_TIMESTAMP="2018-01-01T00:00:00.000000Z" -export VALUES_COUNT=10000 - -# Maximum numbers of retries to attempt an operation (usually corresponds to 120 seconds) -export RETRY_COUNT=120 diff --git a/tests/system/suites/end_to_end_OCS/suite.desc b/tests/system/suites/end_to_end_OCS/suite.desc deleted file mode 100644 index 3b860ac6b2..0000000000 --- a/tests/system/suites/end_to_end_OCS/suite.desc +++ /dev/null @@ -1 +0,0 @@ -System test against OCS. \ No newline at end of file diff --git a/tests/system/suites/end_to_end_OCS/t/0010_prepare.test b/tests/system/suites/end_to_end_OCS/t/0010_prepare.test deleted file mode 100755 index 0d33e72f3f..0000000000 --- a/tests/system/suites/end_to_end_OCS/t/0010_prepare.test +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/bash - -# Reads configuration setting -source ${SUITE_BASEDIR}/suite.cfg - -if [[ ${OCS_TOKEN} == "ocs_north_0001" ]] -then - echo ERROR : the file suite.cfg should be modified using the values for the specific OCS account. - exit 1 -fi - -$TEST_BASEDIR/bash/exec_any_foglamp_command.bash stop > /dev/null 2>&1 -$TEST_BASEDIR/bash/check_foglamp_status.bash - -echo -e "YES" | $TEST_BASEDIR/bash/exec_any_foglamp_command.bash reset > /dev/null 2>&1 diff --git a/tests/system/suites/end_to_end_OCS/t/0020_start.desc b/tests/system/suites/end_to_end_OCS/t/0020_start.desc deleted file mode 100644 index 3affc1d52a..0000000000 --- a/tests/system/suites/end_to_end_OCS/t/0020_start.desc +++ /dev/null @@ -1 +0,0 @@ -Start FogLAMP diff --git a/tests/system/suites/end_to_end_OCS/t/0020_start.test b/tests/system/suites/end_to_end_OCS/t/0020_start.test deleted file mode 100755 index d51802b9c8..0000000000 --- a/tests/system/suites/end_to_end_OCS/t/0020_start.test +++ /dev/null @@ -1,31 +0,0 @@ -#!/bin/bash - - -# Declares used variables -declare TEST_BASEDIR -declare SUITE_BASEDIR -declare FOGLAMP_SERVER -declare FOGLAMP_PORT -declare RESULT_DIR -declare TEST_NAME -declare SENDING_PROCESS_DATA -declare PI_SERVER -declare PI_SERVER_PORT -declare OMF_PRODUCER_TOKEN -declare OMF_TYPE_ID - -# Reads configuration setting -source ${SUITE_BASEDIR}/suite.cfg - -$TEST_BASEDIR/bash/exec_any_foglamp_command.bash start > ${RESULT_DIR}/$TEST_NAME.temp 2>&1 -tail -n1 ${RESULT_DIR}/$TEST_NAME.temp - -$TEST_BASEDIR/bash/wait_foglamp_status.bash RUNNING 1>>"${RESULT_DIR}/${TEST_NAME}_out.temp" 2>>"${RESULT_DIR}/${TEST_NAME}_err.temp" - -# Checks if the FogLAMP server is reachable -curl -s -X GET http://${FOGLAMP_SERVER}:${FOGLAMP_PORT}/foglamp/category/ > /dev/null 2>&1 -if [[ "$?" != "0" ]]; then - - echo "ERROR : FogLAMP server not reachable, server:port -${FOGLAMP_SERVER}:${FOGLAMP_PORT}-." - exit 1 -fi diff --git a/tests/system/suites/end_to_end_OCS/t/0025_prepare_plugins.desc b/tests/system/suites/end_to_end_OCS/t/0025_prepare_plugins.desc deleted file mode 100644 index 4ccded1df8..0000000000 --- a/tests/system/suites/end_to_end_OCS/t/0025_prepare_plugins.desc +++ /dev/null @@ -1 +0,0 @@ -Enable all the required plugins. \ No newline at end of file diff --git a/tests/system/suites/end_to_end_OCS/t/0025_prepare_plugins.test b/tests/system/suites/end_to_end_OCS/t/0025_prepare_plugins.test deleted file mode 100755 index c1514df958..0000000000 --- a/tests/system/suites/end_to_end_OCS/t/0025_prepare_plugins.test +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/bash - -# Declares used variables -declare TEST_BASEDIR - -$TEST_BASEDIR/bash/enable_plugin_coap.bash - diff --git a/tests/system/suites/end_to_end_OCS/t/0030_prepare_OCS.test b/tests/system/suites/end_to_end_OCS/t/0030_prepare_OCS.test deleted file mode 100755 index 4348772ad0..0000000000 --- a/tests/system/suites/end_to_end_OCS/t/0030_prepare_OCS.test +++ /dev/null @@ -1,61 +0,0 @@ -#!/usr/bin/env bash - -declare TMP_FILE_ADD_NORTH_OCS_READINGS - -# Reads configuration setting -source ${SUITE_BASEDIR}/suite.cfg - - -# Redirects std out/err for all the following commands -exec 7>&1 # Backups stdout -exec 8>&1 # Backups stderr -exec 1>>"${RESULT_DIR}/${TEST_NAME}_out.temp" -exec 2>>"${RESULT_DIR}/${TEST_NAME}_err.temp" - - -# Enables the OCS plugin -bash -c "cat > ${TMP_FILE_ADD_NORTH_OCS_READINGS}" << 'EOF' - { - "name": "North_Readings_to_OCS", - "plugin": "ocs_V2", - "type": "north", - "schedule_type": 3, - "schedule_day": 0, - "schedule_time": 0, - "schedule_repeat": 30, - "schedule_enabled": true - } -EOF - -curl -X POST http://${FOGLAMP_SERVER}:${FOGLAMP_PORT}/foglamp/scheduled/task -d@${TMP_FILE_ADD_NORTH_OCS_READINGS} - - -# Waits until the OCS plugin has created the default configurations -${TEST_BASEDIR}/bash/wait_creation_cfg.bash "${SENDING_PROCESS_OCS_DATA}/producerToken" -if [[ "$?" != "0" ]]; then - exit 1 -fi - -# Configures FogLAMP with the required settings -curl -s -X PUT http://${FOGLAMP_SERVER}:${FOGLAMP_PORT}/foglamp/category/"${SENDING_PROCESS_OCS_DATA}"/tenant_id -d '{ "value" : "'${OCS_TENANT}'" }' -curl -s -X PUT http://${FOGLAMP_SERVER}:${FOGLAMP_PORT}/foglamp/category/"${SENDING_PROCESS_OCS_DATA}"/client_id -d '{ "value" : "'${OCS_CLIENT_ID}'" }' -curl -s -X PUT http://${FOGLAMP_SERVER}:${FOGLAMP_PORT}/foglamp/category/"${SENDING_PROCESS_OCS_DATA}"/client_secret -d '{ "value" : "'${OCS_CLIENT_SECRET}'" }' -curl -s -X PUT http://${FOGLAMP_SERVER}:${FOGLAMP_PORT}/foglamp/category/"${SENDING_PROCESS_OCS_DATA}"/namespace -d '{ "value" : "'${OCS_NAMESPACE}'" }' - -curl -s -X PUT http://${FOGLAMP_SERVER}:${FOGLAMP_PORT}/foglamp/category/"${SENDING_PROCESS_OCS_DATA}"/producerToken -d '{ "value" : "'${OCS_TOKEN}'" }' - -# Initializes OCS cleaning all the content of the defined OCS NameSpace -python3 $TEST_BASEDIR/python/ocs_clean_namespace.py $@ - -# Restarts FogLAMP to ensure the new configurations are used -${TEST_BASEDIR}/bash/exec_any_foglamp_command.bash stop - -# Restore stdout/stderr -exec 1>&7 -exec 2>&8 - -# Retrieves the list of the Streams to ensure it is empty -python3 $TEST_BASEDIR/python/ocs_read_streams_list.py $@ - -${TEST_BASEDIR}/bash/exec_any_foglamp_command.bash start > ${RESULT_DIR}/${TEST_NAME}.2.temp 2>&1 -tail -n1 ${RESULT_DIR}/${TEST_NAME}.2.temp diff --git a/tests/system/suites/end_to_end_OCS/t/0035_wait_for_start.test b/tests/system/suites/end_to_end_OCS/t/0035_wait_for_start.test deleted file mode 100755 index 7e49d2dce1..0000000000 --- a/tests/system/suites/end_to_end_OCS/t/0035_wait_for_start.test +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash - -$TEST_BASEDIR/bash/sleep.bash 10 diff --git a/tests/system/suites/end_to_end_OCS/t/0040_inject.test b/tests/system/suites/end_to_end_OCS/t/0040_inject.test deleted file mode 100755 index 57bfc0e51b..0000000000 --- a/tests/system/suites/end_to_end_OCS/t/0040_inject.test +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/bash - -# Reads configuration setting -source ${SUITE_BASEDIR}/suite.cfg - -echo '[{"name":"'${ASSET_CODE}'","sensor_values":[{"name":"sensor","type":"number","min":10,"max":10,"precision":0}]}]' > $RESULT_DIR/$TEST_NAME.1.temp 2>&1 -$TEST_BASEDIR/bash/inject_fogbench_data.bash -t $RESULT_DIR/$TEST_NAME.1.temp > $RESULT_DIR/$TEST_NAME.2.temp 2>&1 -grep '^Total Messages Transferred: ' $RESULT_DIR/$TEST_NAME.2.temp diff --git a/tests/system/suites/end_to_end_OCS/t/0050_wait_for_flush.test b/tests/system/suites/end_to_end_OCS/t/0050_wait_for_flush.test deleted file mode 100755 index 3f22d4c7e5..0000000000 --- a/tests/system/suites/end_to_end_OCS/t/0050_wait_for_flush.test +++ /dev/null @@ -1,4 +0,0 @@ -#!/bin/bash - -$TEST_BASEDIR/bash/sleep.bash 10 - diff --git a/tests/system/suites/end_to_end_OCS/t/0060_read_from_REST.test b/tests/system/suites/end_to_end_OCS/t/0060_read_from_REST.test deleted file mode 100755 index 1c25c8bb85..0000000000 --- a/tests/system/suites/end_to_end_OCS/t/0060_read_from_REST.test +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/bash - -declare SUITE_BASEDIR -declare TEST_BASEDIR -declare RESULT_DIR -declare TEST_NAME -declare ASSET_CODE - -# Reads configuration setting -source ${SUITE_BASEDIR}/suite.cfg - - -$TEST_BASEDIR/bash/count_assets_http.bash -$TEST_BASEDIR/bash/read_an_asset_http.bash "${ASSET_CODE}" > $RESULT_DIR/$TEST_NAME.temp 2>&1 -jq '.[] | .reading' $RESULT_DIR/$TEST_NAME.temp - diff --git a/tests/system/suites/end_to_end_OCS/t/0070_read_from_OCS.test b/tests/system/suites/end_to_end_OCS/t/0070_read_from_OCS.test deleted file mode 100755 index 97589a2179..0000000000 --- a/tests/system/suites/end_to_end_OCS/t/0070_read_from_OCS.test +++ /dev/null @@ -1,26 +0,0 @@ -#!/bin/bash - -# Reads configuration setting -source ${SUITE_BASEDIR}/suite.cfg - -# Waits until either the data is available in OCS or it reaches the timeout -while [ true ] -do - python3 ${TEST_BASEDIR}/python/ocs_read_an_asset.py ${OCS_STREAM} > $RESULT_DIR/${TEST_NAME}.temp 2>&1 - result=`jq '.[] | .sensor' $RESULT_DIR/$TEST_NAME.temp 2>> $RESULT_DIR/${TEST_NAME}_err.temp` - - if [[ "$result" != "" && "$result" != "null" ]] - then - echo ${result} - exit 0 - else - if [[ $count -le ${RETRY_COUNT} ]] - then - sleep 1 - count=$((count+1)) - else - exit 1 - fi - fi -done - diff --git a/tests/system/suites/end_to_end_OCS/t/README.rst b/tests/system/suites/end_to_end_OCS/t/README.rst deleted file mode 100644 index f311d8144e..0000000000 --- a/tests/system/suites/end_to_end_OCS/t/README.rst +++ /dev/null @@ -1,6 +0,0 @@ -************ -Test scripts -************ - -Files in this directory are bash scripts used to run a test. -All bash files have .test suffix. diff --git a/tests/system/suites/end_to_end_PI/e/0010_prepare.expected b/tests/system/suites/end_to_end_PI/e/0010_prepare.expected deleted file mode 100644 index 3f926844de..0000000000 --- a/tests/system/suites/end_to_end_PI/e/0010_prepare.expected +++ /dev/null @@ -1 +0,0 @@ -FogLAMP not running. diff --git a/tests/system/suites/end_to_end_PI/e/0020_start.expected b/tests/system/suites/end_to_end_PI/e/0020_start.expected deleted file mode 100644 index 5b80673bf5..0000000000 --- a/tests/system/suites/end_to_end_PI/e/0020_start.expected +++ /dev/null @@ -1 +0,0 @@ -FogLAMP started. diff --git a/tests/system/suites/end_to_end_PI/e/0025_prepare_plugins.expected b/tests/system/suites/end_to_end_PI/e/0025_prepare_plugins.expected deleted file mode 100644 index 359be07fa3..0000000000 --- a/tests/system/suites/end_to_end_PI/e/0025_prepare_plugins.expected +++ /dev/null @@ -1,5 +0,0 @@ -{ - "name": "coap", - "type": "Southbound", - "status": "running" -} diff --git a/tests/system/suites/end_to_end_PI/e/0030_prepare_PI.expected b/tests/system/suites/end_to_end_PI/e/0030_prepare_PI.expected deleted file mode 100644 index 5b80673bf5..0000000000 --- a/tests/system/suites/end_to_end_PI/e/0030_prepare_PI.expected +++ /dev/null @@ -1 +0,0 @@ -FogLAMP started. diff --git a/tests/system/suites/end_to_end_PI/e/0035_wait_for_start.expected b/tests/system/suites/end_to_end_PI/e/0035_wait_for_start.expected deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/tests/system/suites/end_to_end_PI/e/0040_inject.expected b/tests/system/suites/end_to_end_PI/e/0040_inject.expected deleted file mode 100644 index 0420691aae..0000000000 --- a/tests/system/suites/end_to_end_PI/e/0040_inject.expected +++ /dev/null @@ -1 +0,0 @@ -Total Messages Transferred: 1 diff --git a/tests/system/suites/end_to_end_PI/e/0050_wait_for_flush.expected b/tests/system/suites/end_to_end_PI/e/0050_wait_for_flush.expected deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/tests/system/suites/end_to_end_PI/e/0060_read_from_REST.expected b/tests/system/suites/end_to_end_PI/e/0060_read_from_REST.expected deleted file mode 100644 index 256390650d..0000000000 --- a/tests/system/suites/end_to_end_PI/e/0060_read_from_REST.expected +++ /dev/null @@ -1,9 +0,0 @@ -[ - { - "assetCode": "fogbench/smoke_test", - "count": 1 - } -] -{ - "sensor": 10 -} diff --git a/tests/system/suites/end_to_end_PI/e/0070_read_from_PI.expected b/tests/system/suites/end_to_end_PI/e/0070_read_from_PI.expected deleted file mode 100644 index f599e28b8a..0000000000 --- a/tests/system/suites/end_to_end_PI/e/0070_read_from_PI.expected +++ /dev/null @@ -1 +0,0 @@ -10 diff --git a/tests/system/suites/end_to_end_PI/e/README.rst b/tests/system/suites/end_to_end_PI/e/README.rst deleted file mode 100644 index 9b46d9d2a2..0000000000 --- a/tests/system/suites/end_to_end_PI/e/README.rst +++ /dev/null @@ -1,6 +0,0 @@ -***************** -Extpected Results -***************** - -Files in this directory are the expected results of a test. -All output files have .expected suffix. diff --git a/tests/system/suites/end_to_end_PI/r/README.rst b/tests/system/suites/end_to_end_PI/r/README.rst deleted file mode 100644 index 365c8ab43d..0000000000 --- a/tests/system/suites/end_to_end_PI/r/README.rst +++ /dev/null @@ -1,6 +0,0 @@ -************** -Actual Results -************** - -Files in this directory are the actual results of a test. -All output files have .result suffix. diff --git a/tests/system/suites/end_to_end_PI/suite.cfg b/tests/system/suites/end_to_end_PI/suite.cfg deleted file mode 100644 index 542011e0da..0000000000 --- a/tests/system/suites/end_to_end_PI/suite.cfg +++ /dev/null @@ -1,40 +0,0 @@ -#!/bin/bash - -# FogLAMP server references -export FOGLAMP_SERVER=localhost -export FOGLAMP_PORT=8081 - -export TMP_DIR=/tmp - -# Plugins information -export PLUGIN_COAP_NAME=foglamp-south-coap -export PLUGIN_COAP_REPO=https://github.com/foglamp/${PLUGIN_COAP_NAME} - -# Configurations related to FogLAMP -export SENDING_PROCESS_DATA="North_Readings_to_PI" -export SENDING_PROCESS_STAT="North_statistics_to_PI" -export TMP_FILE_ADD_NORTH_READINGS="${TMP_DIR}/add_north_readings.json" - -# PI server references -export PI_SERVER=pi-server -export PI_SERVER_PORT=5460 -export PI_SERVER_UID=pi-server-uid -export PI_SERVER_PWD=pi-server-pwd -export PI_SERVER_DATABASE=pi-server-db -export CONNECTOR_RELAY_VERSION=x.x - -if [[ ${CONNECTOR_RELAY_VERSION} == "1.x" ]]; then - - export OMF_PRODUCER_TOKEN=omf_north_0001 - -elif [[ ${CONNECTOR_RELAY_VERSION} == "2.x" ]]; then - - export OMF_PRODUCER_TOKEN=xxx -fi - - -# Define the asset information -export ASSET_CODE="fogbench/smoke_test" - -# Maximum numbers of retries to attempt an operation (usually corresponds to 600 seconds) -export RETRY_COUNT=600 diff --git a/tests/system/suites/end_to_end_PI/suite.desc b/tests/system/suites/end_to_end_PI/suite.desc deleted file mode 100644 index d13bdc7540..0000000000 --- a/tests/system/suites/end_to_end_PI/suite.desc +++ /dev/null @@ -1 +0,0 @@ -System test against PI Server. diff --git a/tests/system/suites/end_to_end_PI/t/0010_prepare.test b/tests/system/suites/end_to_end_PI/t/0010_prepare.test deleted file mode 100755 index 376602ef9c..0000000000 --- a/tests/system/suites/end_to_end_PI/t/0010_prepare.test +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/bash - -# Reads configuration setting -source ${SUITE_BASEDIR}/suite.cfg - -if [[ ${PI_SERVER} == "pi-server" ]] -then - echo ERROR : the file suite.cfg should be modified using the appropriated values. - exit 1 -fi - -$TEST_BASEDIR/bash/exec_any_foglamp_command.bash stop > /dev/null 2>&1 -$TEST_BASEDIR/bash/check_foglamp_status.bash - -echo -e "YES" | $TEST_BASEDIR/bash/exec_any_foglamp_command.bash reset > /dev/null 2>&1 diff --git a/tests/system/suites/end_to_end_PI/t/0020_start.desc b/tests/system/suites/end_to_end_PI/t/0020_start.desc deleted file mode 100644 index 3affc1d52a..0000000000 --- a/tests/system/suites/end_to_end_PI/t/0020_start.desc +++ /dev/null @@ -1 +0,0 @@ -Start FogLAMP diff --git a/tests/system/suites/end_to_end_PI/t/0020_start.test b/tests/system/suites/end_to_end_PI/t/0020_start.test deleted file mode 100755 index 9f8583a63e..0000000000 --- a/tests/system/suites/end_to_end_PI/t/0020_start.test +++ /dev/null @@ -1,29 +0,0 @@ -#!/bin/bash - -# Declares used variables -declare TEST_BASEDIR -declare SUITE_BASEDIR -declare FOGLAMP_SERVER -declare FOGLAMP_PORT -declare RESULT_DIR -declare TEST_NAME -declare SENDING_PROCESS_DATA -declare PI_SERVER -declare PI_SERVER_PORT -declare OMF_PRODUCER_TOKEN - -# Reads configuration setting -source ${SUITE_BASEDIR}/suite.cfg - -$TEST_BASEDIR/bash/exec_any_foglamp_command.bash start > ${RESULT_DIR}/$TEST_NAME.temp 2>&1 -tail -n1 ${RESULT_DIR}/$TEST_NAME.temp - -$TEST_BASEDIR/bash/wait_foglamp_status.bash RUNNING 1>>"${RESULT_DIR}/${TEST_NAME}_out.temp" 2>>"${RESULT_DIR}/${TEST_NAME}_err.temp" - -# Checks if the FogLAMP server is reachable -curl -s -X GET http://${FOGLAMP_SERVER}:${FOGLAMP_PORT}/foglamp/category/ > /dev/null 2>&1 -if [[ "$?" != "0" ]]; then - - echo "ERROR : FogLAMP server not reachable, server:port -${FOGLAMP_SERVER}:${FOGLAMP_PORT}-." - exit 1 -fi diff --git a/tests/system/suites/end_to_end_PI/t/0025_prepare_plugins.desc b/tests/system/suites/end_to_end_PI/t/0025_prepare_plugins.desc deleted file mode 100644 index 4ccded1df8..0000000000 --- a/tests/system/suites/end_to_end_PI/t/0025_prepare_plugins.desc +++ /dev/null @@ -1 +0,0 @@ -Enable all the required plugins. \ No newline at end of file diff --git a/tests/system/suites/end_to_end_PI/t/0025_prepare_plugins.test b/tests/system/suites/end_to_end_PI/t/0025_prepare_plugins.test deleted file mode 100755 index c1514df958..0000000000 --- a/tests/system/suites/end_to_end_PI/t/0025_prepare_plugins.test +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/bash - -# Declares used variables -declare TEST_BASEDIR - -$TEST_BASEDIR/bash/enable_plugin_coap.bash - diff --git a/tests/system/suites/end_to_end_PI/t/0030_prepare_PI.test b/tests/system/suites/end_to_end_PI/t/0030_prepare_PI.test deleted file mode 100755 index abf4a1946d..0000000000 --- a/tests/system/suites/end_to_end_PI/t/0030_prepare_PI.test +++ /dev/null @@ -1,64 +0,0 @@ -#!/bin/bash - -# Declares used variables -declare TEST_BASEDIR -declare SUITE_BASEDIR -declare FOGLAMP_SERVER -declare FOGLAMP_PORT -declare RESULT_DIR -declare TEST_NAME -declare SENDING_PROCESS_DATA -declare PI_SERVER -declare PI_SERVER_PORT -declare OMF_PRODUCER_TOKEN - -# Reads configuration setting -source ${SUITE_BASEDIR}/suite.cfg - - -# Redirects std out/err for all the following commands -exec 7>&1 # Backups stdout -exec 8>&1 # Backups stderr -exec 1>>"${RESULT_DIR}/${TEST_NAME}_out.temp" -exec 2>>"${RESULT_DIR}/${TEST_NAME}_err.temp" - - -# Enables the pi_server plugin -bash -c "cat > ${TMP_FILE_ADD_NORTH_READINGS}" << 'EOF' - { - "name": "North_Readings_to_PI", - "plugin": "PI_Server_V2", - "type": "north", - "schedule_type": 3, - "schedule_day": 0, - "schedule_time": 0, - "schedule_repeat": 30, - "schedule_enabled": true - } -EOF - -curl -X POST http://${FOGLAMP_SERVER}:${FOGLAMP_PORT}/foglamp/scheduled/task -d@${TMP_FILE_ADD_NORTH_READINGS} - - -# Waits until the OMF plugin has created the default configurations -${TEST_BASEDIR}/bash/wait_creation_cfg.bash "${SENDING_PROCESS_DATA}/producerToken" -if [[ "$?" != "0" ]]; then - exit 1 -fi - -# Configures FogLAMP with the required settings -curl -s -X PUT http://${FOGLAMP_SERVER}:${FOGLAMP_PORT}/foglamp/category/${SENDING_PROCESS_DATA}/URL -d '{ "value" : "https://'${PI_SERVER}':'${PI_SERVER_PORT}'/ingress/messages"}' -curl -s -X PUT http://${FOGLAMP_SERVER}:${FOGLAMP_PORT}/foglamp/category/${SENDING_PROCESS_DATA}/producerToken -d '{ "value" : "'${OMF_PRODUCER_TOKEN}'" }' - -# Restarts FogLAMP to ensure the new configurations are used -${TEST_BASEDIR}/bash/exec_any_foglamp_command.bash stop > /dev/null 2>&1 - -$TEST_BASEDIR/bash/wait_foglamp_status.bash STOPPED - -# Restore stdout/stderr -exec 1>&7 -exec 2>&8 - -${TEST_BASEDIR}/bash/exec_any_foglamp_command.bash start > ${RESULT_DIR}/${TEST_NAME}.2.temp 2>&1 -tail -n1 ${RESULT_DIR}/${TEST_NAME}.2.temp - diff --git a/tests/system/suites/end_to_end_PI/t/0035_wait_for_start.test b/tests/system/suites/end_to_end_PI/t/0035_wait_for_start.test deleted file mode 100755 index 8e752f7745..0000000000 --- a/tests/system/suites/end_to_end_PI/t/0035_wait_for_start.test +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash - -$TEST_BASEDIR/bash/sleep.bash 10 \ No newline at end of file diff --git a/tests/system/suites/end_to_end_PI/t/0040_inject.test b/tests/system/suites/end_to_end_PI/t/0040_inject.test deleted file mode 100755 index 739f64965c..0000000000 --- a/tests/system/suites/end_to_end_PI/t/0040_inject.test +++ /dev/null @@ -1,9 +0,0 @@ -#!/bin/bash - -# Reads configuration setting -source ${SUITE_BASEDIR}/suite.cfg - -echo '[{"name":"'${ASSET_CODE}'","sensor_values":[{"name":"sensor","type":"number","min":10,"max":10,"precision":0}]}]' > $RESULT_DIR/$TEST_NAME.1.temp 2>&1 -$TEST_BASEDIR/bash/inject_fogbench_data.bash -t $RESULT_DIR/$TEST_NAME.1.temp > $RESULT_DIR/$TEST_NAME.2.temp 2>&1 -grep '^Total Messages Transferred: ' $RESULT_DIR/$TEST_NAME.2.temp - diff --git a/tests/system/suites/end_to_end_PI/t/0050_wait_for_flush.test b/tests/system/suites/end_to_end_PI/t/0050_wait_for_flush.test deleted file mode 100755 index 3f22d4c7e5..0000000000 --- a/tests/system/suites/end_to_end_PI/t/0050_wait_for_flush.test +++ /dev/null @@ -1,4 +0,0 @@ -#!/bin/bash - -$TEST_BASEDIR/bash/sleep.bash 10 - diff --git a/tests/system/suites/end_to_end_PI/t/0060_read_from_REST.test b/tests/system/suites/end_to_end_PI/t/0060_read_from_REST.test deleted file mode 100755 index 7a3780c52e..0000000000 --- a/tests/system/suites/end_to_end_PI/t/0060_read_from_REST.test +++ /dev/null @@ -1,10 +0,0 @@ -#!/bin/bash - -declare ASSET_CODE - -# Reads configuration setting -source ${SUITE_BASEDIR}/suite.cfg - -$TEST_BASEDIR/bash/count_assets_http.bash -$TEST_BASEDIR/bash/read_an_asset_http.bash "${ASSET_CODE}" > $RESULT_DIR/$TEST_NAME.temp 2>&1 -jq '.[] | .reading' $RESULT_DIR/$TEST_NAME.temp diff --git a/tests/system/suites/end_to_end_PI/t/0070_read_from_PI.test b/tests/system/suites/end_to_end_PI/t/0070_read_from_PI.test deleted file mode 100755 index 83e78b369e..0000000000 --- a/tests/system/suites/end_to_end_PI/t/0070_read_from_PI.test +++ /dev/null @@ -1,141 +0,0 @@ -#!/bin/bash - -# Globals declaration -declare SUITE_BASEDIR -declare RESULT_DIR -declare TEST_NAME - -declare PI_SERVER_UID -declare PI_SERVER_PWD -declare PI_SERVER_DATABASE -declare PI_SERVER -declare OMF_PRODUCER_TOKEN -declare CONNECTOR_RELAY_VERSION -declare ASSET_CODE - -declare RETRY_COUNT - -# Reads configuration setting -source ${SUITE_BASEDIR}/suite.cfg - -function pi_web_retrieves_value_1x { - - url_assets_list="" - url_asset="" - value="" - - # Retrieves the asset list - url_assets_list=`curl -s -u ${PI_SERVER_UID}:${PI_SERVER_PWD} -X GET -k ${url_elements_list} | jq --raw-output '.Items | .[] | select(.Name=="'${OMF_PRODUCER_TOKEN}'") | .Links | .Elements' 2>> $RESULT_DIR/$TEST_NAME.2.temp ` - echo url_assets_list :${url_assets_list}: >> $RESULT_DIR/$TEST_NAME.1.temp 2>&1 - - if [[ "${url_assets_list}" != "" ]]; then - - # Retrieves asset information - url_asset=`curl -s -u ${PI_SERVER_UID}:${PI_SERVER_PWD} -X GET -k ${url_assets_list} | jq --raw-output '.Items | .[] | select(.Name=="'${ASSET_CODE}'") | .Links | .EndValue' 2>> $RESULT_DIR/$TEST_NAME.2.temp ` - echo url_asset :${url_asset}: >> $RESULT_DIR/$TEST_NAME.1.temp 2>&1 - fi - - if [[ "${url_asset}" != "" ]]; then - - # Retrieves the value - value=`curl -s -u ${PI_SERVER_UID}:${PI_SERVER_PWD} -X GET -k ${url_asset} | jq --raw-output '.Items | .[] | select(.Name=="sensor") | .Value | .Value' 2>> $RESULT_DIR/$TEST_NAME.2.temp ` - echo value :${value}: >> $RESULT_DIR/$TEST_NAME.1.temp 2>&1 - fi - - echo ${value} -} - -function pi_web_retrieves_value_2x { - - url_assets_list="" - url_asset="" - value="" - - # Retrieves the asset list - if [[ "${url_assets_list}" == "" && "${url_elements_list}" != "" ]]; then - - url_assets_list=`curl -s -u ${PI_SERVER_UID}:${PI_SERVER_PWD} -X GET -k ${url_elements_list} | jq --raw-output '.Items | .[] | select(.Name=="'${ASSET_CODE}'") | .Links | .EndValue' 2>> $RESULT_DIR/$TEST_NAME.2.temp ` - echo url_assets_list :${url_assets_list}: >> $RESULT_DIR/$TEST_NAME.1.temp 2>&1 - fi - - if [[ "${url_assets_list}" != "" ]]; then - - url_asset=${url_assets_list} - fi - - if [[ "${url_asset}" != "" ]]; then - - # Retrieves the value - value=`curl -s -u ${PI_SERVER_UID}:${PI_SERVER_PWD} -X GET -k ${url_asset} | jq --raw-output '.Items | .[] | select(.Name=="sensor") | .Value | .Value' 2>> $RESULT_DIR/$TEST_NAME.2.temp ` - echo value 1 :${value}: >> $RESULT_DIR/$TEST_NAME.1.temp 2>&1 - - # 2 reads to improve the stability of the test - sleep 5 - value=`curl -s -u ${PI_SERVER_UID}:${PI_SERVER_PWD} -X GET -k ${url_asset} | jq --raw-output '.Items | .[] | select(.Name=="sensor") | .Value | .Value' 2>> $RESULT_DIR/$TEST_NAME.2.temp ` - echo value 2 :${value}: >> $RESULT_DIR/$TEST_NAME.1.temp 2>&1 - fi - - echo ${value} -} - -# -# MAIN -# - -# -# Drills down into PI-WEB information to extract the expected value from the PI-Server -# -url_databases="" -url_elements="" -url_elements_list="" - -# Waits until either the data is available in the PI server or it reaches the timeout -count=0 -while [ true ] -do - - if [[ "${url_databases}" == "" ]]; then - - url_databases=`curl -s -u ${PI_SERVER_UID}:${PI_SERVER_PWD} -X GET -k https://${PI_SERVER}/piwebapi/assetservers | jq --raw-output '.Items | .[] | .Links | .Databases ' 2>> $RESULT_DIR/$TEST_NAME.2.temp ` - echo url_Databases :${url_databases}: >> $RESULT_DIR/$TEST_NAME.1.temp 2>&1 - fi - - if [[ "${url_elements}" == "" && "${url_databases}" != "" ]]; then - # - url_elements=`curl -s -u ${PI_SERVER_UID}:${PI_SERVER_PWD} -X GET -k ${url_databases} | jq --raw-output '.Items | .[] | select(.Name=="'${PI_SERVER_DATABASE}'") | .Links | .Elements' 2>> $RESULT_DIR/$TEST_NAME.2.temp ` - echo url_Elements :${url_elements}: >> $RESULT_DIR/$TEST_NAME.1.temp 2>&1 - fi - - # - if [[ "${url_elements_list}" == "" && "${url_elements}" != "" ]]; then - - url_elements_list=`curl -s -u ${PI_SERVER_UID}:${PI_SERVER_PWD} -X GET -k ${url_elements} | jq --raw-output '.Items | .[] | .Links | .Elements' 2>> $RESULT_DIR/$TEST_NAME.2.temp ` - echo url_Elements_list :${url_elements_list}: >> $RESULT_DIR/$TEST_NAME.1.temp 2>&1 - fi - - if [[ ${CONNECTOR_RELAY_VERSION} == "1.x" ]]; then - - value=$(pi_web_retrieves_value_1x) - - elif [[ ${CONNECTOR_RELAY_VERSION} == "2.x" ]]; then - - value=$(pi_web_retrieves_value_2x) - fi - - if [[ "${value}" != "" && "${value}" != *"PI Point not found"* ]]; then - - echo Value retrieved - N. of retries :${count}: >> $RESULT_DIR/$TEST_NAME.1.temp 2>&1 - echo ${value} - exit 0 - else - if [[ $count -le ${RETRY_COUNT} ]] - then - sleep 1 - count=$((count+1)) - else - echo Timeout reached - N. of retries :${count}: >> $RESULT_DIR/$TEST_NAME.1.temp 2>&1 - echo ${value} - exit 1 - fi - fi -done diff --git a/tests/system/suites/end_to_end_PI/t/README.rst b/tests/system/suites/end_to_end_PI/t/README.rst deleted file mode 100644 index f311d8144e..0000000000 --- a/tests/system/suites/end_to_end_PI/t/README.rst +++ /dev/null @@ -1,6 +0,0 @@ -************ -Test scripts -************ - -Files in this directory are bash scripts used to run a test. -All bash files have .test suffix. diff --git a/tests/system/suites/foglamp-test b/tests/system/suites/foglamp-test deleted file mode 100755 index 73dea8443e..0000000000 --- a/tests/system/suites/foglamp-test +++ /dev/null @@ -1,400 +0,0 @@ -#!/bin/bash -############################################################################## -# -# FogLAMP System Test Running Script -# Copyright (C) 2018 Dianomic Systems, Inc. -# -############################################################################## - - -############################################################################## -# -## This script is used to run system tests. It can call any test suite present -## in the local directory. -# -############################################################################## - -#set -e -#set -x - -# FOGLAMP_ROOT and FOGLAMP_DATAi: -# change these variables if you know the tests are going to need them -# Alternatively, let the script find FogLAMP -FACTORY_SETTING_ROOT="/usr/local/foglamp" -FACTORY_SETTING_DATA="/usr/local/foglamp/data" - -## -## Environment Variables -## These variables will be used in the test suites and files -## -export SUITE_NAME="" # Name of the suite to execute -export FOGLAMP_EXE="" # Call to the foglamp script -export FOGBENCH_EXE="" # Call to the fogbench script -export SUITE_BASEDIR="" # Suite Directory -export TEST_BASEDIR="" # Base Directory for tests -export RESULT_DIR="" # Result Directory -export TEST_NAME="" # Name of the suite file to execute - -## Other variables -foglamp_from_package=false # True if the scripts finds a foglamp script -print_list=false # By default do not print any list -AVAILABLE_TEST_SUITES="" # Array of available test suites -TEST_NAMES="" # Array of tests in a suite to execute - - -## Script Header -print_header() { - - echo "##### FogLAMP System Test #####" - -} - - -## Print the list of test suites -print_suite_list() { - - echo "Available test suites:" - for test_suite in $AVAILABLE_TEST_SUITES; do - suite_description=$(extract_desc_from_suite $test_suite) - echo "${test_suite}: ${suite_description}" - done - -} - - -## Extract the text from the suite.desc file in the suite directory -extract_desc_from_suite() { - - if [[ -e "${1}/suite.desc" ]]; then - suite_description=`cat ${1}/suite.desc` - else - suite_description="" - fi - echo "$suite_description" - -} - - -## Print the list of tests in a suite -print_test_list() { - - echo "Tests in the ${SUITE_NAME} suite:" - - for test_file in $TEST_NAMES; do - test_name=$(extract_file_test_name $test_file) - test_description=$(extract_desc_from_test $test_file) - row_to_print="${test_name}: ${test_description}" - echo "$row_to_print" - - # Extract original test files and descriptions - original_test_files=$(extract_test_files $test_file) - for single_test_file in ${original_test_files}; do - desc_file="${TEST_BASEDIR}/${single_test_file}.desc" - - if [[ -e "${desc_file}" ]]; then - desc=`cat ${desc_file}` - else - desc="" - fi - echo ">>> ${single_test_file}: ${desc}" - done - done - echo - -} - - -## Extract the text from the .desc file in the /t directory -extract_desc_from_test() { - - desc_file=`echo $1 | sed 's/.test$/.desc/'` - if [[ -e "${desc_file}" ]]; then - test_description=`cat ${desc_file}` - else - test_description="" - fi - echo "$test_description" - -} - - -## Get the test files from the test file suite -extract_test_files() { - test_files=`grep -v '^ *#' $1 | grep -o '\$TEST_BASEDIR/.*' | cut -d' ' -f1 | sed 's/$TEST_BASEDIR\///' | sed 's/.bash$//'` - echo $test_files -} - - -## Script variables print -print_vars() { - - echo "Script Suite: $SUITE_NAME" - echo "Suite DIR: $SUITE_BASEDIR" - echo "Test DIR: $TEST_BASEDIR" - echo "FogLAMP Root: $FOGLAMP_ROOT" - echo "FogLAMP Data: $FOGLAMP_DATA" - echo "foglamp command: $FOGLAMP_EXE" - echo "fogbench command: $FOGBENCH_EXE" - if [ "${foglamp_from_package}" = true ]; then - echo "FogLAMP from a package" - fi - echo - -} - - -## Check if a suite is in the suite list -check_suite_list() { - suite_found=false - for test_suite in $AVAILABLE_TEST_SUITES; do - if [[ "${test_suite}" == "${1}" ]]; then - suite_found=true - break - fi - done - - echo $suite_found -} - - -## Set FogLAMP variables -check_env_vars() { - - # Check if a foglamp command exists - that usually means that a snap is installed - if ! [[ -x "$(command -v foglamp)" ]]; then - - foglamp_from_package=false - - # Check FOGLAMP_ROOT and set it if it does not exist - if [ -z ${FOGLAMP_ROOT+x} ]; then - FOGLAMP_ROOT="$FACTORY_SETTING_ROOT" - export FOGLAMP_ROOT - fi - - # Check if the FOGLAMP_ROOT directory exists - if [[ ! -d "${FOGLAMP_ROOT}" ]]; then - echo "The test cannot be executed:" - echo "- foglamp is not a valid command" - echo "- ${FOGLAMP_ROOT} is not a valid directory." - exit 1 - else - if [[ -x "${FOGLAMP_ROOT}/bin/foglamp" ]]; then - FOGLAMP_EXE="${FOGLAMP_ROOT}/bin/foglamp" - else - if [[ -x "${FOGLAMP_ROOT}/scripts/foglamp" ]]; then - FOGLAMP_EXE="${FOGLAMP_ROOT}/scripts/foglamp" - else - echo "The test cannot be executed: the foglamp script is not present in ${FOGLAMP_ROOT}." - exit 1 - fi - fi - fi - else - foglamp_from_package=true - FOGLAMP_EXE="foglamp" - fi - - # Check if fogbench exists - if ! [[ -x "$(command -v foglamp.fogbench)" ]]; then - if [[ -x "${FOGLAMP_ROOT}/bin/fogbench" ]]; then - FOGBENCH_EXE="${FOGLAMP_ROOT}/bin/fogbench" - else - if [[ -x "${FOGLAMP_ROOT}/scripts/extras/fogbench" ]]; then - FOGBENCH_EXE="${FOGLAMP_ROOT}/scripts/extras/fogbench" - fi - fi - else - FOGBENCH_EXE="foglamp.fogbench" - fi - -} - - -## Get the file name of a test -extract_file_test_name() { - file_name=`basename $1` - echo $file_name | sed 's/.test$//' -} - - -## Clean the results directory -cleanup_results() { - rm -f ${SUITE_NAME}/r/*.result # Result files - rm -f ${SUITE_NAME}/r/*.temp # Temp files -} - - -## Convert milliseconds into seconds with decimals -convert_msec_to_sec() { - echo $1 | awk '{print substr($1,1,length($1)-3)"."substr($1,length($1)-2,3)}' -} - - -## Print the help screen -do_help() { - - echo - echo "foglamp-test : Execute a FogLAMP System Test Suite" - echo - echo "Usage: foglamp-test [test-suite] [parameters]" - echo - echo "test-suite: a directory suite in the local directory" - echo "parameters: a valid parameter with or without a test-suite name" - echo " -h | --help Print this help, it takes priority over the test-suite" - echo " -l | --list (without test-suite name) Print the list of test suites" - echo " (with a test-suite name) Print the list of tests in a suite" - echo - -} - - -################## -### MAIN LOGIC ### -################## - -# Retrieve the list of test suites -AVAILABLE_TEST_SUITES=`find * -maxdepth 0 -type d` - -### Parameter Check ### -while [ "$1" != "" ]; do - case "$1" in - -l | --list ) - print_list=true - ;; - -h | --help ) - do_help - exit 0 - ;; - * ) - # This may be the test suite name - if [[ -z "${SUITE_NAME}" ]]; then - if [ "$(check_suite_list $1)" = true ]; then - SUITE_NAME=$1 - else - echo "Not a valid test-suite name." - echo "Try foglamp-test --help for more info" - echo - exit 1 - fi - else - # It looks like the user typed more than one test-suite name - echo "You can only execute one test suite at a time" - exit 1 - fi - ;; - esac - shift -done - -# Time to decide what to do -if [[ -z "${SUITE_NAME}" ]]; then - - # No test-suite selected - if [ "${print_list}" = true ]; then - - # --list: print the list of test suites - print_header - print_suite_list - - else - - # Nothing passed: print usage - echo "Usage: foglamp-test [test-suite] [parameters] [--help]" - fi - -else - - # test-suite selected - - TEST_BASEDIR=`realpath ../tests` # Set the test dir - SUITE_BASEDIR=`realpath ${SUITE_NAME}` # Set the suite dir - RESULT_DIR=`realpath ${SUITE_NAME}/r` # Set the Result Dir - TEST_NAMES=${SUITE_NAME}/t/*.test # Collect the list of tests in the suite - - print_header - - if [ "${print_list}" = true ]; then - - echo "Test Suite: ${SUITE_NAME}" - echo $(extract_desc_from_suite ${SUITE_NAME}) - echo - - # --list: print the list of tests in the test suite - print_test_list - - else - - # Execute the test suite - check_env_vars - print_vars - cleanup_results - - suite_result="COMPLETED" - echo "Suite Start: $(date +'%Y-%m-%d %H:%M:%S.%6N')" - suite_start=$(date +%s%N) - - # Main Loop to execute tests - for test_to_execute in $TEST_NAMES; do - TEST_NAME=$(extract_file_test_name $test_to_execute) - echo -n "[$(date +'%Y-%m-%d %H:%M:%S.%6N')] - $TEST_NAME - " - test_start=$(date +%s%N) - $test_to_execute > ${SUITE_NAME}/r/${TEST_NAME}.result 2>&1 - test_end=$(date +%s%N) - elapsed_time=$((($test_end - $test_start)/1000000 )) - - # Check if the two files (expect and result) exist - if [[ -e "${SUITE_NAME}/e/${TEST_NAME}.expected" ]]; then - - # The expected output exists - if [[ -e "${SUITE_NAME}/r/${TEST_NAME}.result" ]]; then - - # Expected and result output exist - check_diff=`diff ${SUITE_NAME}/r/${TEST_NAME}.result ${SUITE_NAME}/e/${TEST_NAME}.expected | wc -l` - if [[ "$check_diff" -eq 0 ]]; then - test_result="PASSED" - else - test_result="FAILED" - extra_result_info="Expect/Result MISMATCH" - fi - - else # Expected exists - Result does not exist - - test_result="FAILED" - extra_result_info="MISSING Result" - - fi - - else # There is no expected output - - if [[ -e "${SUITE_NAME}/r/${TEST_NAME}.result" ]]; then - # Expected does not exist - Result exists - test_result="FAILED" - extra_result_info="UNEXPECTED Result" - else # Expected and Result do not exist - test_result="PASSED" - fi - - fi - - echo -n "[$(date +'%Y-%m-%d %H:%M:%S.%6N')] ($(convert_msec_to_sec $elapsed_time) seconds) - ${test_result}" - if [[ "${test_result}" == "FAILED" ]]; then - echo -n " - ${extra_result_info}" - echo - suite_result="INCOMPLETED" - break - else - echo - fi - done - - suite_end=$(date +%s%N) - elapsed_time=$((($suite_end - $suite_start)/1000000 )) - echo "Total Execution Time: $(convert_msec_to_sec $elapsed_time) seconds." - echo "Suite End: $(date +'%Y-%m-%d %H:%M:%S.%6N') - $suite_result" - - fi - -fi - -exit 0 - diff --git a/tests/system/suites/smoke/e/0010_prepare.expected b/tests/system/suites/smoke/e/0010_prepare.expected deleted file mode 100644 index 3f926844de..0000000000 --- a/tests/system/suites/smoke/e/0010_prepare.expected +++ /dev/null @@ -1 +0,0 @@ -FogLAMP not running. diff --git a/tests/system/suites/smoke/e/0020_start.expected b/tests/system/suites/smoke/e/0020_start.expected deleted file mode 100644 index 0764b28a1b..0000000000 --- a/tests/system/suites/smoke/e/0020_start.expected +++ /dev/null @@ -1 +0,0 @@ -FogLAMPvrunning diff --git a/tests/system/suites/smoke/e/0030_prepare_plugins.expected b/tests/system/suites/smoke/e/0030_prepare_plugins.expected deleted file mode 100644 index 359be07fa3..0000000000 --- a/tests/system/suites/smoke/e/0030_prepare_plugins.expected +++ /dev/null @@ -1,5 +0,0 @@ -{ - "name": "coap", - "type": "Southbound", - "status": "running" -} diff --git a/tests/system/suites/smoke/e/0035_wait_for_start.expected b/tests/system/suites/smoke/e/0035_wait_for_start.expected deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/tests/system/suites/smoke/e/0040_inject.expected b/tests/system/suites/smoke/e/0040_inject.expected deleted file mode 100644 index 0420691aae..0000000000 --- a/tests/system/suites/smoke/e/0040_inject.expected +++ /dev/null @@ -1 +0,0 @@ -Total Messages Transferred: 1 diff --git a/tests/system/suites/smoke/e/0050_wait_for_flush.expected b/tests/system/suites/smoke/e/0050_wait_for_flush.expected deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/tests/system/suites/smoke/e/0060_read_from_REST.expected b/tests/system/suites/smoke/e/0060_read_from_REST.expected deleted file mode 100644 index 256390650d..0000000000 --- a/tests/system/suites/smoke/e/0060_read_from_REST.expected +++ /dev/null @@ -1,9 +0,0 @@ -[ - { - "assetCode": "fogbench/smoke_test", - "count": 1 - } -] -{ - "sensor": 10 -} diff --git a/tests/system/suites/smoke/e/README.rst b/tests/system/suites/smoke/e/README.rst deleted file mode 100644 index 9b46d9d2a2..0000000000 --- a/tests/system/suites/smoke/e/README.rst +++ /dev/null @@ -1,6 +0,0 @@ -***************** -Extpected Results -***************** - -Files in this directory are the expected results of a test. -All output files have .expected suffix. diff --git a/tests/system/suites/smoke/r/README.rst b/tests/system/suites/smoke/r/README.rst deleted file mode 100644 index 365c8ab43d..0000000000 --- a/tests/system/suites/smoke/r/README.rst +++ /dev/null @@ -1,6 +0,0 @@ -************** -Actual Results -************** - -Files in this directory are the actual results of a test. -All output files have .result suffix. diff --git a/tests/system/suites/smoke/suite.cfg b/tests/system/suites/smoke/suite.cfg deleted file mode 100644 index de273bb171..0000000000 --- a/tests/system/suites/smoke/suite.cfg +++ /dev/null @@ -1,41 +0,0 @@ -#!/bin/bash - -# FogLAMP server references -export FOGLAMP_SERVER=localhost -export FOGLAMP_PORT=8081 - -export TMP_DIR=/tmp - -# Plugins information -export PLUGIN_COAP_NAME=foglamp-south-coap -export PLUGIN_COAP_REPO=https://github.com/foglamp/${PLUGIN_COAP_NAME} - -# Configurations related to FogLAMP -export SENDING_PROCESS_DATA="North%20Readings%20to%20PI" -export SENDING_PROCESS_STAT="North%20Statistics%20to%20PI" - -# PI server references -export PI_SERVER=pi-server -export PI_SERVER_PORT=5460 -export PI_SERVER_UID=pi-server-uid -export PI_SERVER_PWD=pi-server-pwd -export PI_SERVER_DATABASE=pi-server-db -export CONNECTOR_RELAY_VERSION=x.x - -# Identifies sensors and measurements types -export OMF_TYPE_ID=0001 - -if [[ ${CONNECTOR_RELAY_VERSION} == "1.x" ]]; then - - export OMF_PRODUCER_TOKEN=omf_north_${OMF_TYPE_ID} - -elif [[ ${CONNECTOR_RELAY_VERSION} == "2.x" ]]; then - - export OMF_PRODUCER_TOKEN=xxx -fi - -# Define the asset information -export ASSET_CODE="fogbench/smoke_test" - -# Maximum numbers of retries to attempt an operation (usually corresponds to 600 seconds) -export RETRY_COUNT=600 diff --git a/tests/system/suites/smoke/suite.desc b/tests/system/suites/smoke/suite.desc deleted file mode 100644 index 71c732699b..0000000000 --- a/tests/system/suites/smoke/suite.desc +++ /dev/null @@ -1 +0,0 @@ -Smoke Test suite diff --git a/tests/system/suites/smoke/t/0010_prepare.test b/tests/system/suites/smoke/t/0010_prepare.test deleted file mode 100755 index 7eb2a74039..0000000000 --- a/tests/system/suites/smoke/t/0010_prepare.test +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/bash - -$TEST_BASEDIR/bash/exec_any_foglamp_command.bash stop 1>>"${RESULT_DIR}/${TEST_NAME}_out.temp" 2>>"${RESULT_DIR}/${TEST_NAME}_err.temp" -$TEST_BASEDIR/bash/exec_any_foglamp_command.bash kill 1>>"${RESULT_DIR}/${TEST_NAME}_out.temp" 2>>"${RESULT_DIR}/${TEST_NAME}_err.temp" -$TEST_BASEDIR/bash/check_foglamp_status.bash - -# Double reset to ensure the execution -echo -e "YES" | $TEST_BASEDIR/bash/exec_any_foglamp_command.bash reset 1>>"${RESULT_DIR}/${TEST_NAME}_out.temp" 2>>"${RESULT_DIR}/${TEST_NAME}_err.temp" - -$TEST_BASEDIR/bash/sleep.bash 6 - -echo -e "YES" | $TEST_BASEDIR/bash/exec_any_foglamp_command.bash reset 1>>"${RESULT_DIR}/${TEST_NAME}_out.temp" 2>>"${RESULT_DIR}/${TEST_NAME}_err.temp" diff --git a/tests/system/suites/smoke/t/0020_start.desc b/tests/system/suites/smoke/t/0020_start.desc deleted file mode 100644 index 3affc1d52a..0000000000 --- a/tests/system/suites/smoke/t/0020_start.desc +++ /dev/null @@ -1 +0,0 @@ -Start FogLAMP diff --git a/tests/system/suites/smoke/t/0020_start.test b/tests/system/suites/smoke/t/0020_start.test deleted file mode 100755 index e162131714..0000000000 --- a/tests/system/suites/smoke/t/0020_start.test +++ /dev/null @@ -1,34 +0,0 @@ -#!/bin/bash - -# Declares used variables -declare TEST_BASEDIR -declare SUITE_BASEDIR -declare FOGLAMP_SERVER -declare FOGLAMP_PORT -declare RESULT_DIR -declare TEST_NAME -declare SENDING_PROCESS_DATA -declare PI_SERVER -declare PI_SERVER_PORT -declare OMF_PRODUCER_TOKEN -declare OMF_TYPE_ID - -# Reads configuration setting -source ${SUITE_BASEDIR}/suite.cfg - -$TEST_BASEDIR/bash/exec_any_foglamp_command.bash start 1>>"${RESULT_DIR}/${TEST_NAME}_out.temp" 2>>"${RESULT_DIR}/${TEST_NAME}_err.temp" - -$TEST_BASEDIR/bash/wait_foglamp_status.bash RUNNING 1>>"${RESULT_DIR}/${TEST_NAME}_out.temp" 2>>"${RESULT_DIR}/${TEST_NAME}_err.temp" - -$TEST_BASEDIR/bash/check_foglamp_status.bash > ${RESULT_DIR}/$TEST_NAME.temp 2>&1 - -# Grab the 1st line and remove all digits (version #), spaces and dots -head -n1 ${RESULT_DIR}/$TEST_NAME.temp | tr -d '[0-9]. ' - -# Checks if the FogLAMP server is reachable -curl -s -X GET http://${FOGLAMP_SERVER}:${FOGLAMP_PORT}/foglamp/category/ > /dev/null 2>&1 -if [[ "$?" != "0" ]]; then - - echo "ERROR : FogLAMP server not reachable, server:port -${FOGLAMP_SERVER}:${FOGLAMP_PORT}-." - exit 1 -fi \ No newline at end of file diff --git a/tests/system/suites/smoke/t/0030_prepare_plugins.desc b/tests/system/suites/smoke/t/0030_prepare_plugins.desc deleted file mode 100644 index 4ccded1df8..0000000000 --- a/tests/system/suites/smoke/t/0030_prepare_plugins.desc +++ /dev/null @@ -1 +0,0 @@ -Enable all the required plugins. \ No newline at end of file diff --git a/tests/system/suites/smoke/t/0030_prepare_plugins.test b/tests/system/suites/smoke/t/0030_prepare_plugins.test deleted file mode 100755 index c1514df958..0000000000 --- a/tests/system/suites/smoke/t/0030_prepare_plugins.test +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/bash - -# Declares used variables -declare TEST_BASEDIR - -$TEST_BASEDIR/bash/enable_plugin_coap.bash - diff --git a/tests/system/suites/smoke/t/0035_wait_for_start.test b/tests/system/suites/smoke/t/0035_wait_for_start.test deleted file mode 100755 index 7e49d2dce1..0000000000 --- a/tests/system/suites/smoke/t/0035_wait_for_start.test +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash - -$TEST_BASEDIR/bash/sleep.bash 10 diff --git a/tests/system/suites/smoke/t/0040_inject.test b/tests/system/suites/smoke/t/0040_inject.test deleted file mode 100755 index f3bb67026c..0000000000 --- a/tests/system/suites/smoke/t/0040_inject.test +++ /dev/null @@ -1,6 +0,0 @@ -#!/bin/bash - -echo '[{"name":"fogbench/smoke_test","sensor_values":[{"name":"sensor","type":"number","min":10,"max":10,"precision":0}]}]' > $RESULT_DIR/$TEST_NAME.1.temp 2>&1 -$TEST_BASEDIR/bash/inject_fogbench_data.bash -t $RESULT_DIR/$TEST_NAME.1.temp > $RESULT_DIR/$TEST_NAME.2.temp 2>&1 -grep '^Total Messages Transferred: ' $RESULT_DIR/$TEST_NAME.2.temp - diff --git a/tests/system/suites/smoke/t/0050_wait_for_flush.test b/tests/system/suites/smoke/t/0050_wait_for_flush.test deleted file mode 100755 index 3f22d4c7e5..0000000000 --- a/tests/system/suites/smoke/t/0050_wait_for_flush.test +++ /dev/null @@ -1,4 +0,0 @@ -#!/bin/bash - -$TEST_BASEDIR/bash/sleep.bash 10 - diff --git a/tests/system/suites/smoke/t/0060_read_from_REST.test b/tests/system/suites/smoke/t/0060_read_from_REST.test deleted file mode 100755 index 0d764cd47a..0000000000 --- a/tests/system/suites/smoke/t/0060_read_from_REST.test +++ /dev/null @@ -1,6 +0,0 @@ -#!/bin/bash - -$TEST_BASEDIR/bash/count_assets_http.bash -$TEST_BASEDIR/bash/read_an_asset_http.bash "fogbench%2Fsmoke_test" > $RESULT_DIR/$TEST_NAME.temp 2>&1 -jq '.[] | .reading' $RESULT_DIR/$TEST_NAME.temp - diff --git a/tests/system/suites/smoke/t/README.rst b/tests/system/suites/smoke/t/README.rst deleted file mode 100644 index f311d8144e..0000000000 --- a/tests/system/suites/smoke/t/README.rst +++ /dev/null @@ -1,6 +0,0 @@ -************ -Test scripts -************ - -Files in this directory are bash scripts used to run a test. -All bash files have .test suffix. diff --git a/tests/system/tests/README.rst b/tests/system/tests/README.rst deleted file mode 100644 index a89a85e420..0000000000 --- a/tests/system/tests/README.rst +++ /dev/null @@ -1,6 +0,0 @@ -***** -Tests -***** - -This directory contains the tests in various formats: -- bash: directory for tests in bash shell diff --git a/tests/system/tests/__init__.py b/tests/system/tests/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/tests/system/tests/bash/README.rst b/tests/system/tests/bash/README.rst deleted file mode 100644 index bdbba867fd..0000000000 --- a/tests/system/tests/bash/README.rst +++ /dev/null @@ -1,10 +0,0 @@ -*************************** -Tests Written in Bash Shell -*************************** - -This directory contains the tests written in Bash shell. - -For each test: -- The test script has .bash suffix -- The test description has .desc suffix -- A long description is in a ReStructuredText file diff --git a/tests/system/tests/bash/check_foglamp_status.bash b/tests/system/tests/bash/check_foglamp_status.bash deleted file mode 100755 index fba60274e3..0000000000 --- a/tests/system/tests/bash/check_foglamp_status.bash +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/bash - -$FOGLAMP_EXE status - - diff --git a/tests/system/tests/bash/check_foglamp_status.desc b/tests/system/tests/bash/check_foglamp_status.desc deleted file mode 100644 index 18a257c9c3..0000000000 --- a/tests/system/tests/bash/check_foglamp_status.desc +++ /dev/null @@ -1 +0,0 @@ -Execute the foglamp status command and retrieves the result. diff --git a/tests/system/tests/bash/count_assets_http.bash b/tests/system/tests/bash/count_assets_http.bash deleted file mode 100755 index c3f255e3f6..0000000000 --- a/tests/system/tests/bash/count_assets_http.bash +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash - -curl -s http://localhost:8081/foglamp/asset | jq -S '.' diff --git a/tests/system/tests/bash/count_assets_https.bash b/tests/system/tests/bash/count_assets_https.bash deleted file mode 100755 index a98edf80aa..0000000000 --- a/tests/system/tests/bash/count_assets_https.bash +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash - -curl -sk https://localhost:1995/foglamp/asset | jq -S '.' diff --git a/tests/system/tests/bash/enable_plugin_coap.bash b/tests/system/tests/bash/enable_plugin_coap.bash deleted file mode 100755 index 47bbf5a4b7..0000000000 --- a/tests/system/tests/bash/enable_plugin_coap.bash +++ /dev/null @@ -1,101 +0,0 @@ -#!/bin/bash - -# Declares used variables -declare SUITE_BASEDIR -declare FOGLAMP_ROOT -declare TMP_DIR -declare FOGLAMP_SERVER -declare FOGLAMP_PORT - -declare TEST_BASEDIR -declare TEST_NAME -declare RESULT_DIR - -declare PLUGIN_COAP_NAME -declare PLUGIN_COAP_REPO - -# Reads configuration setting -source ${SUITE_BASEDIR}/suite.cfg - -# Definitions -COAPFile=${FOGLAMP_ROOT}/python/foglamp/plugins/south/coap/coap.py -COAPRequirementsFile=${TMP_DIR}/${PLUGIN_COAP_NAME}/python/requirements-coap.txt - -# Redirects std out/err for all the following commands -exec 8>&1 # Backups stdout -exec 1>>"${RESULT_DIR}/${TEST_NAME}_out.temp" -exec 2>>"${RESULT_DIR}/${TEST_NAME}_err.temp" - -# -# Checks if the COAP plugin code is already available in the temporary directory -# -if [[ ! -f "${COAPRequirementsFile}" ]] -then - echo "COAP plugin code does not exists in the temporary directory - |${COAPRequirementsFile}|, retrieving the code from the github repository." - - # Extracts the COAP plugin code - cd ${TMP_DIR} - rm -rf ${PLUGIN_COAP_NAME} - git clone ${PLUGIN_COAP_REPO} - cd ${PLUGIN_COAP_NAME} -else - echo "COAP plugin code is already available - |${COAPRequirementsFile}|" -fi - -# -# Checks if the COAP plugin code is already available in the FogLAMP directory tree -# -if [[ ! -f "${COAPFile}" ]] -then - echo "COAP plugin code does not exists in the FogLAMP directory- |${COAPFile}|, copying the code." - - # Copies the COAP plugin code into the FogLAMP directory tree - mkdir -p ${FOGLAMP_ROOT}/python/foglamp/plugins/south/coap - cp -r ${TMP_DIR}/${PLUGIN_COAP_NAME}/python/foglamp/plugins/south/coap/* ${FOGLAMP_ROOT}/python/foglamp/plugins/south/coap - -else - echo "COAP plugin code is already available - |${COAPFile}|" -fi - -# -# Installs python libraries required by the plugin -# -pip3 install --user -Ir "${COAPRequirementsFile}" --no-cache-dir -if [[ "$?" != "0" ]]; then - exit 1 -fi - -# -# Enables the plugin -# -curl -k -s -S -X POST http://${FOGLAMP_SERVER}:${FOGLAMP_PORT}/foglamp/service -d '{ "name" : "coap", "type" : "south", "plugin" : "coap", "enabled": true}' | jq -S "." - -# -# Waits the availability of the plugin -# -$TEST_BASEDIR/bash/wait_plugin_available.bash "coap" - -# Checks if the COAP plugin is enabled -export COAP_PLUGIN=`curl -k -s -S -X GET http://${FOGLAMP_SERVER}:${FOGLAMP_PORT}/foglamp/service| jq --raw-output '.services | .[] | select(.name=="coap") | .name'` - -echo COAP_PLUGIN -${COAP_PLUGIN}- - -if [[ "${COAP_PLUGIN}" == "" ]] -then - echo "COAP plugin is not already activated, enabling - |${COAP_PLUGIN}|" - - export SCHEDULE_ID=` curl -k -s -S -X GET http://${FOGLAMP_SERVER}:${FOGLAMP_PORT}/foglamp/schedule | jq --raw-output '.schedules | .[] | select(.processName=="coap") | .id'` - - echo SCHEDULE_ID -${SCHEDULE_ID}- - - curl -k -s -S -X GET http://${FOGLAMP_SERVER}:${FOGLAMP_PORT}/foglamp/schedule/${SCHEDULE_ID} | jq -S "." - - curl -k -s -S -X GET http://${FOGLAMP_SERVER}:${FOGLAMP_PORT}/foglamp/service | jq -S "." -else - echo "COAP plugin already active |${COAP_PLUGIN}|" -fi - -# Restore stdout -exec 1>&8 - -curl -k -s -S -X GET http://${FOGLAMP_SERVER}:${FOGLAMP_PORT}/foglamp/service| jq --raw-output '.services | .[] | select(.name=="coap") | {name,type,status}' diff --git a/tests/system/tests/bash/exec_any_foglamp_command.bash b/tests/system/tests/bash/exec_any_foglamp_command.bash deleted file mode 100755 index b7737e995c..0000000000 --- a/tests/system/tests/bash/exec_any_foglamp_command.bash +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/bash - -$FOGLAMP_EXE $@ - - diff --git a/tests/system/tests/bash/exec_any_foglamp_command.desc b/tests/system/tests/bash/exec_any_foglamp_command.desc deleted file mode 100644 index 6db6fc1493..0000000000 --- a/tests/system/tests/bash/exec_any_foglamp_command.desc +++ /dev/null @@ -1 +0,0 @@ -Execute the foglamp command with any paremeter. diff --git a/tests/system/tests/bash/inject_fogbench_data.bash b/tests/system/tests/bash/inject_fogbench_data.bash deleted file mode 100755 index 2e67c06437..0000000000 --- a/tests/system/tests/bash/inject_fogbench_data.bash +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/bash - -$FOGBENCH_EXE $@ - - diff --git a/tests/system/tests/bash/pi_delete.bash b/tests/system/tests/bash/pi_delete.bash deleted file mode 100755 index 2cfc7c9466..0000000000 --- a/tests/system/tests/bash/pi_delete.bash +++ /dev/null @@ -1,60 +0,0 @@ -#!/bin/bash - -# Reads configuration setting -source ${SUITE_BASEDIR}/suite.cfg - -# -# Main -# -url_databases=`curl -s -u ${PI_SERVER_UID}:${PI_SERVER_PWD} -X GET -k https://${PI_SERVER}/piwebapi/assetservers | jq --raw-output '.Items | .[] | .Links | .Databases '` -echo url_databases :${url_databases}: > /dev/tty - -# -url_elements=`curl -s -u ${PI_SERVER_UID}:${PI_SERVER_PWD} -X GET -k ${url_databases} | jq --raw-output '.Items | .[] | select(.Name=="'${PI_SERVER_DATABASE}'") | .Links | .Elements'` -echo url_elements :${url_elements}: > /dev/tty - -# -url_elements_list=`curl -s -u ${PI_SERVER_UID}:${PI_SERVER_PWD} -X GET -k ${url_elements} | jq --raw-output '.Items | .[] | .Links | .Elements'` -echo url_elements_list :${url_elements_list}: > /dev/tty - -# -web_id=`curl -s -u ${PI_SERVER_UID}:${PI_SERVER_PWD} -X GET -k ${url_elements_list} | jq --raw-output '.Items | .[] | select(.Name=="'${OMF_PRODUCER_TOKEN}'") | .WebId '` -echo web_id :${web_id}: > /dev/tty - -# -# Deletes AF hierarchy -# -if [[ ${web_id} != "" ]]; then - - curl -u ${PI_SERVER_UID}:${PI_SERVER_PWD} -X DELETE -k https://${PI_SERVER}/piwebapi/elements/${web_id} -fi - -# -# Deletes AF ElementTemplates (type definition) -# -url_element_templates=`curl -s -u ${PI_SERVER_UID}:${PI_SERVER_PWD} -X GET -k ${url_databases} | jq --raw-output '.Items | .[] | select(.Name=="'${PI_SERVER_DATABASE}'") | .Links | .ElementTemplates'` -echo url_elementTemplates :${url_element_templates}: > /dev/tty - -web_id=`curl -s -u ${PI_SERVER_UID}:${PI_SERVER_PWD} -X GET -k ${url_element_templates} | jq --raw-output '.Items | .[] | select(.Name | contains("'${OMF_TYPE_ID}_${ASSET_CODE}'")) | .WebId '` -echo web_id :${web_id}: > /dev/tty - - -if [[ ${web_id} != "" ]]; then - - curl -u ${PI_SERVER_UID}:${PI_SERVER_PWD} -X DELETE -k https://${PI_SERVER}/piwebapi/elementtemplates/${web_id} -fi - - -# -# Deletes PI server data -# -url_points=`curl -s -u ${PI_SERVER_UID}:${PI_SERVER_PWD} -X GET -k https://${PI_SERVER}/piwebapi/dataservers | jq --raw-output '.Items | .[] | .Links | .Points '` -echo url_points :${url_points}: > /dev/tty - -web_id=`curl -s -u ${PI_SERVER_UID}:${PI_SERVER_PWD} -X GET -k ${url_points} | jq --raw-output '.Items | .[] | select(.Name | contains("'${OMF_PRODUCER_TOKEN}'")) | .WebId '` -echo web_id :${web_id}: > /dev/tty - -if [[ ${web_id} != "" ]]; then - - curl -u ${PI_SERVER_UID}:${PI_SERVER_PWD} -X DELETE -k https://${PI_SERVER}/piwebapi/points/${web_id} -fi diff --git a/tests/system/tests/bash/pi_delete.desc b/tests/system/tests/bash/pi_delete.desc deleted file mode 100644 index 3767b5f85a..0000000000 --- a/tests/system/tests/bash/pi_delete.desc +++ /dev/null @@ -1 +0,0 @@ -Deletes from AF and PI server the asset created by the test suite end_to_end_PI. diff --git a/tests/system/tests/bash/read_an_asset_http.bash b/tests/system/tests/bash/read_an_asset_http.bash deleted file mode 100755 index 15e11b409a..0000000000 --- a/tests/system/tests/bash/read_an_asset_http.bash +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/bash - -asset_id="${1/\//%2F}" - -curl -s http://localhost:8081/foglamp/asset/${asset_id} - - diff --git a/tests/system/tests/bash/read_an_asset_https.bash b/tests/system/tests/bash/read_an_asset_https.bash deleted file mode 100755 index 5b88c16270..0000000000 --- a/tests/system/tests/bash/read_an_asset_https.bash +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/bash - -curl -sk https://localhost:1995/foglamp/asset/$1 - - diff --git a/tests/system/tests/bash/sleep.bash b/tests/system/tests/bash/sleep.bash deleted file mode 100755 index a3c4a30607..0000000000 --- a/tests/system/tests/bash/sleep.bash +++ /dev/null @@ -1,4 +0,0 @@ -#!/bin/bash - -sleep $1 - diff --git a/tests/system/tests/bash/wait_creation_cfg.bash b/tests/system/tests/bash/wait_creation_cfg.bash deleted file mode 100755 index 93ea1e4343..0000000000 --- a/tests/system/tests/bash/wait_creation_cfg.bash +++ /dev/null @@ -1,36 +0,0 @@ -#!/bin/bash - -# It waits until either the requested FogLAMP configuration is created or it reaches the timeout. -count=1 -while [ true ] -do - # Checks if the FogLAMP server is reachable - curl -s -X GET http://${FOGLAMP_SERVER}:${FOGLAMP_PORT}/foglamp/category/ > /dev/null 2>&1 - result=$? - - if [[ "$result" == "0" ]] - then - - curl -s -X GET http://${FOGLAMP_SERVER}:${FOGLAMP_PORT}/foglamp/category/${1} | jq '.value' > /dev/null 2>&1 - result=$? - - if [[ "$result" == "0" ]] - then - echo "FogLAMP configuration :${1}: available - N. of retries :${count}:" - exit 0 - else - if [[ $count -le ${RETRY_COUNT} ]] - then - echo "FogLAMP configuration :${1}: not available, result :${result}: - N. of retries :${count}:" - sleep 1 - count=$((count+1)) - else - echo "FogLAMP plugin :${1}: not available - N. of retries :${count}:" - exit 1 - fi - fi - else - echo "ERROR : FogLAMP server not reachable, server:port -${FOGLAMP_SERVER}:${FOGLAMP_PORT}-." > /dev/stderr - exit 1 - fi -done diff --git a/tests/system/tests/bash/wait_creation_cfg.desc b/tests/system/tests/bash/wait_creation_cfg.desc deleted file mode 100644 index c0784467a6..0000000000 --- a/tests/system/tests/bash/wait_creation_cfg.desc +++ /dev/null @@ -1 +0,0 @@ -It waits until either the requested FogLAMP configuration is created or it reaches the timeout. \ No newline at end of file diff --git a/tests/system/tests/bash/wait_foglamp_status.bash b/tests/system/tests/bash/wait_foglamp_status.bash deleted file mode 100755 index fd9668b699..0000000000 --- a/tests/system/tests/bash/wait_foglamp_status.bash +++ /dev/null @@ -1,55 +0,0 @@ -#!/bin/bash - -# -# Expected input parameters : -# -# $1 = FogLAMP status to achieve {RUNNING|STOPPED} -# - -function evaluate_foglamp_status { - - output=$(${FOGLAMP_EXE} status 2>&1) - - # To upper case - output=${output^^} - - if [[ ${output} =~ 'FOGLAMP UPTIME' ]]; then - - status="RUNNING" - - elif [[ ${output} =~ 'FOGLAMP NOT RUNNING.' ]]; then - - status="STOPPED" - else - status="NOT_DEFINED" - fi - - echo ${status} -} - -# Waits until either the requested status of FogLAMP is reached or it reaches the timeout. -count=0 -while [ true ] -do - - value=$(evaluate_foglamp_status) - - if [[ "${value}" == "$1" ]]; then - - echo FogLAMP status reached :${value}: - N. of retries :${count}: >> $RESULT_DIR/$TEST_NAME.1.temp 2>&1 - echo FogLAMP "${value}" - exit 0 - else - if [[ $count -le ${RETRY_COUNT} ]] - then - echo FogLAMP status :$1: not reached, currently :${value}: - N. of retries :${count}: >> $RESULT_DIR/$TEST_NAME.1.temp 2>&1 - sleep 1 - count=$((count+1)) - else - echo FogLAMP status reached :${value}: - N. of retries :${count}: >> $RESULT_DIR/$TEST_NAME.1.temp 2>&1 - echo FogLAMP "${value}" - exit 1 - fi - fi -done - diff --git a/tests/system/tests/bash/wait_foglamp_status.desc b/tests/system/tests/bash/wait_foglamp_status.desc deleted file mode 100644 index d87c22d3ce..0000000000 --- a/tests/system/tests/bash/wait_foglamp_status.desc +++ /dev/null @@ -1,2 +0,0 @@ -Waits until either the requested status of FogLAMP is reached or it reaches the timeout -sample call: wait_foglamp_status {"RUNNING"|"STOPPED"}. \ No newline at end of file diff --git a/tests/system/tests/bash/wait_plugin_available.bash b/tests/system/tests/bash/wait_plugin_available.bash deleted file mode 100755 index a9c502a06b..0000000000 --- a/tests/system/tests/bash/wait_plugin_available.bash +++ /dev/null @@ -1,36 +0,0 @@ -#!/bin/bash - -# -# Expected input parameters : -# -# $1 = FogLAMP plugin to evaluate -# - -declare FOGLAMP_SERVER -declare FOGLAMP_PORT -declare RETRY_COUNT - -# Waits until either the requested plug is loaded or the timeout is reached. -count=1 -while [ true ] -do - - # Checks if the plugin is available - value=$(curl -k -s -S -X GET http://${FOGLAMP_SERVER}:${FOGLAMP_PORT}/foglamp/service| jq --raw-output '.services | .[] | select(.name=="'${1}'") | .name') - - if [[ "${value}" == "$1" ]]; then - - echo "FogLAMP plugin :${value}: available - N. of retries :${count}:" - exit 0 - else - if [[ $count -le ${RETRY_COUNT} ]] - then - echo "FogLAMP plugin :${1}: not available, currently :${value}: - N. of retries :${count}:" - sleep 1 - count=$((count+1)) - else - echo "FogLAMP plugin :${1}: not available - N. of retries :${count}:" - exit 1 - fi - fi -done diff --git a/tests/system/tests/bash/wait_plugin_available.desc b/tests/system/tests/bash/wait_plugin_available.desc deleted file mode 100644 index 6ab2b6f927..0000000000 --- a/tests/system/tests/bash/wait_plugin_available.desc +++ /dev/null @@ -1 +0,0 @@ -Waits until either the requested plug is loaded or the timeout is reached. \ No newline at end of file diff --git a/tests/system/tests/python/README.rst b/tests/system/tests/python/README.rst deleted file mode 100644 index 5c949b4a93..0000000000 --- a/tests/system/tests/python/README.rst +++ /dev/null @@ -1,10 +0,0 @@ -*********************** -Tests Written in Python -*********************** - -This directory contains the tests written in Python. - -For each test: -- The test script has .py suffix -- The test description has .desc suffix -- A long description is in a RestructuredText file diff --git a/tests/system/tests/python/__init__.py b/tests/system/tests/python/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/tests/system/tests/python/common/__init__.py b/tests/system/tests/python/common/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/tests/system/tests/python/common/ocs.py b/tests/system/tests/python/common/ocs.py deleted file mode 100644 index a0aa4a64df..0000000000 --- a/tests/system/tests/python/common/ocs.py +++ /dev/null @@ -1,129 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- - -# FOGLAMP_BEGIN -# See: http://foglamp.readthedocs.io/ -# FOGLAMP_END - -""" Common module for the interaction with OSI OCS (OSIsoft Cloud Services) """ - -import requests -import json - -__author__ = "Stefano Simonelli" -__copyright__ = "Copyright (c) 2018 OSIsoft, LLC" -__license__ = "Apache 2.0" -__version__ = "${VERSION}" - -OCS_URL = "https://dat-a.osisoft.com" - - -def retrieve_authentication_token(tenant, client_id, client_secret,): - """" Retrieves from OCS the authentication token for the requested tenant/client """ - - url = 'https://login.windows.net/{0}/oauth2/token'.format(tenant) - - authorization = requests.post( - url, - data={ - 'grant_type': 'client_credentials', - 'client_id': client_id, - 'client_secret': client_secret, - 'resource': 'https://qihomeprod.onmicrosoft.com/ocsapi' - } - ) - - header = { - 'Authorization': 'bearer %s' % authorization.json()['access_token'], - 'Content-type': 'application/json', - 'Accept': 'text/plain' - } - - return header - - -def delete_object(headers, tenant, namespace, _object): - """" Deletes an OCS object, used against Types and Streams """ - - tenant_url = "/api/Tenants/{}".format(tenant) - api_url = "/Namespaces/{0}/{1}".format(namespace, _object) - url = OCS_URL + tenant_url + api_url - - response = requests.delete(url, headers=headers) - - print('--- Deleted {} -----------------------------------------'.format(api_url)) - - print('\nExit code: |{0}| \n\nText: |{1}| \n\nUrl: |{2}| '.format( - response.status_code, - response.text, - response.url, - )) - - return response.text - - -def delete_object_type(headers, tenant, namespace, _type): - """" Deletes all the items of a type, used for deleting Streams or/and Types """ - - # Retrieves the list of objects to delete - objects_list = call_api(headers, tenant, namespace, _type) - - if objects_list != "[]": - # the translations are needed to being able to convert the string into a dict - objects_list = objects_list.replace(": true", ": 1") - objects_list = objects_list.replace(": false", ": 0") - - object_list_dict = eval(objects_list) - print("\n Number of elements : namespace |{0}| - type |{1}| - N |{2}|".format(namespace, - _type, - len(object_list_dict))) - - for item in object_list_dict: - type_to_del = item['Id'] - - print("to delete |{}|".format(type_to_del)) - - api = "{0}/{1}".format(_type, type_to_del) - - delete_object(headers, tenant, namespace, api) - - -def delete_types_streams(headers, tenant, namespace): - """" Deletes all the types and streams in the provided namespace - WARNING: it deletes all the information in the namespace - """ - - delete_object_type(headers, tenant, namespace, "Streams") - - delete_object_type(headers, tenant, namespace, "Types") - - -def call_api(headers, tenant, name_space, api): - """" Calls (read operation) an OCS api and returns a string representing the JSON response """ - - tenant_url = "/api/Tenants/{}".format(tenant) - api_url = "/Namespaces/{0}/{1}".format(name_space, api) - url = OCS_URL + tenant_url + api_url - - response = requests.get(url, headers=headers) - - api_output = response.json() - - api_output_str = json.dumps(api_output) - - return api_output_str - - -def get_values_stream(headers, tenant, namespace, ocs_stream, start_timestamp, values_count): - """" Retrieves N values for a specific asset code """ - - api_url = "Streams/{0}/Data/GetRangeValues?" \ - "startIndex={1}"\ - "&count={2}"\ - .format(ocs_stream, - start_timestamp, - values_count) - - api_output_str = call_api(headers, tenant, namespace, api_url) - - return api_output_str diff --git a/tests/system/tests/python/ocs_clean_namespace.desc b/tests/system/tests/python/ocs_clean_namespace.desc deleted file mode 100644 index 48df606073..0000000000 --- a/tests/system/tests/python/ocs_clean_namespace.desc +++ /dev/null @@ -1 +0,0 @@ -Purges a Namespace deleting all the types and streams \ No newline at end of file diff --git a/tests/system/tests/python/ocs_clean_namespace.py b/tests/system/tests/python/ocs_clean_namespace.py deleted file mode 100644 index 856db2f631..0000000000 --- a/tests/system/tests/python/ocs_clean_namespace.py +++ /dev/null @@ -1,28 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- - -# FOGLAMP_BEGIN -# See: http://foglamp.readthedocs.io/ -# FOGLAMP_END - -""" Purges a Namespace deleting all the types and streams """ - -import common.ocs as ocs -import os - -__author__ = "Stefano Simonelli" -__copyright__ = "Copyright (c) 2018 OSIsoft, LLC" -__license__ = "Apache 2.0" -__version__ = "${VERSION}" - - -# ### Main ############################################################################## - -tenant = os.environ['OCS_TENANT'] -client_id = os.environ['OCS_CLIENT_ID'] -client_secret = os.environ['OCS_CLIENT_SECRET'] -namespace = os.environ['OCS_NAMESPACE'] - -headers = ocs.retrieve_authentication_token(tenant, client_id, client_secret) - -ocs.delete_types_streams(headers, tenant, namespace) diff --git a/tests/system/tests/python/ocs_read_an_asset.desc b/tests/system/tests/python/ocs_read_an_asset.desc deleted file mode 100644 index 3e61122471..0000000000 --- a/tests/system/tests/python/ocs_read_an_asset.desc +++ /dev/null @@ -1 +0,0 @@ -Retrieves x values of an OCS stream using the API GetRangeValues \ No newline at end of file diff --git a/tests/system/tests/python/ocs_read_an_asset.py b/tests/system/tests/python/ocs_read_an_asset.py deleted file mode 100644 index 2138d80947..0000000000 --- a/tests/system/tests/python/ocs_read_an_asset.py +++ /dev/null @@ -1,41 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- - -# FOGLAMP_BEGIN -# See: http://foglamp.readthedocs.io/ -# FOGLAMP_END - -""" Retrieves x values of an OCS stream using the API GetRangeValues """ - -import common.ocs as ocs -import os -import sys - -__author__ = "Stefano Simonelli" -__copyright__ = "Copyright (c) 2018 OSIsoft, LLC" -__license__ = "Apache 2.0" -__version__ = "${VERSION}" - - -# ## Main ############################################################################## - -if len(sys.argv) != 2: - print("ERROR: OCS stream name is required as an input parameter") - sys.exit(1) -else: - ocs_stream = sys.argv[1] - # asset_code = os.environ['ASSET_CODE'] - -tenant = os.environ['OCS_TENANT'] -client_id = os.environ['OCS_CLIENT_ID'] -client_secret = os.environ['OCS_CLIENT_SECRET'] -namespace = os.environ['OCS_NAMESPACE'] - -start_timestamp = os.environ['START_TIMESTAMP'] -values_count = os.environ['VALUES_COUNT'] - -headers = ocs.retrieve_authentication_token(tenant, client_id, client_secret) - -api_output = ocs.get_values_stream(headers, tenant, namespace, ocs_stream, start_timestamp, values_count) - -print("{0}".format(api_output)) diff --git a/tests/system/tests/python/ocs_read_streams_list.desc b/tests/system/tests/python/ocs_read_streams_list.desc deleted file mode 100644 index f8b4f8941c..0000000000 --- a/tests/system/tests/python/ocs_read_streams_list.desc +++ /dev/null @@ -1 +0,0 @@ -Retrieves the list of Streams defined in OCS for the specific Namespace \ No newline at end of file diff --git a/tests/system/tests/python/ocs_read_streams_list.py b/tests/system/tests/python/ocs_read_streams_list.py deleted file mode 100644 index 27eeea3fe9..0000000000 --- a/tests/system/tests/python/ocs_read_streams_list.py +++ /dev/null @@ -1,31 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- - -# FOGLAMP_BEGIN -# See: http://foglamp.readthedocs.io/ -# FOGLAMP_END - -""" Retrieves the list of Streams defined in OCS for the specific Namespace """ - -import common.ocs as ocs -import os - -__author__ = "Stefano Simonelli" -__copyright__ = "Copyright (c) 2018 OSIsoft, LLC" -__license__ = "Apache 2.0" -__version__ = "${VERSION}" - - -# ## Main ############################################################################## - -tenant = os.environ['OCS_TENANT'] -client_id = os.environ['OCS_CLIENT_ID'] -client_secret = os.environ['OCS_CLIENT_SECRET'] -name_space = os.environ['OCS_NAMESPACE'] - -headers = ocs.retrieve_authentication_token(tenant, client_id, client_secret) - -api = "Streams" -api_output = ocs.call_api(headers, tenant, name_space, api) - -print("{0}".format(api_output)) From 2a19b327cccc1863189c3bf73cd25d65accb5b15 Mon Sep 17 00:00:00 2001 From: pintomax Date: Mon, 29 Apr 2019 09:29:42 +0200 Subject: [PATCH 081/161] FOGL-2765: Fix SQLITE update of JSON columns when values have double quotes (#1538) FOGL-2765: Fix SQLITE update of JSON columns when values have double quotes --- C/plugins/storage/sqlite/common/connection.cpp | 6 +++--- tests/unit/C/services/storage/sqlite/expected_ETC_UTC/28 | 2 +- .../unit/C/services/storage/sqlite/expected_EUROPE_ROME/28 | 2 +- tests/unit/C/services/storage/sqlite/payloads/update.json | 4 ++-- tests/unit/C/services/storage/sqlite/testSetup.sh | 3 ++- 5 files changed, 9 insertions(+), 8 deletions(-) diff --git a/C/plugins/storage/sqlite/common/connection.cpp b/C/plugins/storage/sqlite/common/connection.cpp index d798636a0b..3c9b43a077 100644 --- a/C/plugins/storage/sqlite/common/connection.cpp +++ b/C/plugins/storage/sqlite/common/connection.cpp @@ -1414,9 +1414,9 @@ SQLBuffer sql; } else { - sql.append("\""); - sql.append(str); - sql.append("\""); + sql.append('\''); + sql.append(escape(str)); + sql.append('\''); } } else if (value.IsDouble()) diff --git a/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/28 b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/28 index fd1a64c2c5..c09ca50e1e 100644 --- a/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/28 +++ b/tests/unit/C/services/storage/sqlite/expected_ETC_UTC/28 @@ -1 +1 @@ -{"count":1,"rows":[{"id":2,"key":"TEST2","description":"updated description","data":{"json":"inserted object"}}]} \ No newline at end of file +{"count":1,"rows":[{"id":2,"key":"TEST2","description":"The 'description' has been \"updated\"","data":{"json":"inserted object"}}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/28 b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/28 index fd1a64c2c5..c09ca50e1e 100644 --- a/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/28 +++ b/tests/unit/C/services/storage/sqlite/expected_EUROPE_ROME/28 @@ -1 +1 @@ -{"count":1,"rows":[{"id":2,"key":"TEST2","description":"updated description","data":{"json":"inserted object"}}]} \ No newline at end of file +{"count":1,"rows":[{"id":2,"key":"TEST2","description":"The 'description' has been \"updated\"","data":{"json":"inserted object"}}]} \ No newline at end of file diff --git a/tests/unit/C/services/storage/sqlite/payloads/update.json b/tests/unit/C/services/storage/sqlite/payloads/update.json index 176acdb392..ad15e9c2e3 100644 --- a/tests/unit/C/services/storage/sqlite/payloads/update.json +++ b/tests/unit/C/services/storage/sqlite/payloads/update.json @@ -5,6 +5,6 @@ "value" : 2 }, "values" : { - "description" : "updated description" - } + "description" : "The 'description' has been \"updated\"" + } } diff --git a/tests/unit/C/services/storage/sqlite/testSetup.sh b/tests/unit/C/services/storage/sqlite/testSetup.sh index ce6ec97c23..533ae6f654 100755 --- a/tests/unit/C/services/storage/sqlite/testSetup.sh +++ b/tests/unit/C/services/storage/sqlite/testSetup.sh @@ -40,7 +40,8 @@ delete from foglamp.configuration; CREATE TABLE IF NOT EXISTS foglamp.configuration ( key character varying(255) NOT NULL, -- Primary key - description character varying(255) NOT NULL, -- Description, in plain text + display_name character varying(255) NOT NULL, -- Display Name + description character varying(255) NOT NULL, -- Description, in plain text value JSON NOT NULL DEFAULT '{}', -- JSON object containing the configuration values ts DATETIME DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW', 'localtime')), -- Timestamp, updated at every change CONSTRAINT configuration_pkey PRIMARY KEY (key) ); From 787ca38f90aecc11774f9e8fa20ac93db92a14d0 Mon Sep 17 00:00:00 2001 From: amarendra-dianomic Date: Mon, 29 Apr 2019 14:33:34 +0530 Subject: [PATCH 082/161] FOGL-2768 - obselete command extract_plugin_snapshot replaced with cmdutil --- python/foglamp/services/core/snapshot.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/foglamp/services/core/snapshot.py b/python/foglamp/services/core/snapshot.py index c606215aa8..a536cf939c 100644 --- a/python/foglamp/services/core/snapshot.py +++ b/python/foglamp/services/core/snapshot.py @@ -109,7 +109,7 @@ def write_to_tar(self, pyz, temp_file, data): def extract_files(self, pyz): # Extraction methods are different for production env and dev env if path.exists("{}/bin".format(_FOGLAMP_ROOT)) and path.exists("{}/bin/foglamp".format(_FOGLAMP_ROOT)): - cmd = "{}/extras/C/extract_plugin_snapshot {}".format(_FOGLAMP_ROOT, pyz) + cmd = "{}/extras/C/cmdutil tar-extract {}".format(_FOGLAMP_ROOT, pyz) retcode = os.system(cmd) if retcode != 0: raise OSError('Error {}: {}'.format(retcode, cmd)) From 16b2c1e0daee7bd85e98d39cec0f07026b99b20a Mon Sep 17 00:00:00 2001 From: Amandeep Singh Arora Date: Mon, 29 Apr 2019 12:57:56 +0000 Subject: [PATCH 083/161] Fix for storage server startup crash --- C/services/common/CMakeLists.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/C/services/common/CMakeLists.txt b/C/services/common/CMakeLists.txt index 13c32932bf..51b0f74e30 100644 --- a/C/services/common/CMakeLists.txt +++ b/C/services/common/CMakeLists.txt @@ -3,7 +3,7 @@ cmake_minimum_required(VERSION 2.4.0) project(services-common-lib) set(CMAKE_CXX_FLAGS_DEBUG "-O0 -ggdb") -set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11") +set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11 -O0") set(DLLIB -ldl) # Find source files From 55b6b479d4244bc92487a82dfb743f7caee93f8d Mon Sep 17 00:00:00 2001 From: pintomax Date: Mon, 29 Apr 2019 17:38:36 +0200 Subject: [PATCH 084/161] FOGL-2699: escape double quotes in category name, description and string values (#1540) FOGL-2699: escape double quotes in category name, description and string values --- C/common/config_category.cpp | 79 ++++-------------- C/common/include/config_category.h | 3 +- C/common/include/json_utils.h | 3 + C/common/include/management_client.h | 5 +- C/common/json_utils.cpp | 47 +++++++++++ C/common/management_client.cpp | 6 +- tests/unit/C/common/test_config_category.cpp | 65 +++++++++++++++ .../C/common/test_default_config_category.cpp | 83 ++++++++++++++++++- 8 files changed, 219 insertions(+), 72 deletions(-) diff --git a/C/common/config_category.cpp b/C/common/config_category.cpp index 3893202b5c..6da8d6cfec 100644 --- a/C/common/config_category.cpp +++ b/C/common/config_category.cpp @@ -98,8 +98,8 @@ string ConfigCategoryDescription::toJSON() const { ostringstream convert; - convert << "{\"key\": \"" << m_name << "\", "; - convert << "\"description\" : \"" << m_description << "\"}"; + convert << "{\"key\": \"" << JSONescape(m_name) << "\", "; + convert << "\"description\" : \"" << JSONescape(m_description) << "\"}"; return convert.str(); } @@ -756,8 +756,8 @@ string ConfigCategory::toJSON(const bool full) const { ostringstream convert; - convert << "{ \"key\" : \"" << m_name << "\", "; - convert << "\"description\" : \"" << m_description << "\", \"value\" : "; + convert << "{ \"key\" : \"" << JSONescape(m_name) << "\", "; + convert << "\"description\" : \"" << JSONescape(m_description) << "\", \"value\" : "; // Add items convert << ConfigCategory::itemsToJSON(full); convert << " }"; @@ -924,7 +924,7 @@ ConfigCategory::CategoryItem::CategoryItem(const string& name, // use current string strbuf.GetString() : // Unescape the string - this->unescape(strbuf.GetString()); + JSONunescape(strbuf.GetString()); // If it's not a real eject, check the string buffer it is: if (!item["value"].IsObject()) @@ -1025,7 +1025,7 @@ ConfigCategory::CategoryItem::CategoryItem(const string& name, // use current string strbuf.GetString() : // Unescape the string - this->unescape(strbuf.GetString()); + JSONunescape(strbuf.GetString()); // If it's not a real eject, check the string buffer it is: if (!item["default"].IsObject()) @@ -1185,8 +1185,8 @@ string ConfigCategory::CategoryItem::toJSON(const bool full) const { ostringstream convert; - convert << "\"" << m_name << "\" : { "; - convert << "\"description\" : \"" << m_description << "\", "; + convert << "\"" << JSONescape(m_name) << "\" : { "; + convert << "\"description\" : \"" << JSONescape(m_description) << "\", "; if (! m_displayName.empty()) { convert << "\"displayName\" : \"" << m_displayName << "\", "; @@ -1208,8 +1208,8 @@ ostringstream convert; m_itemType == BoolItem || m_itemType == EnumerationItem) { - convert << "\"value\" : \"" << m_value << "\", "; - convert << "\"default\" : \"" << m_default << "\""; + convert << "\"value\" : \"" << JSONescape(m_value) << "\", "; + convert << "\"default\" : \"" << JSONescape(m_default) << "\""; } else if (m_itemType == JsonItem || m_itemType == NumberItem || @@ -1270,8 +1270,8 @@ string ConfigCategory::CategoryItem::defaultToJSON() const { ostringstream convert; - convert << "\"" << m_name << "\" : { "; - convert << "\"description\" : \"" << m_description << "\", "; + convert << "\"" << JSONescape(m_name) << "\" : { "; + convert << "\"description\" : \"" << JSONescape(m_description) << "\", "; convert << "\"type\" : \"" << m_type << "\""; if (!m_order.empty()) @@ -1319,7 +1319,7 @@ ostringstream convert; m_itemType == EnumerationItem || m_itemType == BoolItem) { - convert << ", \"default\" : \"" << m_default << "\" }"; + convert << ", \"default\" : \"" << JSONescape(m_default) << "\" }"; } /** * NOTE: @@ -1337,7 +1337,7 @@ ostringstream convert; m_itemType == DoubleItem || m_itemType == ScriptItem) { - convert << ", \"default\" : \"" << escape(m_default) << "\" }"; + convert << ", \"default\" : \"" << JSONescape(m_default) << "\" }"; } return convert.str(); } @@ -1366,8 +1366,8 @@ string DefaultConfigCategory::toJSON() const ostringstream convert; convert << "{ "; - convert << "\"key\" : \"" << m_name << "\", "; - convert << "\"description\" : \"" << m_description << "\", \"value\" : "; + convert << "\"key\" : \"" << JSONescape(m_name) << "\", "; + convert << "\"description\" : \"" << JSONescape(m_description) << "\", \"value\" : "; // Add items convert << DefaultConfigCategory::itemsToJSON(); convert << " }"; @@ -1396,20 +1396,6 @@ ostringstream convert; return convert.str(); } -std::string ConfigCategory::CategoryItem::escape(const std::string& subject) const -{ -size_t pos = 0; -string replace("\\\""); -string escaped = subject; - - while ((pos = escaped.find("\"", pos)) != std::string::npos) - { - escaped.replace(pos, 1, replace); - pos += replace.length(); - } - return escaped; -} - /** * Return JSON string of a category item * @param itemName The given item within current category @@ -1433,39 +1419,6 @@ string ConfigCategory::itemToJSON(const string& itemName) const return convert.str(); } -/** - * Return unescaped version of a JSON string - * - * Routine removes \" inside the string - * and leading and trailing " - * - * @param subject Input string - * @return Unescaped string - */ -std::string ConfigCategory::CategoryItem::unescape(const std::string& subject) const -{ - size_t pos = 0; - string replace(""); - string json = subject; - - // Replace '\"' with '"' - while ((pos = json.find("\\\"", pos)) != std::string::npos) - { - json.replace(pos, 1, ""); - } - // Remove leading '"' - if (json[0] == '\"') - { - json.erase(0, 1); - } - // Remove trainling '"' - if (json[json.length() - 1] == '\"') - { - json.erase(json.length() - 1, 1); - } - return json; -} - /** * Configuration Category constructor * diff --git a/C/common/include/config_category.h b/C/common/include/config_category.h index 7ba9031534..b1d1c2b793 100644 --- a/C/common/include/config_category.h +++ b/C/common/include/config_category.h @@ -14,6 +14,7 @@ #include #include #include +#include class ConfigCategoryDescription { public: @@ -119,8 +120,6 @@ class ConfigCategory { std::string toJSON(const bool full=false) const; // Return only "default" items std::string defaultToJSON() const; - std::string escape(const std::string& str) const; - std::string unescape(const std::string& subject) const; public: std::string m_name; diff --git a/C/common/include/json_utils.h b/C/common/include/json_utils.h index a93b0cc080..17156334a7 100644 --- a/C/common/include/json_utils.h +++ b/C/common/include/json_utils.h @@ -14,4 +14,7 @@ bool JSONStringToVectorString(std::vector& vectorString, const std::string& JSONString, const std::string& Key); +std::string JSONescape(const std::string& subject); +std::string JSONunescape(const std::string& subject); + #endif diff --git a/C/common/include/management_client.h b/C/common/include/management_client.h index c7f057fc30..8a28934434 100644 --- a/C/common/include/management_client.h +++ b/C/common/include/management_client.h @@ -17,6 +17,7 @@ #include #include #include +#include #include using HttpClient = SimpleWeb::Client; @@ -71,8 +72,8 @@ class ManagementClient { // Build the JSON payload std::ostringstream payload; - payload << "{ \"key\" : \"" << t.getName(); - payload << "\", \"description\" : \"" << t.getDescription(); + payload << "{ \"key\" : \"" << JSONescape(t.getName()); + payload << "\", \"description\" : \"" << JSONescape(t.getDescription()); payload << "\", \"value\" : " << t.itemsToJSON(); /** diff --git a/C/common/json_utils.cpp b/C/common/json_utils.cpp index 397f61297e..f56476c8cc 100644 --- a/C/common/json_utils.cpp +++ b/C/common/json_utils.cpp @@ -69,3 +69,50 @@ bool JSONStringToVectorString(std::vector& vectorString, return success; } + +string JSONescape(const std::string& subject) +{ +size_t pos = 0; +string replace("\\\""); +string escaped = subject; + + while ((pos = escaped.find("\"", pos)) != std::string::npos) + { + escaped.replace(pos, 1, replace); + pos += replace.length(); + } + return escaped; +} +/** + * Return unescaped version of a JSON string + * + * Routine removes \" inside the string + * and leading and trailing " + * + * @param subject Input string + * @return Unescaped string + */ +std::string JSONunescape(const std::string& subject) +{ + size_t pos = 0; + string replace(""); + string json = subject; + + // Replace '\"' with '"' + while ((pos = json.find("\\\"", pos)) != std::string::npos) + { + json.replace(pos, 1, ""); + } + // Remove leading '"' + if (json[0] == '\"') + { + json.erase(0, 1); + } + // Remove trainling '"' + if (json[json.length() - 1] == '\"') + { + json.erase(json.length() - 1, 1); + } + return json; +} + diff --git a/C/common/management_client.cpp b/C/common/management_client.cpp index 305053445d..bce48cf18f 100644 --- a/C/common/management_client.cpp +++ b/C/common/management_client.cpp @@ -238,7 +238,7 @@ ostringstream convert; return true; } try { - convert << "{ \"category\" : \"" << category << "\", "; + convert << "{ \"category\" : \"" << JSONescape(category) << "\", "; convert << "\"service\" : \"" << *m_uuid << "\" }"; auto res = this->getHttpClient()->request("POST", "/foglamp/interest", convert.str()); Document doc; @@ -483,7 +483,7 @@ string ManagementClient::addChildCategories(const string& parentCategory, for (auto it = children.begin(); it != children.end(); ++it) { - payload += "\"" + (*it)+ "\""; + payload += "\"" + JSONescape((*it)) + "\""; if ((it + 1) != children.end()) { payload += ", "; @@ -595,7 +595,7 @@ bool ManagementClient::addAssetTrackingTuple(const std::string& service, ostringstream convert; try { - convert << "{ \"service\" : \"" << service << "\", "; + convert << "{ \"service\" : \"" << JSONescape(service) << "\", "; convert << " \"plugin\" : \"" << plugin << "\", "; convert << " \"asset\" : \"" << asset << "\", "; convert << " \"event\" : \"" << event << "\" }"; diff --git a/tests/unit/C/common/test_config_category.cpp b/tests/unit/C/common/test_config_category.cpp index dd97dcae75..7bdddfc368 100644 --- a/tests/unit/C/common/test_config_category.cpp +++ b/tests/unit/C/common/test_config_category.cpp @@ -11,6 +11,10 @@ const char *categories = "{\"categories\": [" "{\"key\": \"cat1\", \"description\":\"First category\"}," "{\"key\": \"cat2\", \"description\":\"Second\"}]}"; +const char *categories_quoted = "{\"categories\": [" + "{\"key\": \"cat \\\"1\\\"\", \"description\":\"First \\\"category\\\"\"}," + "{\"key\": \"cat \\\"2\\\"\", \"description\":\"Second\"}]}"; + const char *myCategory = "{\"description\": {" "\"value\": \"The FogLAMP administrative API\"," "\"type\": \"string\"," @@ -27,6 +31,22 @@ const char *myCategory = "{\"description\": {" "\"default\": {\"first\" : \"FogLAMP\", \"second\" : \"json\" }," "\"description\": \"A JSON configuration parameter\"}}"; +const char *myCategory_quoted = "{\"description\": {" + "\"value\": \"The \\\"FogLAMP\\\" administrative API\"," + "\"type\": \"string\"," + "\"default\": \"The \\\"FogLAMP\\\" administrative API\"," + "\"description\": \"The description of this \\\"FogLAMP\\\" service\"}," + "\"name\": {" + "\"value\": \"\\\"FogLAMP\\\"\"," + "\"type\": \"string\"," + "\"default\": \"\\\"FogLAMP\\\"\"," + "\"description\": \"The name of this \\\"FogLAMP\\\" service\"}," + "\"complex\": {" \ + "\"value\": { \"first\" : \"FogLAMP\", \"second\" : \"json\" }," + "\"type\": \"json\"," + "\"default\": {\"first\" : \"FogLAMP\", \"second\" : \"json\" }," + "\"description\": \"A JSON configuration parameter\"}}"; + const char *myCategoryDisplayName = "{\"description\": {" "\"value\": \"The FogLAMP administrative API\"," "\"type\": \"string\"," @@ -204,6 +224,24 @@ const char *json = "{ \"key\" : \"test\", \"description\" : \"Test description\" "\"value\" : {\"first\":\"FogLAMP\",\"second\":\"json\"}, " "\"default\" : {\"first\":\"FogLAMP\",\"second\":\"json\"} }} }"; +const char *json_quoted = "{ \"key\" : \"test \\\"a\\\"\", \"description\" : \"Test \\\"description\\\"\", " + "\"value\" : {" + "\"description\" : { " + "\"description\" : \"The description of this \\\"FogLAMP\\\" service\", " + "\"type\" : \"string\", " + "\"value\" : \"The \\\"FogLAMP\\\" administrative API\", " + "\"default\" : \"The \\\"FogLAMP\\\" administrative API\" }, " + "\"name\" : { " + "\"description\" : \"The name of this \\\"FogLAMP\\\" service\", " + "\"type\" : \"string\", " + "\"value\" : \"\\\"FogLAMP\\\"\", " + "\"default\" : \"\\\"FogLAMP\\\"\" }, " + "\"complex\" : { " + "\"description\" : \"A JSON configuration parameter\", " + "\"type\" : \"json\", " + "\"value\" : {\"first\":\"FogLAMP\",\"second\":\"json\"}, " + "\"default\" : {\"first\":\"FogLAMP\",\"second\":\"json\"} }} }"; + const char *json_type_JSON = "{ \"key\" : \"test\", \"description\" : \"Test description\", " "\"value\" : {\"filter\" : { \"description\" : \"filter\", \"type\" : \"JSON\", " "\"value\" : {}, \"default\" : {\"pipeline\":[\"scale\",\"exceptional\"]} }} }"; @@ -216,6 +254,8 @@ const char *json_boolean_number = "{ \"key\" : \"test\", \"description\" : \"Tes "\"value\" : \"true\", \"default\" : \"false\" }} }"; const char *allCategories = "[{\"key\": \"cat1\", \"description\" : \"desc1\"}, {\"key\": \"cat2\", \"description\" : \"desc2\"}]"; +const char *allCategories_quoted = "[{\"key\": \"cat\\\"1\\\"\", \"description\" : \"desc\\\"1\\\"\"}, " + "{\"key\": \"cat\\\"2\\\"\", \"description\" : \"desc\\\"2\\\"\"}]"; const char *myCategoryEnumFull = "{\"description\": {" "\"value\": \"The FogLAMP administrative API\"," @@ -275,6 +315,12 @@ TEST(CategoriesTest, Count) ASSERT_EQ(2, confCategories.length()); } +TEST(CategoriesTestQuoted, CountQuoted) +{ + ConfigCategories confCategories(categories_quoted); + ASSERT_EQ(2, confCategories.length()); +} + TEST(CategoriesTest, Index) { ConfigCategories confCategories(categories); @@ -305,6 +351,18 @@ TEST(CategoriesTest, toJSON) ASSERT_EQ(0, result.compare(allCategories)); } +TEST(CategoriesTestQuoted, toJSONQuoted) +{ + ConfigCategories categories; + ConfigCategoryDescription *one = new ConfigCategoryDescription(string("cat\"1\""), string("desc\"1\"")); + ConfigCategoryDescription *two = new ConfigCategoryDescription(string("cat\"2\""), string("desc\"2\"")); + categories.addCategoryDescription(one); + categories.addCategoryDescription(two); + string result = categories.toJSON(); + ASSERT_EQ(2, categories.length()); + ASSERT_EQ(0, result.compare(allCategories_quoted)); +} + TEST(CategoriesTest, toJSONParameters) { // Arrange @@ -381,6 +439,13 @@ TEST(CategoryTest, toJSON) ASSERT_EQ(0, confCategory.toJSON().compare(json)); } +TEST(CategoryTestQuoted, toJSONQuoted) +{ + ConfigCategory confCategory("test \"a\"", myCategory_quoted); + confCategory.setDescription("Test \"description\""); + ASSERT_EQ(0, confCategory.toJSON().compare(json_quoted)); +} + TEST(CategoryTest, bool_and_number_ok) { ConfigCategory confCategory("test", myCategory_number_and_boolean_items); diff --git a/tests/unit/C/common/test_default_config_category.cpp b/tests/unit/C/common/test_default_config_category.cpp index dc2024c084..1e660fdbda 100644 --- a/tests/unit/C/common/test_default_config_category.cpp +++ b/tests/unit/C/common/test_default_config_category.cpp @@ -11,6 +11,10 @@ const char *default_categories = "{\"categories\": [" "{\"key\": \"cat1\", \"description\":\"First category\"}," "{\"key\": \"cat2\", \"description\":\"Second\"}]}"; +const char *default_categories_quoted = "{\"categories\": [" + "{\"key\": \"cat\\\"1\\\"\", \"description\":\"The \\\"First\\\" category\"}," + "{\"key\": \"cat\\\"2\\\"\", \"description\":\"The \\\"Second\\\" category\"}]}"; + const char *default_myCategory = "{\"description\": {" "\"type\": \"string\"," "\"value\": \"The FogLAMP administrative API\"," @@ -27,6 +31,21 @@ const char *default_myCategory = "{\"description\": {" "\"default\": {\"first\" : \"FogLAMP\", \"second\" : \"json\" }," "\"description\": \"A JSON configuration parameter\"}}"; +const char *default_myCategory_quoted = "{\"description\": {" + "\"type\": \"string\"," + "\"value\": \"The \\\"FogLAMP\\\" administrative API\"," + "\"default\": \"The \\\"FogLAMP\\\" administrative API\"," + "\"description\": \"The description of this \\\"FogLAMP\\\" service\"}," + "\"name\": {" + "\"type\": \"string\"," + "\"value\": \"\\\"FogLAMP\\\"\"," + "\"default\": \"\\\"FogLAMP\\\"\"," + "\"description\": \"The name of this \\\"FogLAMP\\\" service\"}," + "\"complex\": {" \ + "\"type\": \"json\"," + "\"value\": {\"first\" : \"FogLAMP\", \"second\" : \"json\" }," + "\"default\": {\"first\" : \"FogLAMP\", \"second\" : \"json\" }," + "\"description\": \"A JSON configuration parameter\"}}"; /** * The JSON output from DefaulltCategory::toJSON has "default" values olny */ @@ -45,6 +64,21 @@ const char *default_json = "{ \"key\" : \"test\", \"description\" : \"Test descr "\"type\" : \"json\", " "\"default\" : \"{\\\"first\\\":\\\"FogLAMP\\\",\\\"second\\\":\\\"json\\\"}\" }} }"; +const char *default_json_quoted = "{ \"key\" : \"test \\\"a\\\"\", \"description\" : \"Test \\\"description\\\"\", " + "\"value\" : {" + "\"description\" : { " + "\"description\" : \"The description of this \\\"FogLAMP\\\" service\", " + "\"type\" : \"string\", " + "\"default\" : \"The \\\"FogLAMP\\\" administrative API\" }, " + "\"name\" : { " + "\"description\" : \"The name of this \\\"FogLAMP\\\" service\", " + "\"type\" : \"string\", " + "\"default\" : \"\\\"FogLAMP\\\"\" }, " + "\"complex\" : { " + "\"description\" : \"A JSON configuration parameter\", " + "\"type\" : \"json\", " + "\"default\" : \"{\\\"first\\\":\\\"FogLAMP\\\",\\\"second\\\":\\\"json\\\"}\" }} }"; + const char *default_myCategory_number_and_boolean_items = "{\"factor\": {" "\"value\": \"101\"," "\"type\": \"integer\"," @@ -113,6 +147,12 @@ TEST(DefaultCategoriesTest, Count) ASSERT_EQ(2, confCategories.length()); } +TEST(DefaultCategoriesTestQuoted, CountQuoted) +{ + ConfigCategories confCategories(default_categories_quoted); + ASSERT_EQ(2, confCategories.length()); +} + TEST(DefaultCategoriesTest, Index) { ConfigCategories confCategories(default_categories); @@ -127,6 +167,12 @@ TEST(DefaultCategoryTest, Construct) ASSERT_EQ(3, confCategory.getCount()); } +TEST(DefaultCategoryTestQuoted, ConstructQuoted) +{ + DefaultConfigCategory confCategory("test", default_myCategory_quoted); + ASSERT_EQ(3, confCategory.getCount()); +} + TEST(DefaultCategoryTest, ExistsTest) { DefaultConfigCategory confCategory("test", default_myCategory); @@ -134,12 +180,25 @@ TEST(DefaultCategoryTest, ExistsTest) ASSERT_EQ(false, confCategory.itemExists("non-existance")); } +TEST(DefaultCategoryTestQuoted, ExistsTestQuoted) +{ + DefaultConfigCategory confCategory("test", default_myCategory_quoted); + ASSERT_EQ(true, confCategory.itemExists("name")); + ASSERT_EQ(false, confCategory.itemExists("non-existance")); +} + TEST(DefaultCategoryTest, getValue) { DefaultConfigCategory confCategory("test", default_myCategory); ASSERT_EQ(0, confCategory.getValue("name").compare("FogLAMP")); } +TEST(DefaultCategoryTestQuoted, getValueQuoted) +{ + DefaultConfigCategory confCategory("test", default_myCategory_quoted); + ASSERT_EQ(0, confCategory.getValue("name").compare("\"FogLAMP\"")); +} + TEST(DefaultCategoryTest, getType) { DefaultConfigCategory confCategory("test", default_myCategory); @@ -152,15 +211,27 @@ TEST(DefaultCategoryTest, getDefault) ASSERT_EQ(0, confCategory.getDefault("name").compare("FogLAMP")); } +TEST(DefaultCategoryTestQuoted, getDefaultQuoted) +{ + DefaultConfigCategory confCategory("test", default_myCategory_quoted); + ASSERT_EQ(0, confCategory.getDefault("name").compare("\"FogLAMP\"")); +} + TEST(DefaultCategoryTest, getDescription) { DefaultConfigCategory confCategory("test", default_myCategory); ASSERT_EQ(0, confCategory.getDescription("name").compare("The name of this FogLAMP service")); } -TEST(DefaultCategoryTest, isString) +TEST(DefaultCategoryTestQuoted, getDescriptionQuoted) { - DefaultConfigCategory confCategory("test", default_myCategory); + DefaultConfigCategory confCategory("test", default_myCategory_quoted); + ASSERT_EQ(0, confCategory.getDescription("name").compare("The name of this \"FogLAMP\" service")); +} + +TEST(DefaultCategoryTestQuoted, isStringQuoted) +{ + DefaultConfigCategory confCategory("test", default_myCategory_quoted); ASSERT_EQ(true, confCategory.isString("name")); ASSERT_EQ(false, confCategory.isString("complex")); } @@ -180,6 +251,14 @@ TEST(DefaultCategoryTest, toJSON) ASSERT_EQ(0, confCategory.toJSON().compare(default_json)); } +TEST(DefaultCategoryTestQuoted, toJSONQuoted) +{ + DefaultConfigCategory confCategory("test \"a\"", default_myCategory_quoted); + confCategory.setDescription("Test \"description\""); + // Only "default" value in the output + ASSERT_EQ(0, confCategory.toJSON().compare(default_json_quoted)); +} + TEST(DefaultCategoryTest, default_bool_and_number_ok) { DefaultConfigCategory confCategory("test", From cff018af0ce12417f30902bdcbe3cf1bf117a786 Mon Sep 17 00:00:00 2001 From: Bill Hunt Date: Tue, 30 Apr 2019 16:56:27 -0700 Subject: [PATCH 085/161] Correct Copyright holder --- LICENSE | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/LICENSE b/LICENSE index 2ee199310e..6c795f7982 100644 --- a/LICENSE +++ b/LICENSE @@ -186,7 +186,7 @@ same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright 2017 OSIsoft, LLC + Copyright 2019 Dianomic Systems Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. From ee24cd896b5d958375a718b9c02a729e332940e0 Mon Sep 17 00:00:00 2001 From: Praveen Garg Date: Wed, 1 May 2019 17:47:29 +0530 Subject: [PATCH 086/161] FOGL-2694 Add coverage for C async plugin to service api system test (#1542) * Async C plugin * Refactored tests and completed async C plugin check --- tests/system/python/api/test_service.py | 109 +++++++++++++++--------- tests/system/python/data/vibration.csv | 1 + 2 files changed, 71 insertions(+), 39 deletions(-) create mode 100644 tests/system/python/data/vibration.csv diff --git a/tests/system/python/api/test_service.py b/tests/system/python/api/test_service.py index e6508f3ef3..ad88b64ef8 100644 --- a/tests/system/python/api/test_service.py +++ b/tests/system/python/api/test_service.py @@ -6,7 +6,7 @@ """ Test add service using poll and async plugins for both python & C version REST API """ -import subprocess +import os import http.client import json import time @@ -14,6 +14,7 @@ from collections import Counter from urllib.parse import quote import pytest + import plugin_and_service __author__ = "Ashish Jabble" @@ -26,7 +27,11 @@ SVC_NAME_2 = 'HTTP-SOUTH' SVC_NAME_3 = '1 Bench' SVC_NAME_4 = 'Rand 1 #3' -SVC_NAME_5 = 'randomwalk' + +SVC_NAME_5 = SVC_NAME_C_ASYNC = "Async 1" +SVC_NAME_6 = 'randomwalk' + + PLUGIN_FILTER = 'metadata' FILTER_NAME = 'meta' @@ -37,16 +42,15 @@ def install_plugins(): plugin_and_service.install('south', plugin='http') plugin_and_service.install('south', plugin='benchmark', plugin_lang='C') plugin_and_service.install('south', plugin='random', plugin_lang='C') - # TODO: FOGL-2662 C-async plugin - Once done add 1 more plugin - # plugin_and_service.install('south', plugin='?', plugin_lang='C') + plugin_and_service.install('south', plugin='csv-async', plugin_lang='C') def get_service(foglamp_url, path): conn = http.client.HTTPConnection(foglamp_url) conn.request("GET", path) - r = conn.getresponse() - assert 200 == r.status - r = r.read().decode() + res = conn.getresponse() + r = res.read().decode() + assert 200 == res.status jdoc = json.loads(r) return jdoc @@ -54,8 +58,7 @@ def get_service(foglamp_url, path): class TestService: def test_cleanup_and_setup(self, reset_and_start_foglamp, install_plugins): - # TODO: FOGL-2669 Remove this workaround - # Use better setup & teardown methods + # TODO: FOGL-2669 Better setup & teardown fixtures pass def test_default_service(self, foglamp_url): @@ -85,13 +88,17 @@ def test_default_service(self, foglamp_url): assert 'FogLAMP Core' == core_svc['name'] assert 'http' == core_svc['protocol'] + C_ASYNC_CONFIG = {"file": {"value": os.getenv("FOGLAMP_ROOT", "") + '/tests/system/python/data/vibration.csv'}} + @pytest.mark.parametrize("plugin, svc_name, display_svc_name, config, enabled, svc_count", [ ("randomwalk", SVC_NAME_1, SVC_NAME_1, None, True, 3), ("http_south", SVC_NAME_2, SVC_NAME_1, None, False, 3), ("Benchmark", SVC_NAME_3, SVC_NAME_3, None, True, 4), - ("Random", SVC_NAME_4, SVC_NAME_3, None, False, 4) + ("Random", SVC_NAME_4, SVC_NAME_3, None, False, 4), + ("CSV-Async", SVC_NAME_C_ASYNC, SVC_NAME_C_ASYNC, C_ASYNC_CONFIG, True, 5) ]) def test_add_service(self, foglamp_url, wait_time, plugin, svc_name, display_svc_name, config, enabled, svc_count): + jdoc = plugin_and_service.add_south_service(plugin, foglamp_url, svc_name, config, enabled) assert svc_name == jdoc['name'] assert UUID(jdoc['id'], version=4) @@ -100,6 +107,7 @@ def test_add_service(self, foglamp_url, wait_time, plugin, svc_name, display_svc jdoc = get_service(foglamp_url, '/foglamp/service') assert len(jdoc), "No data found" assert svc_count == len(jdoc['services']) + southbound_svc = jdoc['services'][svc_count - 1] assert isinstance(southbound_svc['management_port'], int) assert southbound_svc['service_port'] is None @@ -111,7 +119,7 @@ def test_add_service(self, foglamp_url, wait_time, plugin, svc_name, display_svc def test_add_service_with_config(self, foglamp_url, wait_time): # add service with config param - data = {"name": SVC_NAME_5, + data = {"name": SVC_NAME_6, "type": "South", "plugin": 'randomwalk', "config": {"maxValue": {"value": "20"}, "assetName": {"value": "Random"}}, @@ -123,11 +131,11 @@ def test_add_service_with_config(self, foglamp_url, wait_time): assert 200 == r.status r = r.read().decode() jdoc = json.loads(r) - assert SVC_NAME_5 == jdoc['name'] + assert SVC_NAME_6 == jdoc['name'] assert UUID(jdoc['id'], version=4) # verify config is correctly saved - conn.request("GET", '/foglamp/category/{}'.format(SVC_NAME_5)) + conn.request("GET", '/foglamp/category/{}'.format(SVC_NAME_6)) r = conn.getresponse() assert 200 == r.status r = r.read().decode() @@ -139,44 +147,67 @@ def test_add_service_with_config(self, foglamp_url, wait_time): time.sleep(wait_time) jdoc = get_service(foglamp_url, '/foglamp/service') assert len(jdoc), "No data found" - assert 5 == len(jdoc['services']) - assert SVC_NAME_5 == jdoc['services'][4]['name'] + assert 6 == len(jdoc['services']) + assert SVC_NAME_6 == jdoc['services'][5]['name'] @pytest.mark.parametrize("svc_name, status, svc_count", [ ("FogLAMP Storage", 404, 2), ("FogLAMP Core", 404, 2), - (SVC_NAME_1, 200, 4), - (SVC_NAME_2, 200, 4), - (SVC_NAME_3, 200, 3) + (SVC_NAME_1, 200, 5), + (SVC_NAME_2, 200, 5), + (SVC_NAME_3, 200, 4) ]) def test_delete_service(self, svc_name, status, svc_count, foglamp_url, wait_time): conn = http.client.HTTPConnection(foglamp_url) conn.request("DELETE", '/foglamp/service/{}'.format(quote(svc_name))) - r = conn.getresponse() - assert status == r.status - # FIXME: FOGL-2668 + res = conn.getresponse() + assert status == res.status + if status == 404: - assert '{} service does not exist.'.format(svc_name) == r.reason + # FIXME: FOGL-2668 expected 403 for Core and Storage + assert '{} service does not exist.'.format(svc_name) == res.reason else: - r = r.read().decode() + r = res.read().decode() jdoc = json.loads(r) assert 'Service {} deleted successfully.'.format(svc_name) == jdoc['result'] time.sleep(wait_time) + jdoc = get_service(foglamp_url, '/foglamp/service') assert len(jdoc), "No data found" assert svc_count == len(jdoc['services']) - services = [name['name'] for name in jdoc['services']] + services = [s['name'] for s in jdoc['services']] assert svc_name not in services + # no category (including its children) exists anymore for serviceName + conn = http.client.HTTPConnection(foglamp_url) + conn.request("GET", '/foglamp/category/{}'.format(quote(svc_name))) + res = conn.getresponse() + r = res.read().decode() + assert 404 == res.status + + conn.request("GET", '/foglamp/category/{}/children'.format(quote(svc_name))) + res = conn.getresponse() + r = res.read().decode() + assert 404 == res.status + + # no schedule exists anymore for serviceName + conn.request("GET", '/foglamp/schedule') + res = conn.getresponse() + r = res.read().decode() + jdoc = json.loads(r) + assert svc_name not in [s['name'] for s in jdoc["schedules"]] + + # TODO: verify FOGL-2718 no category interest exists anymore for serviceId in InterestRegistry + def test_service_with_enable_schedule(self, foglamp_url, wait_time, enable_schedule): enable_schedule(foglamp_url, SVC_NAME_4) time.sleep(wait_time) jdoc = get_service(foglamp_url, '/foglamp/service') assert len(jdoc), "No data found" - assert 4 == len(jdoc['services']) - assert SVC_NAME_4 == jdoc['services'][3]['name'] + assert 5 == len(jdoc['services']) + assert SVC_NAME_4 in [s['name'] for s in jdoc['services']] def test_service_with_disable_schedule(self, foglamp_url, wait_time, disable_schedule): disable_schedule(foglamp_url, SVC_NAME_4) @@ -184,9 +215,8 @@ def test_service_with_disable_schedule(self, foglamp_url, wait_time, disable_sch time.sleep(wait_time) jdoc = get_service(foglamp_url, '/foglamp/service') assert len(jdoc), "No data found" - assert 4 == len(jdoc['services']) - assert SVC_NAME_4 == jdoc['services'][3]['name'] - assert 'shutdown' == jdoc['services'][3]['status'] + assert 5 == len(jdoc['services']) + assert (SVC_NAME_4, 'shutdown') in [(s['name'], s['status']) for s in jdoc['services']] def test_service_on_restart(self, foglamp_url, wait_time): conn = http.client.HTTPConnection(foglamp_url) @@ -201,29 +231,29 @@ def test_service_on_restart(self, foglamp_url, wait_time): time.sleep(wait_time * 3) jdoc = get_service(foglamp_url, '/foglamp/service') assert len(jdoc), "No data found" - assert 3 == len(jdoc['services']) + assert 4 == len(jdoc['services']) services = [name['name'] for name in jdoc['services']] assert SVC_NAME_4 not in services def test_delete_service_with_filters(self, foglamp_url, wait_time, add_filter, filter_branch, enable_schedule): # add filter - add_filter(PLUGIN_FILTER, filter_branch, FILTER_NAME, {"enable": "true"}, foglamp_url, SVC_NAME_5) + add_filter(PLUGIN_FILTER, filter_branch, FILTER_NAME, {"enable": "true"}, foglamp_url, SVC_NAME_6) # delete service conn = http.client.HTTPConnection(foglamp_url) - conn.request("DELETE", '/foglamp/service/{}'.format(SVC_NAME_5)) + conn.request("DELETE", '/foglamp/service/{}'.format(SVC_NAME_6)) r = conn.getresponse() r = r.read().decode() jdoc = json.loads(r) - assert 'Service {} deleted successfully.'.format(SVC_NAME_5) == jdoc['result'] + assert 'Service {} deleted successfully.'.format(SVC_NAME_6) == jdoc['result'] # verify service does not exist time.sleep(wait_time) jdoc = get_service(foglamp_url, '/foglamp/service') assert len(jdoc), "No data found" - assert 2 == len(jdoc['services']) + assert 3 == len(jdoc['services']) services = [name['name'] for name in jdoc['services']] - assert SVC_NAME_5 not in services + assert SVC_NAME_6 not in services # filter linked with SVC_NAME_4 data = {"pipeline": [FILTER_NAME]} @@ -241,8 +271,9 @@ def test_delete_service_with_filters(self, foglamp_url, wait_time, add_filter, f time.sleep(wait_time) jdoc = get_service(foglamp_url, '/foglamp/service') assert len(jdoc), "No data found" - assert 3 == len(jdoc['services']) - assert SVC_NAME_4 == jdoc['services'][2]['name'] + assert 4 == len(jdoc['services']) + services = [s['name'] for s in jdoc['services']] + assert SVC_NAME_4 in services # delete SVC_NAME_4 conn.request("DELETE", '/foglamp/service/{}'.format(quote(SVC_NAME_4))) @@ -255,8 +286,8 @@ def test_delete_service_with_filters(self, foglamp_url, wait_time, add_filter, f time.sleep(wait_time) jdoc = get_service(foglamp_url, '/foglamp/service') assert len(jdoc), "No data found" - assert 2 == len(jdoc['services']) - services = [name['name'] for name in jdoc['services']] + assert 3 == len(jdoc['services']) + services = [s['name'] for s in jdoc['services']] assert SVC_NAME_4 not in services def test_notification_service(self): diff --git a/tests/system/python/data/vibration.csv b/tests/system/python/data/vibration.csv new file mode 100644 index 0000000000..828afe12e0 --- /dev/null +++ b/tests/system/python/data/vibration.csv @@ -0,0 +1 @@ +2,3,4,5,6 From 0f9d61b188cb626d3fdf9a4b0b38ee988287919d Mon Sep 17 00:00:00 2001 From: Amandeep Singh Arora Date: Wed, 1 May 2019 14:31:03 +0000 Subject: [PATCH 087/161] Override libsqlite v3.7.17 present in /usr/lib64 with the compiled v3.28 --- C/plugins/storage/sqlite/CMakeLists.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/C/plugins/storage/sqlite/CMakeLists.txt b/C/plugins/storage/sqlite/CMakeLists.txt index b2491ce3a0..a3cf7e99f6 100644 --- a/C/plugins/storage/sqlite/CMakeLists.txt +++ b/C/plugins/storage/sqlite/CMakeLists.txt @@ -28,7 +28,8 @@ set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${CMAKE_CURRENT_SOURCE_DIR}") find_package(sqlite3) # Link with SQLite3 library -target_link_libraries(${PROJECT_NAME} -lsqlite3) +#target_link_libraries(${PROJECT_NAME} -lsqlite3) +target_link_libraries(${PROJECT_NAME} /usr/local/lib/libsqlite3.so) # Install library install(TARGETS ${PROJECT_NAME} DESTINATION foglamp/plugins/storage/${PROJECT_NAME}) From 34d8a752d37e968f187356ce7db16047b9f6f89e Mon Sep 17 00:00:00 2001 From: Amandeep Singh Arora Date: Wed, 1 May 2019 17:14:51 +0000 Subject: [PATCH 088/161] Adding conditional check for RHEL/CentOS system --- C/plugins/storage/sqlite/CMakeLists.txt | 10 ++++++++-- C/plugins/storage/sqlite/common/is_RHEL.sh | 4 ++++ 2 files changed, 12 insertions(+), 2 deletions(-) create mode 100755 C/plugins/storage/sqlite/common/is_RHEL.sh diff --git a/C/plugins/storage/sqlite/CMakeLists.txt b/C/plugins/storage/sqlite/CMakeLists.txt index a3cf7e99f6..b44437af56 100644 --- a/C/plugins/storage/sqlite/CMakeLists.txt +++ b/C/plugins/storage/sqlite/CMakeLists.txt @@ -28,8 +28,14 @@ set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${CMAKE_CURRENT_SOURCE_DIR}") find_package(sqlite3) # Link with SQLite3 library -#target_link_libraries(${PROJECT_NAME} -lsqlite3) -target_link_libraries(${PROJECT_NAME} /usr/local/lib/libsqlite3.so) +execute_process(COMMAND bash "-c" "${CMAKE_CURRENT_SOURCE_DIR}/common/is_RHEL.sh" RESULT_VARIABLE ret) +if(ret EQUAL "0") + message(" Red Hat or CentOS system ") + target_link_libraries(${PROJECT_NAME} /usr/local/lib/libsqlite3.so) +else() + message(" Not a Red Hat or CentOS system ") + target_link_libraries(${PROJECT_NAME} -lsqlite3) +endif() # Install library install(TARGETS ${PROJECT_NAME} DESTINATION foglamp/plugins/storage/${PROJECT_NAME}) diff --git a/C/plugins/storage/sqlite/common/is_RHEL.sh b/C/plugins/storage/sqlite/common/is_RHEL.sh new file mode 100755 index 0000000000..fbb8c48e99 --- /dev/null +++ b/C/plugins/storage/sqlite/common/is_RHEL.sh @@ -0,0 +1,4 @@ +#!/bin/sh + +op=$(lsb_release -ds 2>/dev/null || cat /etc/*release 2>/dev/null | head -n1 || uname -om) +echo $op | egrep -q '(Red Hat|CentOS)' From 1c356c1bf73ea78a22f25e90cffe4ad5f25375cb Mon Sep 17 00:00:00 2001 From: Amandeep Singh Arora Date: Thu, 2 May 2019 07:22:32 +0000 Subject: [PATCH 089/161] Added support for running gtest on Red Hat Enterprise Linux 7.6 --- C/services/core/core_management_api.cpp | 15 +++++++++++++-- tests/unit/C/CMakeLists.txt | 1 - tests/unit/C/README.rst | 9 +++++++++ tests/unit/C/common/CMakeLists.txt | 2 ++ tests/unit/C/plugins/common/CMakeLists.txt | 2 ++ tests/unit/C/services/core/CMakeLists.txt | 2 ++ 6 files changed, 28 insertions(+), 3 deletions(-) diff --git a/C/services/core/core_management_api.cpp b/C/services/core/core_management_api.cpp index 1f59211750..df21f6d1dc 100644 --- a/C/services/core/core_management_api.cpp +++ b/C/services/core/core_management_api.cpp @@ -32,6 +32,14 @@ void registerInterestWrapper(shared_ptr response, << "Content-type: application/json\r\n\r\n" << payload; } +void replaceSubstr(std::string& str, const std::string& from, const std::string& to) { + size_t start_pos = 0; + while((start_pos = str.find(from, start_pos)) != std::string::npos) { + str.replace(start_pos, from.length(), to); + start_pos += to.length(); + } +} + /** * Easy wrapper for getting a specific service. * It is called to get storage service details: @@ -53,9 +61,12 @@ void getServiceWrapper(shared_ptr response, { string serviceName = queryString.substr(pos + strlen("name=")); // replace %20 with SPACE - serviceName = std::regex_replace(serviceName, + /*serviceName = std::regex_replace(serviceName, std::regex("%20"), - " "); + " "); */ + // RHEL 7.6 gcc pkg "gcc (GCC) 4.8.5 20150623 (Red Hat 4.8.5-36)" + // doesn't support std:regex and std::regex_replace + replaceSubstr(serviceName, "%20", " "); ServiceRegistry* registry = ServiceRegistry::getInstance(); ServiceRecord* foundService = registry->findService(serviceName); string payload; diff --git a/tests/unit/C/CMakeLists.txt b/tests/unit/C/CMakeLists.txt index 0b13bcb7e1..613c806225 100644 --- a/tests/unit/C/CMakeLists.txt +++ b/tests/unit/C/CMakeLists.txt @@ -47,7 +47,6 @@ file(GLOB SERVICES_COMMON_LIB_SOURCES ../../../C/services/common/*.cpp) # Create shared library add_library(services-common-lib SHARED ${SERVICES_COMMON_LIB_SOURCES}) target_link_libraries(services-common-lib ${COMMONLIB}) -target_link_libraries(services-common-lib ${PYTHON_LIBRARIES}) set_target_properties(services-common-lib PROPERTIES SOVERSION 1) diff --git a/tests/unit/C/README.rst b/tests/unit/C/README.rst index d0746e3914..af593f7781 100644 --- a/tests/unit/C/README.rst +++ b/tests/unit/C/README.rst @@ -9,6 +9,9 @@ Prequisite These tests are written using the Google Test framework. This should be installed on your machine +Ubuntu: +------- + - sudo apt-get install libgtest-dev Unfortunately this does not install the libraries and a manual build set is required @@ -19,6 +22,12 @@ Unfortunately this does not install the libraries and a manual build set is requ - sudo cmake --build build - sudo cp build/libgtest* /usr/lib +Red Hat Enterprise Linux: +------------------------- + +- sudo yum install gtest gtest-devel +- cd /tmp + Running Tests ============= diff --git a/tests/unit/C/common/CMakeLists.txt b/tests/unit/C/common/CMakeLists.txt index fe5151c41f..c688171784 100644 --- a/tests/unit/C/common/CMakeLists.txt +++ b/tests/unit/C/common/CMakeLists.txt @@ -38,6 +38,8 @@ pkg_check_modules(PYTHON REQUIRED python3) # Add Python 3.x header files include_directories(${PYTHON_INCLUDE_DIRS}) +link_directories(${PYTHON_LIBRARY_DIRS}) + link_directories(${PROJECT_BINARY_DIR}/../../lib) # Link runTests with what we want to test and the GTest and pthread library diff --git a/tests/unit/C/plugins/common/CMakeLists.txt b/tests/unit/C/plugins/common/CMakeLists.txt index a0458684de..1c783a4476 100644 --- a/tests/unit/C/plugins/common/CMakeLists.txt +++ b/tests/unit/C/plugins/common/CMakeLists.txt @@ -38,6 +38,8 @@ pkg_check_modules(PYTHON REQUIRED python3) # Add Python 3.x header files include_directories(${PYTHON_INCLUDE_DIRS}) +link_directories(${PYTHON_LIBRARY_DIRS}) + link_directories(${PROJECT_BINARY_DIR}/../../../lib) # Link runTests with what we want to test and the GTest and pthread library diff --git a/tests/unit/C/services/core/CMakeLists.txt b/tests/unit/C/services/core/CMakeLists.txt index 0f002d9201..a762eaf960 100644 --- a/tests/unit/C/services/core/CMakeLists.txt +++ b/tests/unit/C/services/core/CMakeLists.txt @@ -38,6 +38,8 @@ pkg_check_modules(PYTHON REQUIRED python3) # Add Python 3.x header files include_directories(${PYTHON_INCLUDE_DIRS}) +link_directories(${PYTHON_LIBRARY_DIRS}) + link_directories(${PROJECT_BINARY_DIR}/../../../lib) # Link runTests with what we want to test and the GTest and pthread library From 9254d26ed29445a699b21fffc6fe714ee1eede1f Mon Sep 17 00:00:00 2001 From: Vaibhav Singhal Date: Thu, 2 May 2019 15:41:48 +0530 Subject: [PATCH 090/161] lint fixes and code optimised --- python/foglamp/services/core/snapshot.py | 37 ++++++++++++++---------- 1 file changed, 21 insertions(+), 16 deletions(-) diff --git a/python/foglamp/services/core/snapshot.py b/python/foglamp/services/core/snapshot.py index a536cf939c..a85d2c81bb 100644 --- a/python/foglamp/services/core/snapshot.py +++ b/python/foglamp/services/core/snapshot.py @@ -9,11 +9,11 @@ import os from os import path from os.path import basename -import glob import json import tarfile import fnmatch import time +from collections import OrderedDict from foglamp.common import logger from foglamp.common.common import _FOGLAMP_ROOT @@ -28,6 +28,7 @@ _NO_OF_FILES_TO_RETAIN = 3 SNAPSHOT_PREFIX = "snapshot-plugin" + class SnapshotPluginBuilder: _out_file_path = None @@ -51,6 +52,8 @@ def reset(tarinfo): tarinfo.uid = tarinfo.gid = 0 tarinfo.uname = tarinfo.gname = "root" return tarinfo + + tar_file_name = "" try: snapshot_id = str(int(time.time())) snapshot_filename = "{}-{}.tar.gz".format(SNAPSHOT_PREFIX, snapshot_id) @@ -58,14 +61,16 @@ def reset(tarinfo): pyz = tarfile.open(tar_file_name, "w:gz") try: # files are being added to tarfile with relative path and NOT with absolute path. - pyz.add("{}/python/foglamp/plugins".format(_FOGLAMP_ROOT), arcname="python/foglamp/plugins", recursive=True) + pyz.add("{}/python/foglamp/plugins".format(_FOGLAMP_ROOT), + arcname="python/foglamp/plugins", recursive=True) # C plugins location is different with "make install" and "make" if path.exists("{}/bin".format(_FOGLAMP_ROOT)) and path.exists("{}/bin/foglamp".format(_FOGLAMP_ROOT)): pyz.add("{}/plugins".format(_FOGLAMP_ROOT), arcname="plugins", recursive=True, filter=reset) else: pyz.add("{}/C/plugins".format(_FOGLAMP_ROOT), arcname="C/plugins", recursive=True) pyz.add("{}/plugins".format(_FOGLAMP_ROOT), arcname="plugins", recursive=True) - pyz.add("{}/cmake_build/C/plugins".format(_FOGLAMP_ROOT), arcname="cmake_build/C/plugins", recursive=True) + pyz.add("{}/cmake_build/C/plugins".format(_FOGLAMP_ROOT), arcname="cmake_build/C/plugins", + recursive=True) finally: pyz.close() except Exception as ex: @@ -80,21 +85,21 @@ def reset(tarinfo): return snapshot_id, snapshot_filename def check_and_delete_plugins_tar_files(self, snapshot_plugin_dir): + valid_extension = '.tar.gz' + valid_files_to_delete = dict() try: - valid_extension = '.tar.gz' for root, dirs, files in os.walk(snapshot_plugin_dir): - valid_files = list( - filter(lambda f: f.endswith(valid_extension), files)) - list_files = list(map( - lambda x: {"id": x.split("snapshot-plugin-")[1].split(".tar.gz")[0], - "name": x}, valid_files)) - sorted_list = sorted(list_files, key=lambda k: k['id'], reverse=True) - if len(sorted_list) > _NO_OF_FILES_TO_RETAIN: - for f in sorted_list[_NO_OF_FILES_TO_RETAIN:]: - _LOGGER.warning("Removing plugin snapshot file %s.", os.path.join(snapshot_plugin_dir, f['name'])) - os.remove(os.path.join(snapshot_plugin_dir, f['name'])) - except: - pass + for _file in files: + if _file.endswith(valid_extension): + valid_files_to_delete[_file.split(".")[0]] = os.path.join(root, _file) + valid_files_to_delete_sorted = OrderedDict(sorted(valid_files_to_delete.items(), reverse=True)) + while len(valid_files_to_delete_sorted) > _NO_OF_FILES_TO_RETAIN: + _file, _path = valid_files_to_delete_sorted.popitem() + _LOGGER.warning("Removing plugin snapshot file %s.", _path) + os.remove(_path) + except OSError as ex: + _LOGGER.error("ERROR while deleting plugin file", str(ex)) + def check_and_delete_temp_files(self, snapshot_plugin_dir): # Delete all non *.tar.gz files for f in os.listdir(snapshot_plugin_dir): From 2ebaee94621d6d17ca57a8dfdd93d6fd11af815d Mon Sep 17 00:00:00 2001 From: Praveen Garg Date: Thu, 2 May 2019 19:38:47 +0530 Subject: [PATCH 091/161] updated pyjq to latest version; Requird for coral board --- python/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/requirements.txt b/python/requirements.txt index 089f7b7112..d73941ebb0 100644 --- a/python/requirements.txt +++ b/python/requirements.txt @@ -5,4 +5,4 @@ cchardet==2.1.1 pyjwt==1.6.4 # Transformation of data, Apply JqFilter -pyjq==2.2.0 +pyjq==2.3.1 From 999e8bcbb851aa7f9f41b7ad5a9484d16aff356c Mon Sep 17 00:00:00 2001 From: stefano Date: Thu, 2 May 2019 18:16:13 +0200 Subject: [PATCH 092/161] FOGL-2756: specific actions either for debian or redhat --- extras/scripts/foglamp.service | 11 ++++++++--- scripts/foglamp | 13 +++++++++++++ 2 files changed, 21 insertions(+), 3 deletions(-) diff --git a/extras/scripts/foglamp.service b/extras/scripts/foglamp.service index 9522950575..f31ff4992d 100755 --- a/extras/scripts/foglamp.service +++ b/extras/scripts/foglamp.service @@ -1,8 +1,13 @@ #!/bin/sh -# kFreeBSD do not accept scripts as interpreters, using #!/bin/sh and sourcing. -if [ true != "$INIT_D_SCRIPT_SOURCED" ] ; then - set "$0" "$@"; INIT_D_SCRIPT_SOURCED=true . /lib/init/init-d-script +# Debian specific +platform=$([ `cat /etc/os-release | grep -c -i -E "centos|red hat"` -eq 0 ] && echo debian || echo redhat) +if [[ $platform == "debian" ]] +then + # kFreeBSD do not accept scripts as interpreters, using #!/bin/sh and sourcing. + if [ true != "$INIT_D_SCRIPT_SOURCED" ] ; then + set "$0" "$@"; INIT_D_SCRIPT_SOURCED=true . /lib/init/init-d-script + fi fi ### BEGIN INIT INFO # Provides: foglamp diff --git a/scripts/foglamp b/scripts/foglamp index 881f4ce2dc..1fd5160e8f 100755 --- a/scripts/foglamp +++ b/scripts/foglamp @@ -19,6 +19,19 @@ set -e #set -x +# RedHat specific +platform=$([ `cat /etc/os-release | grep -c -i -E "centos|red hat"` -eq 0 ] && echo debian || echo redhat) +if [[ $platform == "redhat" ]] +then + + export PATH=/opt/rh/rh-python36/root/usr/bin${PATH:+:${PATH}} + export LD_LIBRARY_PATH=/opt/rh/rh-python36/root/usr/lib64${LD_LIBRARY_PATH:+:${LD_LIBRARY_PATH}} + export MANPATH=/opt/rh/rh-python36/root/usr/share/man:$MANPATH + export PKG_CONFIG_PATH=/opt/rh/rh-python36/root/usr/lib64/pkgconfig${PKG_CONFIG_PATH:+:${PKG_CONFIG_PATH}} + export XDG_DATA_DIRS="/opt/rh/rh-python36/root/usr/share:${XDG_DATA_DIRS:-/usr/local/share:/usr/share}" + +fi + # # This is the startup script for foglamp # From 7e6ba00170a45ef55f19642317e31f61d083d087 Mon Sep 17 00:00:00 2001 From: stefano Date: Thu, 2 May 2019 18:39:27 +0200 Subject: [PATCH 093/161] FOGL-2756: added RH sudoer --- scripts/extras/foglamp.sudoers_rh | 1 + 1 file changed, 1 insertion(+) create mode 100644 scripts/extras/foglamp.sudoers_rh diff --git a/scripts/extras/foglamp.sudoers_rh b/scripts/extras/foglamp.sudoers_rh new file mode 100644 index 0000000000..77a59a86c8 --- /dev/null +++ b/scripts/extras/foglamp.sudoers_rh @@ -0,0 +1 @@ +%sudo ALL=(ALL) NOPASSWD: /usr/bin/yum -y update, /usr/bin/yum -y install foglamp, /usr/bin/yum -y install /usr/local/foglamp/data/plugins/foglamp*.rpm \ No newline at end of file From 41aca147c66a7741f9b320010b6b32a3f89e5bd8 Mon Sep 17 00:00:00 2001 From: stefano Date: Fri, 3 May 2019 14:47:09 +0200 Subject: [PATCH 094/161] FOGL-2756: makefile, added handling of foglamp.sudoers_rh --- Makefile | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 903a2a5d6f..0047a56726 100644 --- a/Makefile +++ b/Makefile @@ -78,6 +78,7 @@ FOGLAMP_UPDATE_SRC := scripts/extras/foglamp_update UPDATE_TASK_APT_SRC := scripts/extras/update_task.apt UPDATE_TASK_SNAPPY_SRC := scripts/extras/update_task.snappy SUDOERS_SRC := scripts/extras/foglamp.sudoers +SUDOERS_SRC_RH := scripts/extras/foglamp.sudoers_rh # SCRIPTS TO INSTALL IN SCRIPTS DIR COMMON_SCRIPTS_SRC := scripts/common @@ -397,7 +398,8 @@ bin_install : $(BIN_INSTALL_DIR) $(FOGBENCH_SCRIPT_SRC) $(FOGLAMP_SCRIPT_SRC) $(CP) $(FOGLAMP_UPDATE_SRC) $(BIN_INSTALL_DIR) $(CP) $(UPDATE_TASK_APT_SRC) $(BIN_INSTALL_DIR) $(CP) $(UPDATE_TASK_SNAPPY_SRC) $(BIN_INSTALL_DIR) - $(CP) $(SUDOERS_SRC) $(BIN_INSTALL_DIR) + $(CP) $(SUDOERS_SRC) $(BIN_INSTALL_DIR) + $(CP) $(SUDOERS_SRC_RH) $(BIN_INSTALL_DIR) # create bin install dir $(BIN_INSTALL_DIR) : From f9c14028bfe1af2535c27cf26bf96472bdf993db Mon Sep 17 00:00:00 2001 From: stefano Date: Fri, 3 May 2019 15:05:25 +0200 Subject: [PATCH 095/161] FOGL-2756: add blank line foglamp.sudoers_rh --- scripts/extras/foglamp.sudoers_rh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/extras/foglamp.sudoers_rh b/scripts/extras/foglamp.sudoers_rh index 77a59a86c8..224cc01122 100644 --- a/scripts/extras/foglamp.sudoers_rh +++ b/scripts/extras/foglamp.sudoers_rh @@ -1 +1 @@ -%sudo ALL=(ALL) NOPASSWD: /usr/bin/yum -y update, /usr/bin/yum -y install foglamp, /usr/bin/yum -y install /usr/local/foglamp/data/plugins/foglamp*.rpm \ No newline at end of file +%sudo ALL=(ALL) NOPASSWD: /usr/bin/yum -y update, /usr/bin/yum -y install foglamp, /usr/bin/yum -y install /usr/local/foglamp/data/plugins/foglamp*.rpm From cf2cdaa8ffb2ffa82788edc283e2d011240a7fde Mon Sep 17 00:00:00 2001 From: Mark Riddoch Date: Sun, 5 May 2019 10:15:49 +0000 Subject: [PATCH 096/161] FOGL-2791 Fix process name so that log messages have name of process --- C/common/process.cpp | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/C/common/process.cpp b/C/common/process.cpp index 635ec497f0..178bf5d8b9 100644 --- a/C/common/process.cpp +++ b/C/common/process.cpp @@ -89,7 +89,6 @@ FogLampProcess::FogLampProcess(int argc, char** argv) : signal(SIGABRT, handler); string myName = LOG_SERVICE_NAME; - m_logger = new Logger(myName); try { @@ -101,6 +100,8 @@ FogLampProcess::FogLampProcess(int argc, char** argv) : { throw runtime_error(string("Error while parsing required options: ") + e.what()); } + myName = m_name; + m_logger = new Logger(myName); if (m_core_mngt_host.empty()) { From 0b428261bbb5f2a3984adc9d11f4806a72bdfded Mon Sep 17 00:00:00 2001 From: Aman <40791522+AmandeepArora@users.noreply.github.com> Date: Tue, 7 May 2019 17:25:54 +0530 Subject: [PATCH 097/161] FOGL-2751: Run Foglamp and plugins/filters on RHEL 7 (#1537) * FOGL-2751 changes to compile on RHEL * RHEL related make changes * Further changes * Cleanup * Cleanup * Fix test cases compilation/linking * Fixing C unit test cases w.r.t. pkg-config usage * Fix for storage server startup crash * Override libsqlite v3.7.17 present in /usr/lib64 with the compiled v3.28 * Adding conditional check for RHEL/CentOS system * Added support for running gtest on Red Hat Enterprise Linux 7.6 * Update to C unit test README : added epel-release dependency --- C/common/config_category.cpp | 4 +++- C/plugins/storage/sqlite/CMakeLists.txt | 9 ++++++++- C/plugins/storage/sqlite/common/is_RHEL.sh | 4 ++++ C/services/common/CMakeLists.txt | 7 ++----- C/services/core/core_management_api.cpp | 15 +++++++++++++-- .../south-plugin-interfaces/python/CMakeLists.txt | 4 +++- .../python/async_ingest_pymodule/CMakeLists.txt | 5 +++-- C/services/south/ingest.cpp | 2 +- CMakeLists.txt | 2 ++ requirements.sh | 1 + tests/unit/C/CMakeLists.txt | 4 ++-- tests/unit/C/README.rst | 10 ++++++++++ tests/unit/C/common/CMakeLists.txt | 5 ++++- tests/unit/C/plugins/common/CMakeLists.txt | 5 ++++- tests/unit/C/services/core/CMakeLists.txt | 5 ++++- 15 files changed, 64 insertions(+), 18 deletions(-) create mode 100755 C/plugins/storage/sqlite/common/is_RHEL.sh diff --git a/C/common/config_category.cpp b/C/common/config_category.cpp index 6da8d6cfec..e47424ffd3 100644 --- a/C/common/config_category.cpp +++ b/C/common/config_category.cpp @@ -19,6 +19,8 @@ #include #include #include +#include + using namespace std; using namespace rapidjson; @@ -1460,7 +1462,7 @@ ConfigCategoryChange::ConfigCategoryChange(const string& json) catch (exception* e) { Logger::getLogger()->error("Configuration parse error in category %s item '%s', %s: %s", - m_name, + m_name.c_str(), itr->name.GetString(), json.c_str(), e->what()); diff --git a/C/plugins/storage/sqlite/CMakeLists.txt b/C/plugins/storage/sqlite/CMakeLists.txt index b2491ce3a0..b44437af56 100644 --- a/C/plugins/storage/sqlite/CMakeLists.txt +++ b/C/plugins/storage/sqlite/CMakeLists.txt @@ -28,7 +28,14 @@ set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${CMAKE_CURRENT_SOURCE_DIR}") find_package(sqlite3) # Link with SQLite3 library -target_link_libraries(${PROJECT_NAME} -lsqlite3) +execute_process(COMMAND bash "-c" "${CMAKE_CURRENT_SOURCE_DIR}/common/is_RHEL.sh" RESULT_VARIABLE ret) +if(ret EQUAL "0") + message(" Red Hat or CentOS system ") + target_link_libraries(${PROJECT_NAME} /usr/local/lib/libsqlite3.so) +else() + message(" Not a Red Hat or CentOS system ") + target_link_libraries(${PROJECT_NAME} -lsqlite3) +endif() # Install library install(TARGETS ${PROJECT_NAME} DESTINATION foglamp/plugins/storage/${PROJECT_NAME}) diff --git a/C/plugins/storage/sqlite/common/is_RHEL.sh b/C/plugins/storage/sqlite/common/is_RHEL.sh new file mode 100755 index 0000000000..fbb8c48e99 --- /dev/null +++ b/C/plugins/storage/sqlite/common/is_RHEL.sh @@ -0,0 +1,4 @@ +#!/bin/sh + +op=$(lsb_release -ds 2>/dev/null || cat /etc/*release 2>/dev/null | head -n1 || uname -om) +echo $op | egrep -q '(Red Hat|CentOS)' diff --git a/C/services/common/CMakeLists.txt b/C/services/common/CMakeLists.txt index 4ef1d3ca6a..51b0f74e30 100644 --- a/C/services/common/CMakeLists.txt +++ b/C/services/common/CMakeLists.txt @@ -3,14 +3,14 @@ cmake_minimum_required(VERSION 2.4.0) project(services-common-lib) set(CMAKE_CXX_FLAGS_DEBUG "-O0 -ggdb") -set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11") +set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11 -O0") set(DLLIB -ldl) # Find source files file(GLOB SOURCES *.cpp) # Find python3.x dev/lib package -find_package(PythonLibs 3 REQUIRED) +pkg_check_modules(PYTHON REQUIRED python3) # Include header files include_directories(include ../../common/include ../../thirdparty/Simple-Web-Server ../../thirdparty/rapidjson/include) @@ -24,9 +24,6 @@ set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${PROJECT_BINARY_DIR}/../../lib) add_library(${PROJECT_NAME} SHARED ${SOURCES}) target_link_libraries(${PROJECT_NAME} ${DLLIB}) -# Add Python 3.x library -target_link_libraries(${PROJECT_NAME} ${PYTHON_LIBRARIES}) - set_target_properties(${PROJECT_NAME} PROPERTIES SOVERSION 1) # Install library diff --git a/C/services/core/core_management_api.cpp b/C/services/core/core_management_api.cpp index 1f59211750..df21f6d1dc 100644 --- a/C/services/core/core_management_api.cpp +++ b/C/services/core/core_management_api.cpp @@ -32,6 +32,14 @@ void registerInterestWrapper(shared_ptr response, << "Content-type: application/json\r\n\r\n" << payload; } +void replaceSubstr(std::string& str, const std::string& from, const std::string& to) { + size_t start_pos = 0; + while((start_pos = str.find(from, start_pos)) != std::string::npos) { + str.replace(start_pos, from.length(), to); + start_pos += to.length(); + } +} + /** * Easy wrapper for getting a specific service. * It is called to get storage service details: @@ -53,9 +61,12 @@ void getServiceWrapper(shared_ptr response, { string serviceName = queryString.substr(pos + strlen("name=")); // replace %20 with SPACE - serviceName = std::regex_replace(serviceName, + /*serviceName = std::regex_replace(serviceName, std::regex("%20"), - " "); + " "); */ + // RHEL 7.6 gcc pkg "gcc (GCC) 4.8.5 20150623 (Red Hat 4.8.5-36)" + // doesn't support std:regex and std::regex_replace + replaceSubstr(serviceName, "%20", " "); ServiceRegistry* registry = ServiceRegistry::getInstance(); ServiceRecord* foundService = registry->findService(serviceName); string payload; diff --git a/C/services/south-plugin-interfaces/python/CMakeLists.txt b/C/services/south-plugin-interfaces/python/CMakeLists.txt index 3138f79fdb..dd7791fa57 100644 --- a/C/services/south-plugin-interfaces/python/CMakeLists.txt +++ b/C/services/south-plugin-interfaces/python/CMakeLists.txt @@ -12,7 +12,7 @@ set(SERVICE_COMMON_LIB services-common-lib) file(GLOB SOURCES python_plugin_interface.cpp pyobject_reading_parser.cpp) # Find Python.h 3.x dev/lib package -find_package(PythonLibs 3 REQUIRED) +pkg_check_modules(PYTHON REQUIRED python3) # Include header files include_directories(include ../../../common/include ../../../services/common/include ../../../services/south/include ../../../thirdparty/rapidjson/include) @@ -20,6 +20,8 @@ include_directories(include ../../../common/include ../../../services/common/inc # Add Python 3.x header files include_directories(${PYTHON_INCLUDE_DIRS}) +link_directories(${PYTHON_LIBRARY_DIRS}) + set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${PROJECT_BINARY_DIR}/../../../lib) # Create shared library diff --git a/C/services/south-plugin-interfaces/python/async_ingest_pymodule/CMakeLists.txt b/C/services/south-plugin-interfaces/python/async_ingest_pymodule/CMakeLists.txt index c2e8ce92d6..e76512717a 100644 --- a/C/services/south-plugin-interfaces/python/async_ingest_pymodule/CMakeLists.txt +++ b/C/services/south-plugin-interfaces/python/async_ingest_pymodule/CMakeLists.txt @@ -12,8 +12,8 @@ set(SERVICE_COMMON_LIB services-common-lib) file(GLOB SOURCES ingest_callback_pymodule.cpp ../pyobject_reading_parser.cpp) # Find Python 3.5 or higher dev/lib/interp package -find_package(PythonLibs 3.5 REQUIRED) -find_package(PythonInterp 3.5 REQUIRED) +#find_package(PythonInterp 3.5 REQUIRED) +pkg_check_modules(PYTHON REQUIRED python3) # Include header files include_directories(include ../../../../common/include ../../../../services/common/include ../../../../services/south/include ../../../../thirdparty/rapidjson/include) @@ -21,6 +21,7 @@ include_directories(include ../../../../common/include ../../../../services/comm # Add Python 3.x header files include_directories(${PYTHON_INCLUDE_DIRS}) +link_directories(${PYTHON_LIBRARY_DIRS}) link_directories(${PROJECT_BINARY_DIR}/../../../../lib) set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${PROJECT_BINARY_DIR}/../../../../../../python) diff --git a/C/services/south/ingest.cpp b/C/services/south/ingest.cpp index 0ace99507e..90caaf03eb 100644 --- a/C/services/south/ingest.cpp +++ b/C/services/south/ingest.cpp @@ -380,7 +380,7 @@ vector* newQ = new vector(); lock_guard guard(m_qMutex); // BUffer current data in m_data - m_queue->insert(m_queue->cbegin(), + m_queue->insert(m_queue->begin(), m_data->begin(), m_data->end()); // Is it possible that some of the readings are stored in DB, and others are not? diff --git a/CMakeLists.txt b/CMakeLists.txt index c6696579ba..78555c70f5 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -3,6 +3,8 @@ project (FogLAMP) set(CMAKE_CXX_FLAGS "-std=c++11 -O3") +find_package(PkgConfig REQUIRED) + add_subdirectory(C/common) add_subdirectory(C/services/common) add_subdirectory(C/plugins/common) diff --git a/requirements.sh b/requirements.sh index 1330f6ad45..765c46d86b 100755 --- a/requirements.sh +++ b/requirements.sh @@ -31,4 +31,5 @@ sudo apt install -y cmake g++ make build-essential autoconf automake uuid-dev sudo apt install -y libtool libboost-dev libboost-system-dev libboost-thread-dev libpq-dev libssl-dev libz-dev sudo apt install -y python-dbus python-dev python3-dev python3-pip sudo apt install -y sqlite3 libsqlite3-dev +sudo apt install -y pkg-config # sudo apt install -y postgresql diff --git a/tests/unit/C/CMakeLists.txt b/tests/unit/C/CMakeLists.txt index bd85a89276..613c806225 100644 --- a/tests/unit/C/CMakeLists.txt +++ b/tests/unit/C/CMakeLists.txt @@ -16,7 +16,8 @@ find_package(Boost 1.53.0 COMPONENTS ${BOOST_COMPONENTS} REQUIRED) include_directories(SYSTEM ${Boost_INCLUDE_DIR}) # Find python3.x dev/lib package -find_package(PythonLibs 3 REQUIRED) +find_package(PkgConfig REQUIRED) +pkg_check_modules(PYTHON REQUIRED python3) include_directories(../../../C/common/include) include_directories(../../../C/plugins/common/include) @@ -46,7 +47,6 @@ file(GLOB SERVICES_COMMON_LIB_SOURCES ../../../C/services/common/*.cpp) # Create shared library add_library(services-common-lib SHARED ${SERVICES_COMMON_LIB_SOURCES}) target_link_libraries(services-common-lib ${COMMONLIB}) -target_link_libraries(services-common-lib ${PYTHON_LIBRARIES}) set_target_properties(services-common-lib PROPERTIES SOVERSION 1) diff --git a/tests/unit/C/README.rst b/tests/unit/C/README.rst index d0746e3914..1c0d759939 100644 --- a/tests/unit/C/README.rst +++ b/tests/unit/C/README.rst @@ -9,6 +9,9 @@ Prequisite These tests are written using the Google Test framework. This should be installed on your machine +Ubuntu: +------- + - sudo apt-get install libgtest-dev Unfortunately this does not install the libraries and a manual build set is required @@ -19,6 +22,13 @@ Unfortunately this does not install the libraries and a manual build set is requ - sudo cmake --build build - sudo cp build/libgtest* /usr/lib +Red Hat Enterprise Linux: +------------------------- + +- sudo yum install https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm +- sudo yum install gtest gtest-devel +- cd /tmp + Running Tests ============= diff --git a/tests/unit/C/common/CMakeLists.txt b/tests/unit/C/common/CMakeLists.txt index 26866b23ed..c688171784 100644 --- a/tests/unit/C/common/CMakeLists.txt +++ b/tests/unit/C/common/CMakeLists.txt @@ -32,11 +32,14 @@ set(PLUGINS_COMMON_LIB plugins-common-lib) file(GLOB unittests "*.cpp") # Find python3.x dev/lib package -find_package(PythonLibs 3 REQUIRED) +find_package(PkgConfig REQUIRED) +pkg_check_modules(PYTHON REQUIRED python3) # Add Python 3.x header files include_directories(${PYTHON_INCLUDE_DIRS}) +link_directories(${PYTHON_LIBRARY_DIRS}) + link_directories(${PROJECT_BINARY_DIR}/../../lib) # Link runTests with what we want to test and the GTest and pthread library diff --git a/tests/unit/C/plugins/common/CMakeLists.txt b/tests/unit/C/plugins/common/CMakeLists.txt index b3353b263a..1c783a4476 100644 --- a/tests/unit/C/plugins/common/CMakeLists.txt +++ b/tests/unit/C/plugins/common/CMakeLists.txt @@ -32,11 +32,14 @@ set(PLUGINS_COMMON_LIB plugins-common-lib) file(GLOB unittests "*.cpp") # Find python3.x dev/lib package -find_package(PythonLibs 3 REQUIRED) +find_package(PkgConfig REQUIRED) +pkg_check_modules(PYTHON REQUIRED python3) # Add Python 3.x header files include_directories(${PYTHON_INCLUDE_DIRS}) +link_directories(${PYTHON_LIBRARY_DIRS}) + link_directories(${PROJECT_BINARY_DIR}/../../../lib) # Link runTests with what we want to test and the GTest and pthread library diff --git a/tests/unit/C/services/core/CMakeLists.txt b/tests/unit/C/services/core/CMakeLists.txt index 0c2ac21b5f..a762eaf960 100644 --- a/tests/unit/C/services/core/CMakeLists.txt +++ b/tests/unit/C/services/core/CMakeLists.txt @@ -32,11 +32,14 @@ file(GLOB test_sources "../../../../../C/services/core/*.cpp") file(GLOB unittests "*.cpp") # Find python3.x dev/lib package -find_package(PythonLibs 3 REQUIRED) +find_package(PkgConfig REQUIRED) +pkg_check_modules(PYTHON REQUIRED python3) # Add Python 3.x header files include_directories(${PYTHON_INCLUDE_DIRS}) +link_directories(${PYTHON_LIBRARY_DIRS}) + link_directories(${PROJECT_BINARY_DIR}/../../../lib) # Link runTests with what we want to test and the GTest and pthread library From e5c8ba761ed4529a12938f44b8057a08cfa5e628 Mon Sep 17 00:00:00 2001 From: ashish-jabble Date: Tue, 7 May 2019 17:50:57 +0530 Subject: [PATCH 098/161] pem file support added for foglamp cert store --- python/foglamp/common/web/ssl_wrapper.py | 10 ++++++++ .../services/core/api/certificate_store.py | 25 ++++++++++++++++--- python/foglamp/services/core/server.py | 10 ++++++++ scripts/auth_certificates | 2 +- .../core/api/test_certificate_store.py | 8 +++--- 5 files changed, 47 insertions(+), 8 deletions(-) diff --git a/python/foglamp/common/web/ssl_wrapper.py b/python/foglamp/common/web/ssl_wrapper.py index 1c6bd31ace..19d067c09e 100644 --- a/python/foglamp/common/web/ssl_wrapper.py +++ b/python/foglamp/common/web/ssl_wrapper.py @@ -210,3 +210,13 @@ def set_ca_cert(cls, cert): @classmethod def set_user_cert(cls, cert): cls.user_cert = cert + + @classmethod + def verify_pem(cls, cert_path): + # FIXME: standard validation + with open(cert_path, 'r') as content_file: + content = content_file.read() + if "-----BEGIN CERTIFICATE-----" and "-----END CERTIFICATE-----" \ + "-----BEGIN RSA PRIVATE KEY-----" and "-----END RSA PRIVATE KEY-----" in content: + return True + return False diff --git a/python/foglamp/services/core/api/certificate_store.py b/python/foglamp/services/core/api/certificate_store.py index 126ed369d9..0bd1ff3dc0 100644 --- a/python/foglamp/services/core/api/certificate_store.py +++ b/python/foglamp/services/core/api/certificate_store.py @@ -8,6 +8,7 @@ from aiohttp import web from foglamp.services.core import connect from foglamp.common.configuration_manager import ConfigurationManager +from foglamp.common.web.ssl_wrapper import SSLVerifier __author__ = "Ashish Jabble" __copyright__ = "Copyright (c) 2017 OSIsoft, LLC" @@ -37,7 +38,7 @@ async def get_certs(request): # Get certs directory path certs_dir = _get_certs_dir() total_files = [] - valid_extensions = ('.key', '.cert') + valid_extensions = ('.pem', '.key', '.cert') for root, dirs, files in os.walk(certs_dir): total_files = [f for f in files if f.endswith(valid_extensions)] @@ -57,9 +58,9 @@ def search_file(fname): certs = [] for fname in unique_list: cert_pair = {'key': search_file('{}.key'.format(fname)), - 'cert': search_file('{}.cert'.format(fname))} + 'cert': search_file('{}.cert'.format(fname)), + 'pem': search_file('{}.pem'.format(fname))} certs.append(cert_pair) - return web.json_response({"certificates": certs}) @@ -75,6 +76,24 @@ async def upload(request): # contains the name of the file in string format key_file = data.get('key') cert_file = data.get('cert') + pem_file = data.get('pem') + if pem_file is not None: + pem_filename = pem_file.filename + if not pem_filename.endswith(".pem"): + raise web.HTTPBadRequest(reason="Accepted file extensions is .pem") + # TODO: overwrite? + certs_dir = _get_certs_dir() + pem_file_data = data['pem'].file + pem_file_content = pem_file_data.read() + pem_file_path = str(certs_dir) + '/{}'.format(pem_filename) + with open(pem_file_path, 'wb') as f: + f.write(pem_file_content) + + if SSLVerifier.verify_pem(pem_file_path): + return web.json_response({"result": "{} has been uploaded successfully".format(pem_filename)}) + else: + os.remove(pem_file_path) + raise web.HTTPBadRequest(reason="Invalid pem file") # accepted values for overwrite are '0 and 1' allow_overwrite = data.get('overwrite', '0') diff --git a/python/foglamp/services/core/server.py b/python/foglamp/services/core/server.py index ccc212fd0d..58b1389dcf 100755 --- a/python/foglamp/services/core/server.py +++ b/python/foglamp/services/core/server.py @@ -299,6 +299,16 @@ def get_certificates(cls): Put these in $FOGLAMP_DATA/etc/certs, $FOGLAMP_ROOT/data/etc/certs or /usr/local/foglamp/data/etc/certs """ + pem = certs_dir + '/{}.pem'.format(cls.cert_file_name) + if not os.path.isfile(pem): + _logger.warning("%s pem file is missing. Hence using cert and key combination", cls.cert_file_name) + else: + # validate pem file + if SSLVerifier.verify_pem(pem): + return pem, None + else: + raise SSLVerifier.VerificationError('Invalid pem file') + cert = certs_dir + '/{}.cert'.format(cls.cert_file_name) key = certs_dir + '/{}.key'.format(cls.cert_file_name) diff --git a/scripts/auth_certificates b/scripts/auth_certificates index 14668774f3..cf099abac6 100755 --- a/scripts/auth_certificates +++ b/scripts/auth_certificates @@ -169,7 +169,7 @@ else certificate_log "err" "Could not create SSL certificate ${SSL_NAME} at ${SSL_LOCATION}" "all" "pretty" exit 1 fi - + cat "${SSL_LOCATION}/${SSL_NAME}.key" "${SSL_LOCATION}/${SSL_NAME}.cert" >"${SSL_LOCATION}/${SSL_NAME}.pem" certificate_log "info" "${AUTH_TYPE} certificate created successfully for ${SSL_NAME}, and placed in ${SSL_LOCATION}" "outonly" "pretty" fi diff --git a/tests/unit/python/foglamp/services/core/api/test_certificate_store.py b/tests/unit/python/foglamp/services/core/api/test_certificate_store.py index 9a555cb343..e2411180fc 100644 --- a/tests/unit/python/foglamp/services/core/api/test_certificate_store.py +++ b/tests/unit/python/foglamp/services/core/api/test_certificate_store.py @@ -40,12 +40,12 @@ def certs_path(self): return pathlib.Path(__file__).parent async def test_get_certs(self, client, certs_path): - response_content = [{"cert": "foglamp.cert", "key": "foglamp.key"}, - {"cert": "server.cert", "key": ""}] + response_content = [{"cert": "foglamp.cert", "key": "foglamp.key", "pem": "foglamp.pem"}, + {"cert": "server.cert", "key": "", "pem": ""}] with patch.object(certificate_store, '_get_certs_dir', return_value=certs_path / 'certs'): with patch('os.walk') as mockwalk: mockwalk.return_value = [ - (certs_path / 'certs', [], ['foglamp.cert', 'foglamp.key', 'foglamp.txt', 'server.cert']) + (certs_path / 'certs', [], ['foglamp.cert', 'foglamp.key', 'foglamp.txt', 'server.cert', 'foglamp.pem']) ] resp = await client.get('/foglamp/certificate') assert 200 == resp.status @@ -82,7 +82,7 @@ async def test_get_certs_if_bad_extension(self, client, certs_path): mockwalk.assert_called_once_with(certs_path / 'certs') async def test_get_certs_if_pair_is_missing(self, client, certs_path): - actual_response = {'certificates': [{'key': '', 'cert': 'server.cert'}]} + actual_response = {'certificates': [{'key': '', 'cert': 'server.cert', 'pem': ''}]} with patch.object(certificate_store, '_get_certs_dir', return_value=certs_path / 'certs'): with patch('os.walk') as mockwalk: mockwalk.return_value = [(certs_path / 'certs', [], ['server.cert'])] From daa8f7af7615e5ae3c7a33c05965bb92551a916c Mon Sep 17 00:00:00 2001 From: stefano Date: Tue, 7 May 2019 16:58:20 +0200 Subject: [PATCH 099/161] FOGL-2756: Changes in the foglamp makefile will be in a different branch. --- Makefile | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/Makefile b/Makefile index 0047a56726..903a2a5d6f 100644 --- a/Makefile +++ b/Makefile @@ -78,7 +78,6 @@ FOGLAMP_UPDATE_SRC := scripts/extras/foglamp_update UPDATE_TASK_APT_SRC := scripts/extras/update_task.apt UPDATE_TASK_SNAPPY_SRC := scripts/extras/update_task.snappy SUDOERS_SRC := scripts/extras/foglamp.sudoers -SUDOERS_SRC_RH := scripts/extras/foglamp.sudoers_rh # SCRIPTS TO INSTALL IN SCRIPTS DIR COMMON_SCRIPTS_SRC := scripts/common @@ -398,8 +397,7 @@ bin_install : $(BIN_INSTALL_DIR) $(FOGBENCH_SCRIPT_SRC) $(FOGLAMP_SCRIPT_SRC) $(CP) $(FOGLAMP_UPDATE_SRC) $(BIN_INSTALL_DIR) $(CP) $(UPDATE_TASK_APT_SRC) $(BIN_INSTALL_DIR) $(CP) $(UPDATE_TASK_SNAPPY_SRC) $(BIN_INSTALL_DIR) - $(CP) $(SUDOERS_SRC) $(BIN_INSTALL_DIR) - $(CP) $(SUDOERS_SRC_RH) $(BIN_INSTALL_DIR) + $(CP) $(SUDOERS_SRC) $(BIN_INSTALL_DIR) # create bin install dir $(BIN_INSTALL_DIR) : From 8e9fdc301fa1435b16fb06403a151636f6b31959 Mon Sep 17 00:00:00 2001 From: stefano Date: Wed, 8 May 2019 17:03:55 +0200 Subject: [PATCH 100/161] FOGL-2756: make file changes --- Makefile | 22 ++++++++++++++++++++-- 1 file changed, 20 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index 903a2a5d6f..592a33ba37 100644 --- a/Makefile +++ b/Makefile @@ -1,14 +1,27 @@ ############################################################################### ################################### COMMANDS ################################## ############################################################################### +# Check RedHat || CentOS +$(eval PLATFORM_RH=$(shell (lsb_release -ds 2>/dev/null || cat /etc/*release 2>/dev/null | head -n1 || uname -om) | egrep '(Red Hat|CentOS)')) +# Log Platform RedHat || CentOS +$(if $(PLATFORM_RH), $(info Platform is $(PLATFORM_RH))) + MKDIR_PATH := mkdir -p CD := cd LN := ln -sf CMAKE := cmake PIP_USER_FLAG = --user -PIP_INSTALL_REQUIREMENTS := pip3 install -Ir + USE_PIP_CACHE := no -PYTHON_BUILD_PACKAGE = python3 setup.py build -b ../$(PYTHON_BUILD_DIR) +# For RedHat || CentOS we need rh-python36 +ifneq ("$(PLATFORM_RH)","") + PIP_INSTALL_REQUIREMENTS := source scl_source enable rh-python36 && pip3 install -Ir + PYTHON_BUILD_PACKAGE = source scl_source enable rh-python36 && python3 setup.py build -b ../$(PYTHON_BUILD_DIR) +else + PIP_INSTALL_REQUIREMENTS := pip3 install -Ir + PYTHON_BUILD_PACKAGE = python3 setup.py build -b ../$(PYTHON_BUILD_DIR) +endif + RM_DIR := rm -r RM_FILE := rm MAKE_INSTALL = $(MAKE) install @@ -78,6 +91,7 @@ FOGLAMP_UPDATE_SRC := scripts/extras/foglamp_update UPDATE_TASK_APT_SRC := scripts/extras/update_task.apt UPDATE_TASK_SNAPPY_SRC := scripts/extras/update_task.snappy SUDOERS_SRC := scripts/extras/foglamp.sudoers +SUDOERS_SRC_RH := scripts/extras/foglamp.sudoers_rh # SCRIPTS TO INSTALL IN SCRIPTS DIR COMMON_SCRIPTS_SRC := scripts/common @@ -397,7 +411,11 @@ bin_install : $(BIN_INSTALL_DIR) $(FOGBENCH_SCRIPT_SRC) $(FOGLAMP_SCRIPT_SRC) $(CP) $(FOGLAMP_UPDATE_SRC) $(BIN_INSTALL_DIR) $(CP) $(UPDATE_TASK_APT_SRC) $(BIN_INSTALL_DIR) $(CP) $(UPDATE_TASK_SNAPPY_SRC) $(BIN_INSTALL_DIR) +ifneq ("$(PLATFORM_RH)","") + $(CP) $(SUDOERS_SRC_RH) $(BIN_INSTALL_DIR) +else $(CP) $(SUDOERS_SRC) $(BIN_INSTALL_DIR) +endif # create bin install dir $(BIN_INSTALL_DIR) : From 905ff620a24b77f9f1335bf63c46c8948acdd292 Mon Sep 17 00:00:00 2001 From: ashish-jabble Date: Thu, 9 May 2019 13:37:30 +0530 Subject: [PATCH 101/161] server core changes reverted --- python/foglamp/services/core/server.py | 10 ---------- scripts/auth_certificates | 2 +- 2 files changed, 1 insertion(+), 11 deletions(-) diff --git a/python/foglamp/services/core/server.py b/python/foglamp/services/core/server.py index 58b1389dcf..ccc212fd0d 100755 --- a/python/foglamp/services/core/server.py +++ b/python/foglamp/services/core/server.py @@ -299,16 +299,6 @@ def get_certificates(cls): Put these in $FOGLAMP_DATA/etc/certs, $FOGLAMP_ROOT/data/etc/certs or /usr/local/foglamp/data/etc/certs """ - pem = certs_dir + '/{}.pem'.format(cls.cert_file_name) - if not os.path.isfile(pem): - _logger.warning("%s pem file is missing. Hence using cert and key combination", cls.cert_file_name) - else: - # validate pem file - if SSLVerifier.verify_pem(pem): - return pem, None - else: - raise SSLVerifier.VerificationError('Invalid pem file') - cert = certs_dir + '/{}.cert'.format(cls.cert_file_name) key = certs_dir + '/{}.key'.format(cls.cert_file_name) diff --git a/scripts/auth_certificates b/scripts/auth_certificates index cf099abac6..14668774f3 100755 --- a/scripts/auth_certificates +++ b/scripts/auth_certificates @@ -169,7 +169,7 @@ else certificate_log "err" "Could not create SSL certificate ${SSL_NAME} at ${SSL_LOCATION}" "all" "pretty" exit 1 fi - cat "${SSL_LOCATION}/${SSL_NAME}.key" "${SSL_LOCATION}/${SSL_NAME}.cert" >"${SSL_LOCATION}/${SSL_NAME}.pem" + certificate_log "info" "${AUTH_TYPE} certificate created successfully for ${SSL_NAME}, and placed in ${SSL_LOCATION}" "outonly" "pretty" fi From 059da0fd1ad00abe700b73eeb437b8033f84fa3e Mon Sep 17 00:00:00 2001 From: pintomax Date: Thu, 9 May 2019 12:44:30 +0200 Subject: [PATCH 102/161] Fix for CentOS / RedHat (#1548) sudo make install fix for CentOS / RedHat --- Makefile | 23 +++++++++++++++++++---- 1 file changed, 19 insertions(+), 4 deletions(-) diff --git a/Makefile b/Makefile index 903a2a5d6f..7d0aec6f86 100644 --- a/Makefile +++ b/Makefile @@ -1,14 +1,29 @@ ############################################################################### ################################### COMMANDS ################################## ############################################################################### +# Check RedHat || CentOS +$(eval PLATFORM_RH=$(shell (lsb_release -ds 2>/dev/null || cat /etc/*release 2>/dev/null | head -n1 || uname -om) | egrep '(Red Hat|CentOS)')) + +# Log Platform RedHat || CentOS +$(if $(PLATFORM_RH), $(info Platform is $(PLATFORM_RH))) + +# For RedHat || CentOS we need rh-python36 +ifneq ("$(PLATFORM_RH)","") + PIP_INSTALL_REQUIREMENTS := source scl_source enable rh-python36 && pip3 install -Ir + PYTHON_BUILD_PACKAGE = source scl_source enable rh-python36 && python3 setup.py build -b ../$(PYTHON_BUILD_DIR) + CMAKE := source scl_source enable rh-python36 && cmake +else + PIP_INSTALL_REQUIREMENTS := pip3 install -Ir + PYTHON_BUILD_PACKAGE = python3 setup.py build -b ../$(PYTHON_BUILD_DIR) + CMAKE := cmake +endif + MKDIR_PATH := mkdir -p CD := cd LN := ln -sf -CMAKE := cmake PIP_USER_FLAG = --user -PIP_INSTALL_REQUIREMENTS := pip3 install -Ir USE_PIP_CACHE := no -PYTHON_BUILD_PACKAGE = python3 setup.py build -b ../$(PYTHON_BUILD_DIR) + RM_DIR := rm -r RM_FILE := rm MAKE_INSTALL = $(MAKE) install @@ -397,7 +412,7 @@ bin_install : $(BIN_INSTALL_DIR) $(FOGBENCH_SCRIPT_SRC) $(FOGLAMP_SCRIPT_SRC) $(CP) $(FOGLAMP_UPDATE_SRC) $(BIN_INSTALL_DIR) $(CP) $(UPDATE_TASK_APT_SRC) $(BIN_INSTALL_DIR) $(CP) $(UPDATE_TASK_SNAPPY_SRC) $(BIN_INSTALL_DIR) - $(CP) $(SUDOERS_SRC) $(BIN_INSTALL_DIR) + $(CP) $(SUDOERS_SRC) $(BIN_INSTALL_DIR) # create bin install dir $(BIN_INSTALL_DIR) : From 5c3661a15b0fa7d2db125970fcfbee2886bfade7 Mon Sep 17 00:00:00 2001 From: Mark Riddoch Date: Thu, 9 May 2019 11:00:48 +0000 Subject: [PATCH 103/161] FOGL-2779 Fix for array data --- C/common/reading_set.cpp | 68 +++++++++++++++++++++++++++++++++++++--- 1 file changed, 63 insertions(+), 5 deletions(-) diff --git a/C/common/reading_set.cpp b/C/common/reading_set.cpp index ed20bee9e7..4b229c3798 100644 --- a/C/common/reading_set.cpp +++ b/C/common/reading_set.cpp @@ -20,6 +20,9 @@ #define ASSET_NAME_INVALID_READING "error_invalid_reading" +static const char* kTypeNames[] = + { "Null", "False", "True", "Object", "Array", "String", "Number" }; + using namespace std; using namespace rapidjson; @@ -225,8 +228,28 @@ JSONReading::JSONReading(const Value& json) { m_has_id = false; } - m_asset = json["asset_code"].GetString(); - stringToTimestamp(json["user_ts"].GetString(), &m_userTimestamp); + if (json.HasMember("asset_code")) + { + m_asset = json["asset_code"].GetString(); + } + else + { + string errMsg = "Malformed JSON reading, missing asset_code '"; + errMsg.append("value"); + errMsg += "'"; + throw new ReadingSetException(errMsg.c_str()); + } + if (json.HasMember("user_ts")) + { + stringToTimestamp(json["user_ts"].GetString(), &m_userTimestamp); + } + else + { + string errMsg = "Malformed JSON reading, missing user timestamp '"; + errMsg.append("value"); + errMsg += "'"; + throw new ReadingSetException(errMsg.c_str()); + } if (json.HasMember("ts")) { stringToTimestamp(json["ts"].GetString(), &m_timestamp); @@ -235,7 +258,10 @@ JSONReading::JSONReading(const Value& json) { m_timestamp = m_userTimestamp; } - m_uuid = json["read_key"].GetString(); + if (json.HasMember("read_key")) + { + m_uuid = json["read_key"].GetString(); + } // We have a single value here which is a number if (json.HasMember("value") && json["value"].IsNumber()) @@ -277,7 +303,7 @@ JSONReading::JSONReading(const Value& json) throw new ReadingSetException(errMsg.c_str()); } } - else + else if (json.HasMember("reading")) { if (json["reading"].IsObject()) { @@ -326,8 +352,36 @@ JSONReading::JSONReading(const Value& json) } } + case kArrayType: + { + vector arrayValues; + for (auto& v : m.value.GetArray()) + { + if (v.IsDouble()) + { + arrayValues.push_back(v.GetDouble()); + } + else if (v.IsInt() || v.IsUint()) + { + double i = (double)v.GetInt(); + arrayValues.push_back(i); + } + else if (v.IsInt64() || v.IsUint64()) + { + double i = (double)v.GetInt64(); + arrayValues.push_back(i); + } + } + DatapointValue value(arrayValues); + this->addDatapoint(new Datapoint(m.name.GetString(), + value)); + break; + + } + default: { - string errMsg = "Cannot handle unsupported type '" + m.value.GetType(); + string errMsg = "Cannot handle unsupported type '"; + errMsg += kTypeNames[m.value.GetType()]; errMsg += "' of reading element '"; errMsg.append(m.name.GetString()); errMsg += "'"; @@ -390,6 +444,10 @@ JSONReading::JSONReading(const Value& json) m_asset = string(ASSET_NAME_INVALID_READING) + string("_") + m_asset.c_str(); } } + else + { + Logger::getLogger()->error("Missing reading property for JSON reading, %s", m_asset.c_str()); + } } /** From d7932248385f396788320f1ed2a6896c05c6de63 Mon Sep 17 00:00:00 2001 From: ashish-jabble Date: Thu, 9 May 2019 17:01:25 +0530 Subject: [PATCH 104/161] GET, POST endpoint fixes as discussed; pending unit tests and DELETE endpoint --- .../services/core/api/certificate_store.py | 139 ++++++++---------- 1 file changed, 64 insertions(+), 75 deletions(-) diff --git a/python/foglamp/services/core/api/certificate_store.py b/python/foglamp/services/core/api/certificate_store.py index 0bd1ff3dc0..d58b6e3863 100644 --- a/python/foglamp/services/core/api/certificate_store.py +++ b/python/foglamp/services/core/api/certificate_store.py @@ -8,7 +8,6 @@ from aiohttp import web from foglamp.services.core import connect from foglamp.common.configuration_manager import ConfigurationManager -from foglamp.common.web.ssl_wrapper import SSLVerifier __author__ = "Ashish Jabble" __copyright__ = "Copyright (c) 2017 OSIsoft, LLC" @@ -34,40 +33,35 @@ async def get_certs(request): :Example: curl -X GET http://localhost:8081/foglamp/certificate """ - - # Get certs directory path - certs_dir = _get_certs_dir() - total_files = [] - valid_extensions = ('.pem', '.key', '.cert') - - for root, dirs, files in os.walk(certs_dir): - total_files = [f for f in files if f.endswith(valid_extensions)] - - # Get filenames without extension - file_names = [os.path.splitext(fname)[0] for fname in total_files] - - # Get unique list from file_names - unique_list = list(set(file_names)) - - def search_file(fname): - # Search file with extension, if found then filename with extension else empty - if fname in total_files: - return fname - return '' - + certs_dir = _get_certs_dir('/etc/certs') certs = [] - for fname in unique_list: - cert_pair = {'key': search_file('{}.key'.format(fname)), - 'cert': search_file('{}.cert'.format(fname)), - 'pem': search_file('{}.pem'.format(fname))} - certs.append(cert_pair) - return web.json_response({"certificates": certs}) + keys = [] + key_valid_extensions = ('.key', '.pem') + for root, dirs, files in os.walk(certs_dir): + if root.endswith('json'): + for f in files: + if f.endswith('.json'): + certs.append(f) + if root.endswith('pem'): + for f in files: + if f.endswith('.pem'): + certs.append(f) + for f in files: + if f.endswith('.cert'): + certs.append(f) + if f.endswith(key_valid_extensions): + keys.append(f) + return web.json_response({"certs": certs, "keys": keys}) async def upload(request): """ Upload a certificate :Example: + curl -F "cert=@filename.pem" http://localhost:8081/foglamp/certificate + curl -F "cert=@filename.json" http://localhost:8081/foglamp/certificate + curl -F "key=@filename.pem" -F "cert=@filename.pem" http://localhost:8081/foglamp/certificate + curl -F "key=@filename.key" -F "cert=@filename.json" http://localhost:8081/foglamp/certificate curl -F "key=@filename.key" -F "cert=@filename.cert" http://localhost:8081/foglamp/certificate curl -F "key=@filename.key" -F "cert=@filename.cert" -F "overwrite=1" http://localhost:8081/foglamp/certificate """ @@ -76,75 +70,71 @@ async def upload(request): # contains the name of the file in string format key_file = data.get('key') cert_file = data.get('cert') - pem_file = data.get('pem') - if pem_file is not None: - pem_filename = pem_file.filename - if not pem_filename.endswith(".pem"): - raise web.HTTPBadRequest(reason="Accepted file extensions is .pem") - # TODO: overwrite? - certs_dir = _get_certs_dir() - pem_file_data = data['pem'].file - pem_file_content = pem_file_data.read() - pem_file_path = str(certs_dir) + '/{}'.format(pem_filename) - with open(pem_file_path, 'wb') as f: - f.write(pem_file_content) - - if SSLVerifier.verify_pem(pem_file_path): - return web.json_response({"result": "{} has been uploaded successfully".format(pem_filename)}) - else: - os.remove(pem_file_path) - raise web.HTTPBadRequest(reason="Invalid pem file") + allow_overwrite = data.get('overwrite', '0') # accepted values for overwrite are '0 and 1' - allow_overwrite = data.get('overwrite', '0') if allow_overwrite in ('0', '1'): should_overwrite = True if int(allow_overwrite) == 1 else False else: raise web.HTTPBadRequest(reason="Accepted value for overwrite is 0 or 1") - if not key_file or not cert_file: - raise web.HTTPBadRequest(reason="key or certs file is missing") + if not cert_file: + raise web.HTTPBadRequest(reason="Cert file is missing") - key_filename = key_file.filename cert_filename = cert_file.filename + if cert_filename.endswith('.cert'): + if not key_file: + raise web.HTTPBadRequest(reason="key file is missing") + + cert_valid_extensions = ('.cert', '.json', '.pem') + key_valid_extensions = ('.key', '.pem') + key_filename = None + if key_file: + key_filename = key_file.filename + if not key_filename.endswith(key_valid_extensions): + raise web.HTTPBadRequest(reason="Accepted file extensions are .key and .pem for key file") - # accepted extensions are '.key and .cert' - valid_extensions = ('.key', '.cert') - if not cert_filename.endswith(valid_extensions) or not key_filename.endswith(valid_extensions): - raise web.HTTPBadRequest(reason="Accepted file extensions are .key and .cert") + if not cert_filename.endswith(cert_valid_extensions): + raise web.HTTPBadRequest(reason="Accepted file extensions are .cert, .json and .pem for cert file") - # certs and key filename should match - if cert_filename and key_filename: - if cert_filename.split(".")[0] != key_filename.split(".")[0]: - raise web.HTTPBadRequest(reason="key and certs file name should match") + certs_dir = '' + if cert_filename.endswith('.pem'): + certs_dir = _get_certs_dir('/etc/certs/pem') + if cert_filename.endswith('.json'): + certs_dir = _get_certs_dir('/etc/certs/json') - # Get certs directory path - certs_dir = _get_certs_dir() found_files = _find_file(cert_filename, certs_dir) is_found = True if len(found_files) else False if is_found and should_overwrite is False: raise web.HTTPBadRequest(reason="Certificate with the same name already exists. " "To overwrite set the overwrite to 1") - if key_file: - key_file_data = data['key'].file - key_file_content = key_file_data.read() - key_file_path = str(certs_dir) + '/{}'.format(key_filename) - with open(key_file_path, 'wb') as f: - f.write(key_file_content) - + keys_dir = _get_certs_dir('/etc/certs') + found_files = _find_file(key_filename, keys_dir) + is_found = True if len(found_files) else False + if is_found and should_overwrite is False: + raise web.HTTPBadRequest(reason="Key cert with the same name already exists. " + "To overwrite set the overwrite to 1") if cert_file: cert_file_data = data['cert'].file cert_file_content = cert_file_data.read() cert_file_path = str(certs_dir) + '/{}'.format(cert_filename) with open(cert_file_path, 'wb') as f: f.write(cert_file_content) + if key_file: + key_file_data = data['key'].file + key_file_content = key_file_data.read() + key_file_path = str(keys_dir) + '/{}'.format(key_filename) + with open(key_file_path, 'wb') as f: + f.write(key_file_content) # in order to bring this new cert usage into effect, make sure to # update config for category rest_api # and reboot - return web.json_response({"result": "{} and {} have been uploaded successfully" - .format(key_filename, cert_filename)}) + msg = "{} has been uploaded successfully".format(cert_filename) + if key_file: + msg = "{} and {} have been uploaded successfully".format(key_filename, cert_filename) + return web.json_response({"result": msg}) async def delete_certificate(request): @@ -189,12 +179,11 @@ async def delete_certificate(request): return web.json_response({'result': msg}) -def _get_certs_dir(): - if _FOGLAMP_DATA: - certs_dir = os.path.expanduser(_FOGLAMP_DATA + '/etc/certs') - else: - certs_dir = os.path.expanduser(_FOGLAMP_ROOT + '/data/etc/certs') - +def _get_certs_dir(_path): + dir_path = _FOGLAMP_DATA + _path if _FOGLAMP_DATA else _FOGLAMP_ROOT + '/data' + _path + if not os.path.exists(dir_path): + os.makedirs(dir_path) + certs_dir = os.path.expanduser(dir_path) return certs_dir From 071e73b1cd1314ae9f007f3cac9b2a65c105576c Mon Sep 17 00:00:00 2001 From: stefano Date: Thu, 9 May 2019 14:10:35 +0200 Subject: [PATCH 105/161] FOGL-2756: created get_platform --- scripts/common/get_platform.sh | 29 +++++++++++++++++++++++++++++ scripts/foglamp | 34 +++++++++++++++++++++------------- 2 files changed, 50 insertions(+), 13 deletions(-) create mode 100755 scripts/common/get_platform.sh diff --git a/scripts/common/get_platform.sh b/scripts/common/get_platform.sh new file mode 100755 index 0000000000..446fd4ca7c --- /dev/null +++ b/scripts/common/get_platform.sh @@ -0,0 +1,29 @@ +#!/bin/bash + +##-------------------------------------------------------------------- +## Copyright (c) 2019 OSIsoft, LLC +## +## Licensed under the Apache License, Version 2.0 (the "License"); +## you may not use this file except in compliance with the License. +## You may obtain a copy of the License at +## +## http://www.apache.org/licenses/LICENSE-2.0 +## +## Unless required by applicable law or agreed to in writing, software +## distributed under the License is distributed on an "AS IS" BASIS, +## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +## See the License for the specific language governing permissions and +## limitations under the License. +##-------------------------------------------------------------------- + +__author__="Stefano Simonelli" +__version__="1.0" + +# Identifies the platform on which FogLAMP reside +# output : +# not empty - Centos or RedHat +# empty - Debian/Ubuntu +get_platform() { + + (lsb_release -ds 2>/dev/null || cat /etc/*release 2>/dev/null | head -n1 || uname -om) | egrep '(Red Hat|CentOS)' || echo "" +} diff --git a/scripts/foglamp b/scripts/foglamp index 1fd5160e8f..15f1fd12f2 100755 --- a/scripts/foglamp +++ b/scripts/foglamp @@ -19,19 +19,6 @@ set -e #set -x -# RedHat specific -platform=$([ `cat /etc/os-release | grep -c -i -E "centos|red hat"` -eq 0 ] && echo debian || echo redhat) -if [[ $platform == "redhat" ]] -then - - export PATH=/opt/rh/rh-python36/root/usr/bin${PATH:+:${PATH}} - export LD_LIBRARY_PATH=/opt/rh/rh-python36/root/usr/lib64${LD_LIBRARY_PATH:+:${LD_LIBRARY_PATH}} - export MANPATH=/opt/rh/rh-python36/root/usr/share/man:$MANPATH - export PKG_CONFIG_PATH=/opt/rh/rh-python36/root/usr/lib64/pkgconfig${PKG_CONFIG_PATH:+:${PKG_CONFIG_PATH}} - export XDG_DATA_DIRS="/opt/rh/rh-python36/root/usr/share:${XDG_DATA_DIRS:-/usr/local/share:/usr/share}" - -fi - # # This is the startup script for foglamp # @@ -47,6 +34,27 @@ if [ -z ${FOGLAMP_ROOT+x} ]; then export FOGLAMP_ROOT fi +# Include common code +source "${FOGLAMP_ROOT}/scripts/common/get_platform.sh" + +PLATFORM=`get_platform` + +# RedHat specific +if [[ "$PLATFORM" != "" ]] +then + # platform Redhat/Centos + + export PATH=/opt/rh/rh-python36/root/usr/bin${PATH:+:${PATH}} + export LD_LIBRARY_PATH=/opt/rh/rh-python36/root/usr/lib64${LD_LIBRARY_PATH:+:${LD_LIBRARY_PATH}} + export MANPATH=/opt/rh/rh-python36/root/usr/share/man:$MANPATH + export PKG_CONFIG_PATH=/opt/rh/rh-python36/root/usr/lib64/pkgconfig${PKG_CONFIG_PATH:+:${PKG_CONFIG_PATH}} + export XDG_DATA_DIRS="/opt/rh/rh-python36/root/usr/share:${XDG_DATA_DIRS:-/usr/local/share:/usr/share}" + +else + # platform Debian/Ubuntu + : +fi + # Check if the default directory exists if [[ ! -d "${FOGLAMP_ROOT}" ]]; then logger -p local0.err -t "foglamp.script.foglamp" "FogLAMP cannot be executed: ${FOGLAMP_ROOT} is not a valid directory." From 9b0aac771a644b19472ac604b201bd1c8507675f Mon Sep 17 00:00:00 2001 From: stefano Date: Thu, 9 May 2019 14:41:50 +0200 Subject: [PATCH 106/161] FOGL-2756: platform id changed in foglamp.service --- extras/scripts/foglamp.service | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/extras/scripts/foglamp.service b/extras/scripts/foglamp.service index f31ff4992d..c7a1d68943 100755 --- a/extras/scripts/foglamp.service +++ b/extras/scripts/foglamp.service @@ -1,14 +1,15 @@ #!/bin/sh -# Debian specific -platform=$([ `cat /etc/os-release | grep -c -i -E "centos|red hat"` -eq 0 ] && echo debian || echo redhat) -if [[ $platform == "debian" ]] -then +PLATFORM=`(lsb_release -ds 2>/dev/null || cat /etc/*release 2>/dev/null | head -n1 || uname -om) | egrep '(Red Hat|CentOS)' || echo ""` + +# RedHat specific +if [ "$PLATFORM" = "" ]; then # kFreeBSD do not accept scripts as interpreters, using #!/bin/sh and sourcing. if [ true != "$INIT_D_SCRIPT_SOURCED" ] ; then set "$0" "$@"; INIT_D_SCRIPT_SOURCED=true . /lib/init/init-d-script fi fi + ### BEGIN INIT INFO # Provides: foglamp # Required-Start: $local_fs $remote_fs $syslog $network $time From 2c5350a887d04e2113d3a94c77cc28f368764aab Mon Sep 17 00:00:00 2001 From: ashish-jabble Date: Thu, 9 May 2019 19:10:15 +0530 Subject: [PATCH 107/161] DELETE end point fixes; pending unit tests fixes and recursive stuff --- .../services/core/api/certificate_store.py | 62 ++++++++++++------- 1 file changed, 38 insertions(+), 24 deletions(-) diff --git a/python/foglamp/services/core/api/certificate_store.py b/python/foglamp/services/core/api/certificate_store.py index d58b6e3863..471f6f6a2f 100644 --- a/python/foglamp/services/core/api/certificate_store.py +++ b/python/foglamp/services/core/api/certificate_store.py @@ -38,6 +38,7 @@ async def get_certs(request): keys = [] key_valid_extensions = ('.key', '.pem') for root, dirs, files in os.walk(certs_dir): + # FIXME: recursive duplicate entries for both key and certs if root.endswith('json'): for f in files: if f.endswith('.json'): @@ -141,42 +142,55 @@ async def delete_certificate(request): """ Delete a certificate :Example: - curl -X DELETE http://localhost:8081/foglamp/certificate/foglamp + curl -X DELETE http://localhost:8081/foglamp/certificate/foglamp.pem + curl -X DELETE http://localhost:8081/foglamp/certificate/foglamp.cert?type=cert + curl -X DELETE http://localhost:8081/foglamp/certificate/foglamp.json?type=cert + curl -X DELETE http://localhost:8081/foglamp/certificate/foglamp.pem?type=cert + curl -X DELETE http://localhost:8081/foglamp/certificate/foglamp.key?type=key + curl -X DELETE http://localhost:8081/foglamp/certificate/foglamp.pem?type=key """ cert_name = request.match_info.get('name', None) + valid_extensions = ('.cert', '.json', '.key', '.pem') + if not cert_name.endswith(valid_extensions): + raise web.HTTPBadRequest(reason="Accepted file extensions are {}".format(valid_extensions)) + + certs_dir = _get_certs_dir('/etc/certs/') + is_found = False + dir_path = certs_dir + cert_name + if 'type' in request.query and request.query['type'] != '': + _type = request.query['type'] + if _type not in ['cert', 'key']: + raise web.HTTPBadRequest(reason="Only cert and key are allowed for the value of type param") + if os.path.isfile(certs_dir + cert_name): + is_found = True + if _type == 'cert': + if os.path.isfile(certs_dir + 'pem/' + cert_name): + is_found = True + dir_path = certs_dir + 'pem/' + cert_name + if os.path.isfile(certs_dir + 'json/' + cert_name): + is_found = True + dir_path = certs_dir + 'json/' + cert_name + + # FIXME: when type is not given and remove recursively + for root, dirs, files in os.walk(certs_dir): + for file in files: + if cert_name == file: + is_found = True - certs_dir = _get_certs_dir() - cert_file = certs_dir + '/{}.cert'.format(cert_name) - key_file = certs_dir + '/{}.key'.format(cert_name) - - if not os.path.isfile(cert_file) and not os.path.isfile(key_file): + if not is_found: raise web.HTTPNotFound(reason='Certificate with name {} does not exist'.format(cert_name)) # read config # if cert_name is currently set for 'certificateName' in config for 'rest_api' cf_mgr = ConfigurationManager(connect.get_storage_async()) result = await cf_mgr.get_category_item(category_name='rest_api', item_name='certificateName') - if cert_name == result['value']: + if cert_name.split('.')[0] == result['value']: raise web.HTTPConflict(reason='Certificate with name {} is already in use, you can not delete' .format(cert_name)) - msg = '' - cert_file_found_and_removed = False - if os.path.isfile(cert_file): - os.remove(cert_file) - msg = "{}.cert has been deleted successfully".format(cert_name) - cert_file_found_and_removed = True - - key_file_found_and_removed = False - if os.path.isfile(key_file): - os.remove(key_file) - msg = "{}.key has been deleted successfully".format(cert_name) - key_file_found_and_removed = True - - if key_file_found_and_removed and cert_file_found_and_removed: - msg = "{}.key, {}.cert have been deleted successfully".format(cert_name, cert_name) - - return web.json_response({'result': msg}) + # Remove file + os.remove(dir_path) + return web.json_response({'result': "{} has been deleted successfully".format(cert_name)}) def _get_certs_dir(_path): From 316ee915d8990e7999d3a235a8112e0ab6162df4 Mon Sep 17 00:00:00 2001 From: ashish-jabble Date: Fri, 10 May 2019 12:57:27 +0530 Subject: [PATCH 108/161] duplicate entries fixes for GET and POST and other code refactoring --- .../services/core/api/certificate_store.py | 61 ++++++++++++------- 1 file changed, 40 insertions(+), 21 deletions(-) diff --git a/python/foglamp/services/core/api/certificate_store.py b/python/foglamp/services/core/api/certificate_store.py index 471f6f6a2f..afd3d1596f 100644 --- a/python/foglamp/services/core/api/certificate_store.py +++ b/python/foglamp/services/core/api/certificate_store.py @@ -8,6 +8,7 @@ from aiohttp import web from foglamp.services.core import connect from foglamp.common.configuration_manager import ConfigurationManager +from foglamp.common.common import _FOGLAMP_ROOT, _FOGLAMP_DATA __author__ = "Ashish Jabble" __copyright__ = "Copyright (c) 2017 OSIsoft, LLC" @@ -15,10 +16,6 @@ __version__ = "${VERSION}" -_FOGLAMP_DATA = os.getenv("FOGLAMP_DATA", default=None) -_FOGLAMP_ROOT = os.getenv("FOGLAMP_ROOT", default='/usr/local/foglamp') - - _help = """ ------------------------------------------------------------------------------- | GET POST | /foglamp/certificate | @@ -38,15 +35,16 @@ async def get_certs(request): keys = [] key_valid_extensions = ('.key', '.pem') for root, dirs, files in os.walk(certs_dir): - # FIXME: recursive duplicate entries for both key and certs if root.endswith('json'): for f in files: if f.endswith('.json'): certs.append(f) + files.remove(f) if root.endswith('pem'): for f in files: if f.endswith('.pem'): certs.append(f) + files.remove(f) for f in files: if f.endswith('.cert'): certs.append(f) @@ -67,7 +65,6 @@ async def upload(request): curl -F "key=@filename.key" -F "cert=@filename.cert" -F "overwrite=1" http://localhost:8081/foglamp/certificate """ data = await request.post() - # contains the name of the file in string format key_file = data.get('key') cert_file = data.get('cert') @@ -156,26 +153,47 @@ async def delete_certificate(request): certs_dir = _get_certs_dir('/etc/certs/') is_found = False - dir_path = certs_dir + cert_name + dir_path = [certs_dir + cert_name] if 'type' in request.query and request.query['type'] != '': _type = request.query['type'] if _type not in ['cert', 'key']: raise web.HTTPBadRequest(reason="Only cert and key are allowed for the value of type param") - if os.path.isfile(certs_dir + cert_name): - is_found = True if _type == 'cert': - if os.path.isfile(certs_dir + 'pem/' + cert_name): - is_found = True - dir_path = certs_dir + 'pem/' + cert_name - if os.path.isfile(certs_dir + 'json/' + cert_name): - is_found = True - dir_path = certs_dir + 'json/' + cert_name - - # FIXME: when type is not given and remove recursively - for root, dirs, files in os.walk(certs_dir): - for file in files: - if cert_name == file: + if not cert_name.endswith('.cert'): + if os.path.isfile(certs_dir + 'pem/' + cert_name): + is_found = True + dir_path = [certs_dir + 'pem/' + cert_name] + if os.path.isfile(certs_dir + 'json/' + cert_name): + is_found = True + dir_path = [certs_dir + 'json/' + cert_name] + else: + if os.path.isfile(certs_dir + cert_name): + is_found = True + else: + is_found = False + else: + if os.path.isfile(certs_dir + cert_name): is_found = True + else: + is_found = False + + if 'type' not in request.query: + for root, dirs, files in os.walk(certs_dir): + if root.endswith('json'): + for f in files: + if cert_name == f: + is_found = True + dir_path.append(certs_dir + 'json/' + cert_name) + files.remove(f) + if root.endswith('pem'): + for f in files: + if cert_name == f: + is_found = True + dir_path.append(certs_dir + 'pem/' + cert_name) + files.remove(f) + for f in files: + if cert_name == f: + is_found = True if not is_found: raise web.HTTPNotFound(reason='Certificate with name {} does not exist'.format(cert_name)) @@ -189,7 +207,8 @@ async def delete_certificate(request): .format(cert_name)) # Remove file - os.remove(dir_path) + for fp in dir_path: + os.remove(fp) return web.json_response({'result': "{} has been deleted successfully".format(cert_name)}) From b8b3aa32ac35342eee21a145b8bf59538f2bd8ff Mon Sep 17 00:00:00 2001 From: stefano Date: Fri, 10 May 2019 11:03:35 +0200 Subject: [PATCH 109/161] FOGL-2756: improved foglamp start in cas of Centos/RH --- scripts/foglamp | 21 +++++++++++++-------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/scripts/foglamp b/scripts/foglamp index 15f1fd12f2..4a39be43cc 100755 --- a/scripts/foglamp +++ b/scripts/foglamp @@ -42,14 +42,19 @@ PLATFORM=`get_platform` # RedHat specific if [[ "$PLATFORM" != "" ]] then - # platform Redhat/Centos - - export PATH=/opt/rh/rh-python36/root/usr/bin${PATH:+:${PATH}} - export LD_LIBRARY_PATH=/opt/rh/rh-python36/root/usr/lib64${LD_LIBRARY_PATH:+:${LD_LIBRARY_PATH}} - export MANPATH=/opt/rh/rh-python36/root/usr/share/man:$MANPATH - export PKG_CONFIG_PATH=/opt/rh/rh-python36/root/usr/lib64/pkgconfig${PKG_CONFIG_PATH:+:${PKG_CONFIG_PATH}} - export XDG_DATA_DIRS="/opt/rh/rh-python36/root/usr/share:${XDG_DATA_DIRS:-/usr/local/share:/usr/share}" - + # To avoid to stop the execution for any internal error of scl_source + set +e + source scl_source enable rh-python36 + status=$? + + if [[ "$status" != "0" ]] + then + msg_text="ERROR: FogLAMP cannot enable the python environment in RedHat/Centos platform." + logger -p local0.err -t msg_text + echo msg_text + exit 1 + fi + set -e else # platform Debian/Ubuntu : From b608ea0c291fe11a0f9d88e2f0dd4c2629067c3f Mon Sep 17 00:00:00 2001 From: stefano Date: Fri, 10 May 2019 11:05:47 +0200 Subject: [PATCH 110/161] FOGL-2756: comment improved --- scripts/foglamp | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/foglamp b/scripts/foglamp index 4a39be43cc..7117a5cdda 100755 --- a/scripts/foglamp +++ b/scripts/foglamp @@ -39,9 +39,10 @@ source "${FOGLAMP_ROOT}/scripts/common/get_platform.sh" PLATFORM=`get_platform` -# RedHat specific if [[ "$PLATFORM" != "" ]] then + # platform RedHat/Centos + # To avoid to stop the execution for any internal error of scl_source set +e source scl_source enable rh-python36 From 464c597a0119a31436bbb994bc73e2fbc0456572 Mon Sep 17 00:00:00 2001 From: pintomax Date: Fri, 10 May 2019 11:37:17 +0200 Subject: [PATCH 111/161] =?UTF-8?q?FOGL-2751:=20use=20compiled=20sqlite=20?= =?UTF-8?q?static=20library=20in=20/tmp/foglamp-sqlite3=E2=80=A6=20(#1549)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit FOGL-2751: use compiled sqlite static library if found in /tmp/foglamp-sqlite3-pkg cd /tmp/ git clone https://github.com/foglamp/foglamp-sqlite3-pkg.git cd foglamp-sqlite3-pkg/ ./configure --enable-shared=false --enable-static=true --enable-static-shell CFLAGS="-DSQLITE_ENABLE_JSON1 -DSQLITE_ENABLE_LOAD_EXTENSION -DSQLITE_ENABLE_COLUMN_METADATA -fno-common -fPIC" autoreconf -f -i make cd to foglamp dir make NOTE: requirements.sh changes are for RHEL7 only Other JIRA tasks will address requirements.sh changes for CentOS 7 --- C/plugins/storage/sqlite/CMakeLists.txt | 20 +++--- C/plugins/storage/sqlite/Findsqlite3.cmake | 10 ++- C/plugins/storage/sqlite/common/is_RHEL.sh | 4 -- C/plugins/storage/sqlitememory/CMakeLists.txt | 13 +++- .../storage/sqlitememory/Findsqlite3.cmake | 10 +++ Makefile | 14 ++++- requirements.sh | 63 ++++++++++++++++--- scripts/plugins/storage/sqlite.sh | 19 +++--- 8 files changed, 118 insertions(+), 35 deletions(-) delete mode 100755 C/plugins/storage/sqlite/common/is_RHEL.sh diff --git a/C/plugins/storage/sqlite/CMakeLists.txt b/C/plugins/storage/sqlite/CMakeLists.txt index b44437af56..789dfc4e6d 100644 --- a/C/plugins/storage/sqlite/CMakeLists.txt +++ b/C/plugins/storage/sqlite/CMakeLists.txt @@ -6,6 +6,9 @@ set(CMAKE_CXX_FLAGS_DEBUG "-O0 -ggdb") set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11") set(STORAGE_COMMON_LIB storage-common-lib) +# Path of compiled libsqlite3.a and .h files: /tmp/foglamp-sqlite3-pkg/src +set(FOGLAMP_SQLITE3_LIBS "/tmp/foglamp-sqlite3-pkg/src" CACHE INTERNAL "") + # Find source files file(GLOB SOURCES ./common/*.cpp *.cpp) @@ -27,14 +30,17 @@ set_target_properties(${PROJECT_NAME} PROPERTIES SOVERSION 1) set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${CMAKE_CURRENT_SOURCE_DIR}") find_package(sqlite3) -# Link with SQLite3 library -execute_process(COMMAND bash "-c" "${CMAKE_CURRENT_SOURCE_DIR}/common/is_RHEL.sh" RESULT_VARIABLE ret) -if(ret EQUAL "0") - message(" Red Hat or CentOS system ") - target_link_libraries(${PROJECT_NAME} /usr/local/lib/libsqlite3.so) +# Use static SQLite3 library +if(EXISTS ${FOGLAMP_SQLITE3_LIBS}) + include_directories(${FOGLAMP_SQLITE3_LIBS}) + target_link_libraries(${PROJECT_NAME} -L"${FOGLAMP_SQLITE3_LIBS}/.libs" -lsqlite3) else() - message(" Not a Red Hat or CentOS system ") - target_link_libraries(${PROJECT_NAME} -lsqlite3) + target_link_libraries(${PROJECT_NAME} -lsqlite3) +endif() + +# Install SQLite3 command line with static library +if(EXISTS ${FOGLAMP_SQLITE3_LIBS}) + install(PROGRAMS ${FOGLAMP_SQLITE3_LIBS}/sqlite3 DESTINATION "foglamp/plugins/storage/${PROJECT_NAME}") endif() # Install library diff --git a/C/plugins/storage/sqlite/Findsqlite3.cmake b/C/plugins/storage/sqlite/Findsqlite3.cmake index c20a28888a..4a7d06b656 100644 --- a/C/plugins/storage/sqlite/Findsqlite3.cmake +++ b/C/plugins/storage/sqlite/Findsqlite3.cmake @@ -7,8 +7,14 @@ # SQLITE_VERSION - Library version set(SQLITE_MIN_VERSION "3.11.0") -find_path(SQLITE_INCLUDE_DIR sqlite3.h) -find_library(SQLITE_LIBRARIES NAMES libsqlite3.so) +# Check wether path of compiled libsqlite3.a and .h files exists +if (EXISTS ${FOGLAMP_SQLITE3_LIBS}) + find_path(SQLITE_INCLUDE_DIR sqlite3.h PATHS ${FOGLAMP_SQLITE3_LIBS}) + find_library(SQLITE_LIBRARIES NAMES libsqlite3.a PATHS "${FOGLAMP_SQLITE3_LIBS}/.libs") +else() + find_path(SQLITE_INCLUDE_DIR sqlite3.h) + find_library(SQLITE_LIBRARIES NAMES libsqlite3.so) +endif() if (SQLITE_INCLUDE_DIR AND SQLITE_LIBRARIES) execute_process(COMMAND grep ".*#define.*SQLITE_VERSION " ${SQLITE_INCLUDE_DIR}/sqlite3.h diff --git a/C/plugins/storage/sqlite/common/is_RHEL.sh b/C/plugins/storage/sqlite/common/is_RHEL.sh deleted file mode 100755 index fbb8c48e99..0000000000 --- a/C/plugins/storage/sqlite/common/is_RHEL.sh +++ /dev/null @@ -1,4 +0,0 @@ -#!/bin/sh - -op=$(lsb_release -ds 2>/dev/null || cat /etc/*release 2>/dev/null | head -n1 || uname -om) -echo $op | egrep -q '(Red Hat|CentOS)' diff --git a/C/plugins/storage/sqlitememory/CMakeLists.txt b/C/plugins/storage/sqlitememory/CMakeLists.txt index 7f476d32c4..67f506360d 100644 --- a/C/plugins/storage/sqlitememory/CMakeLists.txt +++ b/C/plugins/storage/sqlitememory/CMakeLists.txt @@ -6,6 +6,9 @@ set(CMAKE_CXX_FLAGS_DEBUG "-O0 -ggdb") set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11") set(STORAGE_COMMON_LIB storage-common-lib) +# Path of compiled libsqlite3.a and .h files: /tmp/foglamp-sqlite3-pkg/src +set(FOGLAMP_SQLITE3_LIBS "/tmp/foglamp-sqlite3-pkg/src" CACHE INTERNAL "") + # Find source files # Add sqlite plugin common files file(GLOB COMMON_SOURCES ../sqlite/common/*.cpp) @@ -35,8 +38,14 @@ find_package(sqlite3) add_definitions(-DSQLITE_SPLIT_READINGS=1) add_definitions(-DPLUGIN_LOG_NAME="SQLite 3 in_memory") -# Link with SQLite3 library -target_link_libraries(${PROJECT_NAME} -lsqlite3) +# Use static SQLite3 library +if(EXISTS ${FOGLAMP_SQLITE3_LIBS}) + include_directories(${FOGLAMP_SQLITE3_LIBS}) + target_link_libraries(${PROJECT_NAME} -L"${FOGLAMP_SQLITE3_LIBS}/.libs" -lsqlite3) +else() + # Link with SQLite3 library + target_link_libraries(${PROJECT_NAME} -lsqlite3) +endif() # Install library install(TARGETS ${PROJECT_NAME} DESTINATION foglamp/plugins/storage/${PROJECT_NAME}) diff --git a/C/plugins/storage/sqlitememory/Findsqlite3.cmake b/C/plugins/storage/sqlitememory/Findsqlite3.cmake index c20a28888a..907ef6d375 100644 --- a/C/plugins/storage/sqlitememory/Findsqlite3.cmake +++ b/C/plugins/storage/sqlitememory/Findsqlite3.cmake @@ -10,6 +10,16 @@ set(SQLITE_MIN_VERSION "3.11.0") find_path(SQLITE_INCLUDE_DIR sqlite3.h) find_library(SQLITE_LIBRARIES NAMES libsqlite3.so) +# Check wether path of compiled libsqlite3.a and .h files exists +if (EXISTS ${FOGLAMP_SQLITE3_LIBS}) + find_path(SQLITE_INCLUDE_DIR sqlite3.h PATHS ${FOGLAMP_SQLITE3_LIBS}) + find_library(SQLITE_LIBRARIES NAMES libsqlite3.a PATHS "${FOGLAMP_SQLITE3_LIBS}/.libs") +else() + # Use system defaults + find_path(SQLITE_INCLUDE_DIR sqlite3.h) + find_library(SQLITE_LIBRARIES NAMES libsqlite3.so) +endif() + if (SQLITE_INCLUDE_DIR AND SQLITE_LIBRARIES) execute_process(COMMAND grep ".*#define.*SQLITE_VERSION " ${SQLITE_INCLUDE_DIR}/sqlite3.h COMMAND sed "s/.*\"\\(.*\\)\".*/\\1/" diff --git a/Makefile b/Makefile index 7d0aec6f86..a70855a698 100644 --- a/Makefile +++ b/Makefile @@ -114,14 +114,17 @@ CERTIFICATES_SCRIPT_SRC := scripts/certificates AUTH_CERTIFICATES_SCRIPT_SRC := scripts/auth_certificates PACKAGE_UPDATE_SCRIPT_SRC := scripts/package +# Custom location of SQLite3 library +FOGLAMP_HAS_SQLITE3_PATH := /tmp/foglamp-sqlite3-pkg/src + # EXTRA SCRIPTS EXTRAS_SCRIPTS_SRC_DIR := extras/scripts # FOGBENCH -FOGBENCH_PYTHON_SRC_DIR := extras/python/fogbench +FOGBENCH_PYTHON_SRC_DIR := extras/python/fogbench # FogLAMP Version file -FOGLAMP_VERSION_FILE := VERSION +FOGLAMP_VERSION_FILE := VERSION ############################################################################### ################################### OTHER VARS ################################ @@ -186,6 +189,7 @@ schema_check : apply_version $(if $(SCHEMA_CHANGE_ERROR),$(error FogLAMP DB schema cannot be performed as pre-install task: $(SCHEMA_CHANGE_ERROR)),) $(if $(SCHEMA_CHANGE_WARNING),$(warning $(SCHEMA_CHANGE_WARNING)),$(info -- FogLAMP DB schema check OK: $(SCHEMA_CHANGE_OUTPUT))) +# # install # Creates a deployment structure in the default destination, /usr/local/foglamp # Destination may be overridden by use of the DESTDIR= directive @@ -217,6 +221,12 @@ generate_selfcertificate: # run make execute makefiles producer by cmake c_build : $(CMAKE_GEN_MAKEFILE) $(CD) $(CMAKE_BUILD_DIR) ; $(MAKE) +# Local copy of sqlite3 command line tool if needed +# Copy the cmd line tool into sqlite plugin dir +ifneq ("$(wildcard $(FOGLAMP_HAS_SQLITE3_PATH))","") + $(info SQLite3 package has been found in $(FOGLAMP_HAS_SQLITE3_PATH)) + $(CP) $(FOGLAMP_HAS_SQLITE3_PATH)/sqlite3 $(CMAKE_PLUGINS_DIR)/storage/sqlite/ +endif # run cmake to generate makefiles # always rerun cmake because: diff --git a/requirements.sh b/requirements.sh index 765c46d86b..23c1dbf465 100755 --- a/requirements.sh +++ b/requirements.sh @@ -17,19 +17,62 @@ ##-------------------------------------------------------------------- ## -## Author: Ashish Jabble +## Author: Ashish Jabble, Massimiliano Pinto ## set -e -sudo apt update -sudo apt -y upgrade +foglamp_location=`pwd` +is_rhel=`(lsb_release -ds 2>/dev/null || cat /etc/*release 2>/dev/null | head -n1 || uname -om) | egrep '(Red Hat|CentOS)' || echo ""` +if [ "${is_rhel}" != "" ]; then + echo "Platform is ${is_rhel}" + sudo yum check-update + sudo yum update -y + sudo yum-config-manager --enable 'Red Hat Enterprise Linux Server 7 RHSCL (RPMs)' + sudo yum install -y @development + sudo yum install -y boost-devel + sudo yum install -y glib2-devel + sudo yum install -y rh-python36 + sudo yum install -y rsyslog + sudo yum install -y openssl-devel + sudo yum install -y postgresql-devel + sudo yum install -y wget + sudo yum install -y zlib-devel + sudo yum install -y git + sudo yum install -y cmake + sudo yum install -y libuuid-devel + sudo yum install -y dbus-devel -sudo apt install -y avahi-daemon curl -sudo apt install -y cmake g++ make build-essential autoconf automake uuid-dev -sudo apt install -y libtool libboost-dev libboost-system-dev libboost-thread-dev libpq-dev libssl-dev libz-dev -sudo apt install -y python-dbus python-dev python3-dev python3-pip -sudo apt install -y sqlite3 libsqlite3-dev -sudo apt install -y pkg-config -# sudo apt install -y postgresql + sudo su - < Date: Fri, 10 May 2019 12:08:02 +0200 Subject: [PATCH 112/161] FOGL-2756: improved get platform --- extras/scripts/foglamp.service | 8 +++++--- scripts/common/get_platform.sh | 3 ++- scripts/foglamp | 3 ++- 3 files changed, 9 insertions(+), 5 deletions(-) diff --git a/extras/scripts/foglamp.service b/extras/scripts/foglamp.service index c7a1d68943..7a8c0d17c2 100755 --- a/extras/scripts/foglamp.service +++ b/extras/scripts/foglamp.service @@ -1,9 +1,11 @@ #!/bin/sh -PLATFORM=`(lsb_release -ds 2>/dev/null || cat /etc/*release 2>/dev/null | head -n1 || uname -om) | egrep '(Red Hat|CentOS)' || echo ""` +PLATFORM=`(lsb_release -ds 2>/dev/null || cat /etc/*release 2>/dev/null | head -n1 || uname -om)` +IS_RHEL=`echo $PLATFORM | egrep '(Red Hat|CentOS)' || echo ""` + +if [ "IS_RHEL" = "" ]; then + # Ubuntu/Debian specific -# RedHat specific -if [ "$PLATFORM" = "" ]; then # kFreeBSD do not accept scripts as interpreters, using #!/bin/sh and sourcing. if [ true != "$INIT_D_SCRIPT_SOURCED" ] ; then set "$0" "$@"; INIT_D_SCRIPT_SOURCED=true . /lib/init/init-d-script diff --git a/scripts/common/get_platform.sh b/scripts/common/get_platform.sh index 446fd4ca7c..e5102954f0 100755 --- a/scripts/common/get_platform.sh +++ b/scripts/common/get_platform.sh @@ -25,5 +25,6 @@ __version__="1.0" # empty - Debian/Ubuntu get_platform() { - (lsb_release -ds 2>/dev/null || cat /etc/*release 2>/dev/null | head -n1 || uname -om) | egrep '(Red Hat|CentOS)' || echo "" + (lsb_release -ds 2>/dev/null || cat /etc/*release 2>/dev/null | head -n1 || uname -om) + } diff --git a/scripts/foglamp b/scripts/foglamp index 7117a5cdda..06ef47a010 100755 --- a/scripts/foglamp +++ b/scripts/foglamp @@ -38,8 +38,9 @@ fi source "${FOGLAMP_ROOT}/scripts/common/get_platform.sh" PLATFORM=`get_platform` +IS_RHEL=`echo $PLATFORM | egrep '(Red Hat|CentOS)' || echo ""` -if [[ "$PLATFORM" != "" ]] +if [[ "$IS_RHEL" != "" ]] then # platform RedHat/Centos From 316e5222a93b05e42c12f607ceb86b08fc7170bd Mon Sep 17 00:00:00 2001 From: stefano Date: Fri, 10 May 2019 12:09:11 +0200 Subject: [PATCH 113/161] FOGL-2756: improved get platform --- extras/scripts/foglamp.service | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/extras/scripts/foglamp.service b/extras/scripts/foglamp.service index 7a8c0d17c2..6e7bf4d0b2 100755 --- a/extras/scripts/foglamp.service +++ b/extras/scripts/foglamp.service @@ -3,7 +3,7 @@ PLATFORM=`(lsb_release -ds 2>/dev/null || cat /etc/*release 2>/dev/null | head -n1 || uname -om)` IS_RHEL=`echo $PLATFORM | egrep '(Red Hat|CentOS)' || echo ""` -if [ "IS_RHEL" = "" ]; then +if [ "$IS_RHEL" = "" ]; then # Ubuntu/Debian specific # kFreeBSD do not accept scripts as interpreters, using #!/bin/sh and sourcing. From de03b461b3222c2d723ac2b5445fc6313ed0f5da Mon Sep 17 00:00:00 2001 From: stefano Date: Fri, 10 May 2019 12:24:43 +0200 Subject: [PATCH 114/161] FOGL-2756: reverted change --- tests/unit/C/README.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tests/unit/C/README.rst b/tests/unit/C/README.rst index e5d7f05b1a..1c0d759939 100644 --- a/tests/unit/C/README.rst +++ b/tests/unit/C/README.rst @@ -9,6 +9,9 @@ Prequisite These tests are written using the Google Test framework. This should be installed on your machine +Ubuntu: +------- + - sudo apt-get install libgtest-dev Unfortunately this does not install the libraries and a manual build set is required From 8f966a952113abdaf50d11f65a1d70a260ecbf46 Mon Sep 17 00:00:00 2001 From: Massimiliano Pinto Date: Fri, 10 May 2019 12:40:28 +0200 Subject: [PATCH 115/161] Detect Centos/RHEL and install the right package Detect Centos/RHEL and install the right package --- requirements.sh | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/requirements.sh b/requirements.sh index 23c1dbf465..a7d142228c 100755 --- a/requirements.sh +++ b/requirements.sh @@ -24,12 +24,19 @@ set -e foglamp_location=`pwd` -is_rhel=`(lsb_release -ds 2>/dev/null || cat /etc/*release 2>/dev/null | head -n1 || uname -om) | egrep '(Red Hat|CentOS)' || echo ""` +is_rhel=`(lsb_release -ds 2>/dev/null || cat /etc/*release 2>/dev/null | head -n1 || uname -om) | egrep -o '(Red Hat|CentOS)' || echo ""` if [ "${is_rhel}" != "" ]; then echo "Platform is ${is_rhel}" + centos=`echo $is_rhel | egrep 'CentOS' || echo ""` sudo yum check-update sudo yum update -y - sudo yum-config-manager --enable 'Red Hat Enterprise Linux Server 7 RHSCL (RPMs)' + if [ "${centos}" = "" ]; then + # RHEL 7 specific + sudo yum-config-manager --enable 'Red Hat Enterprise Linux Server 7 RHSCL (RPMs)' + else + # CentOS 7 specific + sudo yum install -y centos-release-scl-rh + fi sudo yum install -y @development sudo yum install -y boost-devel sudo yum install -y glib2-devel From fd90147a4dcb74687160b29f12c7365121f0d237 Mon Sep 17 00:00:00 2001 From: ashish-jabble Date: Fri, 10 May 2019 16:10:36 +0530 Subject: [PATCH 116/161] unit test fixes and other feedback fixes --- python/foglamp/common/web/ssl_wrapper.py | 10 -- .../services/core/api/certificate_store.py | 21 ++- .../services/core/api/certs/foglamp.pem | 30 ++++ .../services/core/api/certs/json/test.json | 7 + .../services/core/api/certs/pem/foglamp.pem | 30 ++++ .../core/api/test_certificate_store.py | 145 ++++++++++-------- 6 files changed, 160 insertions(+), 83 deletions(-) create mode 100644 tests/unit/python/foglamp/services/core/api/certs/foglamp.pem create mode 100644 tests/unit/python/foglamp/services/core/api/certs/json/test.json create mode 100644 tests/unit/python/foglamp/services/core/api/certs/pem/foglamp.pem diff --git a/python/foglamp/common/web/ssl_wrapper.py b/python/foglamp/common/web/ssl_wrapper.py index 19d067c09e..1c6bd31ace 100644 --- a/python/foglamp/common/web/ssl_wrapper.py +++ b/python/foglamp/common/web/ssl_wrapper.py @@ -210,13 +210,3 @@ def set_ca_cert(cls, cert): @classmethod def set_user_cert(cls, cert): cls.user_cert = cert - - @classmethod - def verify_pem(cls, cert_path): - # FIXME: standard validation - with open(cert_path, 'r') as content_file: - content = content_file.read() - if "-----BEGIN CERTIFICATE-----" and "-----END CERTIFICATE-----" \ - "-----BEGIN RSA PRIVATE KEY-----" and "-----END RSA PRIVATE KEY-----" in content: - return True - return False diff --git a/python/foglamp/services/core/api/certificate_store.py b/python/foglamp/services/core/api/certificate_store.py index afd3d1596f..c8f60e24ae 100644 --- a/python/foglamp/services/core/api/certificate_store.py +++ b/python/foglamp/services/core/api/certificate_store.py @@ -95,7 +95,7 @@ async def upload(request): if not cert_filename.endswith(cert_valid_extensions): raise web.HTTPBadRequest(reason="Accepted file extensions are .cert, .json and .pem for cert file") - certs_dir = '' + certs_dir = _get_certs_dir('/etc/certs/') if cert_filename.endswith('.pem'): certs_dir = _get_certs_dir('/etc/certs/pem') if cert_filename.endswith('.json'): @@ -106,13 +106,12 @@ async def upload(request): if is_found and should_overwrite is False: raise web.HTTPBadRequest(reason="Certificate with the same name already exists. " "To overwrite set the overwrite to 1") - - keys_dir = _get_certs_dir('/etc/certs') - found_files = _find_file(key_filename, keys_dir) - is_found = True if len(found_files) else False - if is_found and should_overwrite is False: - raise web.HTTPBadRequest(reason="Key cert with the same name already exists. " - "To overwrite set the overwrite to 1") + if key_file: + found_files = _find_file(key_filename, certs_dir) + is_found = True if len(found_files) else False + if is_found and should_overwrite is False: + raise web.HTTPBadRequest(reason="Key cert with the same name already exists. " + "To overwrite set the overwrite to 1") if cert_file: cert_file_data = data['cert'].file cert_file_content = cert_file_data.read() @@ -122,7 +121,7 @@ async def upload(request): if key_file: key_file_data = data['key'].file key_file_content = key_file_data.read() - key_file_path = str(keys_dir) + '/{}'.format(key_filename) + key_file_path = str(certs_dir) + '/{}'.format(key_filename) with open(key_file_path, 'wb') as f: f.write(key_file_content) @@ -140,10 +139,10 @@ async def delete_certificate(request): :Example: curl -X DELETE http://localhost:8081/foglamp/certificate/foglamp.pem - curl -X DELETE http://localhost:8081/foglamp/certificate/foglamp.cert?type=cert + curl -X DELETE http://localhost:8081/foglamp/certificate/foglamp.cert curl -X DELETE http://localhost:8081/foglamp/certificate/foglamp.json?type=cert curl -X DELETE http://localhost:8081/foglamp/certificate/foglamp.pem?type=cert - curl -X DELETE http://localhost:8081/foglamp/certificate/foglamp.key?type=key + curl -X DELETE http://localhost:8081/foglamp/certificate/foglamp.key curl -X DELETE http://localhost:8081/foglamp/certificate/foglamp.pem?type=key """ cert_name = request.match_info.get('name', None) diff --git a/tests/unit/python/foglamp/services/core/api/certs/foglamp.pem b/tests/unit/python/foglamp/services/core/api/certs/foglamp.pem new file mode 100644 index 0000000000..d600b3048b --- /dev/null +++ b/tests/unit/python/foglamp/services/core/api/certs/foglamp.pem @@ -0,0 +1,30 @@ +-----BEGIN CERTIFICATE----- +MIICVTCCAb4CCQCcGy+708GE7TANBgkqhkiG9w0BAQsFADBvMQswCQYDVQQGEwJV +UzETMBEGA1UECAwKQ2FsaWZvcm5pYTEQMA4GA1UECgwHT1NJc29mdDEQMA4GA1UE +AwwHZm9nbGFtcDEnMCUGCSqGSIb3DQEJARYYZm9nbGFtcEBnb29nbGVncm91cHMu +Y29tMB4XDTE4MDIwMzEwNTk1MVoXDTE5MDIwMzEwNTk1MVowbzELMAkGA1UEBhMC +VVMxEzARBgNVBAgMCkNhbGlmb3JuaWExEDAOBgNVBAoMB09TSXNvZnQxEDAOBgNV +BAMMB2ZvZ2xhbXAxJzAlBgkqhkiG9w0BCQEWGGZvZ2xhbXBAZ29vZ2xlZ3JvdXBz +LmNvbTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAmfRMO86UFZeKX4QiTCNZ ++4Jgrgi8i/DLTckHeihj9lqeom+DlGuVK5k9jY34Ji679E5QEgTazI/quaP4ihfZ +8ZeovJM6oH30aS/kMTCHoFgsCh9qcB5Qx2/nHId+Ficq3V3DlL21BnTjNGnTOKA9 +xH0NodhA9gbL38nAb8vN7bsCAwEAATANBgkqhkiG9w0BAQsFAAOBgQCARZlcWN9W +Fn8f1dExO1J5M3ybEsnUaXmGCFfy+fynkfQMN6Yxc8Lz1W45zUp0xg+qt59pyKGQ +L7qoznpyziMKnIsXoE0HhKzyQVr56wx7j9CS8I1FJd5jTzwrUN1ecgElnz3MLgWY +ikoHYHq4kqdBUo5lI1vLGeh7iyWVv6xo7Q== +-----END CERTIFICATE----- +-----BEGIN RSA PRIVATE KEY----- +MIICXQIBAAKBgQCZ9Ew7zpQVl4pfhCJMI1n7gmCuCLyL8MtNyQd6KGP2Wp6ib4OU +a5UrmT2NjfgmLrv0TlASBNrMj+q5o/iKF9nxl6i8kzqgffRpL+QxMIegWCwKH2pw +HlDHb+cch34WJyrdXcOUvbUGdOM0adM4oD3EfQ2h2ED2BsvfycBvy83tuwIDAQAB +AoGAQ8bdWOcVInxYmrxdLQ/TKXq4fWWzPBiCKHh8Jjdp6qbyd6iIUOsb/Nm103/Q +415nuKJRdd1VRgGF4LxjSsg9VgQObA2aij1Mdb2TzfJbxWiUWHxWI5wDj6orZ+2Q +1cXDns83rUq2HsYImb2o8Yw7pnLJ/j/LMZ+SO8PYPwCkKqECQQDKHLzeHSwJbVtu +Y/3xyruuL356ZP/6I712fnNkVJvACYlcXTMm6uBurbXAyYvZ8dqzmKHG19wbJEQA +MCAoaR2nAkEAwwCEMmmV47vfomWDyqFIbWwoHtcWL9MSKE0i3U7bRYHlNh5CDSiv +RLcKZ9oEUlXTuZ+5CKGgaJ8YsuuIzqE5zQJBAMK8R7nW77wxktvdGivjQ0wEVN/G ++uBfmUhXdAUihGLWjXPiFMwVmKLrWmjjiI04u5k5of49s8Su/5oSxJlLFpcCQF+U +kjePj1uny87yyS157mFMAUJUlNhuMdk1LeCh9VJVf/8Nx7xhc79PpLcQosukq/mx +M2XqqxB84wNX9Z+qYQUCQQC8NmVM5T7zvyzSJ54lTy03MDcSnyKXHTPy/8cdirf+ +DUMPr9tcNWChAGAkfONOiGKVt/c2frYrI1N4v6zAaPcE +-----END RSA PRIVATE KEY----- diff --git a/tests/unit/python/foglamp/services/core/api/certs/json/test.json b/tests/unit/python/foglamp/services/core/api/certs/json/test.json new file mode 100644 index 0000000000..8fdb2649cc --- /dev/null +++ b/tests/unit/python/foglamp/services/core/api/certs/json/test.json @@ -0,0 +1,7 @@ +{ + "certificate": "-----BEGIN CERTIFICATE-----\nMIIETDCCAjSgAwIBAgIUTrvpjzgpyt6L9AWj2E0W4ps2woswDQYJKoZIhvcNAQEL\nBQAwKzEpMCcGA1UEAwwgc2VsZnNlcnZlX3Byb2R1Y3Rpb25fcGtpIFJvb3QgQ0Ew\nHhcNMTcwOTI2MDQ1NzM3WhcNMTcxMTIxMDQ1ODA3WjAPMQ0wCwYDVQQDEwR0ZXN0\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAtuWgQ5P9KjpgplOyejAE\nj5pDgSmQ6mZkbqY6gnIIKlw1I4Vulaigmeiir37NcAHtLA9HrpqafKoQqt3RPIFq\nMq2qb728JUNqdkmgp1QRnXdRVqrvGxT3o6XLMmxpkniwL+f3A/qFzuBgDJVltKLn\n1e0O3conPiiGtqaZ70+1lccKkKviLoin13T+27gFFws6dT74znCxT8c/ikXGMja1\nTDEddd+qkXlo4At104Fo7Uhx95JWorSljSTaCQkEeOjX+8SJHkARSrKeGEvkBESp\nXD23oUY9MlxGQnldioLAI5Eu8fRo3PKQUhuFnuoxTr0pO7R60AEe8E0sVU/cE3Ut\nswIDAQABo4GDMIGAMA4GA1UdDwEB/wQEAwIDqDAdBgNVHSUEFjAUBggrBgEFBQcD\nAQYIKwYBBQUHAwIwHQYDVR0OBBYEFLfcxphP+aSe61Mdi8IDP7bBvGXdMB8GA1Ud\nIwQYMBaAFA2X7xP+NiXXNXhJy3UQqocENxRTMA8GA1UdEQQIMAaCBHRlc3QwDQYJ\nKoZIhvcNAQELBQADggIBAH3oLFPSSgubbwhXycm+oTMnEZyUwKfwAjkc2mykDZ/p\nPPrHZKCfMuWNf8mp7mK0K8O2JjBKbUlUUJZgd/8/9d0vLqU7Hf97Xk/8d0Rxwqgd\n2OmdujQpj49NFoAC+jAcGFXASwvGAzWg4ylTi+zvpUbVpLk0hOpYnJFvxEcXj0ab\nul9Mq0hrjarmkPAoDhmWjUQG8EKiJEelIv5r4OuNIDl+N5B3BNU+g8nz4GWJKIbP\n6dEb98GJh0tFqOHoxewVmrCmMnsGfJYJDqLg+CwXHSNS8xYQnuFzcJXQ4j7Kge5P\nCeMB6fizgTiUXFexjbTv6RUk1DfOywtRu7Wus9joTpDILb/WlIUlGvRj2j395BvK\naq5nLcgSpmO46776uobh6MN6se1kmpJ20sjUZWEtJsKODSAv7LA9jsMWhh1SGEWf\nUuQ1hUKHZ2073hgc0InmYGGyTJAnI3mYIbL+ddprK1CpORAH2cruqn9I192sCWNw\npZIxuMCiRUrFWitKEkFwPfmDbVhPQ/ZvxMcdAHXJ+ZQ9RxcanmcBGnlvCjidOBZa\naLN2/Y99M26z+XcYG9rN0fx5Htf4UDENQ8kp8TITmyHdwvqVox/UXcPWzV3MD7+I\nn0UdA2lqnM2Rv+kg2MGm0u9Y/noZz4IS4YTlfxMbGF212ROcCC9/oQYy321NqBns\n-----END CERTIFICATE-----", + "private_key": "-----BEGIN RSA PRIVATE KEY-----\nMIIEpQIBAAKCAQEAtuWgQ5P9KjpgplOyejAEj5pDgSmQ6mZkbqY6gnIIKlw1I4Vu\nlaigmeiir37NcAHtLA9HrpqafKoQqt3RPIFqMq2qb728JUNqdkmgp1QRnXdRVqrv\nGxT3o6XLMmxpkniwL+f3A/qFzuBgDJVltKLn1e0O3conPiiGtqaZ70+1lccKkKvi\nLoin13T+27gFFws6dT74znCxT8c/ikXGMja1TDEddd+qkXlo4At104Fo7Uhx95JW\norSljSTaCQkEeOjX+8SJHkARSrKeGEvkBESpXD23oUY9MlxGQnldioLAI5Eu8fRo\n3PKQUhuFnuoxTr0pO7R60AEe8E0sVU/cE3UtswIDAQABAoIBAB3kQ6An1K2NIvSs\nIzRTGru5k6TNfVDB8VIgOtnM90atEUY/7YXqLG1bFxOlnr/aoL+ds7J2tB8B0H2M\niUDhSdEEjyF6GgDhFspEWExgsgxRTuriPvfnIl4Nn7sa+tokfW8m8zkkPbBE/Y2w\n8RFnuoo9FzvqaSWAjBvX+LqjBWN4AGHxPcBcZs/H4U7RvdO0etX2Zbpjs62K/KO3\ni3e4MXgGZtj0Vx2LYD/AYSbqEoo1v8/U1AbGmsCTTNc2EwARhyb1zUgO7yc9yft6\nUoAC6pZjxOFsJtwz26jpNdqXz9t1xml3XnNusqHe+hgStQlIL2mgU8qj18q5pqpu\nkehM9LECgYEAxiU9WA7kQTp8hGKTRqrRbcGBsLTGxsYeILFQggtJBOZ5ngOH35Nd\nUIzQ1EjKODFEzGH9qPBBfE6BNdl3naHuYgIS3Uz8FCAwsOZAW6X8tC7VU/ZrwKUA\nF3Rc2iek+J1bdaz5o3hnR2eY/6kVuNHznxqIzK+JuZ7Dq/wEMlAL4gkCgYEA7Eyb\n4uyQFMXfPLiZPn7opNlgmi4i5lNLbPAjJq0dagdP8HbhLBqQThMcyAnu9rJmNm6t\n2Wu8kkKIpcZiGOVzFQvoTWOm6KGU/nIFFH1p6AAz/hvhATFA8HpLe9B7la9T6c5R\nabbtFbUNrHyoieMsIxkrjPo1zVIThLJeIVdoUNsCgYEAwuhKyV4MpSU06rxUhsTs\nsXwRaJLKnSiw5hPFT8ZuE0XrB8YNV52LwvphSRA46sF8HVeevxlmMTK/4wqBoSty\nZDIKAGoD5IAtpTU4xW4nf845xhe1spAb4PZzh5xLqMqQ9tYp0eVUImcDlyjp1x2e\n+TiOrFlXrqE/dOO39Q3MQpECgYEA5plMd4OMh/kiBcvQIOEQf+9zCoODo2od7U3b\nv96pGdPQ+0XIMJYrxUV5jO3EuhMXFH+mQMuW1tT/LWgQS2N/j0ZziTJ6rAMjt7vl\noT1SoQmxs4XZaqR6TzPJfibStBzJsx2Y7aWKcOijU3TDtOxxIj9p9MYowxoZ2iGH\nItp9/okCgYEAh6lbVbf77NArp1FsocQoeZ2ZL1hsOXpmRwpNmePPA6DfjqJyttpH\ngSh8Z0daqMvojStilhwIkEURy9ITuPYoKt2blWQY8RY//H1zFnwKg2AJR5PvlWcT\n0JBxt4cHMYy6jW2Q8/ZTVuttPd+UVIDehTFN6oyWF6FBgKxLO5bSjzc=\n-----END RSA PRIVATE KEY-----", + "issuing_ca": "-----BEGIN CERTIFICATE-----\nMIIFJjCCAw6gAwIBAgIUDUnfHPvwqpztM2lJh40lVUmTjV8wDQYJKoZIhvcNAQEL\nBQAwKzEpMCcGA1UEAwwgc2VsZnNlcnZlX3Byb2R1Y3Rpb25fcGtpIFJvb3QgQ0Ew\nHhcNMTcwODI1MDUwNTEzWhcNMjcwODIzMDUwNTQzWjArMSkwJwYDVQQDDCBzZWxm\nc2VydmVfcHJvZHVjdGlvbl9wa2kgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQAD\nggIPADCCAgoCggIBAM5LpBH9Qyg5VjTkdMj61gt72CVIrqE5s9iD+Bpb2hlLnWdb\n52FtcgCxIRca8kJhCYK53dNVmCP8d7LSzogxdIHyzEe5f405ukJVZIbYEYcA4BLK\n3UU322bYJkTTToABwV+XhlHjLhaze9GLo4snCklxAzafWvqR1C0faB2dPtq5WyQi\n/2uCvGHcpqe/ozNvZON6eYkjQpCwHftR0TwVVb435hvJb6FeeV95MgVq/C0pZFG4\nGLgJNj4GK4BtG2wsIDVMMcaoFrSKfKDqyE+4ekvzYP4nDzbYK5XsgH7/7XB9tL7w\nwMVj0J1mR3TbxVTBZyk509F0oXqBcNb6vvybJevhDlkXMQPgxyOmogm6GUQ3beMX\nsRpN5uotnbWaF0MQbgo8YrgQX3BGrLmKRfk9rIMoBKabptDMRw5Df1ouu5D9Jb3b\n3nlelkRXR5qb0R68CM0S78KqVB32NQsLixQ58YUKmcvlQcaIF9cwC28+LYm4sRq/\nV0tCl68K19PmgZT+Qr0Apakw+vlQ8ojvT+/wTVtg+gphuG7Ovv00xRXa/dpoC3Ff\nOktxUmu3bh4YU/IVCT3+YbwB7vyOfKGTwSmVK+s5gt4MDM65zX58xa85psJI8mqP\nCwKGDleglrAIrHxxg2wKrIibiIriSnjJsKqCzpcm9+6V4zewwQFqdfr1R92rAgMB\nAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW\nBBQNl+8T/jYl1zV4Sct1EKqHBDcUUzANBgkqhkiG9w0BAQsFAAOCAgEAYo+vaKzi\nW2YTogGvuDvWnFzDtRa6zfB1UNqUTiacmr9ISqTDGJPOE7o7+5//31yS63/VuPAb\nsskfjtbywGUcjLEoa//vqDUA5VPQSr2MGpqZItt+QQ7eIQPQEt6IaqohmIxvgyDI\nvV35Ld06slZju9IZJdOx5GyRU49ZrhTciNeHBFJbPTzTWw7swjP1Kj13BJ9++YlU\ndHHnJecMgRPXbbFn8cThcIUwhaTEWFhlC7zc4YUpTm8nmHaCLmG8TM7tYLaymHqd\nypMBa3TrGr4+XIgwkWWb9h9+JnlBXc+aq2pJulErzN3raytzv+iTOwcI+YCufgee\nAf25Zzk9t75KIHjSdqu1U/QXiPSgJgr7o2yrtZbeLT+eMHuhCfbuWduipuRgTlUk\na8hvoiFDabCrlJABDYHNO8WMCIqX9qja0crqA1JbPXAEMiYwdtoU+p27CtNupGVE\nQENamacyYD5VhApTnxACwwakMep0jDYQUXUYTeLz6Aj3vVUJl54/3Uqbh6fxKamh\n8xDeb+HjhO5UKDkfAH0qe17qSGGVftMI3YMPCEqrvnnoVl8VHxpvdVjjJoHEEKoE\ne8mrX4Jp9O3xVcGFItMQQzvWc1A47ewqIy6x+bk+0W8fL6+rKd+8U7aRIvC7LFiw\nluvq3QIacuHULtox36A7HFmlYDQ1ozh+tLI=\n-----END CERTIFICATE-----", + "keystorepass": "ibmkey", + "destination_host": "desthost" +} \ No newline at end of file diff --git a/tests/unit/python/foglamp/services/core/api/certs/pem/foglamp.pem b/tests/unit/python/foglamp/services/core/api/certs/pem/foglamp.pem new file mode 100644 index 0000000000..d600b3048b --- /dev/null +++ b/tests/unit/python/foglamp/services/core/api/certs/pem/foglamp.pem @@ -0,0 +1,30 @@ +-----BEGIN CERTIFICATE----- +MIICVTCCAb4CCQCcGy+708GE7TANBgkqhkiG9w0BAQsFADBvMQswCQYDVQQGEwJV +UzETMBEGA1UECAwKQ2FsaWZvcm5pYTEQMA4GA1UECgwHT1NJc29mdDEQMA4GA1UE +AwwHZm9nbGFtcDEnMCUGCSqGSIb3DQEJARYYZm9nbGFtcEBnb29nbGVncm91cHMu +Y29tMB4XDTE4MDIwMzEwNTk1MVoXDTE5MDIwMzEwNTk1MVowbzELMAkGA1UEBhMC +VVMxEzARBgNVBAgMCkNhbGlmb3JuaWExEDAOBgNVBAoMB09TSXNvZnQxEDAOBgNV +BAMMB2ZvZ2xhbXAxJzAlBgkqhkiG9w0BCQEWGGZvZ2xhbXBAZ29vZ2xlZ3JvdXBz +LmNvbTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAmfRMO86UFZeKX4QiTCNZ ++4Jgrgi8i/DLTckHeihj9lqeom+DlGuVK5k9jY34Ji679E5QEgTazI/quaP4ihfZ +8ZeovJM6oH30aS/kMTCHoFgsCh9qcB5Qx2/nHId+Ficq3V3DlL21BnTjNGnTOKA9 +xH0NodhA9gbL38nAb8vN7bsCAwEAATANBgkqhkiG9w0BAQsFAAOBgQCARZlcWN9W +Fn8f1dExO1J5M3ybEsnUaXmGCFfy+fynkfQMN6Yxc8Lz1W45zUp0xg+qt59pyKGQ +L7qoznpyziMKnIsXoE0HhKzyQVr56wx7j9CS8I1FJd5jTzwrUN1ecgElnz3MLgWY +ikoHYHq4kqdBUo5lI1vLGeh7iyWVv6xo7Q== +-----END CERTIFICATE----- +-----BEGIN RSA PRIVATE KEY----- +MIICXQIBAAKBgQCZ9Ew7zpQVl4pfhCJMI1n7gmCuCLyL8MtNyQd6KGP2Wp6ib4OU +a5UrmT2NjfgmLrv0TlASBNrMj+q5o/iKF9nxl6i8kzqgffRpL+QxMIegWCwKH2pw +HlDHb+cch34WJyrdXcOUvbUGdOM0adM4oD3EfQ2h2ED2BsvfycBvy83tuwIDAQAB +AoGAQ8bdWOcVInxYmrxdLQ/TKXq4fWWzPBiCKHh8Jjdp6qbyd6iIUOsb/Nm103/Q +415nuKJRdd1VRgGF4LxjSsg9VgQObA2aij1Mdb2TzfJbxWiUWHxWI5wDj6orZ+2Q +1cXDns83rUq2HsYImb2o8Yw7pnLJ/j/LMZ+SO8PYPwCkKqECQQDKHLzeHSwJbVtu +Y/3xyruuL356ZP/6I712fnNkVJvACYlcXTMm6uBurbXAyYvZ8dqzmKHG19wbJEQA +MCAoaR2nAkEAwwCEMmmV47vfomWDyqFIbWwoHtcWL9MSKE0i3U7bRYHlNh5CDSiv +RLcKZ9oEUlXTuZ+5CKGgaJ8YsuuIzqE5zQJBAMK8R7nW77wxktvdGivjQ0wEVN/G ++uBfmUhXdAUihGLWjXPiFMwVmKLrWmjjiI04u5k5of49s8Su/5oSxJlLFpcCQF+U +kjePj1uny87yyS157mFMAUJUlNhuMdk1LeCh9VJVf/8Nx7xhc79PpLcQosukq/mx +M2XqqxB84wNX9Z+qYQUCQQC8NmVM5T7zvyzSJ54lTy03MDcSnyKXHTPy/8cdirf+ +DUMPr9tcNWChAGAkfONOiGKVt/c2frYrI1N4v6zAaPcE +-----END RSA PRIVATE KEY----- diff --git a/tests/unit/python/foglamp/services/core/api/test_certificate_store.py b/tests/unit/python/foglamp/services/core/api/test_certificate_store.py index e2411180fc..f3a0a66b0d 100644 --- a/tests/unit/python/foglamp/services/core/api/test_certificate_store.py +++ b/tests/unit/python/foglamp/services/core/api/test_certificate_store.py @@ -9,6 +9,7 @@ import pathlib from unittest.mock import MagicMock, patch +from collections import Counter from aiohttp import web import pytest @@ -40,58 +41,45 @@ def certs_path(self): return pathlib.Path(__file__).parent async def test_get_certs(self, client, certs_path): - response_content = [{"cert": "foglamp.cert", "key": "foglamp.key", "pem": "foglamp.pem"}, - {"cert": "server.cert", "key": "", "pem": ""}] + response_content = {'keys': ['foglamp.key', 'rsa_private.pem'], + 'certs': ['foglamp.cert', 'test.json', 'foglamp.pem']} with patch.object(certificate_store, '_get_certs_dir', return_value=certs_path / 'certs'): with patch('os.walk') as mockwalk: - mockwalk.return_value = [ - (certs_path / 'certs', [], ['foglamp.cert', 'foglamp.key', 'foglamp.txt', 'server.cert', 'foglamp.pem']) - ] + mockwalk.return_value = [(str(certs_path / 'certs'), [], ['foglamp.cert']), + (str(certs_path / 'certs/pem'), [], ['foglamp.pem']), + (str(certs_path / 'certs/json'), [], ['test.json']), + (str(certs_path / 'certs'), [], ['foglamp.key', 'rsa_private.pem']) + ] resp = await client.get('/foglamp/certificate') assert 200 == resp.status res = await resp.text() jdict = json.loads(res) - result = jdict["certificates"] - assert 2 == len(result) - assert response_content[0] in result - assert response_content[1] in result + cert = jdict["certs"] + assert 3 == len(cert) + assert Counter(response_content['certs']) == Counter(cert) + key = jdict["keys"] + assert 2 == len(key) + assert Counter(response_content['keys']) == Counter(key) mockwalk.assert_called_once_with(certs_path / 'certs') - async def test_get_certs_if_dir_is_empty(self, client, certs_path): - with patch.object(certificate_store, '_get_certs_dir', return_value=certs_path / 'certs'): - with patch('os.walk') as mockwalk: - mockwalk.return_value = [(certs_path / 'certs', [], [])] - resp = await client.get('/foglamp/certificate') - assert 200 == resp.status - result = await resp.text() - json_response = json.loads(result) - assert 0 == len(json_response['certificates']) - assert {'certificates': []} == json_response - mockwalk.assert_called_once_with(certs_path / 'certs') - - async def test_get_certs_if_bad_extension(self, client, certs_path): - with patch.object(certificate_store, '_get_certs_dir', return_value=certs_path / 'certs'): - with patch('os.walk') as mockwalk: - mockwalk.return_value = [(certs_path / 'certs', [], ['foglamp.txt'])] - resp = await client.get('/foglamp/certificate') - assert 200 == resp.status - result = await resp.text() - json_response = json.loads(result) - assert 0 == len(json_response['certificates']) - assert {'certificates': []} == json_response - mockwalk.assert_called_once_with(certs_path / 'certs') - - async def test_get_certs_if_pair_is_missing(self, client, certs_path): - actual_response = {'certificates': [{'key': '', 'cert': 'server.cert', 'pem': ''}]} + @pytest.mark.parametrize("files", [ + [], ['foglamp.txt'], + ]) + async def test_get_bad_certs(self, client, certs_path, files): with patch.object(certificate_store, '_get_certs_dir', return_value=certs_path / 'certs'): with patch('os.walk') as mockwalk: - mockwalk.return_value = [(certs_path / 'certs', [], ['server.cert'])] + mockwalk.return_value = [(str(certs_path / 'certs'), [], files), + (str(certs_path / 'certs/pem'), [], files), + (str(certs_path / 'certs/json'), [], files), + (str(certs_path / 'certs'), [], files) + ] resp = await client.get('/foglamp/certificate') assert 200 == resp.status result = await resp.text() json_response = json.loads(result) - assert 1 == len(json_response['certificates']) - assert actual_response == json_response + assert 0 == len(json_response['certs']) + assert 0 == len(json_response['keys']) + assert {'certs': [], 'keys': []} == json_response mockwalk.assert_called_once_with(certs_path / 'certs') async def test_upload(self, client, certs_path): @@ -104,9 +92,24 @@ async def test_upload(self, client, certs_path): result = await resp.text() json_response = json.loads(result) assert 'foglamp.key and foglamp.cert have been uploaded successfully' == json_response['result'] + assert 2 == patch_find_file.call_count + args, kwargs = patch_find_file.call_args_list[0] + assert ('foglamp.cert', certificate_store._get_certs_dir('/certs/')) == args + args, kwargs = patch_find_file.call_args_list[1] + assert ('foglamp.key', certificate_store._get_certs_dir('/certs/')) == args + + async def test_upload_with_cert_only(self, client, certs_path): + files = {'cert': open(str(certs_path / 'certs/foglamp.pem'), 'rb')} + with patch.object(certificate_store, '_get_certs_dir', return_value=certs_path / 'certs/pem'): + with patch.object(certificate_store, '_find_file', return_value=[]) as patch_find_file: + resp = await client.post('/foglamp/certificate', data=files) + assert 200 == resp.status + result = await resp.text() + json_response = json.loads(result) + assert 'foglamp.pem has been uploaded successfully' == json_response['result'] assert 1 == patch_find_file.call_count args, kwargs = patch_find_file.call_args - assert ('foglamp.cert', certificate_store._get_certs_dir()) == args + assert ('foglamp.pem', certificate_store._get_certs_dir('/certs/pem')) == args async def test_file_upload_with_overwrite(self, client, certs_path): files = {'key': open(str(certs_path / 'certs/foglamp.key'), 'rb'), @@ -119,16 +122,11 @@ async def test_file_upload_with_overwrite(self, client, certs_path): result = await resp.text() json_response = json.loads(result) assert 'foglamp.key and foglamp.cert have been uploaded successfully' == json_response['result'] - assert 1 == patch_find_file.call_count - args, kwargs = patch_find_file.call_args - assert ('foglamp.cert', certificate_store._get_certs_dir()) == args - - async def test_file_upload_with_different_names(self, client, certs_path): - files = {'key': open(str(certs_path / 'certs/foglamp.key'), 'rb'), - 'cert': open(str(certs_path / 'certs/server.cert'), 'rb')} - resp = await client.post('/foglamp/certificate', data=files) - assert 400 == resp.status - assert 'key and certs file name should match' == resp.reason + assert 2 == patch_find_file.call_count + args, kwargs = patch_find_file.call_args_list[0] + assert ('foglamp.cert', certificate_store._get_certs_dir('/certs/')) == args + args, kwargs = patch_find_file.call_args_list[1] + assert ('foglamp.key', certificate_store._get_certs_dir('/certs/')) == args async def test_bad_key_file_upload(self, client, certs_path): files = {'bad_key': open(str(certs_path / 'certs/foglamp.key'), 'rb'), @@ -136,21 +134,28 @@ async def test_bad_key_file_upload(self, client, certs_path): } resp = await client.post('/foglamp/certificate', data=files) assert 400 == resp.status - assert 'key or certs file is missing' == resp.reason + assert 'key file is missing' == resp.reason async def test_bad_cert_file_upload(self, client, certs_path): files = {'bad_cert': open(str(certs_path / 'certs/foglamp.cert'), 'rb'), 'key': open(str(certs_path / 'certs/foglamp.key'), 'rb')} resp = await client.post('/foglamp/certificate', data=files) assert 400 == resp.status - assert 'key or certs file is missing' == resp.reason + assert 'Cert file is missing' == resp.reason - async def test_bad_extension_file_upload(self, client, certs_path): + async def test_bad_extension_cert_file_upload(self, client, certs_path): files = {'cert': open(str(certs_path / 'certs/foglamp.txt'), 'rb'), 'key': open(str(certs_path / 'certs/foglamp.key'), 'rb')} resp = await client.post('/foglamp/certificate', data=files) assert 400 == resp.status - assert 'Accepted file extensions are .key and .cert' == resp.reason + assert 'Accepted file extensions are .cert, .json and .pem for cert file' == resp.reason + + async def test_bad_extension_key_file_upload(self, client, certs_path): + files = {'cert': open(str(certs_path / 'certs/foglamp.cert'), 'rb'), + 'key': open(str(certs_path / 'certs/foglamp.txt'), 'rb')} + resp = await client.post('/foglamp/certificate', data=files) + assert 400 == resp.status + assert 'Accepted file extensions are .key and .pem for key file' == resp.reason @pytest.mark.parametrize("overwrite", ['blah', '2']) async def test_bad_overwrite_file_upload(self, client, certs_path, overwrite): @@ -168,10 +173,11 @@ async def test_upload_with_existing_and_no_overwrite(self, client, certs_path): with patch.object(certificate_store, '_find_file', return_value=["v"]) as patch_file: resp = await client.post('/foglamp/certificate', data=files) assert 400 == resp.status - assert 'Certificate with the same name already exists. To overwrite set the overwrite to 1' == resp.reason + assert 'Certificate with the same name already exists. To overwrite set the ' \ + 'overwrite to 1' == resp.reason assert 1 == patch_file.call_count args, kwargs = patch_file.call_args - assert ('foglamp.cert', certificate_store._get_certs_dir()) == args + assert ('foglamp.cert', certificate_store._get_certs_dir('/certs')) == args async def test_exception(self, client): files = {'cert': 'certs/bla.cert', 'key': 'certs/bla.key'} @@ -181,7 +187,9 @@ async def test_exception(self, client): @pytest.mark.parametrize("cert_name, actual_code, actual_reason", [ ('', 404, "Not Found"), - ('blah', 404, "Certificate with name blah does not exist"), + ('root.txt', 400, "Accepted file extensions are ('.cert', '.json', '.key', '.pem')"), + ('root.pem', 404, "Certificate with name root.pem does not exist"), + ('rsa_private.key', 404, "Certificate with name rsa_private.key does not exist"), ]) async def test_bad_delete_cert(self, client, cert_name, actual_code, actual_reason): resp = await client.delete('/foglamp/certificate/{}'.format(cert_name)) @@ -197,14 +205,27 @@ async def async_mock(): with patch('os.path.isfile', return_value=True): with patch.object(connect, 'get_storage_async', return_value=storage_client_mock): with patch.object(c_mgr, 'get_category_item', return_value=async_mock()) as patch_cfg: - resp = await client.delete('/foglamp/certificate/foglamp') + resp = await client.delete('/foglamp/certificate/foglamp.cert') assert 409 == resp.status - assert 'Certificate with name foglamp is already in use, you can not delete' == resp.reason + assert 'Certificate with name foglamp.cert is already in use, you can not delete' == resp.reason assert 1 == patch_cfg.call_count args, kwargs = patch_cfg.call_args assert ({'item_name': 'certificateName', 'category_name': 'rest_api'}) == kwargs - async def test_delete_cert(self, client): + async def test_bad_type_delete_cert(self, client): + resp = await client.delete('/foglamp/certificate/foglamp.key?type=pem') + assert 400 == resp.status + assert 'Only cert and key are allowed for the value of type param' == resp.reason + + @pytest.mark.parametrize("cert_name, param", [ + ('server.cert', ''), + ('foglamp.cert', '?type=cert'), + ('foglamp.json', '?type=cert'), + ('foglamp.pem', '?type=cert'), + ('foglamp.key', '?type=key'), + ('rsa_private.pem', '?type=key'), + ]) + async def test_delete_cert(self, client, cert_name, param): async def async_mock(): return {'value': 'test'} @@ -214,9 +235,9 @@ async def async_mock(): with patch.object(c_mgr, 'get_category_item', return_value=async_mock()): with patch('os.path.isfile', return_value=True): with patch('os.remove', return_value=True) as patch_remove: - resp = await client.delete('/foglamp/certificate/foglamp') + resp = await client.delete('/foglamp/certificate/{}{}'.format(cert_name, param)) assert 200 == resp.status result = await resp.text() json_response = json.loads(result) - assert 'foglamp.key, foglamp.cert have been deleted successfully' == json_response['result'] - assert 2 == patch_remove.call_count + assert '{} has been deleted successfully'.format(cert_name) == json_response['result'] + assert 1 == patch_remove.call_count From f31bd202387690b99fc9a5ca87b8d8ecc11a60c9 Mon Sep 17 00:00:00 2001 From: Massimiliano Pinto Date: Fri, 10 May 2019 12:53:10 +0200 Subject: [PATCH 117/161] Add set +e around system updates Add set +e around system updates --- requirements.sh | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/requirements.sh b/requirements.sh index a7d142228c..8370fe172d 100755 --- a/requirements.sh +++ b/requirements.sh @@ -28,8 +28,10 @@ is_rhel=`(lsb_release -ds 2>/dev/null || cat /etc/*release 2>/dev/null | head -n if [ "${is_rhel}" != "" ]; then echo "Platform is ${is_rhel}" centos=`echo $is_rhel | egrep 'CentOS' || echo ""` + set +e sudo yum check-update sudo yum update -y + set -e if [ "${centos}" = "" ]; then # RHEL 7 specific sudo yum-config-manager --enable 'Red Hat Enterprise Linux Server 7 RHSCL (RPMs)' @@ -72,8 +74,10 @@ EOF make cd $foglamp_location else + set +e sudo apt update sudo apt -y upgrade + set -e sudo apt install -y avahi-daemon curl sudo apt install -y cmake g++ make build-essential autoconf automake uuid-dev From 16127b291df281706430da21f6a6a488d4a11ff4 Mon Sep 17 00:00:00 2001 From: ashish-jabble Date: Fri, 10 May 2019 17:15:55 +0530 Subject: [PATCH 118/161] delete cert test fixes and other text fixes --- .../services/core/api/certificate_store.py | 2 +- .../core/api/test_certificate_store.py | 24 ++++++++++++++++--- 2 files changed, 22 insertions(+), 4 deletions(-) diff --git a/python/foglamp/services/core/api/certificate_store.py b/python/foglamp/services/core/api/certificate_store.py index c8f60e24ae..27301d0648 100644 --- a/python/foglamp/services/core/api/certificate_store.py +++ b/python/foglamp/services/core/api/certificate_store.py @@ -82,7 +82,7 @@ async def upload(request): cert_filename = cert_file.filename if cert_filename.endswith('.cert'): if not key_file: - raise web.HTTPBadRequest(reason="key file is missing") + raise web.HTTPBadRequest(reason="key file is missing, or upload certificate with .pem or .json extension") cert_valid_extensions = ('.cert', '.json', '.pem') key_valid_extensions = ('.key', '.pem') diff --git a/tests/unit/python/foglamp/services/core/api/test_certificate_store.py b/tests/unit/python/foglamp/services/core/api/test_certificate_store.py index f3a0a66b0d..06b2997207 100644 --- a/tests/unit/python/foglamp/services/core/api/test_certificate_store.py +++ b/tests/unit/python/foglamp/services/core/api/test_certificate_store.py @@ -134,7 +134,7 @@ async def test_bad_key_file_upload(self, client, certs_path): } resp = await client.post('/foglamp/certificate', data=files) assert 400 == resp.status - assert 'key file is missing' == resp.reason + assert 'key file is missing, or upload certificate with .pem or .json extension' == resp.reason async def test_bad_cert_file_upload(self, client, certs_path): files = {'bad_cert': open(str(certs_path / 'certs/foglamp.cert'), 'rb'), @@ -218,14 +218,13 @@ async def test_bad_type_delete_cert(self, client): assert 'Only cert and key are allowed for the value of type param' == resp.reason @pytest.mark.parametrize("cert_name, param", [ - ('server.cert', ''), ('foglamp.cert', '?type=cert'), ('foglamp.json', '?type=cert'), ('foglamp.pem', '?type=cert'), ('foglamp.key', '?type=key'), ('rsa_private.pem', '?type=key'), ]) - async def test_delete_cert(self, client, cert_name, param): + async def test_delete_cert_with_type(self, client, cert_name, param): async def async_mock(): return {'value': 'test'} @@ -241,3 +240,22 @@ async def async_mock(): json_response = json.loads(result) assert '{} has been deleted successfully'.format(cert_name) == json_response['result'] assert 1 == patch_remove.call_count + + async def test_delete_cert(self, client, certs_path, cert_name='server.cert'): + async def async_mock(): + return {'value': 'test'} + + storage_client_mock = MagicMock(StorageClientAsync) + c_mgr = ConfigurationManager(storage_client_mock) + with patch.object(certificate_store, '_get_certs_dir', return_value=str(certs_path / 'certs') + '/'): + with patch('os.walk') as mockwalk: + mockwalk.return_value = [(str(certs_path / 'certs'), [], [cert_name])] + with patch.object(connect, 'get_storage_async', return_value=storage_client_mock): + with patch.object(c_mgr, 'get_category_item', return_value=async_mock()): + with patch('os.remove', return_value=True) as patch_remove: + resp = await client.delete('/foglamp/certificate/{}'.format(cert_name)) + assert 200 == resp.status + result = await resp.text() + json_response = json.loads(result) + assert '{} has been deleted successfully'.format(cert_name) == json_response['result'] + assert 1 == patch_remove.call_count From d93deb7c9ba4feb877746a994402fa960507fc0d Mon Sep 17 00:00:00 2001 From: Vaibhav Singhal Date: Fri, 10 May 2019 18:11:04 +0530 Subject: [PATCH 119/161] Fixes for FOGL-2751 leftovers --- requirements.sh | 77 +++++++++++++++++++++++-------------------------- 1 file changed, 36 insertions(+), 41 deletions(-) diff --git a/requirements.sh b/requirements.sh index 8370fe172d..afe5a24a35 100755 --- a/requirements.sh +++ b/requirements.sh @@ -17,47 +17,44 @@ ##-------------------------------------------------------------------- ## -## Author: Ashish Jabble, Massimiliano Pinto +## Author: Ashish Jabble, Massimiliano Pinto, Vaibhav Singhal ## set -e foglamp_location=`pwd` -is_rhel=`(lsb_release -ds 2>/dev/null || cat /etc/*release 2>/dev/null | head -n1 || uname -om) | egrep -o '(Red Hat|CentOS)' || echo ""` -if [ "${is_rhel}" != "" ]; then - echo "Platform is ${is_rhel}" - centos=`echo $is_rhel | egrep 'CentOS' || echo ""` - set +e - sudo yum check-update - sudo yum update -y - set -e - if [ "${centos}" = "" ]; then - # RHEL 7 specific - sudo yum-config-manager --enable 'Red Hat Enterprise Linux Server 7 RHSCL (RPMs)' +os_name=`(grep -o '^NAME=.*' /etc/os-release | cut -f2 -d\" | sed 's/"//g')` +os_version=`(grep -o '^VERSION_ID=.*' /etc/os-release | cut -f2 -d\" | sed 's/"//g')` +echo "Platform is ${os_name}, Version: ${os_version}" + +if [[ ( $os_name == *"Red Hat"* || $os_name == *"CentOS"* ) && $os_version == *"7"* ]]; then + if [[ $os_name == *"Red Hat"* ]]; then + yum-config-manager --enable 'Red Hat Enterprise Linux Server 7 RHSCL (RPMs)' + yum install -y @development else - # CentOS 7 specific - sudo yum install -y centos-release-scl-rh + yum groupinstall "Development tools" -y + yum install -y centos-release-scl fi - sudo yum install -y @development - sudo yum install -y boost-devel - sudo yum install -y glib2-devel - sudo yum install -y rh-python36 - sudo yum install -y rsyslog - sudo yum install -y openssl-devel - sudo yum install -y postgresql-devel - sudo yum install -y wget - sudo yum install -y zlib-devel - sudo yum install -y git - sudo yum install -y cmake - sudo yum install -y libuuid-devel - sudo yum install -y dbus-devel + yum install -y boost-devel + yum install -y glib2-devel + yum install -y rh-python36 + yum install -y rsyslog + yum install -y openssl-devel + yum install -y postgresql-devel + yum install -y wget + yum install -y zlib-devel + yum install -y git + yum install -y cmake + yum install -y libuuid-devel + yum install -y dbus-devel + echo "source scl_source enable rh-python36" >> /home/${SUDO_USER}/.bashrc - sudo su - </dev/null; then + apt install -y avahi-daemon curl + apt install -y cmake g++ make build-essential autoconf automake uuid-dev + apt install -y libtool libboost-dev libboost-system-dev libboost-thread-dev libpq-dev libssl-dev libz-dev + apt install -y python-dbus python-dev python3-dev python3-pip + apt install -y sqlite3 libsqlite3-dev + apt install -y pkg-config # sudo apt install -y postgresql +else + echo "Requirements cannot be automatically installed, please refer README.rst to install requirements manually" fi From 2de250405503de2cb0c8af65521bbbb82057ba3d Mon Sep 17 00:00:00 2001 From: stefano Date: Mon, 13 May 2019 10:14:25 +0200 Subject: [PATCH 120/161] FOGL-2813: improved the checking of the plaform in the foglamp script --- scripts/foglamp | 43 ++++++++++++++++++++++++++----------------- 1 file changed, 26 insertions(+), 17 deletions(-) diff --git a/scripts/foglamp b/scripts/foglamp index 06ef47a010..c8354d4a29 100755 --- a/scripts/foglamp +++ b/scripts/foglamp @@ -34,6 +34,25 @@ if [ -z ${FOGLAMP_ROOT+x} ]; then export FOGLAMP_ROOT fi +# Check if the default directory exists +if [[ ! -d "${FOGLAMP_ROOT}" ]]; then + logger -p local0.err -t "foglamp.script.foglamp" "FogLAMP cannot be executed: ${FOGLAMP_ROOT} is not a valid directory." + echo "FogLAMP cannot be executed: ${FOGLAMP_ROOT} is not a valid directory." + echo "Create the enviroment variable FOGLAMP_ROOT before using FogLAMP." + echo "Specify the base directory for FogLAMP and set the variable with:" + echo "export FOGLAMP_ROOT=" + exit 1 +fi + +if [[ ! -e "${FOGLAMP_ROOT}/scripts/common/get_platform.sh" ]]; then + + msg_text="ERROR: FogLAMP not properly installed in the dir :${FOGLAMP_ROOT}:" + echo $msg_text + logger -p local0.err $msg_text + + exit 1 +fi + # Include common code source "${FOGLAMP_ROOT}/scripts/common/get_platform.sh" @@ -52,8 +71,8 @@ then if [[ "$status" != "0" ]] then msg_text="ERROR: FogLAMP cannot enable the python environment in RedHat/Centos platform." - logger -p local0.err -t msg_text - echo msg_text + logger -p local0.err $msg_text + echo $msg_text exit 1 fi set -e @@ -62,16 +81,6 @@ else : fi -# Check if the default directory exists -if [[ ! -d "${FOGLAMP_ROOT}" ]]; then - logger -p local0.err -t "foglamp.script.foglamp" "FogLAMP cannot be executed: ${FOGLAMP_ROOT} is not a valid directory." - echo "FogLAMP cannot be executed: ${FOGLAMP_ROOT} is not a valid directory." - echo "Create the enviroment variable FOGLAMP_ROOT before using FogLAMP." - echo "Specify the base directory for FogLAMP and set the variable with:" - echo "export FOGLAMP_ROOT=" - exit 1 -fi - # Check/set LD_LIBRARY_PATH libPathSet=0 libdir=${FOGLAMP_ROOT}/lib; [ -d ${libdir} ] && LD_LIBRARY_PATH=$(echo $LD_LIBRARY_PATH | sed "s|${libdir}||g") && export LD_LIBRARY_PATH=${libdir}:${LD_LIBRARY_PATH} && libPathSet=1 @@ -109,10 +118,10 @@ foglamp_reset() { # Check the storage management script check_storage_management_script "be reset" - # We could have made it easier here, we will improve it later. + # We could have made it easier here, we will improve it later. # For now, check the status of faoglamp, since the server must be down result=`foglamp_status "silent"` - + if [[ $result != "2" ]]; then foglamp_log "info" "FogLAMP appears to be running and it cannot be reset. Stop FogLAMP first." "all" "pretty" exit 0 @@ -229,7 +238,7 @@ foglamp_start() { foglamp_stop() { result=`foglamp_status "silent"` - + if [[ $result = "2" ]]; then foglamp_log "info" "It looks like FogLAMP is not running." "all" "pretty" exit 0 @@ -433,7 +442,7 @@ foglamp_status() { foglamp_log "info" "FogLAMP v${FOGLAMP_VERSION} running${safe_mode}." "outonly" "pretty" foglamp_log "info" "FogLAMP Uptime: ${uptime_sec} seconds." "outonly" "pretty" foglamp_log "info" "FogLAMP records: ${record_read} read, ${record_sent} sent, ${record_purged} purged." "outonly" "pretty" - + foglamp_log "info" "FogLAMP ${req_auth} authentication." "outonly" "pretty" # Show Services @@ -634,7 +643,7 @@ case "$1" in foglamp_status ;; version) - foglamp_print_version + foglamp_print_version ;; help) foglamp_help From 3c47d2475cfcf6ff641616d003e2faccf5f8a526 Mon Sep 17 00:00:00 2001 From: Stefano Date: Tue, 14 May 2019 12:17:00 +0200 Subject: [PATCH 121/161] FOGL-2815: adapt FogLAMP readme files to reference RedHat/Centos --- REDHAT.rst | 109 +++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 109 insertions(+) create mode 100644 REDHAT.rst diff --git a/REDHAT.rst b/REDHAT.rst new file mode 100644 index 0000000000..0eea26fd39 --- /dev/null +++ b/REDHAT.rst @@ -0,0 +1,109 @@ +.. |br| raw:: html + +
+ +******************************************* +Building and using FogLAMP on RedHat/CentOS +******************************************* + +FogLAMP can be build and installed on Red Hat and CentOS. +It is tested on : +:: + Red Hat 7.6 + CentOS 7.6-1810 +|br| + +You may follow the instructions in the README file to build, +install and run FogLAMP on Red Hat or CentOS. + +****************************************************** +Install FogLAMP on RedHat/CentOS using the RPM package +****************************************************** + +The FogLAMP RPM can is available in the download page of the +documentation avaiable at : + +https://foglamp.readthedocs.io/en/master/92_downloads.html + +The RPM can also be created using the repository available at : +https://github.com/foglamp/foglamp-pkg +using the make_rpm scipt and following the instruction in README.rst. + + +Red Hat +======= + +:: + sudo yum-config-manager --enable 'Red Hat Enterprise Linux Server 7 RHSCL (RPMs)' + sudo yum -y localinstall ~/foglamp-1.5.2-0.00.x86_64.rpm +|br| + +CentOS +====== + +:: + sudo yum install -y centos-release-scl-rh + sudo yum -y localinstall ~/foglamp-1.5.2-0.00.x86_64.rpm +|br| + + + +FogLAMP requires the use of Python 3.5 in order to support the +asynchronous IO mechanisms used by FogLAMP. Earlier Raspberry Pi Raspbian +distributions support Python 3.4 as the latest version of Python. +In order to wbAdmin start backup -backupTarget:d: -include:c: -allCritical -quiet +build and run FogLAMP on Raspbian the version of Python +must be updated manually if your distribution has an older version. + +**NOTE**: These steps must be executed *in addition* to what is described in the README file when you install FogLAMP on Raspbian. + +Check your Python version by running the command +:: + python3 --version +|br| + +If your version is less than 3.5 then follow the instructions below to update +your Python version. + +Install and update the build tools required for Python to be built +:: + sudo apt-get update + sudo apt-get install build-essential tk-dev + sudo apt-get install libncurses5-dev libncursesw5-dev libreadline6-dev + sudo apt-get install libdb5.3-dev libgdbm-dev libsqlite3-dev libssl-dev + sudo apt-get install libbz2-dev libexpat1-dev liblzma-dev zlib1g-dev +|br| + +Now build and install the new version of Python +:: + wget https://www.python.org/ftp/python/3.5.2/Python-3.5.2.tgz + tar zxvf Python-3.5.2.tgz + cd Python-3.5.2 + ./configure + make + sudo make install +|br| + +Confirm the Python version +:: + python3 --version + pip3 --version +|br| + +These should both return a version number as 3.5, if not then check which +python3 and pip3 you are running and replace these with the newly +built versions. This may be caused by the newly built version being +installed in /usr/local/bin and the existing python3 and pip3 being +in /usr/bin. If this is the case then remove the /usr/bin versions +:: + sudo rm /usr/bin/python3 /usr/bin/pip3 +|br| + +You may also link tothe new version if you wish +:: + sudo ln -s /usr/bin/python3 /usr/local/bin/python3 + sudo ln -s /usr/bin/pip3 /usr/local/bin/pip3 +|br| +Once python3.5 has been installed you may follow the instructions +in the README file to build, install and run FogLAMP on Raspberry +Pi using the Raspbian distribution. From c94c1aa24507d757fd53b0b96bf8fea6c16b924f Mon Sep 17 00:00:00 2001 From: Stefano Date: Tue, 14 May 2019 12:36:21 +0200 Subject: [PATCH 122/161] FOGL-2815: update --- REDHAT.rst | 21 ++++++++++----------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/REDHAT.rst b/REDHAT.rst index 0eea26fd39..5a5377108c 100644 --- a/REDHAT.rst +++ b/REDHAT.rst @@ -8,10 +8,8 @@ Building and using FogLAMP on RedHat/CentOS FogLAMP can be build and installed on Red Hat and CentOS. It is tested on : -:: - Red Hat 7.6 - CentOS 7.6-1810 -|br| +- Red Hat 7.6 +- CentOS 7.6-1810 You may follow the instructions in the README file to build, install and run FogLAMP on Red Hat or CentOS. @@ -21,29 +19,30 @@ Install FogLAMP on RedHat/CentOS using the RPM package ****************************************************** The FogLAMP RPM can is available in the download page of the -documentation avaiable at : +documentation available at : https://foglamp.readthedocs.io/en/master/92_downloads.html -The RPM can also be created using the repository available at : +The RPM can also be created through the repository : https://github.com/foglamp/foglamp-pkg -using the make_rpm scipt and following the instruction in README.rst. + +using the make_rpm script and following the instruction in README.rst. Red Hat ======= :: - sudo yum-config-manager --enable 'Red Hat Enterprise Linux Server 7 RHSCL (RPMs)' - sudo yum -y localinstall ~/foglamp-1.5.2-0.00.x86_64.rpm + sudo yum-config-manager --enable 'Red Hat Enterprise Linux Server 7 RHSCL (RPMs)' + sudo yum -y localinstall ~/foglamp-1.5.2-0.00.x86_64.rpm |br| CentOS ====== :: - sudo yum install -y centos-release-scl-rh - sudo yum -y localinstall ~/foglamp-1.5.2-0.00.x86_64.rpm + sudo yum install -y centos-release-scl-rh + sudo yum -y localinstall ~/foglamp-1.5.2-0.00.x86_64.rpm |br| From b51167314425719a7897f19c9e65a69feadd9d9a Mon Sep 17 00:00:00 2001 From: Stefano Date: Tue, 14 May 2019 12:38:28 +0200 Subject: [PATCH 123/161] FOGL-2815: update --- REDHAT.rst | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/REDHAT.rst b/REDHAT.rst index 5a5377108c..6d634d43d1 100644 --- a/REDHAT.rst +++ b/REDHAT.rst @@ -7,7 +7,8 @@ Building and using FogLAMP on RedHat/CentOS ******************************************* FogLAMP can be build and installed on Red Hat and CentOS. -It is tested on : +It is tested against: + - Red Hat 7.6 - CentOS 7.6-1810 @@ -18,7 +19,7 @@ install and run FogLAMP on Red Hat or CentOS. Install FogLAMP on RedHat/CentOS using the RPM package ****************************************************** -The FogLAMP RPM can is available in the download page of the +The FogLAMP RPM is available in the download page of the documentation available at : https://foglamp.readthedocs.io/en/master/92_downloads.html @@ -26,7 +27,7 @@ https://foglamp.readthedocs.io/en/master/92_downloads.html The RPM can also be created through the repository : https://github.com/foglamp/foglamp-pkg -using the make_rpm script and following the instruction in README.rst. +using the make_rpm script and following the instruction in the README.rst. Red Hat From 09c6a09200435ed322d398efb4fcbe4f01d94aeb Mon Sep 17 00:00:00 2001 From: Stefano Date: Tue, 14 May 2019 12:45:47 +0200 Subject: [PATCH 124/161] FOGL-2815: update --- REDHAT.rst | 96 ++++++++++++------------------------------------------ 1 file changed, 20 insertions(+), 76 deletions(-) diff --git a/REDHAT.rst b/REDHAT.rst index 6d634d43d1..5300fc79e5 100644 --- a/REDHAT.rst +++ b/REDHAT.rst @@ -1,12 +1,18 @@ .. |br| raw:: html +.. Links +.. _download page: https://foglamp.readthedocs.io/en/master/92_downloads.html +.. _repository: https://github.com/foglamp/foglamp-pkg + +
******************************************* Building and using FogLAMP on RedHat/CentOS ******************************************* -FogLAMP can be build and installed on Red Hat and CentOS. +FogLAMP can be build and installed on Red Hat or CentOS. + It is tested against: - Red Hat 7.6 @@ -15,95 +21,33 @@ It is tested against: You may follow the instructions in the README file to build, install and run FogLAMP on Red Hat or CentOS. -****************************************************** -Install FogLAMP on RedHat/CentOS using the RPM package -****************************************************** - -The FogLAMP RPM is available in the download page of the -documentation available at : +******************************************************* +Install FogLAMP on Red Hat/CentOS using the RPM package +******************************************************* -https://foglamp.readthedocs.io/en/master/92_downloads.html +The FogLAMP RPM is available in the download page of the documentation available at `download page`_ -The RPM can also be created through the repository : -https://github.com/foglamp/foglamp-pkg +The RPM can also be created through the `repository`_ using the make_rpm script and following the instruction in the README.rst. -using the make_rpm script and following the instruction in the README.rst. +Installation on Red Hat +======================= -Red Hat -======= +A Red Hat package should be installed before the FogLAMP RPM, follow the instructions : :: sudo yum-config-manager --enable 'Red Hat Enterprise Linux Server 7 RHSCL (RPMs)' sudo yum -y localinstall ~/foglamp-1.5.2-0.00.x86_64.rpm -|br| - -CentOS -====== - -:: - sudo yum install -y centos-release-scl-rh - sudo yum -y localinstall ~/foglamp-1.5.2-0.00.x86_64.rpm -|br| - - - -FogLAMP requires the use of Python 3.5 in order to support the -asynchronous IO mechanisms used by FogLAMP. Earlier Raspberry Pi Raspbian -distributions support Python 3.4 as the latest version of Python. -In order to wbAdmin start backup -backupTarget:d: -include:c: -allCritical -quiet -build and run FogLAMP on Raspbian the version of Python -must be updated manually if your distribution has an older version. -**NOTE**: These steps must be executed *in addition* to what is described in the README file when you install FogLAMP on Raspbian. - -Check your Python version by running the command -:: - python3 --version |br| -If your version is less than 3.5 then follow the instructions below to update -your Python version. - -Install and update the build tools required for Python to be built -:: - sudo apt-get update - sudo apt-get install build-essential tk-dev - sudo apt-get install libncurses5-dev libncursesw5-dev libreadline6-dev - sudo apt-get install libdb5.3-dev libgdbm-dev libsqlite3-dev libssl-dev - sudo apt-get install libbz2-dev libexpat1-dev liblzma-dev zlib1g-dev -|br| - -Now build and install the new version of Python -:: - wget https://www.python.org/ftp/python/3.5.2/Python-3.5.2.tgz - tar zxvf Python-3.5.2.tgz - cd Python-3.5.2 - ./configure - make - sudo make install -|br| +Installation on CentOS +====================== -Confirm the Python version -:: - python3 --version - pip3 --version -|br| +A CentOS package should be installed before the FogLAMP RPM, follow the instructions : -These should both return a version number as 3.5, if not then check which -python3 and pip3 you are running and replace these with the newly -built versions. This may be caused by the newly built version being -installed in /usr/local/bin and the existing python3 and pip3 being -in /usr/bin. If this is the case then remove the /usr/bin versions :: - sudo rm /usr/bin/python3 /usr/bin/pip3 -|br| + sudo yum install -y centos-release-scl-rh + sudo yum -y localinstall ~/foglamp-1.5.2-0.00.x86_64.rpm -You may also link tothe new version if you wish -:: - sudo ln -s /usr/bin/python3 /usr/local/bin/python3 - sudo ln -s /usr/bin/pip3 /usr/local/bin/pip3 |br| -Once python3.5 has been installed you may follow the instructions -in the README file to build, install and run FogLAMP on Raspberry -Pi using the Raspbian distribution. From cdb29a3008dd006ca94b16005142bd1b43e4d38f Mon Sep 17 00:00:00 2001 From: Stefano Date: Tue, 14 May 2019 12:50:28 +0200 Subject: [PATCH 125/161] FOGL-2815: update --- REDHAT.rst | 5 ----- 1 file changed, 5 deletions(-) diff --git a/REDHAT.rst b/REDHAT.rst index 5300fc79e5..d7557f2b5e 100644 --- a/REDHAT.rst +++ b/REDHAT.rst @@ -34,20 +34,15 @@ Installation on Red Hat ======================= A Red Hat package should be installed before the FogLAMP RPM, follow the instructions : - :: sudo yum-config-manager --enable 'Red Hat Enterprise Linux Server 7 RHSCL (RPMs)' sudo yum -y localinstall ~/foglamp-1.5.2-0.00.x86_64.rpm -|br| Installation on CentOS ====================== A CentOS package should be installed before the FogLAMP RPM, follow the instructions : - :: sudo yum install -y centos-release-scl-rh sudo yum -y localinstall ~/foglamp-1.5.2-0.00.x86_64.rpm - -|br| From 2924e301495c10155289911fb8033806fdb2b47d Mon Sep 17 00:00:00 2001 From: Stefano Date: Tue, 14 May 2019 12:54:15 +0200 Subject: [PATCH 126/161] FOGL-2815: update --- REDHAT.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/REDHAT.rst b/REDHAT.rst index d7557f2b5e..be51d2b5f1 100644 --- a/REDHAT.rst +++ b/REDHAT.rst @@ -2,7 +2,7 @@ .. Links .. _download page: https://foglamp.readthedocs.io/en/master/92_downloads.html -.. _repository: https://github.com/foglamp/foglamp-pkg +.. _foglamp-pkg: https://github.com/foglamp/foglamp-pkg
@@ -25,9 +25,9 @@ install and run FogLAMP on Red Hat or CentOS. Install FogLAMP on Red Hat/CentOS using the RPM package ******************************************************* -The FogLAMP RPM is available in the download page of the documentation available at `download page`_ +The FogLAMP RPM is available in the download page of the documentation available at `download page`_. -The RPM can also be created through the `repository`_ using the make_rpm script and following the instruction in the README.rst. +The RPM can also be created from the FogLAMP sources through the repository `foglamp-pkg`_ using the make_rpm script and following the instruction in the README.rst. Installation on Red Hat From 52aab62d7989a0cdf4bb5d5ae78bbc63f73b8705 Mon Sep 17 00:00:00 2001 From: Stefano Date: Tue, 14 May 2019 12:55:31 +0200 Subject: [PATCH 127/161] FOGL-2815: update --- REDHAT.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/REDHAT.rst b/REDHAT.rst index be51d2b5f1..bc08f353dc 100644 --- a/REDHAT.rst +++ b/REDHAT.rst @@ -1,12 +1,12 @@ .. |br| raw:: html +
+ .. Links .. _download page: https://foglamp.readthedocs.io/en/master/92_downloads.html .. _foglamp-pkg: https://github.com/foglamp/foglamp-pkg -
- ******************************************* Building and using FogLAMP on RedHat/CentOS ******************************************* From 40c9f9a4baeae10d29e33dd3b1b174e4701804c4 Mon Sep 17 00:00:00 2001 From: Stefano Date: Tue, 14 May 2019 12:58:03 +0200 Subject: [PATCH 128/161] FOGL-2815: update --- REDHAT.rst | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/REDHAT.rst b/REDHAT.rst index bc08f353dc..e95282a30d 100644 --- a/REDHAT.rst +++ b/REDHAT.rst @@ -11,9 +11,7 @@ Building and using FogLAMP on RedHat/CentOS ******************************************* -FogLAMP can be build and installed on Red Hat or CentOS. - -It is tested against: +FogLAMP can be build or installed on Red Hat or CentOS, it is tested against: - Red Hat 7.6 - CentOS 7.6-1810 From 56d360402543ee614c0e578e9dca2f4b1b23d881 Mon Sep 17 00:00:00 2001 From: Stefano Date: Tue, 14 May 2019 12:58:53 +0200 Subject: [PATCH 129/161] FOGL-2815: update --- REDHAT.rst | 1 - 1 file changed, 1 deletion(-) diff --git a/REDHAT.rst b/REDHAT.rst index e95282a30d..1f5d1a3cd7 100644 --- a/REDHAT.rst +++ b/REDHAT.rst @@ -6,7 +6,6 @@ .. _download page: https://foglamp.readthedocs.io/en/master/92_downloads.html .. _foglamp-pkg: https://github.com/foglamp/foglamp-pkg - ******************************************* Building and using FogLAMP on RedHat/CentOS ******************************************* From aa6462f8ec3880ea577b745af9f8595237e645a9 Mon Sep 17 00:00:00 2001 From: Stefano Date: Tue, 14 May 2019 14:27:15 +0200 Subject: [PATCH 130/161] FOGL-2815: update --- README.rst | 25 +++++++++++++++++++++++++ REDHAT.rst | 2 +- 2 files changed, 26 insertions(+), 1 deletion(-) diff --git a/README.rst b/README.rst index 4878628ee7..ba0072501b 100644 --- a/README.rst +++ b/README.rst @@ -69,6 +69,24 @@ FogLAMP is currently based on C/C++ and Python code. The packages needed to buil - sqlite3 - libsqlite3-dev + +Linux distributions +------------------- + +FogLAMP can be build or installed in one of the following Linux distributions : + +- Ubuntu +- Raspbian +- Red Hat +- CentOS + +The packages needed to build FogLAMP can be installed manually or automatically +using the `requirements.sh `_ script. + + +Build on Ubuntu +--------------- + On Ubuntu-based Linux distributions the packages can be installed with given `requirements.sh `_ or manual *apt-get*: :: apt-get install avahi-daemon curl @@ -80,6 +98,13 @@ On Ubuntu-based Linux distributions the packages can be installed with given `re You may need to use *sudo* to allow *apt-get* to install packages dependent upon your access rights. +Build on Red Hat/CentOS +----------------------- +On Red Hat and CentOS distributions the required packages can be installed with given `requirements.sh `_: +:: + ./requirements.sh + +You may need to use *sudo* upon your access rights. Build ----- diff --git a/REDHAT.rst b/REDHAT.rst index 1f5d1a3cd7..0dc41b66d2 100644 --- a/REDHAT.rst +++ b/REDHAT.rst @@ -10,7 +10,7 @@ Building and using FogLAMP on RedHat/CentOS ******************************************* -FogLAMP can be build or installed on Red Hat or CentOS, it is tested against: +FogLAMP can be build or installed on Red Hat or CentOS, it is currently tested against: - Red Hat 7.6 - CentOS 7.6-1810 From b250857ded120e1e96b71532138d623c814744de Mon Sep 17 00:00:00 2001 From: Stefano Date: Tue, 14 May 2019 14:38:34 +0200 Subject: [PATCH 131/161] FOGL-2815: update --- README.rst | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/README.rst b/README.rst index ba0072501b..d1c90d9603 100644 --- a/README.rst +++ b/README.rst @@ -84,8 +84,8 @@ The packages needed to build FogLAMP can be installed manually or automatically using the `requirements.sh `_ script. -Build on Ubuntu ---------------- +Install the requirements on Ubuntu +---------------------------------- On Ubuntu-based Linux distributions the packages can be installed with given `requirements.sh `_ or manual *apt-get*: :: @@ -98,11 +98,11 @@ On Ubuntu-based Linux distributions the packages can be installed with given `re You may need to use *sudo* to allow *apt-get* to install packages dependent upon your access rights. -Build on Red Hat/CentOS ------------------------ -On Red Hat and CentOS distributions the required packages can be installed with given `requirements.sh `_: +Install the requirements on Red Hat/CentOS +------------------------------------------ +On Red Hat and CentOS distributions the required packages can be installed automatically with given `requirements.sh `_: :: - ./requirements.sh + sh ./requirements.sh You may need to use *sudo* upon your access rights. @@ -112,8 +112,8 @@ Build To build FogLAMP run the command ``make`` in the top level directory. This will compile all the components that need to be compiled and will also create a runable structure of the Python code components of FogLAMP. **NOTE:** - - *The GCC compiler version 5.4 available in Ubuntu 16.04 LTS raises warnings. This is a known bug of the compiler and it can be ignored.* + - *openssl toolkit is a requirement if we want to use https based REST client and certificate based authentication.* Once the *make* has completed you can decide to test FogLAMP from your development environment or you can install it. From 048f7faef2cb1cda52be501a486d5fb874e359f3 Mon Sep 17 00:00:00 2001 From: Stefano Date: Tue, 14 May 2019 14:39:15 +0200 Subject: [PATCH 132/161] FOGL-2815: update --- README.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.rst b/README.rst index d1c90d9603..abd43ed616 100644 --- a/README.rst +++ b/README.rst @@ -84,7 +84,7 @@ The packages needed to build FogLAMP can be installed manually or automatically using the `requirements.sh `_ script. -Install the requirements on Ubuntu +Install the prerequisites on Ubuntu ---------------------------------- On Ubuntu-based Linux distributions the packages can be installed with given `requirements.sh `_ or manual *apt-get*: @@ -98,7 +98,7 @@ On Ubuntu-based Linux distributions the packages can be installed with given `re You may need to use *sudo* to allow *apt-get* to install packages dependent upon your access rights. -Install the requirements on Red Hat/CentOS +Install the prerequisites on Red Hat/CentOS ------------------------------------------ On Red Hat and CentOS distributions the required packages can be installed automatically with given `requirements.sh `_: :: From 706a55c1b63cab4bf3a8c018398ccbf009134531 Mon Sep 17 00:00:00 2001 From: Stefano Date: Tue, 14 May 2019 14:39:58 +0200 Subject: [PATCH 133/161] FOGL-2815: update --- README.rst | 4 ---- 1 file changed, 4 deletions(-) diff --git a/README.rst b/README.rst index abd43ed616..ff03eacdf6 100644 --- a/README.rst +++ b/README.rst @@ -80,10 +80,6 @@ FogLAMP can be build or installed in one of the following Linux distributions : - Red Hat - CentOS -The packages needed to build FogLAMP can be installed manually or automatically -using the `requirements.sh `_ script. - - Install the prerequisites on Ubuntu ---------------------------------- From 646222a7aea910d19c6a3a3736862cfbdbbb6884 Mon Sep 17 00:00:00 2001 From: Stefano Date: Tue, 14 May 2019 14:40:30 +0200 Subject: [PATCH 134/161] FOGL-2815: update --- README.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/README.rst b/README.rst index ff03eacdf6..233d20130e 100644 --- a/README.rst +++ b/README.rst @@ -108,6 +108,7 @@ Build To build FogLAMP run the command ``make`` in the top level directory. This will compile all the components that need to be compiled and will also create a runable structure of the Python code components of FogLAMP. **NOTE:** + - *The GCC compiler version 5.4 available in Ubuntu 16.04 LTS raises warnings. This is a known bug of the compiler and it can be ignored.* - *openssl toolkit is a requirement if we want to use https based REST client and certificate based authentication.* From 061bf41e78d93d5745cc3d5bb648db9c23171e5c Mon Sep 17 00:00:00 2001 From: Stefano Date: Tue, 14 May 2019 14:41:20 +0200 Subject: [PATCH 135/161] FOGL-2815: update --- README.rst | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/README.rst b/README.rst index 233d20130e..14f5b51e2a 100644 --- a/README.rst +++ b/README.rst @@ -81,7 +81,7 @@ FogLAMP can be build or installed in one of the following Linux distributions : - CentOS Install the prerequisites on Ubuntu ----------------------------------- +----------------------------------- On Ubuntu-based Linux distributions the packages can be installed with given `requirements.sh `_ or manual *apt-get*: :: @@ -95,7 +95,8 @@ On Ubuntu-based Linux distributions the packages can be installed with given `re You may need to use *sudo* to allow *apt-get* to install packages dependent upon your access rights. Install the prerequisites on Red Hat/CentOS ------------------------------------------- +------------------------------------------- + On Red Hat and CentOS distributions the required packages can be installed automatically with given `requirements.sh `_: :: sh ./requirements.sh From d90a1b63a0488258292c1c5f6ba0dc038d24a9d1 Mon Sep 17 00:00:00 2001 From: pintomax Date: Tue, 14 May 2019 16:16:39 +0200 Subject: [PATCH 136/161] Initial commit of a check for segfault test (#1554) Added test which traps SIGSEGV --- tests/unit/C/common/main.cpp | 4 ++-- tests/unit/C/common/test_config_category.cpp | 10 ++++++++++ 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/tests/unit/C/common/main.cpp b/tests/unit/C/common/main.cpp index 84c639c2c3..5a7062ecf5 100644 --- a/tests/unit/C/common/main.cpp +++ b/tests/unit/C/common/main.cpp @@ -8,9 +8,9 @@ using namespace std; int main(int argc, char **argv) { testing::InitGoogleTest(&argc, argv); - testing::GTEST_FLAG(repeat) = 2000; + testing::GTEST_FLAG(repeat) = 500; testing::GTEST_FLAG(shuffle) = true; - testing::GTEST_FLAG(break_on_failure) = true; + testing::GTEST_FLAG(death_test_style) = "threadsafe"; return RUN_ALL_TESTS(); } diff --git a/tests/unit/C/common/test_config_category.cpp b/tests/unit/C/common/test_config_category.cpp index 7bdddfc368..c0b332bcb1 100644 --- a/tests/unit/C/common/test_config_category.cpp +++ b/tests/unit/C/common/test_config_category.cpp @@ -599,3 +599,13 @@ TEST(CategoryTest, categoryValues) ASSERT_EQ(true, complex.getValue("plugin").compare("PI_Server_V2") == 0); ASSERT_EQ(true, complex.getValue("OMFMaxRetry").compare("3") == 0); } + +/** + * Check segfault + */ +TEST(CategoryTest, minMaxCheckSegFault) +{ +ASSERT_DEATH({ + raise(SIGSEGV); + }, ""); +} From 3681c36340df9c9c0c772fa98fe8a3341fe4e93d Mon Sep 17 00:00:00 2001 From: stefano Date: Tue, 14 May 2019 17:14:27 +0200 Subject: [PATCH 137/161] FOGL-2815: text fixed --- README.rst | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/README.rst b/README.rst index 14f5b51e2a..ed8df2ac44 100644 --- a/README.rst +++ b/README.rst @@ -73,12 +73,12 @@ FogLAMP is currently based on C/C++ and Python code. The packages needed to buil Linux distributions ------------------- -FogLAMP can be build or installed in one of the following Linux distributions : +FogLAMP can be built or installed in one of the following Linux distributions : -- Ubuntu +- Ubuntu 16.04 and Ubuntu 18.04 - Raspbian -- Red Hat -- CentOS +- Red Hat 7.6 +- CentOS 7.6 Install the prerequisites on Ubuntu ----------------------------------- From 1f906fb5e41b4f570ce86532e27831c0eddb1174 Mon Sep 17 00:00:00 2001 From: Praveen Garg Date: Wed, 15 May 2019 16:10:19 +0530 Subject: [PATCH 138/161] correct minimum python version is 3.5.3 --- RASPBIAN.rst | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/RASPBIAN.rst b/RASPBIAN.rst index 4eb45d8656..a60b1f70f9 100644 --- a/RASPBIAN.rst +++ b/RASPBIAN.rst @@ -6,7 +6,7 @@ Building and using FogLAMP on Raspbian ************************************** -FogLAMP requires the use of Python 3.5 in order to support the +FogLAMP requires the use of Python 3.5.3+ in order to support the asynchronous IO mechanisms used by FogLAMP. Earlier Raspberry Pi Raspbian distributions support Python 3.4 as the latest version of Python. In order to build and run FogLAMP on Raspbian the version of Python @@ -19,7 +19,7 @@ Check your Python version by running the command python3 --version |br| -If your version is less than 3.5 then follow the instructions below to update +If your version is less than 3.5.3 then follow the instructions below to update your Python version. Install and update the build tools required for Python to be built @@ -33,9 +33,9 @@ Install and update the build tools required for Python to be built Now build and install the new version of Python :: - wget https://www.python.org/ftp/python/3.5.2/Python-3.5.2.tgz - tar zxvf Python-3.5.2.tgz - cd Python-3.5.2 + wget https://www.python.org/ftp/python/3.5.3/Python-3.5.3.tgz + tar zxvf Python-3.5.3.tgz + cd Python-3.5.3 ./configure make sudo make install @@ -47,7 +47,7 @@ Confirm the Python version pip3 --version |br| -These should both return a version number as 3.5, if not then check which +These should both return a version number as 3.5.3+, if not then check which python3 and pip3 you are running and replace these with the newly built versions. This may be caused by the newly built version being installed in /usr/local/bin and the existing python3 and pip3 being From b72a238587550e8a41c6b9c65e9c341d9cadc24a Mon Sep 17 00:00:00 2001 From: ashish-jabble Date: Wed, 15 May 2019 19:48:05 +0530 Subject: [PATCH 139/161] requirements.sh run without sh as per new changes in --- tests/system/python/scripts/install_c_plugin | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/system/python/scripts/install_c_plugin b/tests/system/python/scripts/install_c_plugin index 3c210ccd25..abdc7d913d 100755 --- a/tests/system/python/scripts/install_c_plugin +++ b/tests/system/python/scripts/install_c_plugin @@ -43,7 +43,7 @@ clone_repo () { install_requirement (){ req_file=$(find /tmp/${REPO_NAME} -name requirement*.sh) - [[ ! -z "${req_file}" ]] && sh ${req_file} || echo "No external dependency needed for ${PLUGIN_NAME} plugin." + [[ ! -z "${req_file}" ]] && ./${req_file} || echo "No external dependency needed for ${PLUGIN_NAME} plugin." } install_binary_file () { From 3bc303d83e4cf1c53c37900fe110633908182f27 Mon Sep 17 00:00:00 2001 From: ashish-jabble Date: Thu, 16 May 2019 12:02:04 +0530 Subject: [PATCH 140/161] minor fix --- tests/system/python/scripts/install_c_plugin | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/system/python/scripts/install_c_plugin b/tests/system/python/scripts/install_c_plugin index abdc7d913d..0559e782d6 100755 --- a/tests/system/python/scripts/install_c_plugin +++ b/tests/system/python/scripts/install_c_plugin @@ -43,7 +43,7 @@ clone_repo () { install_requirement (){ req_file=$(find /tmp/${REPO_NAME} -name requirement*.sh) - [[ ! -z "${req_file}" ]] && ./${req_file} || echo "No external dependency needed for ${PLUGIN_NAME} plugin." + [[ ! -z "${req_file}" ]] && ${req_file} || echo "No external dependency needed for ${PLUGIN_NAME} plugin." } install_binary_file () { From 5dd55db809bf1bfb85bce3746abb76c0d09f54e5 Mon Sep 17 00:00:00 2001 From: pintomax Date: Fri, 17 May 2019 09:10:39 +0200 Subject: [PATCH 141/161] Fix unit tests in RHEL 7 (#1552) Fix unit tests in RHEL 7 --- tests/unit/C/cmake_sqlite/CMakeLists.txt | 12 +++++++++++- tests/unit/C/cmake_sqlite/Findsqlite3.cmake | 10 ++++++++-- .../sqlitememory/{tests.cpp => sqlmem_tests.cpp} | 0 3 files changed, 19 insertions(+), 3 deletions(-) rename tests/unit/C/plugins/storage/sqlitememory/{tests.cpp => sqlmem_tests.cpp} (100%) diff --git a/tests/unit/C/cmake_sqlite/CMakeLists.txt b/tests/unit/C/cmake_sqlite/CMakeLists.txt index 9c47264f6a..7563a7c7d5 100644 --- a/tests/unit/C/cmake_sqlite/CMakeLists.txt +++ b/tests/unit/C/cmake_sqlite/CMakeLists.txt @@ -6,6 +6,9 @@ set(CMAKE_CXX_FLAGS "-std=c++11 -O3") set(STORAGE_COMMON_LIB storage-common-lib) +# Path of compiled libsqlite3.a and .h files: /tmp/foglamp-sqlite3-pkg/src +set(FOGLAMP_SQLITE3_LIBS "/tmp/foglamp-sqlite3-pkg/src" CACHE INTERNAL "") + ## sqlitememory plugin include_directories(../../../../C/thirdparty/rapidjson/include) include_directories(../../../../C/common/include) @@ -31,6 +34,13 @@ add_library(${PROJECT_NAME} SHARED ${SOURCES} ${COMMON_SOURCES}) add_definitions(-DSQLITE_SPLIT_READINGS=1) add_definitions(-DPLUGIN_LOG_NAME="SQLite 3 in_memory") -target_link_libraries(${PROJECT_NAME} -lsqlite3) +if(EXISTS ${FOGLAMP_SQLITE3_LIBS}) + include_directories(${FOGLAMP_SQLITE3_LIBS}) + target_link_libraries(${PROJECT_NAME} -L"${FOGLAMP_SQLITE3_LIBS}/.libs" -lsqlite3) +else() + + target_link_libraries(${PROJECT_NAME} -lsqlite3) +endif() + target_link_libraries(${PROJECT_NAME} ${STORAGE_COMMON_LIB}) set_target_properties(${PROJECT_NAME} PROPERTIES SOVERSION 1) diff --git a/tests/unit/C/cmake_sqlite/Findsqlite3.cmake b/tests/unit/C/cmake_sqlite/Findsqlite3.cmake index c20a28888a..4a7d06b656 100644 --- a/tests/unit/C/cmake_sqlite/Findsqlite3.cmake +++ b/tests/unit/C/cmake_sqlite/Findsqlite3.cmake @@ -7,8 +7,14 @@ # SQLITE_VERSION - Library version set(SQLITE_MIN_VERSION "3.11.0") -find_path(SQLITE_INCLUDE_DIR sqlite3.h) -find_library(SQLITE_LIBRARIES NAMES libsqlite3.so) +# Check wether path of compiled libsqlite3.a and .h files exists +if (EXISTS ${FOGLAMP_SQLITE3_LIBS}) + find_path(SQLITE_INCLUDE_DIR sqlite3.h PATHS ${FOGLAMP_SQLITE3_LIBS}) + find_library(SQLITE_LIBRARIES NAMES libsqlite3.a PATHS "${FOGLAMP_SQLITE3_LIBS}/.libs") +else() + find_path(SQLITE_INCLUDE_DIR sqlite3.h) + find_library(SQLITE_LIBRARIES NAMES libsqlite3.so) +endif() if (SQLITE_INCLUDE_DIR AND SQLITE_LIBRARIES) execute_process(COMMAND grep ".*#define.*SQLITE_VERSION " ${SQLITE_INCLUDE_DIR}/sqlite3.h diff --git a/tests/unit/C/plugins/storage/sqlitememory/tests.cpp b/tests/unit/C/plugins/storage/sqlitememory/sqlmem_tests.cpp similarity index 100% rename from tests/unit/C/plugins/storage/sqlitememory/tests.cpp rename to tests/unit/C/plugins/storage/sqlitememory/sqlmem_tests.cpp From b78869ee4c8d54972ebde74bf9d13ed9ef522d5b Mon Sep 17 00:00:00 2001 From: amarendra-dianomic Date: Fri, 17 May 2019 18:13:18 +0530 Subject: [PATCH 142/161] FOGL-2641 - Service announcer modified to use zeroconf lib instead avahi/dbus lib --- .../services/common/service_announcer.py | 77 +++++++++---------- python/foglamp/services/core/server.py | 11 +-- 2 files changed, 43 insertions(+), 45 deletions(-) diff --git a/python/foglamp/services/common/service_announcer.py b/python/foglamp/services/common/service_announcer.py index c69ea94dba..c86add95e8 100644 --- a/python/foglamp/services/common/service_announcer.py +++ b/python/foglamp/services/common/service_announcer.py @@ -6,52 +6,49 @@ """Common FoglampMicroservice Class""" -import foglamp.services.common.avahi as avahi + +import socket +from zeroconf import ServiceInfo, Zeroconf + from foglamp.common import logger -import dbus -__author__ = "Mark Riddoch" + +__author__ = "Mark Riddoch, Amarendra K Sinha" __copyright__ = "Copyright (c) 2017 OSIsoft, LLC" __license__ = "Apache 2.0" __version__ = "${VERSION}" _LOGGER = logger.setup(__name__) + class ServiceAnnouncer: - _service_name = None - """ The name of the service to advertise """ - - _group = None - """ The Avahi group """ - - def __init__(self, name, service, port, txt): - try: - bus = dbus.SystemBus() - server = dbus.Interface(bus.get_object(avahi.DBUS_NAME, avahi.DBUS_PATH_SERVER), avahi.DBUS_INTERFACE_SERVER) - self._group = dbus.Interface(bus.get_object(avahi.DBUS_NAME, server.EntryGroupNew()), - avahi.DBUS_INTERFACE_ENTRY_GROUP) - - self._service_name = name - index = 1 - while True: - try: - self._group.AddService(avahi.IF_UNSPEC, avahi.PROTO_INET, 0, self._service_name, service, '', '', port, - avahi.string_array_to_txt_array(txt)) - except dbus.DBusException: # name collision -> rename - index += 1 - self._service_name = '%s #%s' % (name, str(index)) - else: - break - - self._group.Commit() - except Exception: - _LOGGER.error("Avahi not available, continuing without service discovery available") - - @property - def get_service_name(self): - return self._service_name - - def unregister(self): - if self._group is not None: - self._group.Reset() - self._group = None + def __init__(self, sname, stype, port, txt): + host_name = socket.gethostname() + host = socket.gethostbyname(host_name) + service_name = "_" + sname.lower() + "._tcp.local." + desc = {'path': '/~paulsm/'} # TODO: Change + _LOGGER.error(">>>>>>>>>>>>>>>>> %s", service_name) + """Create a service description. + type_: fully qualified service type name + name: fully qualified service name + address: IP address as unsigned short, network byte order + port: port that the service runs on + weight: weight of the service + priority: priority of the service + properties: dictionary of properties (or a string holding the + bytes for the text field) + server: fully qualified name for service host (defaults to name) + host_ttl: ttl used for A/SRV records + other_ttl: ttl used for PTR/TXT records""" + info = ServiceInfo( + "_foglamp-manage._tcp.local.", + host_name + "._foglamp-manage._tcp.local.", + socket.inet_aton(host), + port, + 0, + 0, + desc, + "foglamp.local.", + ) + zeroconf = Zeroconf() + zeroconf.register_service(info) diff --git a/python/foglamp/services/core/server.py b/python/foglamp/services/core/server.py index ccc212fd0d..db2da24755 100755 --- a/python/foglamp/services/core/server.py +++ b/python/foglamp/services/core/server.py @@ -719,7 +719,7 @@ def _start_core(cls, loop=None): # to allow other microservices to find FogLAMP loop.run_until_complete(cls.service_config()) _logger.info('Announce management API service') - cls.management_announcer = ServiceAnnouncer('core.{}'.format(cls._service_name), cls._MANAGEMENT_SERVICE, cls.core_management_port, + cls.management_announcer = ServiceAnnouncer(cls._service_name, cls._MANAGEMENT_SERVICE, cls.core_management_port, ['The FogLAMP Core REST API']) cls.service_server, cls.service_server_handler = cls._start_app(loop, cls.service_app, host, cls.rest_server_port, ssl_ctx=ssl_ctx) @@ -732,10 +732,11 @@ def _start_core(cls, loop=None): address, service_server_port) # All services are up so now we can advertise the Admin and User REST API's - cls.admin_announcer = ServiceAnnouncer(cls._service_name, cls._ADMIN_API_SERVICE, service_server_port, - [cls._service_description]) - cls.user_announcer = ServiceAnnouncer(cls._service_name, cls._USER_API_SERVICE, service_server_port, - [cls._service_description]) + # cls.admin_announcer = ServiceAnnouncer(cls._service_name, cls._ADMIN_API_SERVICE, service_server_port, + # [cls._service_description]) + # cls.user_announcer = ServiceAnnouncer(cls._service_name, cls._USER_API_SERVICE, service_server_port, + # [cls._service_description]) + # register core # a service with 2 web server instance, # registering now only when service_port is ready to listen the request From 67c9d14f81273749f3eaf73f740bb119881984c3 Mon Sep 17 00:00:00 2001 From: amarendra-dianomic Date: Fri, 17 May 2019 18:34:46 +0530 Subject: [PATCH 143/161] More refactoring/fixes --- .../foglamp/services/common/service_announcer.py | 14 +++++++++++++- python/requirements.txt | 3 +++ 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/python/foglamp/services/common/service_announcer.py b/python/foglamp/services/common/service_announcer.py index c86add95e8..9b481e0263 100644 --- a/python/foglamp/services/common/service_announcer.py +++ b/python/foglamp/services/common/service_announcer.py @@ -24,7 +24,7 @@ class ServiceAnnouncer: def __init__(self, sname, stype, port, txt): host_name = socket.gethostname() - host = socket.gethostbyname(host_name) + host = self.get_ip() service_name = "_" + sname.lower() + "._tcp.local." desc = {'path': '/~paulsm/'} # TODO: Change _LOGGER.error(">>>>>>>>>>>>>>>>> %s", service_name) @@ -52,3 +52,15 @@ def __init__(self, sname, stype, port, txt): ) zeroconf = Zeroconf() zeroconf.register_service(info) + + def get_ip(self): + s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + try: + # doesn't even have to be reachable + s.connect(('10.255.255.255', 1)) + IP = s.getsockname()[0] + except: + IP = '127.0.0.1' + finally: + s.close() + return IP diff --git a/python/requirements.txt b/python/requirements.txt index d73941ebb0..b82b883351 100644 --- a/python/requirements.txt +++ b/python/requirements.txt @@ -6,3 +6,6 @@ pyjwt==1.6.4 # Transformation of data, Apply JqFilter pyjq==2.3.1 + +# Foglamp discovery +zeroconf==0.22.0 From f080c119e59cab3ebbd5db520210d497d2a446ad Mon Sep 17 00:00:00 2001 From: amarendra-dianomic Date: Fri, 17 May 2019 22:01:16 +0530 Subject: [PATCH 144/161] Feedback changes --- .../foglamp/services/common/service_announcer.py | 11 +++++------ python/foglamp/services/core/server.py | 16 ++++++++-------- 2 files changed, 13 insertions(+), 14 deletions(-) diff --git a/python/foglamp/services/common/service_announcer.py b/python/foglamp/services/common/service_announcer.py index 9b481e0263..68cad772be 100644 --- a/python/foglamp/services/common/service_announcer.py +++ b/python/foglamp/services/common/service_announcer.py @@ -25,9 +25,8 @@ class ServiceAnnouncer: def __init__(self, sname, stype, port, txt): host_name = socket.gethostname() host = self.get_ip() - service_name = "_" + sname.lower() + "._tcp.local." - desc = {'path': '/~paulsm/'} # TODO: Change - _LOGGER.error(">>>>>>>>>>>>>>>>> %s", service_name) + service_name = "{}_{}.{}".format(host_name, sname, stype) + desc = {'serviceDescription': 'dummy' if txt[0] is None else txt[0]} """Create a service description. type_: fully qualified service type name name: fully qualified service name @@ -41,14 +40,14 @@ def __init__(self, sname, stype, port, txt): host_ttl: ttl used for A/SRV records other_ttl: ttl used for PTR/TXT records""" info = ServiceInfo( - "_foglamp-manage._tcp.local.", - host_name + "._foglamp-manage._tcp.local.", + stype, + service_name, socket.inet_aton(host), port, 0, 0, desc, - "foglamp.local.", + "{}.local.".format(host_name), ) zeroconf = Zeroconf() zeroconf.register_service(info) diff --git a/python/foglamp/services/core/server.py b/python/foglamp/services/core/server.py index db2da24755..26f915a9a7 100755 --- a/python/foglamp/services/core/server.py +++ b/python/foglamp/services/core/server.py @@ -100,13 +100,13 @@ class Server: } } - _MANAGEMENT_SERVICE = '_foglamp-manage._tcp' + _MANAGEMENT_SERVICE = '_foglamp-manage._tcp.local.' """ The management service we advertise """ - _ADMIN_API_SERVICE = '_foglamp-admin._tcp' + _ADMIN_API_SERVICE = '_foglamp-admin._tcp.local.' """ The admin REST service we advertise """ - _USER_API_SERVICE = '_foglamp-user._tcp' + _USER_API_SERVICE = '_foglamp-user._tcp.local.' """ The user REST service we advertise """ admin_announcer = None @@ -719,7 +719,7 @@ def _start_core(cls, loop=None): # to allow other microservices to find FogLAMP loop.run_until_complete(cls.service_config()) _logger.info('Announce management API service') - cls.management_announcer = ServiceAnnouncer(cls._service_name, cls._MANAGEMENT_SERVICE, cls.core_management_port, + cls.management_announcer = ServiceAnnouncer("core_{}".format(cls._service_name), cls._MANAGEMENT_SERVICE, cls.core_management_port, ['The FogLAMP Core REST API']) cls.service_server, cls.service_server_handler = cls._start_app(loop, cls.service_app, host, cls.rest_server_port, ssl_ctx=ssl_ctx) @@ -732,10 +732,10 @@ def _start_core(cls, loop=None): address, service_server_port) # All services are up so now we can advertise the Admin and User REST API's - # cls.admin_announcer = ServiceAnnouncer(cls._service_name, cls._ADMIN_API_SERVICE, service_server_port, - # [cls._service_description]) - # cls.user_announcer = ServiceAnnouncer(cls._service_name, cls._USER_API_SERVICE, service_server_port, - # [cls._service_description]) + cls.admin_announcer = ServiceAnnouncer(cls._service_name, cls._ADMIN_API_SERVICE, service_server_port, + [cls._service_description]) + cls.user_announcer = ServiceAnnouncer(cls._service_name, cls._USER_API_SERVICE, service_server_port, + [cls._service_description]) # register core # a service with 2 web server instance, From 5c532c2201cf26d11f1ba944adce6c633c58364b Mon Sep 17 00:00:00 2001 From: amarendra-dianomic Date: Sat, 18 May 2019 18:47:42 +0530 Subject: [PATCH 145/161] More fixes and refactoring --- README.rst | 3 +- docs/building_foglamp/building_foglamp.rst | 3 +- python/foglamp/services/common/avahi.py | 112 ------------------ .../services/common/service_announcer.py | 7 +- python/foglamp/services/core/server.py | 6 +- .../foglamp/services/common/test_avahi.py | 64 ---------- 6 files changed, 9 insertions(+), 186 deletions(-) delete mode 100644 python/foglamp/services/common/avahi.py delete mode 100644 tests/unit/python/foglamp/services/common/test_avahi.py diff --git a/README.rst b/README.rst index 4878628ee7..334c50a139 100644 --- a/README.rst +++ b/README.rst @@ -61,7 +61,6 @@ FogLAMP is currently based on C/C++ and Python code. The packages needed to buil - libz-dev - make - postgresql -- python-dbus - python3-pip - python-dev - python3-dev @@ -74,7 +73,7 @@ On Ubuntu-based Linux distributions the packages can be installed with given `re apt-get install avahi-daemon curl apt-get install cmake g++ make build-essential autoconf automake uuid-dev apt-get install libtool libboost-dev libboost-system-dev libboost-thread-dev libpq-dev libssl-dev libz-dev - apt-get install python-dbus python-dev python3-dev python3-pip + apt-get install python-dev python3-dev python3-pip apt-get install postgresql apt-get install sqlite3 libsqlite3-dev diff --git a/docs/building_foglamp/building_foglamp.rst b/docs/building_foglamp/building_foglamp.rst index 5dd689059d..4578302e71 100644 --- a/docs/building_foglamp/building_foglamp.rst +++ b/docs/building_foglamp/building_foglamp.rst @@ -87,7 +87,6 @@ FogLAMP is currently based on C/C++ and Python code. The packages needed to buil - libz-dev - make - postgresql -- python3-dbus - python3-dev - python3-pip - python3-setuptools @@ -116,7 +115,7 @@ FogLAMP is currently based on C/C++ and Python code. The packages needed to buil Building dependency tree ... $ - $ sudo apt-get install python3-dev python3-pip python3-dbus python3-setuptools + $ sudo apt-get install python3-dev python3-pip python3-setuptools Reading package lists... Done Building dependency tree ... diff --git a/python/foglamp/services/common/avahi.py b/python/foglamp/services/common/avahi.py deleted file mode 100644 index 7b4502937a..0000000000 --- a/python/foglamp/services/common/avahi.py +++ /dev/null @@ -1,112 +0,0 @@ -# This file is part of avahi. -# -# avahi is free software; you can redistribute it and/or modify it -# under the terms of the GNU Lesser General Public License as -# published by the Free Software Foundation; either version 2 of the -# License, or (at your option) any later version. -# -# avahi is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY -# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public -# License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with avahi; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 -# USA. - -# Some definitions matching those in avahi-common/defs.h - -import dbus - -SERVER_INVALID, SERVER_REGISTERING, SERVER_RUNNING, SERVER_COLLISION, SERVER_FAILURE = range(0, 5) - -ENTRY_GROUP_UNCOMMITED, ENTRY_GROUP_REGISTERING, ENTRY_GROUP_ESTABLISHED, ENTRY_GROUP_COLLISION, ENTRY_GROUP_FAILURE = range(0, 5) - -DOMAIN_BROWSER_BROWSE, DOMAIN_BROWSER_BROWSE_DEFAULT, DOMAIN_BROWSER_REGISTER, DOMAIN_BROWSER_REGISTER_DEFAULT, DOMAIN_BROWSER_BROWSE_LEGACY = range(0, 5) - -PROTO_UNSPEC, PROTO_INET, PROTO_INET6 = -1, 0, 1 - -IF_UNSPEC = -1 - -PUBLISH_UNIQUE = 1 -PUBLISH_NO_PROBE = 2 -PUBLISH_NO_ANNOUNCE = 4 -PUBLISH_ALLOW_MULTIPLE = 8 -PUBLISH_NO_REVERSE = 16 -PUBLISH_NO_COOKIE = 32 -PUBLISH_UPDATE = 64 -PUBLISH_USE_WIDE_AREA = 128 -PUBLISH_USE_MULTICAST = 256 - -LOOKUP_USE_WIDE_AREA = 1 -LOOKUP_USE_MULTICAST = 2 -LOOKUP_NO_TXT = 4 -LOOKUP_NO_ADDRESS = 8 - -LOOKUP_RESULT_CACHED = 1 -LOOKUP_RESULT_WIDE_AREA = 2 -LOOKUP_RESULT_MULTICAST = 4 -LOOKUP_RESULT_LOCAL = 8 -LOOKUP_RESULT_OUR_OWN = 16 -LOOKUP_RESULT_STATIC = 32 - -SERVICE_COOKIE = "org.freedesktop.Avahi.cookie" -SERVICE_COOKIE_INVALID = 0 - -DBUS_NAME = "org.freedesktop.Avahi" -DBUS_INTERFACE_SERVER = DBUS_NAME + ".Server" -DBUS_PATH_SERVER = "/" -DBUS_INTERFACE_ENTRY_GROUP = DBUS_NAME + ".EntryGroup" -DBUS_INTERFACE_DOMAIN_BROWSER = DBUS_NAME + ".DomainBrowser" -DBUS_INTERFACE_SERVICE_TYPE_BROWSER = DBUS_NAME + ".ServiceTypeBrowser" -DBUS_INTERFACE_SERVICE_BROWSER = DBUS_NAME + ".ServiceBrowser" -DBUS_INTERFACE_ADDRESS_RESOLVER = DBUS_NAME + ".AddressResolver" -DBUS_INTERFACE_HOST_NAME_RESOLVER = DBUS_NAME + ".HostNameResolver" -DBUS_INTERFACE_SERVICE_RESOLVER = DBUS_NAME + ".ServiceResolver" -DBUS_INTERFACE_RECORD_BROWSER = DBUS_NAME + ".RecordBrowser" - -def byte_array_to_string(s): - r = "" - - for c in s: - - if c >= 32 and c < 127: - r += "%c" % c - else: - r += "." - - return r - -def txt_array_to_string_array(t): - l = [] - - for s in t: - l.append(byte_array_to_string(s)) - - return l - - -def string_to_byte_array(s): - r = [] - - for c in s: - r.append(dbus.Byte(ord(c))) - - return r - -def string_array_to_txt_array(t): - l = [] - - for s in t: - l.append(string_to_byte_array(s)) - - return l - -def dict_to_txt_array(txt_dict): - l = [] - - for k,v in txt_dict.items(): - l.append(string_to_byte_array("%s=%s" % (k,v))) - - return l diff --git a/python/foglamp/services/common/service_announcer.py b/python/foglamp/services/common/service_announcer.py index 68cad772be..08abd11c31 100644 --- a/python/foglamp/services/common/service_announcer.py +++ b/python/foglamp/services/common/service_announcer.py @@ -25,8 +25,9 @@ class ServiceAnnouncer: def __init__(self, sname, stype, port, txt): host_name = socket.gethostname() host = self.get_ip() - service_name = "{}_{}.{}".format(host_name, sname, stype) - desc = {'serviceDescription': 'dummy' if txt[0] is None else txt[0]} + service_name = "{}_{}.{}".format(sname, host.split(".")[3], stype) + desc_txt = txt[0] if isinstance(txt, list) and txt[0] is not None else 'FogLAMP Service' + desc = {'serviceDescription': desc_txt} """Create a service description. type_: fully qualified service type name name: fully qualified service name @@ -50,7 +51,7 @@ def __init__(self, sname, stype, port, txt): "{}.local.".format(host_name), ) zeroconf = Zeroconf() - zeroconf.register_service(info) + zeroconf.register_service(info, allow_name_change=True) def get_ip(self): s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) diff --git a/python/foglamp/services/core/server.py b/python/foglamp/services/core/server.py index 26f915a9a7..a659a2d368 100755 --- a/python/foglamp/services/core/server.py +++ b/python/foglamp/services/core/server.py @@ -719,7 +719,7 @@ def _start_core(cls, loop=None): # to allow other microservices to find FogLAMP loop.run_until_complete(cls.service_config()) _logger.info('Announce management API service') - cls.management_announcer = ServiceAnnouncer("core_{}".format(cls._service_name), cls._MANAGEMENT_SERVICE, cls.core_management_port, + cls.management_announcer = ServiceAnnouncer("core", cls._MANAGEMENT_SERVICE, cls.core_management_port, ['The FogLAMP Core REST API']) cls.service_server, cls.service_server_handler = cls._start_app(loop, cls.service_app, host, cls.rest_server_port, ssl_ctx=ssl_ctx) @@ -732,9 +732,9 @@ def _start_core(cls, loop=None): address, service_server_port) # All services are up so now we can advertise the Admin and User REST API's - cls.admin_announcer = ServiceAnnouncer(cls._service_name, cls._ADMIN_API_SERVICE, service_server_port, + cls.admin_announcer = ServiceAnnouncer("admin", cls._ADMIN_API_SERVICE, service_server_port, [cls._service_description]) - cls.user_announcer = ServiceAnnouncer(cls._service_name, cls._USER_API_SERVICE, service_server_port, + cls.user_announcer = ServiceAnnouncer("user", cls._USER_API_SERVICE, service_server_port, [cls._service_description]) # register core diff --git a/tests/unit/python/foglamp/services/common/test_avahi.py b/tests/unit/python/foglamp/services/common/test_avahi.py deleted file mode 100644 index 9a4e7f3dd7..0000000000 --- a/tests/unit/python/foglamp/services/common/test_avahi.py +++ /dev/null @@ -1,64 +0,0 @@ -# -*- coding: utf-8 -*- - -# FOGLAMP_BEGIN -# See: http://foglamp.readthedocs.io/ -# FOGLAMP_END - -import pytest -import foglamp.services.common.avahi as avahi - -def test_byte_array_to_string(): - array = [104,101,108,108,111] - str = avahi.byte_array_to_string(array) - assert str == 'hello' - -def test_byte_array_to_string_unprintable(): - array = [104,101,108,108,111,12] - str = avahi.byte_array_to_string(array) - assert str == 'hello.' - -def test_txt_array_string_array(): - a1 = [104,101,108,108,111] - a2 = [104,101,108,108,111] - strs = avahi.txt_array_to_string_array([a1, a2]) - assert strs[0] == 'hello' - assert strs[1] == 'hello' - -def test_string_to_byte_array(): - array = avahi.string_to_byte_array('hello') - assert array[0] == 104 - assert array[1] == 101 - assert array[2] == 108 - assert array[3] == 108 - assert array[4] == 111 - -def test_string_array_to_txt_array(): - arrays = avahi.string_array_to_txt_array(['hello','hello']) - array = arrays[0] - assert array[0] == 104 - assert array[1] == 101 - assert array[2] == 108 - assert array[3] == 108 - assert array[4] == 111 - array = arrays[1] - assert array[0] == 104 - assert array[1] == 101 - assert array[2] == 108 - assert array[3] == 108 - assert array[4] == 111 - -def test_dict_to_txt_array(): - dict = { "hello" : "world" } - arrays = avahi.dict_to_txt_array(dict) - array = arrays[0] - assert array[0] == 104 - assert array[1] == 101 - assert array[2] == 108 - assert array[3] == 108 - assert array[4] == 111 - assert array[5] == 61 - assert array[6] == 119 - assert array[7] == 111 - assert array[8] == 114 - assert array[9] == 108 - assert array[10] == 100 From 29294d5d1c09b9815754a03765ad416125111272 Mon Sep 17 00:00:00 2001 From: Praveen Garg Date: Sat, 18 May 2019 23:34:14 +0530 Subject: [PATCH 146/161] name and desc fixes for service and readme cleanup for dep --- README.rst | 2 +- docs/building_foglamp/building_foglamp.rst | 50 +------------------ .../services/common/service_announcer.py | 25 ++++++---- python/foglamp/services/core/server.py | 6 +-- requirements.sh | 8 ++- 5 files changed, 22 insertions(+), 69 deletions(-) diff --git a/README.rst b/README.rst index 334c50a139..55f01d5641 100644 --- a/README.rst +++ b/README.rst @@ -70,7 +70,7 @@ FogLAMP is currently based on C/C++ and Python code. The packages needed to buil On Ubuntu-based Linux distributions the packages can be installed with given `requirements.sh `_ or manual *apt-get*: :: - apt-get install avahi-daemon curl + apt-get install curl apt-get install cmake g++ make build-essential autoconf automake uuid-dev apt-get install libtool libboost-dev libboost-system-dev libboost-thread-dev libpq-dev libssl-dev libz-dev apt-get install python-dev python3-dev python3-pip diff --git a/docs/building_foglamp/building_foglamp.rst b/docs/building_foglamp/building_foglamp.rst index 4578302e71..b9c42258ae 100644 --- a/docs/building_foglamp/building_foglamp.rst +++ b/docs/building_foglamp/building_foglamp.rst @@ -45,7 +45,6 @@ General Requirements This version of FogLAMP requires the following software to be installed in the same environment: -- **Avahi 0.6.32+** - **Python 3.5.3+** - **PostgreSQL 9.5+** - **SQLite 3.11+** @@ -71,7 +70,6 @@ FogLAMP is currently based on C/C++ and Python code. The packages needed to buil - autoconf - automake -- avahi-daemon - build-essential - cmake - curl @@ -100,7 +98,7 @@ FogLAMP is currently based on C/C++ and Python code. The packages needed to buil ... All packages are up-to-date. $ - $ sudo apt-get install avahi-daemon curl git cmake g++ make build-essential autoconf automake + $ sudo apt-get install curl git cmake g++ make build-essential autoconf automake Reading package lists... Done Building dependency tree ... @@ -641,52 +639,6 @@ First, clone the Github repository with the usual command: |br| ``git clone http We need to apply these changes to *C/plugins/storage/postgres/CMakeLists.txt*: - Replace |br| ``include_directories(../../../thirdparty/rapidjson/include /usr/include/postgresql)`` |br| with: |br| ``include_directories(../../../thirdparty/rapidjson/include /usr/pgsql-9.6/include)`` |br| ``link_directories(/usr/pgsql-9.6/lib)`` |br| -- Replace the content of *python/foglamp/services/common/service_announcer.py* with this code: - -.. code-block:: python - - # -*- coding: utf-8 -*- - # FOGLAMP_BEGIN - # See: http://foglamp.readthedocs.io/ - # FOGLAMP_END - """Common FoglampMicroservice Class""" - - import foglamp.services.common.avahi as avahi - from foglamp.common import logger - - _LOGGER = logger.setup(__name__) - - class ServiceAnnouncer: - _service_name = None - """ The name of the service to advertise """ - - _group = None - """ The Avahi group """ - - def __init__(self, name, service, port, txt): - - self._service_name = name - _LOGGER.error("Avahi not available, continuing without service discovery available") - - @property - def get_service_name(self): - return self._service_name - - def unregister(self): - if self._group is not None: - self._group.Reset() - self._group = None - -Finally, in *python/foglamp/services/common/avahi.py*, comment these lines: - -.. code-block:: python - - # import dbus - - <<< In the function string_to_byte_array(s) comment: >>> - # for c in s: - # r.append(dbus.Byte(ord(c))) - You are now ready to execute the ``make`` command, as described here_. diff --git a/python/foglamp/services/common/service_announcer.py b/python/foglamp/services/common/service_announcer.py index 08abd11c31..d4457600c8 100644 --- a/python/foglamp/services/common/service_announcer.py +++ b/python/foglamp/services/common/service_announcer.py @@ -25,10 +25,15 @@ class ServiceAnnouncer: def __init__(self, sname, stype, port, txt): host_name = socket.gethostname() host = self.get_ip() - service_name = "{}_{}.{}".format(sname, host.split(".")[3], stype) - desc_txt = txt[0] if isinstance(txt, list) and txt[0] is not None else 'FogLAMP Service' - desc = {'serviceDescription': desc_txt} - """Create a service description. + service_name = "{}.{}".format(sname, stype) + desc_txt = 'FogLAMP Service' + if isinstance(txt, list): + try: + desc_txt = txt[0] + except: + pass + desc = {'description': desc_txt} + """ Create a service description. type_: fully qualified service type name name: fully qualified service name address: IP address as unsigned short, network byte order @@ -45,10 +50,8 @@ def __init__(self, sname, stype, port, txt): service_name, socket.inet_aton(host), port, - 0, - 0, - desc, - "{}.local.".format(host_name), + properties=desc, + server="{}.local.".format(host_name) ) zeroconf = Zeroconf() zeroconf.register_service(info, allow_name_change=True) @@ -58,9 +61,9 @@ def get_ip(self): try: # doesn't even have to be reachable s.connect(('10.255.255.255', 1)) - IP = s.getsockname()[0] + ip = s.getsockname()[0] except: - IP = '127.0.0.1' + ip = '127.0.0.1' finally: s.close() - return IP + return ip diff --git a/python/foglamp/services/core/server.py b/python/foglamp/services/core/server.py index a659a2d368..80836f17fd 100755 --- a/python/foglamp/services/core/server.py +++ b/python/foglamp/services/core/server.py @@ -719,7 +719,7 @@ def _start_core(cls, loop=None): # to allow other microservices to find FogLAMP loop.run_until_complete(cls.service_config()) _logger.info('Announce management API service') - cls.management_announcer = ServiceAnnouncer("core", cls._MANAGEMENT_SERVICE, cls.core_management_port, + cls.management_announcer = ServiceAnnouncer("core-{}".format(cls._service_name), cls._MANAGEMENT_SERVICE, cls.core_management_port, ['The FogLAMP Core REST API']) cls.service_server, cls.service_server_handler = cls._start_app(loop, cls.service_app, host, cls.rest_server_port, ssl_ctx=ssl_ctx) @@ -732,9 +732,9 @@ def _start_core(cls, loop=None): address, service_server_port) # All services are up so now we can advertise the Admin and User REST API's - cls.admin_announcer = ServiceAnnouncer("admin", cls._ADMIN_API_SERVICE, service_server_port, + cls.admin_announcer = ServiceAnnouncer(cls._service_name, cls._ADMIN_API_SERVICE, service_server_port, [cls._service_description]) - cls.user_announcer = ServiceAnnouncer("user", cls._USER_API_SERVICE, service_server_port, + cls.user_announcer = ServiceAnnouncer(cls._service_name, cls._USER_API_SERVICE, service_server_port, [cls._service_description]) # register core diff --git a/requirements.sh b/requirements.sh index afe5a24a35..51eb04136b 100755 --- a/requirements.sh +++ b/requirements.sh @@ -47,12 +47,10 @@ if [[ ( $os_name == *"Red Hat"* || $os_name == *"CentOS"* ) && $os_version == * yum install -y git yum install -y cmake yum install -y libuuid-devel - yum install -y dbus-devel echo "source scl_source enable rh-python36" >> /home/${SUDO_USER}/.bashrc su - </dev/null; then - apt install -y avahi-daemon curl + apt install -y curl apt install -y cmake g++ make build-essential autoconf automake uuid-dev apt install -y libtool libboost-dev libboost-system-dev libboost-thread-dev libpq-dev libssl-dev libz-dev - apt install -y python-dbus python-dev python3-dev python3-pip + apt install -y python-dev python3-dev python3-pip apt install -y sqlite3 libsqlite3-dev apt install -y pkg-config # sudo apt install -y postgresql From 5b7efdd2595a63bc3def3d9fe109cc4a853d95ae Mon Sep 17 00:00:00 2001 From: stefano Date: Mon, 20 May 2019 09:48:26 +0200 Subject: [PATCH 147/161] FOGL-2815: doc update --- REDHAT.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/REDHAT.rst b/REDHAT.rst index 0dc41b66d2..26f31a1849 100644 --- a/REDHAT.rst +++ b/REDHAT.rst @@ -3,7 +3,7 @@
.. Links -.. _download page: https://foglamp.readthedocs.io/en/master/92_downloads.html +.. _download page: http://dianomic.com/download-packages .. _foglamp-pkg: https://github.com/foglamp/foglamp-pkg ******************************************* @@ -13,7 +13,7 @@ Building and using FogLAMP on RedHat/CentOS FogLAMP can be build or installed on Red Hat or CentOS, it is currently tested against: - Red Hat 7.6 -- CentOS 7.6-1810 +- CentOS 7.6 You may follow the instructions in the README file to build, install and run FogLAMP on Red Hat or CentOS. From 341800b19505cc2935ca75fa16f63c6b784dc48a Mon Sep 17 00:00:00 2001 From: stefano Date: Mon, 20 May 2019 10:08:40 +0200 Subject: [PATCH 148/161] FOGL-2815: doc update --- README.rst | 2 +- RASPBIAN.rst => docs/RASPBIAN.rst | 0 REDHAT.rst => docs/REDHAT.rst | 0 docs/index.rst | 2 ++ requirements.sh | 2 +- 5 files changed, 4 insertions(+), 2 deletions(-) rename RASPBIAN.rst => docs/RASPBIAN.rst (100%) rename REDHAT.rst => docs/REDHAT.rst (100%) diff --git a/README.rst b/README.rst index ed8df2ac44..6e38b72eaa 100644 --- a/README.rst +++ b/README.rst @@ -99,7 +99,7 @@ Install the prerequisites on Red Hat/CentOS On Red Hat and CentOS distributions the required packages can be installed automatically with given `requirements.sh `_: :: - sh ./requirements.sh + ./requirements.sh You may need to use *sudo* upon your access rights. diff --git a/RASPBIAN.rst b/docs/RASPBIAN.rst similarity index 100% rename from RASPBIAN.rst rename to docs/RASPBIAN.rst diff --git a/REDHAT.rst b/docs/REDHAT.rst similarity index 100% rename from REDHAT.rst rename to docs/REDHAT.rst diff --git a/docs/index.rst b/docs/index.rst index 93a8d773fa..d63cf88365 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -16,4 +16,6 @@ Welcome to FogLAMP's documentation! building_foglamp/index 91_version_history 92_downloads + RASPBIAN + REDHAT diff --git a/requirements.sh b/requirements.sh index afe5a24a35..e3a1335398 100755 --- a/requirements.sh +++ b/requirements.sh @@ -1,4 +1,4 @@ -#!//usr/bin/env bash +#!/usr/bin/env bash ##-------------------------------------------------------------------- ## Copyright (c) 2019 Dianomic Systems From 7fa7b0258f66e8b574f3d5e791d0379ba9af17a0 Mon Sep 17 00:00:00 2001 From: Praveen Garg Date: Mon, 20 May 2019 14:31:05 +0530 Subject: [PATCH 149/161] avahi daemon requirement restored --- README.rst | 2 +- docs/building_foglamp/building_foglamp.rst | 4 +++- requirements.sh | 6 +----- 3 files changed, 5 insertions(+), 7 deletions(-) diff --git a/README.rst b/README.rst index 55f01d5641..334c50a139 100644 --- a/README.rst +++ b/README.rst @@ -70,7 +70,7 @@ FogLAMP is currently based on C/C++ and Python code. The packages needed to buil On Ubuntu-based Linux distributions the packages can be installed with given `requirements.sh `_ or manual *apt-get*: :: - apt-get install curl + apt-get install avahi-daemon curl apt-get install cmake g++ make build-essential autoconf automake uuid-dev apt-get install libtool libboost-dev libboost-system-dev libboost-thread-dev libpq-dev libssl-dev libz-dev apt-get install python-dev python3-dev python3-pip diff --git a/docs/building_foglamp/building_foglamp.rst b/docs/building_foglamp/building_foglamp.rst index b9c42258ae..a5fcfac359 100644 --- a/docs/building_foglamp/building_foglamp.rst +++ b/docs/building_foglamp/building_foglamp.rst @@ -45,6 +45,7 @@ General Requirements This version of FogLAMP requires the following software to be installed in the same environment: +- **Avahi 0.6.32+** - **Python 3.5.3+** - **PostgreSQL 9.5+** - **SQLite 3.11+** @@ -70,6 +71,7 @@ FogLAMP is currently based on C/C++ and Python code. The packages needed to buil - autoconf - automake +- avahi-daemon - build-essential - cmake - curl @@ -98,7 +100,7 @@ FogLAMP is currently based on C/C++ and Python code. The packages needed to buil ... All packages are up-to-date. $ - $ sudo apt-get install curl git cmake g++ make build-essential autoconf automake + $ sudo apt-get install avahi-daemon curl git cmake g++ make build-essential autoconf automake Reading package lists... Done Building dependency tree ... diff --git a/requirements.sh b/requirements.sh index 51eb04136b..f71a26af89 100755 --- a/requirements.sh +++ b/requirements.sh @@ -48,10 +48,6 @@ if [[ ( $os_name == *"Red Hat"* || $os_name == *"CentOS"* ) && $os_version == * yum install -y cmake yum install -y libuuid-devel echo "source scl_source enable rh-python36" >> /home/${SUDO_USER}/.bashrc - - su - </dev/null; then - apt install -y curl + apt install -y avahi-daemon curl apt install -y cmake g++ make build-essential autoconf automake uuid-dev apt install -y libtool libboost-dev libboost-system-dev libboost-thread-dev libpq-dev libssl-dev libz-dev apt install -y python-dev python3-dev python3-pip From d858d58cfe75b87ec04ac6c48bc32abc77798627 Mon Sep 17 00:00:00 2001 From: amarendra-dianomic Date: Tue, 21 May 2019 12:06:44 +0530 Subject: [PATCH 150/161] Service announcer zeroconf cache refreshed on service add --- python/foglamp/services/common/service_announcer.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/python/foglamp/services/common/service_announcer.py b/python/foglamp/services/common/service_announcer.py index d4457600c8..1c13bdd5a4 100644 --- a/python/foglamp/services/common/service_announcer.py +++ b/python/foglamp/services/common/service_announcer.py @@ -8,7 +8,7 @@ import socket -from zeroconf import ServiceInfo, Zeroconf +from zeroconf import ServiceInfo, ServiceBrowser, ServiceStateChange, Zeroconf from foglamp.common import logger @@ -54,6 +54,8 @@ def __init__(self, sname, stype, port, txt): server="{}.local.".format(host_name) ) zeroconf = Zeroconf() + # Refresh zeroconf cache + browser = ServiceBrowser(zeroconf, stype, handlers=[self.on_service_state_change]) zeroconf.register_service(info, allow_name_change=True) def get_ip(self): @@ -67,3 +69,7 @@ def get_ip(self): finally: s.close() return ip + + def on_service_state_change(self, zeroconf: Zeroconf, service_type: str, name: str, state_change: ServiceStateChange) -> None: + if state_change is ServiceStateChange.Added: + info = zeroconf.get_service_info(service_type, name) From 569d20d483a903c12d563baca14752e2f7dbc576 Mon Sep 17 00:00:00 2001 From: pintomax Date: Tue, 21 May 2019 10:31:03 +0200 Subject: [PATCH 151/161] FOGL-2227: Review feedback (#1563) FOGL-2227: Review feedback --- C/tasks/north/sending_process/sending.cpp | 2 ++ 1 file changed, 2 insertions(+) diff --git a/C/tasks/north/sending_process/sending.cpp b/C/tasks/north/sending_process/sending.cpp index e5d2d96749..ca4197e873 100644 --- a/C/tasks/north/sending_process/sending.cpp +++ b/C/tasks/north/sending_process/sending.cpp @@ -205,9 +205,11 @@ SendingProcess::SendingProcess(int argc, char** argv) : FogLampProcess(argc, arg ConfigCategory config = this->fetchConfiguration(sendingDefaultConfig, m_plugin_name); +#if VERBOSE_LOG m_logger->debug("%s - stream-id :%d:", LOG_SERVICE_NAME.c_str(), m_stream_id); +#endif // Checks if stream-id is undefined, it allocates a new one in the case if (m_stream_id == 0) { From b0af85088120fa9c5846d713ba2ff830415a3d96 Mon Sep 17 00:00:00 2001 From: Praveen Garg Date: Tue, 21 May 2019 14:56:28 +0530 Subject: [PATCH 152/161] picking correct dir path --- .../services/core/api/certificate_store.py | 105 +++++++++--------- 1 file changed, 53 insertions(+), 52 deletions(-) diff --git a/python/foglamp/services/core/api/certificate_store.py b/python/foglamp/services/core/api/certificate_store.py index 27301d0648..257fe88401 100644 --- a/python/foglamp/services/core/api/certificate_store.py +++ b/python/foglamp/services/core/api/certificate_store.py @@ -84,8 +84,9 @@ async def upload(request): if not key_file: raise web.HTTPBadRequest(reason="key file is missing, or upload certificate with .pem or .json extension") - cert_valid_extensions = ('.cert', '.json', '.pem') key_valid_extensions = ('.key', '.pem') + cert_valid_extensions = ('.cert', '.json', '.pem') + key_filename = None if key_file: key_filename = key_file.filename @@ -101,17 +102,15 @@ async def upload(request): if cert_filename.endswith('.json'): certs_dir = _get_certs_dir('/etc/certs/json') - found_files = _find_file(cert_filename, certs_dir) - is_found = True if len(found_files) else False + is_found = True if len(_find_file(cert_filename, certs_dir)) else False if is_found and should_overwrite is False: - raise web.HTTPBadRequest(reason="Certificate with the same name already exists. " - "To overwrite set the overwrite to 1") + raise web.HTTPBadRequest(reason="Certificate with the same name already exists! " + "To overwrite, set the overwrite flag") if key_file: - found_files = _find_file(key_filename, certs_dir) - is_found = True if len(found_files) else False - if is_found and should_overwrite is False: + key_file_found = True if len(_find_file(key_filename, _get_certs_dir('/etc/certs/'))) else False + if key_file_found and should_overwrite is False: raise web.HTTPBadRequest(reason="Key cert with the same name already exists. " - "To overwrite set the overwrite to 1") + "To overwrite, set the overwrite flag") if cert_file: cert_file_data = data['cert'].file cert_file_content = cert_file_data.read() @@ -121,7 +120,7 @@ async def upload(request): if key_file: key_file_data = data['key'].file key_file_content = key_file_data.read() - key_file_path = str(certs_dir) + '/{}'.format(key_filename) + key_file_path = str(_get_certs_dir('/etc/certs/')) + '/{}'.format(key_filename) with open(key_file_path, 'wb') as f: f.write(key_file_content) @@ -145,70 +144,73 @@ async def delete_certificate(request): curl -X DELETE http://localhost:8081/foglamp/certificate/foglamp.key curl -X DELETE http://localhost:8081/foglamp/certificate/foglamp.pem?type=key """ - cert_name = request.match_info.get('name', None) + file_name = request.match_info.get('name', None) + valid_extensions = ('.cert', '.json', '.key', '.pem') - if not cert_name.endswith(valid_extensions): + if not file_name.endswith(valid_extensions): raise web.HTTPBadRequest(reason="Accepted file extensions are {}".format(valid_extensions)) - certs_dir = _get_certs_dir('/etc/certs/') - is_found = False - dir_path = [certs_dir + cert_name] + # check if cert_name is currently set for 'certificateName' in config for 'rest_api' + cf_mgr = ConfigurationManager(connect.get_storage_async()) + result = await cf_mgr.get_category_item(category_name='rest_api', item_name='certificateName') + if file_name.split('.')[0] == result['value']: + raise web.HTTPConflict(reason='Certificate with name {} is already in use, you can not delete' + .format(file_name)) + + _type = None if 'type' in request.query and request.query['type'] != '': _type = request.query['type'] if _type not in ['cert', 'key']: raise web.HTTPBadRequest(reason="Only cert and key are allowed for the value of type param") - if _type == 'cert': - if not cert_name.endswith('.cert'): - if os.path.isfile(certs_dir + 'pem/' + cert_name): - is_found = True - dir_path = [certs_dir + 'pem/' + cert_name] - if os.path.isfile(certs_dir + 'json/' + cert_name): - is_found = True - dir_path = [certs_dir + 'json/' + cert_name] - else: - if os.path.isfile(certs_dir + cert_name): - is_found = True - else: - is_found = False + + certs_dir = _get_certs_dir('/etc/certs/') + is_found = False + cert_path = list() + + if _type and _type == 'cert': + if not file_name.endswith('.cert'): + if os.path.isfile(certs_dir + 'pem/' + file_name): + is_found = True + cert_path = [certs_dir + 'pem/' + file_name] + if os.path.isfile(certs_dir + 'json/' + file_name): + is_found = True + cert_path = [certs_dir + 'json/' + file_name] else: - if os.path.isfile(certs_dir + cert_name): + if os.path.isfile(certs_dir + file_name): is_found = True - else: - is_found = False + cert_path = [certs_dir + file_name] - if 'type' not in request.query: + if _type and _type == 'key': + if os.path.isfile(certs_dir + file_name): + is_found = True + cert_path = [certs_dir + file_name] + + if _type is None: for root, dirs, files in os.walk(certs_dir): if root.endswith('json'): for f in files: - if cert_name == f: + if file_name == f: is_found = True - dir_path.append(certs_dir + 'json/' + cert_name) + cert_path.append(certs_dir + 'json/' + file_name) files.remove(f) if root.endswith('pem'): for f in files: - if cert_name == f: + if file_name == f: is_found = True - dir_path.append(certs_dir + 'pem/' + cert_name) + cert_path.append(certs_dir + 'pem/' + file_name) files.remove(f) for f in files: - if cert_name == f: + if file_name == f: is_found = True + cert_path.append(certs_dir + file_name) if not is_found: - raise web.HTTPNotFound(reason='Certificate with name {} does not exist'.format(cert_name)) - - # read config - # if cert_name is currently set for 'certificateName' in config for 'rest_api' - cf_mgr = ConfigurationManager(connect.get_storage_async()) - result = await cf_mgr.get_category_item(category_name='rest_api', item_name='certificateName') - if cert_name.split('.')[0] == result['value']: - raise web.HTTPConflict(reason='Certificate with name {} is already in use, you can not delete' - .format(cert_name)) + raise web.HTTPNotFound(reason='Certificate with name {} does not exist'.format(file_name)) # Remove file - for fp in dir_path: + for fp in cert_path: os.remove(fp) - return web.json_response({'result': "{} has been deleted successfully".format(cert_name)}) + return web.json_response({'result': "{} has been deleted successfully".format(file_name)}) def _get_certs_dir(_path): @@ -220,9 +222,8 @@ def _get_certs_dir(_path): def _find_file(name, path): - result = [] + fl = list() for root, dirs, files in os.walk(path): if name in files: - result.append(os.path.join(root, name)) - - return result + fl.append(os.path.join(root, name)) + return fl From 43abec08b00ea729f4d633e3113d13fa4257f642 Mon Sep 17 00:00:00 2001 From: Praveen Garg Date: Tue, 21 May 2019 16:15:47 +0530 Subject: [PATCH 153/161] get cert fixes --- .../services/core/api/certificate_store.py | 28 +++++++++++-------- 1 file changed, 16 insertions(+), 12 deletions(-) diff --git a/python/foglamp/services/core/api/certificate_store.py b/python/foglamp/services/core/api/certificate_store.py index 257fe88401..f53bbbb763 100644 --- a/python/foglamp/services/core/api/certificate_store.py +++ b/python/foglamp/services/core/api/certificate_store.py @@ -5,7 +5,9 @@ # FOGLAMP_END import os + from aiohttp import web + from foglamp.services.core import connect from foglamp.common.configuration_manager import ConfigurationManager from foglamp.common.common import _FOGLAMP_ROOT, _FOGLAMP_DATA @@ -30,26 +32,28 @@ async def get_certs(request): :Example: curl -X GET http://localhost:8081/foglamp/certificate """ - certs_dir = _get_certs_dir('/etc/certs') certs = [] keys = [] + key_valid_extensions = ('.key', '.pem') - for root, dirs, files in os.walk(certs_dir): - if root.endswith('json'): - for f in files: - if f.endswith('.json'): - certs.append(f) - files.remove(f) - if root.endswith('pem'): - for f in files: - if f.endswith('.pem'): - certs.append(f) - files.remove(f) + certs_root_dir = _get_certs_dir('/etc/certs') + for root, dirs, files in os.walk(certs_root_dir): for f in files: if f.endswith('.cert'): certs.append(f) if f.endswith(key_valid_extensions): keys.append(f) + + json_certs_path = _get_certs_dir('/etc/certs/json') + json_cert_files = os.listdir(json_certs_path) + json_certs = [f for f in json_cert_files if f.endswith('.json')] + certs += json_certs + + pem_certs_path = _get_certs_dir('/etc/certs/pem') + pem_cert_files = os.listdir(pem_certs_path) + pem_certs = [f for f in pem_cert_files if f.endswith('.pem')] + certs += pem_certs + return web.json_response({"certs": certs, "keys": keys}) From 3b6b8c8ffeec15e9794ae93461566c91553180d8 Mon Sep 17 00:00:00 2001 From: pintomax Date: Tue, 21 May 2019 13:15:45 +0200 Subject: [PATCH 154/161] FOGL-2841: Fix C++ unit tests compilation in Centos/RHEL (#1564) FOGL-2841: Fix C++ unit tests compilation in Centos/RHEL --- tests/unit/C/plugins/storage/sqlitememory/CMakeLists.txt | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/tests/unit/C/plugins/storage/sqlitememory/CMakeLists.txt b/tests/unit/C/plugins/storage/sqlitememory/CMakeLists.txt index 958004a7a6..7c0c4d222b 100644 --- a/tests/unit/C/plugins/storage/sqlitememory/CMakeLists.txt +++ b/tests/unit/C/plugins/storage/sqlitememory/CMakeLists.txt @@ -30,6 +30,13 @@ file(GLOB COMMON_SOURCES ../sqlite/common/*.cpp) file(GLOB COMMON_SOURCES ../sqlitememory/*.cpp) file(GLOB test_sources tests.cpp) +# Check for SQLite3 source tree in specific location +set(FOGLAMP_SQLITE3_LIBS "/tmp/foglamp-sqlite3-pkg/src" CACHE INTERNAL "") +if(EXISTS ${FOGLAMP_SQLITE3_LIBS}) + message(STATUS "Using SLITE3 source files in ${FOGLAMP_SQLITE3_LIBS}") + include_directories(${FOGLAMP_SQLITE3_LIBS}) +endif() + # Exe creation link_directories( ${PROJECT_BINARY_DIR}/../../../../lib From 1e6595c75637fc5ca44ca4c51e78d8faedae276e Mon Sep 17 00:00:00 2001 From: Stefano Simonelli Date: Tue, 21 May 2019 15:00:54 +0200 Subject: [PATCH 155/161] FOGL-2815: text update --- README.rst | 5 +++-- docs/REDHAT.rst | 6 +++--- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/README.rst b/README.rst index c25582f9c1..8f805e79e9 100644 --- a/README.rst +++ b/README.rst @@ -98,9 +98,10 @@ Install the prerequisites on Red Hat/CentOS On Red Hat and CentOS distributions the required packages can be installed automatically with given `requirements.sh `_: :: - ./requirements.sh + sudo ./requirements.sh + +You should run this as a user with *sudo* access rights. -You may need to use *sudo* upon your access rights. Build ----- diff --git a/docs/REDHAT.rst b/docs/REDHAT.rst index 26f31a1849..2de578a04e 100644 --- a/docs/REDHAT.rst +++ b/docs/REDHAT.rst @@ -10,7 +10,7 @@ Building and using FogLAMP on RedHat/CentOS ******************************************* -FogLAMP can be build or installed on Red Hat or CentOS, it is currently tested against: +FogLAMP can be built or installed on Red Hat or CentOS, it is currently tested against: - Red Hat 7.6 - CentOS 7.6 @@ -30,7 +30,7 @@ The RPM can also be created from the FogLAMP sources through the repository `fog Installation on Red Hat ======================= -A Red Hat package should be installed before the FogLAMP RPM, follow the instructions : +It is necessary to install a Red Hat package before FogLAMP can be installed successfully. The installation sequence is as follows: :: sudo yum-config-manager --enable 'Red Hat Enterprise Linux Server 7 RHSCL (RPMs)' sudo yum -y localinstall ~/foglamp-1.5.2-0.00.x86_64.rpm @@ -39,7 +39,7 @@ A Red Hat package should be installed before the FogLAMP RPM, follow the instruc Installation on CentOS ====================== -A CentOS package should be installed before the FogLAMP RPM, follow the instructions : +It is necessary to install a CentOS package before FogLAMP can be installed successfully. The installation sequence is as follows: :: sudo yum install -y centos-release-scl-rh sudo yum -y localinstall ~/foglamp-1.5.2-0.00.x86_64.rpm From 1c6e2553a9e3221ab9a27bea3ae47a435e668917 Mon Sep 17 00:00:00 2001 From: Praveen Garg Date: Tue, 21 May 2019 19:41:05 +0530 Subject: [PATCH 156/161] get iterator in top level vs nested directories --- python/foglamp/services/core/api/certificate_store.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/python/foglamp/services/core/api/certificate_store.py b/python/foglamp/services/core/api/certificate_store.py index f53bbbb763..8844eeba30 100644 --- a/python/foglamp/services/core/api/certificate_store.py +++ b/python/foglamp/services/core/api/certificate_store.py @@ -38,11 +38,12 @@ async def get_certs(request): key_valid_extensions = ('.key', '.pem') certs_root_dir = _get_certs_dir('/etc/certs') for root, dirs, files in os.walk(certs_root_dir): - for f in files: - if f.endswith('.cert'): - certs.append(f) - if f.endswith(key_valid_extensions): - keys.append(f) + if not root.endswith(("pem", "json")): + for f in files: + if f.endswith('.cert'): + certs.append(f) + if f.endswith(key_valid_extensions): + keys.append(f) json_certs_path = _get_certs_dir('/etc/certs/json') json_cert_files = os.listdir(json_certs_path) From b60a38fd6b18bb0161481c1bb0d7bea564838aba Mon Sep 17 00:00:00 2001 From: pintomax Date: Tue, 21 May 2019 18:03:13 +0200 Subject: [PATCH 157/161] FOGL-2780: Update OMF north plugin to handle array data (#1561) FOGL-2780: Update OMF north plugin to handle array data Any unsupported OMF data type will be stripped out by the OMF plugin and not sent to the PI Server. Unit tests updated --- C/plugins/common/include/omf.h | 15 +- C/plugins/common/omf.cpp | 139 ++++++++++++++-- C/plugins/north/PI_Server_V2/plugin.cpp | 20 ++- tests/unit/C/plugins/common/main.cpp | 1 - .../C/plugins/common/test_omf_translation.cpp | 154 ++++++++++++++++++ 5 files changed, 297 insertions(+), 32 deletions(-) diff --git a/C/plugins/common/include/omf.h b/C/plugins/common/include/omf.h index 14622105d8..8eabe711d3 100644 --- a/C/plugins/common/include/omf.h +++ b/C/plugins/common/include/omf.h @@ -17,10 +17,11 @@ #include #define TYPE_ID_DEFAULT 1 -#define FAKE_ASSET_KEY "_default_start_id_" -#define OMF_TYPE_STRING "string" -#define OMF_TYPE_INTEGER "integer" -#define OMF_TYPE_FLOAT "number" +#define FAKE_ASSET_KEY "_default_start_id_" +#define OMF_TYPE_STRING "string" +#define OMF_TYPE_INTEGER "integer" +#define OMF_TYPE_FLOAT "number" +#define OMF_TYPE_UNSUPPORTED "unsupported" /** * Per asset dataTypes @@ -202,13 +203,15 @@ class OMF std::map m_formatTypes { {OMF_TYPE_STRING, ""}, {OMF_TYPE_INTEGER,"int64"}, - {OMF_TYPE_FLOAT, "float64"} + {OMF_TYPE_FLOAT, "float64"}, + {OMF_TYPE_UNSUPPORTED, "unsupported"} }; // Vector with OMF_TYPES const std::vector omfTypes = { OMF_TYPE_STRING, OMF_TYPE_FLOAT, // Forces the creation of float also for integer numbers - OMF_TYPE_FLOAT }; + OMF_TYPE_FLOAT, + OMF_TYPE_UNSUPPORTED}; // HTTP Sender interface HttpSender& m_sender; bool m_lastError; diff --git a/C/plugins/common/omf.cpp b/C/plugins/common/omf.cpp index 8117fe5bb4..4af6284afe 100644 --- a/C/plugins/common/omf.cpp +++ b/C/plugins/common/omf.cpp @@ -22,18 +22,23 @@ using namespace std; using namespace rapidjson; +static bool isTypeSupported(DatapointValue& dataPoint); + /** * OMFData constructor */ OMFData::OMFData(const Reading& reading, const long typeId) { + string outData; + // Convert reading data into the OMF JSON string - m_value.append("{\"containerid\": \"" + to_string(typeId) + "measurement_"); - m_value.append(reading.getAssetName() + "\", \"values\": [{"); + outData.append("{\"containerid\": \"" + to_string(typeId) + "measurement_"); + outData.append(reading.getAssetName() + "\", \"values\": [{"); // Get reading data const vector data = reading.getReadingData(); + unsigned long skipDatapoints = 0; /** * This loop creates: @@ -41,15 +46,30 @@ OMFData::OMFData(const Reading& reading, const long typeId) */ for (vector::const_iterator it = data.begin(); it != data.end(); ++it) { - // Add datapoint Name - m_value.append("\"" + (*it)->getName() + "\": " + (*it)->getData().toString()); - m_value.append(", "); + if (!isTypeSupported((*it)->getData())) + { + skipDatapoints++;; + continue; + } + else + { + // Add datapoint Name + outData.append("\"" + (*it)->getName() + "\": " + (*it)->getData().toString()); + outData.append(", "); + } } // Append Z to getAssetDateTime(FMT_STANDARD) - m_value.append("\"Time\": \"" + reading.getAssetDateUserTime(Reading::FMT_STANDARD) + "Z" + "\""); + outData.append("\"Time\": \"" + reading.getAssetDateUserTime(Reading::FMT_STANDARD) + "Z" + "\""); - m_value.append("}]}"); + outData.append("}]}"); + + // Append all, some or no datapoins + if (!skipDatapoints || + skipDatapoints < data.size()) + { + m_value.append(outData); + } } /** @@ -175,6 +195,16 @@ bool OMF::sendDataTypes(const Reading& row) // Create data for Type message string typeData = OMF::createTypeData(row); + // If Datatyope in Reading row is not supported, just return true + if (typeData.empty()) + { + return true; + } + else + { + // TODO: ADD LOG + } + // Build an HTTPS POST with 'resType' headers // and 'typeData' JSON payload // Then get HTTPS POST ret code and return 0 to client on error @@ -416,6 +446,7 @@ uint32_t OMF::sendToServer(const vector& readings, // Used for logging string json_not_compressed; + bool pendingSeparator = false; ostringstream jsonData; jsonData << "["; @@ -467,8 +498,12 @@ uint32_t OMF::sendToServer(const vector& readings, } // Add into JSON string the OMF transformed Reading data - jsonData << OMFData(**elem, typeId).OMFdataVal() << - (elem < (readings.end() - 1 ) ? ", " : ""); + string outData = OMFData(**elem, typeId).OMFdataVal(); + if (!outData.empty()) + { + jsonData << (pendingSeparator ? ", " : "") << outData; + pendingSeparator = true; + } } // Remove all assets supersetDataPoints @@ -832,6 +867,7 @@ const std::string OMF::createTypeData(const Reading& reading) const * 'string' for STRING */ + bool ret = true; const vector data = reading.getReadingData(); /** @@ -840,9 +876,22 @@ const std::string OMF::createTypeData(const Reading& reading) const */ for (vector::const_iterator it = data.begin(); it != data.end(); ++it) { - string omfType = omfTypes[((*it)->getData()).getType()]; + string omfType; + if (!isTypeSupported( (*it)->getData())) + { + omfType = OMF_TYPE_UNSUPPORTED; + } + else + { + omfType = omfTypes[((*it)->getData()).getType()]; + } string format = OMF::getFormatType(omfType); - + if (format.compare(OMF_TYPE_UNSUPPORTED) == 0) + { + //TO DO: ADD LOG + ret = false; + continue; + } // Add datapoint Name tData.append("\"" + (*it)->getName() + "\""); tData.append(": {\"type\": \""); @@ -870,8 +919,17 @@ const std::string OMF::createTypeData(const Reading& reading) const tData.append("\" }]"); - // Return JSON string - return tData; + // Check we have to return empty data or not + if (!ret && data.size() == 1) + { + // TODO: ADD LOGGING + return string(""); + } + else + { + // Return JSON string + return tData; + } } /** @@ -1223,9 +1281,18 @@ void OMF::setMapObjectTypes(const vector& readings, for (vector::const_iterator it = data.begin(); it != data.end(); ++it) - { - string omfType = omfTypes[((*it)->getData()).getType()]; + { + string omfType; + if (!isTypeSupported((*it)->getData())) + { + omfType = OMF_TYPE_UNSUPPORTED; + } + else + { + omfType = omfTypes[((*it)->getData()).getType()]; + } string datapointName = (*it)->getName(); + auto itr = readingAllDataPoints.find(assetName); // Asset not found in the map if (itr == readingAllDataPoints.end()) @@ -1291,6 +1358,12 @@ void OMF::setMapObjectTypes(const vector& readings, DatapointValue vString("v_str"); values.push_back(new Datapoint((*dp).first, vString)); } + else if ((*dp).second.compare(OMF_TYPE_UNSUPPORTED) == 0) + { + std::vector vData = {0}; + DatapointValue vArray(vData); + values.push_back(new Datapoint((*dp).first, vArray)); + } } // Add the superset Reading data with fake values @@ -1458,7 +1531,8 @@ bool OMF::setCreatedTypes(const Reading& row) const vector data = row.getReadingData(); types.append("{"); for (vector::const_iterator it = data.begin(); - it != data.end(); + (it != data.end() && + isTypeSupported((*it)->getData())); ++it) { if (it != data.begin()) @@ -1466,7 +1540,17 @@ bool OMF::setCreatedTypes(const Reading& row) types.append(", "); } - string omfType = omfTypes[((*it)->getData()).getType()]; + string omfType; + if (!isTypeSupported((*it)->getData())) + { + omfType = OMF_TYPE_UNSUPPORTED; + continue; + } + else + { + omfType = omfTypes[((*it)->getData()).getType()]; + } + string format = OMF::getFormatType(omfType); // Add datapoint Name @@ -1569,3 +1653,24 @@ bool OMF::getCreatedTypes(const string& key) } return ret; } + +/** + * Check whether input Datapoint type is supported by OMF class + * + * @param dataPoint Input data + * @return True is fupported, false otherwise + */ + +static bool isTypeSupported(DatapointValue& dataPoint) +{ + if (dataPoint.getType() == DatapointValue::DatapointTag::T_FLOAT_ARRAY || + dataPoint.getType() == DatapointValue::DatapointTag::T_DP_DICT || + dataPoint.getType() == DatapointValue::DatapointTag::T_DP_LIST) + { + return false; + } + else + { + return true; + } +} diff --git a/C/plugins/north/PI_Server_V2/plugin.cpp b/C/plugins/north/PI_Server_V2/plugin.cpp index 3cb79cc7cc..dd6ee2445e 100644 --- a/C/plugins/north/PI_Server_V2/plugin.cpp +++ b/C/plugins/north/PI_Server_V2/plugin.cpp @@ -463,21 +463,25 @@ string saveSentDataTypes(CONNECTOR_INFO* connInfo) unsigned long tSize = connInfo->assetsDataTypes.size(); if (tSize) { - // DataTypes map is not empty + + // Prepare output data (skip empty data types) newData << "\"" << SENT_TYPES_KEY << "\" : ["; + + bool pendingSeparator = false; for (auto it = connInfo->assetsDataTypes.begin(); it != connInfo->assetsDataTypes.end(); ++it) { - if (it != connInfo->assetsDataTypes.begin()) + if (((*it).second).types.compare("{}") != 0) { - newData << ", "; + newData << (pendingSeparator ? ", " : ""); + newData << "{\"" << (*it).first << "\" : {\"" << TYPE_ID_KEY << + "\": " << to_string(((*it).second).typeId); + newData << ", \"" << DATA_KEY << "\": " << + (((*it).second).types.empty() ? "{}" : ((*it).second).types) << + "}}"; + pendingSeparator = true; } - newData << "{\"" << (*it).first << "\" : {\"" << TYPE_ID_KEY << - "\": " << to_string(((*it).second).typeId); - newData << ", \"" << DATA_KEY << "\": " << - (((*it).second).types.empty() ? "{}" : ((*it).second).types) << - "}}"; } tSize = connInfo->assetsDataTypes.size(); diff --git a/tests/unit/C/plugins/common/main.cpp b/tests/unit/C/plugins/common/main.cpp index dd846efa7c..10f8070614 100644 --- a/tests/unit/C/plugins/common/main.cpp +++ b/tests/unit/C/plugins/common/main.cpp @@ -21,7 +21,6 @@ int main(int argc, char **argv) { testing::GTEST_FLAG(repeat) = 5000; testing::GTEST_FLAG(shuffle) = true; - testing::GTEST_FLAG(break_on_failure) = true; return RUN_ALL_TESTS(); } diff --git a/tests/unit/C/plugins/common/test_omf_translation.cpp b/tests/unit/C/plugins/common/test_omf_translation.cpp index 3cd5b7cb44..6602220d76 100644 --- a/tests/unit/C/plugins/common/test_omf_translation.cpp +++ b/tests/unit/C/plugins/common/test_omf_translation.cpp @@ -64,6 +64,78 @@ const char *readings_with_different_datapoints = R"( } )"; +// 3 readings JSON text with unsupported data types (array) +const char *all_readings_with_unsupported_datapoints_types = R"( + { + "count" : 4, "rows" : [ + { + "id": 1, "asset_code": "A", + "read_key": "5b3be500-ff95-41ae-b5a4-cc99d08bef4a", + "reading": { "lux": [45204.524] }, + "user_ts": "2018-06-11 14:00:08.532958", + "ts": "2018-06-12 14:47:18.872708" + }, + { + "id": 2, "asset_code": "B", + "read_key": "5b3be50c-ff95-41ae-b5a4-cc99d08bef4a", + "reading": { "temp": [87], "label" : [1] }, + "user_ts": "2018-08-21 14:00:09.32958", + "ts": "2018-08-22 14:48:18.72708" + }, + { + "id": 3, "asset_code": "C", + "read_key": "5b3be50c-ff95-41ae-b5a4-cc99d08bef4a", + "reading": { "temp": [23.2], "label" : [5] }, + "user_ts": "2018-08-21 14:00:09.32958", + "ts": "2018-08-22 15:48:18.72708" + } + ] + } +)"; + +// 5 readings JSON text with unsupported data types (array) +const char *readings_with_unsupported_datapoints_types = R"( + { + "count" : 4, "rows" : [ + { + "id": 1, "asset_code": "A", + "read_key": "5b3be500-ff95-41ae-b5a4-cc99d08bef4a", + "reading": { "lux": [45204.524] }, + "user_ts": "2018-06-11 14:00:08.532958", + "ts": "2018-06-12 14:47:18.872708" + }, + { + "id": 2, "asset_code": "B", + "read_key": "5b3be50c-ff95-41ae-b5a4-cc99d08bef4a", + "reading": { "temp": 87, "label" : [1] }, + "user_ts": "2018-08-21 14:00:09.32958", + "ts": "2018-08-22 14:48:18.72708" + }, + { + "id": 3, "asset_code": "C", + "read_key": "5b3be50c-ff95-41ae-b5a4-cc99d08bef4a", + "reading": { "temp": [23.2], "label" : [5] }, + "user_ts": "2018-08-21 14:00:09.32958", + "ts": "2018-08-22 15:48:18.72708" + }, + { + "id": 3, "asset_code": "D", + "read_key": "5b3be50c-ff95-41ae-b5a4-cc99d08bef4a", + "reading": { "temp": 23.2, "label" : 5 }, + "user_ts": "2018-08-21 14:00:09.32958", + "ts": "2018-08-22 15:48:18.72708" + }, + { + "id": 3, "asset_code": "E", + "read_key": "5b3be50c-ff95-41ae-b5a4-cc99d08bef4a", + "reading": { "temp": [23.2], "label" : [5] }, + "user_ts": "2018-08-21 14:00:09.32958", + "ts": "2018-08-22 15:48:18.72708" + } + ] + } +)"; + // 2 readings translated to OMF JSON text const string two_translated_readings = "[{\"containerid\": \"" + to_string(TYPE_ID) + \ @@ -173,3 +245,85 @@ TEST(OMF_transation, SuperSet) // Superset map is empty ASSERT_EQ(0, superSetDataPoints.size()); } + +// Compare translated readings with a provided JSON value +TEST(OMF_transation, AllReadingsWithUnsupportedTypes) +{ + // Build a ReadingSet from JSON + ReadingSet readingSet(all_readings_with_unsupported_datapoints_types); + + ostringstream jsonData; + jsonData << "["; + + bool pendingSeparator = false; + // Iterate over Readings via readingSet.getAllReadings() + for (auto elem = readingSet.getAllReadings().begin(); + elem != readingSet.getAllReadings().end(); + ++elem) + { + string rData = OMFData(**elem, TYPE_ID).OMFdataVal(); + // Add into JSON string the OMF transformed Reading data + if (!rData.empty()) + { + jsonData << (pendingSeparator ? ", " : "") << rData; + pendingSeparator = true; + } + } + + jsonData << "]"; + + Document doc; + doc.Parse(jsonData.str().c_str()); + if (doc.HasParseError()) + { + ASSERT_FALSE(true); + } + else + { + // JSON is an array + ASSERT_TRUE(doc.IsArray()); + // Array size is 1 + ASSERT_EQ(doc.Size(), 0); + } +} + +// Compare translated readings with a provided JSON value +TEST(OMF_transation, ReadingsWithUnsupportedTypes) +{ + // Build a ReadingSet from JSON + ReadingSet readingSet(readings_with_unsupported_datapoints_types); + + ostringstream jsonData; + jsonData << "["; + + bool pendingSeparator = false; + // Iterate over Readings via readingSet.getAllReadings() + for (auto elem = readingSet.getAllReadings().begin(); + elem != readingSet.getAllReadings().end(); + ++elem) + { + string rData = OMFData(**elem, TYPE_ID).OMFdataVal(); + // Add into JSON string the OMF transformed Reading data + if (!rData.empty()) + { + jsonData << (pendingSeparator ? ", " : "") << rData; + pendingSeparator = true; + } + } + + jsonData << "]"; + + Document doc; + doc.Parse(jsonData.str().c_str()); + if (doc.HasParseError()) + { + ASSERT_FALSE(true); + } + else + { + // JSON is an array + ASSERT_TRUE(doc.IsArray()); + // Array size is 1 + ASSERT_EQ(doc.Size(), 2); + } +} From 59a9d453bb804e86dcdbbc3a0a667e4f0d001bad Mon Sep 17 00:00:00 2001 From: ashish-jabble Date: Wed, 22 May 2019 12:58:21 +0530 Subject: [PATCH 158/161] unit test fixes for cert store --- .../core/api/test_certificate_store.py | 82 +++++++++++++------ 1 file changed, 56 insertions(+), 26 deletions(-) diff --git a/tests/unit/python/foglamp/services/core/api/test_certificate_store.py b/tests/unit/python/foglamp/services/core/api/test_certificate_store.py index 06b2997207..af4c6ebceb 100644 --- a/tests/unit/python/foglamp/services/core/api/test_certificate_store.py +++ b/tests/unit/python/foglamp/services/core/api/test_certificate_store.py @@ -43,43 +43,52 @@ def certs_path(self): async def test_get_certs(self, client, certs_path): response_content = {'keys': ['foglamp.key', 'rsa_private.pem'], 'certs': ['foglamp.cert', 'test.json', 'foglamp.pem']} - with patch.object(certificate_store, '_get_certs_dir', return_value=certs_path / 'certs'): + with patch.object(certificate_store, '_get_certs_dir', side_effect=[certs_path / 'certs', + certs_path / 'json', certs_path / 'pem']): with patch('os.walk') as mockwalk: mockwalk.return_value = [(str(certs_path / 'certs'), [], ['foglamp.cert']), (str(certs_path / 'certs/pem'), [], ['foglamp.pem']), (str(certs_path / 'certs/json'), [], ['test.json']), (str(certs_path / 'certs'), [], ['foglamp.key', 'rsa_private.pem']) ] - resp = await client.get('/foglamp/certificate') - assert 200 == resp.status - res = await resp.text() - jdict = json.loads(res) - cert = jdict["certs"] - assert 3 == len(cert) - assert Counter(response_content['certs']) == Counter(cert) - key = jdict["keys"] - assert 2 == len(key) - assert Counter(response_content['keys']) == Counter(key) + with patch('os.listdir') as mocked_listdir: + mocked_listdir.return_value = ['test.json', 'foglamp.pem'] + resp = await client.get('/foglamp/certificate') + assert 200 == resp.status + res = await resp.text() + jdict = json.loads(res) + cert = jdict["certs"] + assert 3 == len(cert) + assert Counter(response_content['certs']) == Counter(cert) + key = jdict["keys"] + assert 2 == len(key) + assert Counter(response_content['keys']) == Counter(key) + assert 2 == mocked_listdir.call_count mockwalk.assert_called_once_with(certs_path / 'certs') @pytest.mark.parametrize("files", [ [], ['foglamp.txt'], ]) async def test_get_bad_certs(self, client, certs_path, files): - with patch.object(certificate_store, '_get_certs_dir', return_value=certs_path / 'certs'): + with patch.object(certificate_store, '_get_certs_dir', side_effect=[certs_path / 'certs', + certs_path / 'json', certs_path / 'pem']): + with patch('os.walk') as mockwalk: mockwalk.return_value = [(str(certs_path / 'certs'), [], files), (str(certs_path / 'certs/pem'), [], files), (str(certs_path / 'certs/json'), [], files), (str(certs_path / 'certs'), [], files) ] - resp = await client.get('/foglamp/certificate') - assert 200 == resp.status - result = await resp.text() - json_response = json.loads(result) - assert 0 == len(json_response['certs']) - assert 0 == len(json_response['keys']) - assert {'certs': [], 'keys': []} == json_response + with patch('os.listdir') as mocked_listdir: + mocked_listdir.return_value = [] + resp = await client.get('/foglamp/certificate') + assert 200 == resp.status + result = await resp.text() + json_response = json.loads(result) + assert 0 == len(json_response['certs']) + assert 0 == len(json_response['keys']) + assert {'certs': [], 'keys': []} == json_response + assert 2 == mocked_listdir.call_count mockwalk.assert_called_once_with(certs_path / 'certs') async def test_upload(self, client, certs_path): @@ -173,8 +182,8 @@ async def test_upload_with_existing_and_no_overwrite(self, client, certs_path): with patch.object(certificate_store, '_find_file', return_value=["v"]) as patch_file: resp = await client.post('/foglamp/certificate', data=files) assert 400 == resp.status - assert 'Certificate with the same name already exists. To overwrite set the ' \ - 'overwrite to 1' == resp.reason + assert 'Certificate with the same name already exists! To overwrite, set the ' \ + 'overwrite flag' == resp.reason assert 1 == patch_file.call_count args, kwargs = patch_file.call_args assert ('foglamp.cert', certificate_store._get_certs_dir('/certs')) == args @@ -186,11 +195,25 @@ async def test_exception(self, client): assert 'Internal Server Error' == resp.reason @pytest.mark.parametrize("cert_name, actual_code, actual_reason", [ - ('', 404, "Not Found"), - ('root.txt', 400, "Accepted file extensions are ('.cert', '.json', '.key', '.pem')"), ('root.pem', 404, "Certificate with name root.pem does not exist"), ('rsa_private.key', 404, "Certificate with name rsa_private.key does not exist"), ]) + async def test_bad_delete_cert_with_invalid_filename(self, client, cert_name, actual_code, actual_reason): + async def async_mock(): + return {'value': 'foglamp'} + storage_client_mock = MagicMock(StorageClientAsync) + c_mgr = ConfigurationManager(storage_client_mock) + with patch.object(connect, 'get_storage_async', return_value=storage_client_mock): + with patch.object(c_mgr, 'get_category_item', return_value=async_mock()) as patch_cfg: + resp = await client.delete('/foglamp/certificate/{}'.format(cert_name)) + assert actual_code == resp.status + assert actual_reason == resp.reason + assert 1 == patch_cfg.call_count + + @pytest.mark.parametrize("cert_name, actual_code, actual_reason", [ + ('', 404, "Not Found",), + ('root.txt', 400, "Accepted file extensions are ('.cert', '.json', '.key', '.pem')"), + ]) async def test_bad_delete_cert(self, client, cert_name, actual_code, actual_reason): resp = await client.delete('/foglamp/certificate/{}'.format(cert_name)) assert actual_code == resp.status @@ -213,9 +236,16 @@ async def async_mock(): assert ({'item_name': 'certificateName', 'category_name': 'rest_api'}) == kwargs async def test_bad_type_delete_cert(self, client): - resp = await client.delete('/foglamp/certificate/foglamp.key?type=pem') - assert 400 == resp.status - assert 'Only cert and key are allowed for the value of type param' == resp.reason + async def async_mock(): + return {'value': 'foglamp'} + storage_client_mock = MagicMock(StorageClientAsync) + c_mgr = ConfigurationManager(storage_client_mock) + with patch.object(connect, 'get_storage_async', return_value=storage_client_mock): + with patch.object(c_mgr, 'get_category_item', return_value=async_mock()) as patch_cfg: + resp = await client.delete('/foglamp/certificate/server.cert?type=pem') + assert 400 == resp.status + assert 'Only cert and key are allowed for the value of type param' == resp.reason + assert 1 == patch_cfg.call_count @pytest.mark.parametrize("cert_name, param", [ ('foglamp.cert', '?type=cert'), From d81f8380b9a320d5748004f21963a3c5eade818e Mon Sep 17 00:00:00 2001 From: dianomicbot Date: Wed, 22 May 2019 11:21:59 +0000 Subject: [PATCH 159/161] VERSION changed --- VERSION | 2 +- docs/91_version_history.rst | 83 +++++++++++++++++++++++++++++++++++++ 2 files changed, 84 insertions(+), 1 deletion(-) diff --git a/VERSION b/VERSION index 484652bf89..f92d454bbe 100755 --- a/VERSION +++ b/VERSION @@ -1,2 +1,2 @@ -foglamp_version=1.5.2 +foglamp_version=1.6.0 foglamp_schema=27 diff --git a/docs/91_version_history.rst b/docs/91_version_history.rst index b1d52ba155..0e391af2e2 100644 --- a/docs/91_version_history.rst +++ b/docs/91_version_history.rst @@ -25,6 +25,89 @@ Version History FogLAMP v1 ========== +v1.6.0 +------- + +Release Date: 2019-05-22 + +- **FogLAMP Core** + + - New Features: + + - + - + - + - + - + - + - The scope of the FogLAMP certificate store has been widen to allow it to store .pem certificates and keys for accessing cloud functions. + - + - + - The creation of a Docker container for FogLAMP has been added to the packaging options for FogLAMP in this version of FogLAMP. + - + - Red Hat Enterprise Linux packages have been made available from this release of FogLAMp onwards. These packages include all the applicable plugins and notification service for FogLAMP. + - + - + - + - + - + - + - + - + - The FogLAMP API now supports the creation of configuration snapshots which can be used to create configuration checkpoints that be used to checkpoint and rollback configuration changes. + - + - + - The FogLAMP administration API has been extended to allow the installation of new plugins via that API. + - + - + - + - Set, unset optional configuration attributes + + + - Bug Fix: + + - + - + - + - + - + - + - + - + - + - + - + - A bug that prevents multiple FogLAMP's on the same network being discoverable via multicast DNS lookup has been fixed. + - + + +- **GUI** + + - New Features: + + - Improvements have been made to the FogLAMP Graphical User Interface to allow more control of the time periods displayed in the graphs of asset values. + - The FogLAMP Graphical User Interface now has the ability to show sets of graphs over a time period for data such as the spectrum analysis produced but the Fast Fourier transform filter. + - + - + - The FogLAMP Graphical User Interface is now available as an RPM file that may be installed on Red Hat Enterprise Linux or CentOS. + - + - + - + - + - + - + - Improvements have been made to the appearance of dropdown and other elements with the FogLAMP Graphical User Interface. + + + - Bug Fix: + + - + - + - Some improvements to screen layout in the FogLAMP Graphical User Interface have been made in order to improve the look and reduce the screen space used in some of the screens. + - + - + + v1.5.2 ------- From b8d09c8af04293de9288f1dad33d1760339a5344 Mon Sep 17 00:00:00 2001 From: Vaibhav Singhal Date: Wed, 22 May 2019 17:11:51 +0530 Subject: [PATCH 160/161] Docs modified --- docs/91_version_history.rst | 88 ++++++++++++++----------------------- 1 file changed, 32 insertions(+), 56 deletions(-) diff --git a/docs/91_version_history.rst b/docs/91_version_history.rst index 0e391af2e2..43daba871a 100644 --- a/docs/91_version_history.rst +++ b/docs/91_version_history.rst @@ -34,78 +34,54 @@ Release Date: 2019-05-22 - New Features: - - - - - - - - - - - - - The scope of the FogLAMP certificate store has been widen to allow it to store .pem certificates and keys for accessing cloud functions. - - - - - The creation of a Docker container for FogLAMP has been added to the packaging options for FogLAMP in this version of FogLAMP. - - - - Red Hat Enterprise Linux packages have been made available from this release of FogLAMp onwards. These packages include all the applicable plugins and notification service for FogLAMP. - - - - - - - - - - - - - - - - - - The FogLAMP API now supports the creation of configuration snapshots which can be used to create configuration checkpoints that be used to checkpoint and rollback configuration changes. - - - - - - The FogLAMP administration API has been extended to allow the installation of new plugins via that API. - - - - - - - - Set, unset optional configuration attributes - + - Red Hat Enterprise Linux packages have been made available from this release of FogLAMP onwards. These packages include all the applicable plugins and notification service for FogLAMP. + - The FogLAMP API now supports the creation of configuration snapshots which can be used to create configuration checkpoints and rollback configuration changes. + - The FogLAMP administration API has been extended to allow the installation of new plugins via API. + - - Bug Fix: + - Improvements/Bug Fix: - - - - - - - - - - - - - - - - - - - - - - - A bug that prevents multiple FogLAMP's on the same network being discoverable via multicast DNS lookup has been fixed. - - + - Set, unset optional configuration attributes - **GUI** - New Features: - - - Improvements have been made to the FogLAMP Graphical User Interface to allow more control of the time periods displayed in the graphs of asset values. + - The FogLAMP Graphical User Interface now has the ability to show sets of graphs over a time period for data such as the spectrum analysis produced but the Fast Fourier transform filter. - - - - - The FogLAMP Graphical User Interface is now available as an RPM file that may be installed on Red Hat Enterprise Linux or CentOS. - - - - - - - - - - - - - - Improvements have been made to the appearance of dropdown and other elements with the FogLAMP Graphical User Interface. - - Bug Fix: + - Improvements/Bug Fix: - - - - + - Improvements have been made to the FogLAMP Graphical User Interface to allow more control of the time periods displayed in the graphs of asset values. - Some improvements to screen layout in the FogLAMP Graphical User Interface have been made in order to improve the look and reduce the screen space used in some of the screens. - - - - + - Improvements have been made to the appearance of dropdown and other elements with the FogLAMP Graphical User Interface. + + +- **Plugins** + + - New Features: + - A new threshold filter has been added that can be used to block onward transmission of data until a configured expression evaluates too true. + - The Modbus RTU/TCP south plugin is now available on CentOS 7.6 and RHEL 7.6. + - A new north plugin has been added to allow data to be sent the Google Cloud Platform IoT Core interface. + - The FFT filter now has an option to output raw frequency spectra. Note this can not be accepted into all north bound systems. + - Changed the release status of the FFT filter plugin. + - Added the ability in the modbus plugin to define multiple registers that create composite values. For example two 16 bit registers can be put together to make one 32 bit value. This is does using an array of register values in a modbus map, e.g. {"name":"rpm","slave":1,"register":[33,34],"scale":0.1,"offset":0}. Register 33 contains the low 16 its of the RPM and register 34 the high 16 bits of the RPM. + - Addition of a new Notification Delivery plugin to send notifications to a Google Hangouts chatroom. + - A new plugin has been created that uses machine learning based on Google's TensorFlow technology to classify image data and populate derived information the north side systems. The current TensorFlow model in use will recognise hard written digits and populate those digits. This plugins is currently a proof of concept for machine learning. + + + - Improvements/Bug Fix: + - Removal of unnecessary include directive from Modbus-C plugin. + - Improved error reporting for the modbus-c plugin and added documentation on the configuration of the plugin. + - Improved the subscription handling in the OPCUA south plugin. + - Stability improvements have been made to the notification service, these related to the handling of dynamic reconfigurations of the notifications. + - Removed erroneous default for script configuration option in Python35 notification delivery plugin. + - Corrected description of the enable configuration item. v1.5.2 From 1dffabd76725c7a1ccfb455b1430ebb101ebd9ef Mon Sep 17 00:00:00 2001 From: Praveen Garg Date: Thu, 23 May 2019 12:58:44 +0530 Subject: [PATCH 161/161] FOGL-2847 syslog file path fixes for RHEL/ CentOS (#1567) * syslog file path fixes, RHEL/ CentOS * added note for Red Hat syslog permissions issue --- docs/REDHAT.rst | 6 ++++++ python/foglamp/services/core/api/support.py | 6 ++++++ python/foglamp/services/core/support.py | 4 ++++ 3 files changed, 16 insertions(+) diff --git a/docs/REDHAT.rst b/docs/REDHAT.rst index 2de578a04e..af044e9f43 100644 --- a/docs/REDHAT.rst +++ b/docs/REDHAT.rst @@ -43,3 +43,9 @@ It is necessary to install a CentOS package before FogLAMP can be installed succ :: sudo yum install -y centos-release-scl-rh sudo yum -y localinstall ~/foglamp-1.5.2-0.00.x86_64.rpm + +.. note:: + By default, /var/log/messages are created with read-write permissions for ‘root’ user only. + Make sure to set the correct READ permissions. + + `sudo chmod 644 /var/log/messages` diff --git a/python/foglamp/services/core/api/support.py b/python/foglamp/services/core/api/support.py index 1e2e5eaf8d..4ebd5c676b 100644 --- a/python/foglamp/services/core/api/support.py +++ b/python/foglamp/services/core/api/support.py @@ -5,6 +5,7 @@ # FOGLAMP_END import os +import platform import subprocess from pathlib import Path from aiohttp import web @@ -18,7 +19,12 @@ _FOGLAMP_DATA = os.getenv("FOGLAMP_DATA", default=None) _FOGLAMP_ROOT = os.getenv("FOGLAMP_ROOT", default='/usr/local/foglamp') + _SYSLOG_FILE = '/var/log/syslog' + +if ('centos' in platform.platform()) or ('redhat' in platform.platform()): + _SYSLOG_FILE = '/var/log/messages' + __DEFAULT_LIMIT = 20 __DEFAULT_OFFSET = 0 __DEFAULT_LOG_SOURCE = 'FogLAMP' diff --git a/python/foglamp/services/core/support.py b/python/foglamp/services/core/support.py index 9ce1772f35..f0478d0322 100644 --- a/python/foglamp/services/core/support.py +++ b/python/foglamp/services/core/support.py @@ -8,6 +8,7 @@ """ import datetime +import platform import os from os.path import basename import glob @@ -30,6 +31,9 @@ _NO_OF_FILES_TO_RETAIN = 3 _SYSLOG_FILE = '/var/log/syslog' +if ('centos' in platform.platform()) or ('redhat' in platform.platform()): + _SYSLOG_FILE = '/var/log/messages' + class SupportBuilder: