From 4652b2b5169e02dc2f172126801e359997f4569d Mon Sep 17 00:00:00 2001 From: phylax2020 Date: Sun, 3 Jul 2022 21:28:19 +0200 Subject: [PATCH 1/4] Missing string conversion of value in log_data function. --- cbpi/controller/log_file_controller.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cbpi/controller/log_file_controller.py b/cbpi/controller/log_file_controller.py index d835982..fac4491 100644 --- a/cbpi/controller/log_file_controller.py +++ b/cbpi/controller/log_file_controller.py @@ -42,7 +42,7 @@ class LogController: self.datalogger[name] = data_logger formatted_time = strftime("%Y-%m-%d %H:%M:%S", localtime()) - self.datalogger[name].info("%s,%s" % (formatted_time, value)) + self.datalogger[name].info("%s,%s" % (formatted_time, str(value))) if self.influxdb == "Yes": self.influxdbcloud = self.cbpi.config.get("INFLUXDBCLOUD", "No") self.influxdbaddr = self.cbpi.config.get("INFLUXDBADDR", None) From e95237eef660db30d25310fd99384d776a5c2159 Mon Sep 17 00:00:00 2001 From: phylax2020 Date: Sat, 9 Jul 2022 22:23:26 +0200 Subject: [PATCH 2/4] limit log data transfer to not exceed 2 times max_rows = 1000 rows by removing every nth row. This keeps the user interface operable. --- cbpi/controller/log_file_controller.py | 25 ++++++++++++++++++------- 1 file changed, 18 insertions(+), 7 deletions(-) diff --git a/cbpi/controller/log_file_controller.py b/cbpi/controller/log_file_controller.py index fac4491..8c59da7 100644 --- a/cbpi/controller/log_file_controller.py +++ b/cbpi/controller/log_file_controller.py @@ -116,28 +116,36 @@ class LogController: for name in names: # get all log names all_filenames = glob.glob('./logs/sensor_%s.log*' % name) - # concat all logs df = pd.concat([pd.read_csv(f, parse_dates=True, date_parser=dateparse, index_col='DateTime', names=['DateTime', name], header=None) for f in all_filenames]) logging.info("Read all files for {}".format(names)) # resample if rate provided - if sample_rate is not None: - df = df[name].resample(sample_rate).max() - logging.info("Sampled now for {}".format(names)) - df = df.dropna() + # if sample_rate is not None: + # df = df[name].resample(sample_rate).max() + # logging.info("Sampled now for {}".format(names)) + df = df[name].dropna() + # take every nth row so that total number of rows does not exceed max_rows * 2 + max_rows = 500 + total_rows = df.shape[0] + if (total_rows > 0) and (total_rows > max_rows): + nth = int(total_rows/max_rows) + if nth > 1: + df = df.iloc[::nth] + if result is None: result = df else: result = pd.merge(result, df, how='outer', left_index=True, right_index=True) data = {"time": df.index.tolist()} - if len(names) > 1: for name in names: data[name] = result[name].interpolate(limit_direction='both', limit=10).tolist() else: data[name] = result.interpolate().tolist() + logging.info("Send Log for {}".format(names)) + return data async def get_data2(self, ids) -> dict: @@ -146,7 +154,10 @@ class LogController: result = dict() for id in ids: - df = pd.read_csv("./logs/sensor_%s.log" % id, parse_dates=True, date_parser=dateparse, index_col='DateTime', names=['DateTime',"Values"], header=None) + # df = pd.read_csv("./logs/sensor_%s.log" % id, parse_dates=True, date_parser=dateparse, index_col='DateTime', names=['DateTime',"Values"], header=None) + # concat all logs + all_filenames = glob.glob('./logs/sensor_%s.log*' % id) + df = pd.concat([pd.read_csv(f, parse_dates=True, date_parser=dateparse, index_col='DateTime', names=['DateTime', 'Values'], header=None) for f in all_filenames]) result[id] = {"time": df.index.astype(str).tolist(), "value":df.Values.tolist()} return result From 8621888d8156b0dbec134fb0de6788c591802a59 Mon Sep 17 00:00:00 2001 From: phylax2020 Date: Fri, 15 Jul 2022 21:47:23 +0200 Subject: [PATCH 3/4] Allow changing log file size and log file backup count in settings. Default for log file size is now 130000 bytes (was 1MB). Dashboard with charts can be blocked if accumulated size of log file data exceeds some megabytes. Also chart refresh rate should be greater than some 10 seconds so that chart plots can not block the user interface. In functions get_data (for dashboard charts) and get_data2 (for chart in analytics) the pandas resample function is reactivated respectively added to further reduce the amount of data to be transferred to the clients. In function log_data max_bytes and backup_count must be converted to int, when these settings are changed in the settings dialog. --- cbpi/config/config.json | 16 +++++++++++++++- cbpi/controller/log_file_controller.py | 15 +++++++++------ 2 files changed, 24 insertions(+), 7 deletions(-) diff --git a/cbpi/config/config.json b/cbpi/config/config.json index 56f98c5..cdb9b15 100644 --- a/cbpi/config/config.json +++ b/cbpi/config/config.json @@ -144,5 +144,19 @@ "options": null, "type": "step", "value": "NotificationStep" - } + }, + "SENSOR_LOG_BACKUP_COUNT": { + "description": "Max. number of backup logs", + "name": "SENSOR_LOG_BACKUP_COUNT", + "options": null, + "type": "number", + "value": 3 + }, + "SENSOR_LOG_MAX_BYTES": { + "description": "Max. number of bytes in sensor logs", + "name": "SENSOR_LOG_MAX_BYTES", + "options": null, + "type": "number", + "value": "100000" + } } diff --git a/cbpi/controller/log_file_controller.py b/cbpi/controller/log_file_controller.py index 8c59da7..993a1f5 100644 --- a/cbpi/controller/log_file_controller.py +++ b/cbpi/controller/log_file_controller.py @@ -31,8 +31,8 @@ class LogController: self.influxdb = self.cbpi.config.get("INFLUXDB", "No") if self.logfiles == "Yes": if name not in self.datalogger: - max_bytes = self.cbpi.config.get("SENSOR_LOG_MAX_BYTES", 1048576) - backup_count = self.cbpi.config.get("SENSOR_LOG_BACKUP_COUNT", 3) + max_bytes = int(self.cbpi.config.get("SENSOR_LOG_MAX_BYTES", 131072)) + backup_count = int(self.cbpi.config.get("SENSOR_LOG_BACKUP_COUNT", 3)) data_logger = logging.getLogger('cbpi.sensor.%s' % name) data_logger.propagate = False @@ -120,10 +120,10 @@ class LogController: df = pd.concat([pd.read_csv(f, parse_dates=True, date_parser=dateparse, index_col='DateTime', names=['DateTime', name], header=None) for f in all_filenames]) logging.info("Read all files for {}".format(names)) # resample if rate provided - # if sample_rate is not None: - # df = df[name].resample(sample_rate).max() - # logging.info("Sampled now for {}".format(names)) - df = df[name].dropna() + if sample_rate is not None: + df = df[name].resample(sample_rate).max() + logging.info("Sampled now for {}".format(names)) + df = df.dropna() # take every nth row so that total number of rows does not exceed max_rows * 2 max_rows = 500 total_rows = df.shape[0] @@ -138,6 +138,7 @@ class LogController: result = pd.merge(result, df, how='outer', left_index=True, right_index=True) data = {"time": df.index.tolist()} + if len(names) > 1: for name in names: data[name] = result[name].interpolate(limit_direction='both', limit=10).tolist() @@ -158,6 +159,8 @@ class LogController: # concat all logs all_filenames = glob.glob('./logs/sensor_%s.log*' % id) df = pd.concat([pd.read_csv(f, parse_dates=True, date_parser=dateparse, index_col='DateTime', names=['DateTime', 'Values'], header=None) for f in all_filenames]) + df = df.resample('60s').max() + df = df.dropna() result[id] = {"time": df.index.astype(str).tolist(), "value":df.Values.tolist()} return result From abe87b4218aeb1a941995e681c2673875e6805f4 Mon Sep 17 00:00:00 2001 From: phylax2020 Date: Tue, 19 Jul 2022 09:39:27 +0200 Subject: [PATCH 4/4] Logfile size and backup logfile count in config.json --- cbpi/config/config.json | 16 +--------------- cbpi/extension/ConfigUpdate/__init__.py | 22 ++++++++++++++++++++-- 2 files changed, 21 insertions(+), 17 deletions(-) diff --git a/cbpi/config/config.json b/cbpi/config/config.json index cdb9b15..56f98c5 100644 --- a/cbpi/config/config.json +++ b/cbpi/config/config.json @@ -144,19 +144,5 @@ "options": null, "type": "step", "value": "NotificationStep" - }, - "SENSOR_LOG_BACKUP_COUNT": { - "description": "Max. number of backup logs", - "name": "SENSOR_LOG_BACKUP_COUNT", - "options": null, - "type": "number", - "value": 3 - }, - "SENSOR_LOG_MAX_BYTES": { - "description": "Max. number of bytes in sensor logs", - "name": "SENSOR_LOG_MAX_BYTES", - "options": null, - "type": "number", - "value": "100000" - } + } } diff --git a/cbpi/extension/ConfigUpdate/__init__.py b/cbpi/extension/ConfigUpdate/__init__.py index e5dc9ac..77d9677 100644 --- a/cbpi/extension/ConfigUpdate/__init__.py +++ b/cbpi/extension/ConfigUpdate/__init__.py @@ -47,8 +47,9 @@ class ConfigUpdate(CBPiExtension): influxdbcloud = self.cbpi.config.get("INFLUXDBCLOUD", None) mqttupdate = self.cbpi.config.get("MQTTUpdate", None) PRESSURE_UNIT = self.cbpi.config.get("PRESSURE_UNIT", None) - - + SENSOR_LOG_BACKUP_COUNT = self.cbpi.config.get("SENSOR_LOG_BACKUP_COUNT", None) + SENSOR_LOG_MAX_BYTES = self.cbpi.config.get("SENSOR_LOG_MAX_BYTES", None) + if boil_temp is None: logger.info("INIT Boil Temp Setting") try: @@ -285,6 +286,23 @@ class ConfigUpdate(CBPiExtension): {"label": "PSI", "value": "PSI"}]) except: logger.warning('Unable to update config') + + # check if SENSOR_LOG_BACKUP_COUNT exists in config + if SENSOR_LOG_BACKUP_COUNT is None: + logger.info("INIT SENSOR_LOG_BACKUP_COUNT") + try: + await self.cbpi.config.add("SENSOR_LOG_BACKUP_COUNT", 3, ConfigType.NUMBER, "Max. number of backup logs") + except: + logger.warning('Unable to update database') + + # check if SENSOR_LOG_MAX_BYTES exists in config + if SENSOR_LOG_MAX_BYTES is None: + logger.info("INIT SENSOR_LOG_MAX_BYTES") + try: + await self.cbpi.config.add("SENSOR_LOG_MAX_BYTES", 100000, ConfigType.NUMBER, "Max. number of bytes in sensor logs") + except: + logger.warning('Unable to update database') + def setup(cbpi): cbpi.plugin.register("ConfigUpdate", ConfigUpdate)