From 4652b2b5169e02dc2f172126801e359997f4569d Mon Sep 17 00:00:00 2001 From: phylax2020 Date: Sun, 3 Jul 2022 21:28:19 +0200 Subject: [PATCH 1/2] Missing string conversion of value in log_data function. --- cbpi/controller/log_file_controller.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cbpi/controller/log_file_controller.py b/cbpi/controller/log_file_controller.py index d835982..fac4491 100644 --- a/cbpi/controller/log_file_controller.py +++ b/cbpi/controller/log_file_controller.py @@ -42,7 +42,7 @@ class LogController: self.datalogger[name] = data_logger formatted_time = strftime("%Y-%m-%d %H:%M:%S", localtime()) - self.datalogger[name].info("%s,%s" % (formatted_time, value)) + self.datalogger[name].info("%s,%s" % (formatted_time, str(value))) if self.influxdb == "Yes": self.influxdbcloud = self.cbpi.config.get("INFLUXDBCLOUD", "No") self.influxdbaddr = self.cbpi.config.get("INFLUXDBADDR", None) From e95237eef660db30d25310fd99384d776a5c2159 Mon Sep 17 00:00:00 2001 From: phylax2020 Date: Sat, 9 Jul 2022 22:23:26 +0200 Subject: [PATCH 2/2] limit log data transfer to not exceed 2 times max_rows = 1000 rows by removing every nth row. This keeps the user interface operable. --- cbpi/controller/log_file_controller.py | 25 ++++++++++++++++++------- 1 file changed, 18 insertions(+), 7 deletions(-) diff --git a/cbpi/controller/log_file_controller.py b/cbpi/controller/log_file_controller.py index fac4491..8c59da7 100644 --- a/cbpi/controller/log_file_controller.py +++ b/cbpi/controller/log_file_controller.py @@ -116,28 +116,36 @@ class LogController: for name in names: # get all log names all_filenames = glob.glob('./logs/sensor_%s.log*' % name) - # concat all logs df = pd.concat([pd.read_csv(f, parse_dates=True, date_parser=dateparse, index_col='DateTime', names=['DateTime', name], header=None) for f in all_filenames]) logging.info("Read all files for {}".format(names)) # resample if rate provided - if sample_rate is not None: - df = df[name].resample(sample_rate).max() - logging.info("Sampled now for {}".format(names)) - df = df.dropna() + # if sample_rate is not None: + # df = df[name].resample(sample_rate).max() + # logging.info("Sampled now for {}".format(names)) + df = df[name].dropna() + # take every nth row so that total number of rows does not exceed max_rows * 2 + max_rows = 500 + total_rows = df.shape[0] + if (total_rows > 0) and (total_rows > max_rows): + nth = int(total_rows/max_rows) + if nth > 1: + df = df.iloc[::nth] + if result is None: result = df else: result = pd.merge(result, df, how='outer', left_index=True, right_index=True) data = {"time": df.index.tolist()} - if len(names) > 1: for name in names: data[name] = result[name].interpolate(limit_direction='both', limit=10).tolist() else: data[name] = result.interpolate().tolist() + logging.info("Send Log for {}".format(names)) + return data async def get_data2(self, ids) -> dict: @@ -146,7 +154,10 @@ class LogController: result = dict() for id in ids: - df = pd.read_csv("./logs/sensor_%s.log" % id, parse_dates=True, date_parser=dateparse, index_col='DateTime', names=['DateTime',"Values"], header=None) + # df = pd.read_csv("./logs/sensor_%s.log" % id, parse_dates=True, date_parser=dateparse, index_col='DateTime', names=['DateTime',"Values"], header=None) + # concat all logs + all_filenames = glob.glob('./logs/sensor_%s.log*' % id) + df = pd.concat([pd.read_csv(f, parse_dates=True, date_parser=dateparse, index_col='DateTime', names=['DateTime', 'Values'], header=None) for f in all_filenames]) result[id] = {"time": df.index.astype(str).tolist(), "value":df.Values.tolist()} return result