From 8621888d8156b0dbec134fb0de6788c591802a59 Mon Sep 17 00:00:00 2001 From: phylax2020 Date: Fri, 15 Jul 2022 21:47:23 +0200 Subject: [PATCH 1/8] Allow changing log file size and log file backup count in settings. Default for log file size is now 130000 bytes (was 1MB). Dashboard with charts can be blocked if accumulated size of log file data exceeds some megabytes. Also chart refresh rate should be greater than some 10 seconds so that chart plots can not block the user interface. In functions get_data (for dashboard charts) and get_data2 (for chart in analytics) the pandas resample function is reactivated respectively added to further reduce the amount of data to be transferred to the clients. In function log_data max_bytes and backup_count must be converted to int, when these settings are changed in the settings dialog. --- cbpi/config/config.json | 16 +++++++++++++++- cbpi/controller/log_file_controller.py | 15 +++++++++------ 2 files changed, 24 insertions(+), 7 deletions(-) diff --git a/cbpi/config/config.json b/cbpi/config/config.json index 56f98c5..cdb9b15 100644 --- a/cbpi/config/config.json +++ b/cbpi/config/config.json @@ -144,5 +144,19 @@ "options": null, "type": "step", "value": "NotificationStep" - } + }, + "SENSOR_LOG_BACKUP_COUNT": { + "description": "Max. number of backup logs", + "name": "SENSOR_LOG_BACKUP_COUNT", + "options": null, + "type": "number", + "value": 3 + }, + "SENSOR_LOG_MAX_BYTES": { + "description": "Max. number of bytes in sensor logs", + "name": "SENSOR_LOG_MAX_BYTES", + "options": null, + "type": "number", + "value": "100000" + } } diff --git a/cbpi/controller/log_file_controller.py b/cbpi/controller/log_file_controller.py index 8c59da7..993a1f5 100644 --- a/cbpi/controller/log_file_controller.py +++ b/cbpi/controller/log_file_controller.py @@ -31,8 +31,8 @@ class LogController: self.influxdb = self.cbpi.config.get("INFLUXDB", "No") if self.logfiles == "Yes": if name not in self.datalogger: - max_bytes = self.cbpi.config.get("SENSOR_LOG_MAX_BYTES", 1048576) - backup_count = self.cbpi.config.get("SENSOR_LOG_BACKUP_COUNT", 3) + max_bytes = int(self.cbpi.config.get("SENSOR_LOG_MAX_BYTES", 131072)) + backup_count = int(self.cbpi.config.get("SENSOR_LOG_BACKUP_COUNT", 3)) data_logger = logging.getLogger('cbpi.sensor.%s' % name) data_logger.propagate = False @@ -120,10 +120,10 @@ class LogController: df = pd.concat([pd.read_csv(f, parse_dates=True, date_parser=dateparse, index_col='DateTime', names=['DateTime', name], header=None) for f in all_filenames]) logging.info("Read all files for {}".format(names)) # resample if rate provided - # if sample_rate is not None: - # df = df[name].resample(sample_rate).max() - # logging.info("Sampled now for {}".format(names)) - df = df[name].dropna() + if sample_rate is not None: + df = df[name].resample(sample_rate).max() + logging.info("Sampled now for {}".format(names)) + df = df.dropna() # take every nth row so that total number of rows does not exceed max_rows * 2 max_rows = 500 total_rows = df.shape[0] @@ -138,6 +138,7 @@ class LogController: result = pd.merge(result, df, how='outer', left_index=True, right_index=True) data = {"time": df.index.tolist()} + if len(names) > 1: for name in names: data[name] = result[name].interpolate(limit_direction='both', limit=10).tolist() @@ -158,6 +159,8 @@ class LogController: # concat all logs all_filenames = glob.glob('./logs/sensor_%s.log*' % id) df = pd.concat([pd.read_csv(f, parse_dates=True, date_parser=dateparse, index_col='DateTime', names=['DateTime', 'Values'], header=None) for f in all_filenames]) + df = df.resample('60s').max() + df = df.dropna() result[id] = {"time": df.index.astype(str).tolist(), "value":df.Values.tolist()} return result From abe87b4218aeb1a941995e681c2673875e6805f4 Mon Sep 17 00:00:00 2001 From: phylax2020 Date: Tue, 19 Jul 2022 09:39:27 +0200 Subject: [PATCH 2/8] Logfile size and backup logfile count in config.json --- cbpi/config/config.json | 16 +--------------- cbpi/extension/ConfigUpdate/__init__.py | 22 ++++++++++++++++++++-- 2 files changed, 21 insertions(+), 17 deletions(-) diff --git a/cbpi/config/config.json b/cbpi/config/config.json index cdb9b15..56f98c5 100644 --- a/cbpi/config/config.json +++ b/cbpi/config/config.json @@ -144,19 +144,5 @@ "options": null, "type": "step", "value": "NotificationStep" - }, - "SENSOR_LOG_BACKUP_COUNT": { - "description": "Max. number of backup logs", - "name": "SENSOR_LOG_BACKUP_COUNT", - "options": null, - "type": "number", - "value": 3 - }, - "SENSOR_LOG_MAX_BYTES": { - "description": "Max. number of bytes in sensor logs", - "name": "SENSOR_LOG_MAX_BYTES", - "options": null, - "type": "number", - "value": "100000" - } + } } diff --git a/cbpi/extension/ConfigUpdate/__init__.py b/cbpi/extension/ConfigUpdate/__init__.py index e5dc9ac..77d9677 100644 --- a/cbpi/extension/ConfigUpdate/__init__.py +++ b/cbpi/extension/ConfigUpdate/__init__.py @@ -47,8 +47,9 @@ class ConfigUpdate(CBPiExtension): influxdbcloud = self.cbpi.config.get("INFLUXDBCLOUD", None) mqttupdate = self.cbpi.config.get("MQTTUpdate", None) PRESSURE_UNIT = self.cbpi.config.get("PRESSURE_UNIT", None) - - + SENSOR_LOG_BACKUP_COUNT = self.cbpi.config.get("SENSOR_LOG_BACKUP_COUNT", None) + SENSOR_LOG_MAX_BYTES = self.cbpi.config.get("SENSOR_LOG_MAX_BYTES", None) + if boil_temp is None: logger.info("INIT Boil Temp Setting") try: @@ -285,6 +286,23 @@ class ConfigUpdate(CBPiExtension): {"label": "PSI", "value": "PSI"}]) except: logger.warning('Unable to update config') + + # check if SENSOR_LOG_BACKUP_COUNT exists in config + if SENSOR_LOG_BACKUP_COUNT is None: + logger.info("INIT SENSOR_LOG_BACKUP_COUNT") + try: + await self.cbpi.config.add("SENSOR_LOG_BACKUP_COUNT", 3, ConfigType.NUMBER, "Max. number of backup logs") + except: + logger.warning('Unable to update database') + + # check if SENSOR_LOG_MAX_BYTES exists in config + if SENSOR_LOG_MAX_BYTES is None: + logger.info("INIT SENSOR_LOG_MAX_BYTES") + try: + await self.cbpi.config.add("SENSOR_LOG_MAX_BYTES", 100000, ConfigType.NUMBER, "Max. number of bytes in sensor logs") + except: + logger.warning('Unable to update database') + def setup(cbpi): cbpi.plugin.register("ConfigUpdate", ConfigUpdate) From 49b50678410f0a4b3ecd8af33135643611432ee5 Mon Sep 17 00:00:00 2001 From: Alexander Vollkopf <43980694+avollkopf@users.noreply.github.com> Date: Tue, 19 Jul 2022 17:36:37 +0200 Subject: [PATCH 3/8] Revert "Allow changing logfile size and logfile backup count" --- cbpi/controller/log_file_controller.py | 26 ++++++------------------- cbpi/extension/ConfigUpdate/__init__.py | 22 ++------------------- 2 files changed, 8 insertions(+), 40 deletions(-) diff --git a/cbpi/controller/log_file_controller.py b/cbpi/controller/log_file_controller.py index 993a1f5..d835982 100644 --- a/cbpi/controller/log_file_controller.py +++ b/cbpi/controller/log_file_controller.py @@ -31,8 +31,8 @@ class LogController: self.influxdb = self.cbpi.config.get("INFLUXDB", "No") if self.logfiles == "Yes": if name not in self.datalogger: - max_bytes = int(self.cbpi.config.get("SENSOR_LOG_MAX_BYTES", 131072)) - backup_count = int(self.cbpi.config.get("SENSOR_LOG_BACKUP_COUNT", 3)) + max_bytes = self.cbpi.config.get("SENSOR_LOG_MAX_BYTES", 1048576) + backup_count = self.cbpi.config.get("SENSOR_LOG_BACKUP_COUNT", 3) data_logger = logging.getLogger('cbpi.sensor.%s' % name) data_logger.propagate = False @@ -42,7 +42,7 @@ class LogController: self.datalogger[name] = data_logger formatted_time = strftime("%Y-%m-%d %H:%M:%S", localtime()) - self.datalogger[name].info("%s,%s" % (formatted_time, str(value))) + self.datalogger[name].info("%s,%s" % (formatted_time, value)) if self.influxdb == "Yes": self.influxdbcloud = self.cbpi.config.get("INFLUXDBCLOUD", "No") self.influxdbaddr = self.cbpi.config.get("INFLUXDBADDR", None) @@ -116,6 +116,7 @@ class LogController: for name in names: # get all log names all_filenames = glob.glob('./logs/sensor_%s.log*' % name) + # concat all logs df = pd.concat([pd.read_csv(f, parse_dates=True, date_parser=dateparse, index_col='DateTime', names=['DateTime', name], header=None) for f in all_filenames]) logging.info("Read all files for {}".format(names)) @@ -124,29 +125,19 @@ class LogController: df = df[name].resample(sample_rate).max() logging.info("Sampled now for {}".format(names)) df = df.dropna() - # take every nth row so that total number of rows does not exceed max_rows * 2 - max_rows = 500 - total_rows = df.shape[0] - if (total_rows > 0) and (total_rows > max_rows): - nth = int(total_rows/max_rows) - if nth > 1: - df = df.iloc[::nth] - if result is None: result = df else: result = pd.merge(result, df, how='outer', left_index=True, right_index=True) data = {"time": df.index.tolist()} - + if len(names) > 1: for name in names: data[name] = result[name].interpolate(limit_direction='both', limit=10).tolist() else: data[name] = result.interpolate().tolist() - logging.info("Send Log for {}".format(names)) - return data async def get_data2(self, ids) -> dict: @@ -155,12 +146,7 @@ class LogController: result = dict() for id in ids: - # df = pd.read_csv("./logs/sensor_%s.log" % id, parse_dates=True, date_parser=dateparse, index_col='DateTime', names=['DateTime',"Values"], header=None) - # concat all logs - all_filenames = glob.glob('./logs/sensor_%s.log*' % id) - df = pd.concat([pd.read_csv(f, parse_dates=True, date_parser=dateparse, index_col='DateTime', names=['DateTime', 'Values'], header=None) for f in all_filenames]) - df = df.resample('60s').max() - df = df.dropna() + df = pd.read_csv("./logs/sensor_%s.log" % id, parse_dates=True, date_parser=dateparse, index_col='DateTime', names=['DateTime',"Values"], header=None) result[id] = {"time": df.index.astype(str).tolist(), "value":df.Values.tolist()} return result diff --git a/cbpi/extension/ConfigUpdate/__init__.py b/cbpi/extension/ConfigUpdate/__init__.py index 77d9677..e5dc9ac 100644 --- a/cbpi/extension/ConfigUpdate/__init__.py +++ b/cbpi/extension/ConfigUpdate/__init__.py @@ -47,9 +47,8 @@ class ConfigUpdate(CBPiExtension): influxdbcloud = self.cbpi.config.get("INFLUXDBCLOUD", None) mqttupdate = self.cbpi.config.get("MQTTUpdate", None) PRESSURE_UNIT = self.cbpi.config.get("PRESSURE_UNIT", None) - SENSOR_LOG_BACKUP_COUNT = self.cbpi.config.get("SENSOR_LOG_BACKUP_COUNT", None) - SENSOR_LOG_MAX_BYTES = self.cbpi.config.get("SENSOR_LOG_MAX_BYTES", None) - + + if boil_temp is None: logger.info("INIT Boil Temp Setting") try: @@ -286,23 +285,6 @@ class ConfigUpdate(CBPiExtension): {"label": "PSI", "value": "PSI"}]) except: logger.warning('Unable to update config') - - # check if SENSOR_LOG_BACKUP_COUNT exists in config - if SENSOR_LOG_BACKUP_COUNT is None: - logger.info("INIT SENSOR_LOG_BACKUP_COUNT") - try: - await self.cbpi.config.add("SENSOR_LOG_BACKUP_COUNT", 3, ConfigType.NUMBER, "Max. number of backup logs") - except: - logger.warning('Unable to update database') - - # check if SENSOR_LOG_MAX_BYTES exists in config - if SENSOR_LOG_MAX_BYTES is None: - logger.info("INIT SENSOR_LOG_MAX_BYTES") - try: - await self.cbpi.config.add("SENSOR_LOG_MAX_BYTES", 100000, ConfigType.NUMBER, "Max. number of bytes in sensor logs") - except: - logger.warning('Unable to update database') - def setup(cbpi): cbpi.plugin.register("ConfigUpdate", ConfigUpdate) From c85b929b7551b0f1e1b17a2385e7a92b19e09fbc Mon Sep 17 00:00:00 2001 From: phylax2020 Date: Tue, 19 Jul 2022 18:27:10 +0200 Subject: [PATCH 4/8] Annotations for logfile maximum size revised --- cbpi/extension/ConfigUpdate/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/cbpi/extension/ConfigUpdate/__init__.py b/cbpi/extension/ConfigUpdate/__init__.py index 77d9677..9e78ae3 100644 --- a/cbpi/extension/ConfigUpdate/__init__.py +++ b/cbpi/extension/ConfigUpdate/__init__.py @@ -297,9 +297,9 @@ class ConfigUpdate(CBPiExtension): # check if SENSOR_LOG_MAX_BYTES exists in config if SENSOR_LOG_MAX_BYTES is None: - logger.info("INIT SENSOR_LOG_MAX_BYTES") + logger.info("Init maximum size of sensor logfiles") try: - await self.cbpi.config.add("SENSOR_LOG_MAX_BYTES", 100000, ConfigType.NUMBER, "Max. number of bytes in sensor logs") + await self.cbpi.config.add("SENSOR_LOG_MAX_BYTES", 100000, ConfigType.NUMBER, "Max. number of bytes in sensor logfiles") except: logger.warning('Unable to update database') From 313f9d4b398006f95ec51fb417b8694c4d494f73 Mon Sep 17 00:00:00 2001 From: phylax2020 Date: Tue, 19 Jul 2022 20:28:36 +0200 Subject: [PATCH 5/8] Set default logfile size to 100000 bytes --- cbpi/controller/log_file_controller.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cbpi/controller/log_file_controller.py b/cbpi/controller/log_file_controller.py index 993a1f5..968929f 100644 --- a/cbpi/controller/log_file_controller.py +++ b/cbpi/controller/log_file_controller.py @@ -31,7 +31,7 @@ class LogController: self.influxdb = self.cbpi.config.get("INFLUXDB", "No") if self.logfiles == "Yes": if name not in self.datalogger: - max_bytes = int(self.cbpi.config.get("SENSOR_LOG_MAX_BYTES", 131072)) + max_bytes = int(self.cbpi.config.get("SENSOR_LOG_MAX_BYTES", 100000)) backup_count = int(self.cbpi.config.get("SENSOR_LOG_BACKUP_COUNT", 3)) data_logger = logging.getLogger('cbpi.sensor.%s' % name) From 752d9a27b64fb8b95b16321ab1c8bd34dfe42340 Mon Sep 17 00:00:00 2001 From: phylax2020 Date: Tue, 19 Jul 2022 20:45:26 +0200 Subject: [PATCH 6/8] delete log_file_controller.py. will be later restored --- cbpi/controller/log_file_controller.py | 224 ------------------------- 1 file changed, 224 deletions(-) delete mode 100644 cbpi/controller/log_file_controller.py diff --git a/cbpi/controller/log_file_controller.py b/cbpi/controller/log_file_controller.py deleted file mode 100644 index 968929f..0000000 --- a/cbpi/controller/log_file_controller.py +++ /dev/null @@ -1,224 +0,0 @@ -import datetime -import glob -import logging -import os -from logging.handlers import RotatingFileHandler -from time import strftime, localtime -import pandas as pd -import zipfile -import base64 -import urllib3 -from cbpi.api import * -from cbpi.api.config import ConfigType -from cbpi.api.base import CBPiBase -import asyncio - - -class LogController: - - def __init__(self, cbpi): - ''' - - :param cbpi: craftbeerpi object - ''' - self.cbpi = cbpi - self.logger = logging.getLogger(__name__) - self.configuration = False - self.datalogger = {} - - def log_data(self, name: str, value: str) -> None: - self.logfiles = self.cbpi.config.get("CSVLOGFILES", "Yes") - self.influxdb = self.cbpi.config.get("INFLUXDB", "No") - if self.logfiles == "Yes": - if name not in self.datalogger: - max_bytes = int(self.cbpi.config.get("SENSOR_LOG_MAX_BYTES", 100000)) - backup_count = int(self.cbpi.config.get("SENSOR_LOG_BACKUP_COUNT", 3)) - - data_logger = logging.getLogger('cbpi.sensor.%s' % name) - data_logger.propagate = False - data_logger.setLevel(logging.DEBUG) - handler = RotatingFileHandler('./logs/sensor_%s.log' % name, maxBytes=max_bytes, backupCount=backup_count) - data_logger.addHandler(handler) - self.datalogger[name] = data_logger - - formatted_time = strftime("%Y-%m-%d %H:%M:%S", localtime()) - self.datalogger[name].info("%s,%s" % (formatted_time, str(value))) - if self.influxdb == "Yes": - self.influxdbcloud = self.cbpi.config.get("INFLUXDBCLOUD", "No") - self.influxdbaddr = self.cbpi.config.get("INFLUXDBADDR", None) - self.influxdbport = self.cbpi.config.get("INFLUXDBPORT", None) - self.influxdbname = self.cbpi.config.get("INFLUXDBNAME", None) - self.influxdbuser = self.cbpi.config.get("INFLUXDBUSER", None) - self.influxdbpwd = self.cbpi.config.get("INFLUXDBPWD", None) - - id = name - try: - chars = {'ö':'oe','ä':'ae','ü':'ue','Ö':'Oe','Ä':'Ae','Ü':'Ue'} - sensor=self.cbpi.sensor.find_by_id(name) - if sensor is not None: - itemname=sensor.name.replace(" ", "_") - for char in chars: - itemname = itemname.replace(char,chars[char]) - out="measurement,source=" + itemname + ",itemID=" + str(id) + " value="+str(value) - except Exception as e: - logging.error("InfluxDB ID Error: {}".format(e)) - - if self.influxdbcloud == "Yes": - self.influxdburl="https://" + self.influxdbaddr + "/api/v2/write?org=" + self.influxdbuser + "&bucket=" + self.influxdbname + "&precision=s" - try: - header = {'User-Agent': name, 'Authorization': "Token {}".format(self.influxdbpwd)} - http = urllib3.PoolManager() - req = http.request('POST',self.influxdburl, body=out, headers = header) - except Exception as e: - logging.error("InfluxDB cloud write Error: {}".format(e)) - - else: - self.base64string = base64.b64encode(('%s:%s' % (self.influxdbuser,self.influxdbpwd)).encode()) - self.influxdburl='http://' + self.influxdbaddr + ':' + str(self.influxdbport) + '/write?db=' + self.influxdbname - try: - header = {'User-Agent': name, 'Content-Type': 'application/x-www-form-urlencoded','Authorization': 'Basic %s' % self.base64string.decode('utf-8')} - http = urllib3.PoolManager() - req = http.request('POST',self.influxdburl, body=out, headers = header) - except Exception as e: - logging.error("InfluxDB write Error: {}".format(e)) - - - - async def get_data(self, names, sample_rate='60s'): - logging.info("Start Log for {}".format(names)) - ''' - :param names: name as string or list of names as string - :param sample_rate: rate for resampling the data - :return: - ''' - # make string to array - if isinstance(names, list) is False: - names = [names] - - # remove duplicates - names = set(names) - - - result = None - - def dateparse(time_in_secs): - ''' - Internal helper for date parsing - :param time_in_secs: - :return: - ''' - return datetime.datetime.strptime(time_in_secs, '%Y-%m-%d %H:%M:%S') - - def datetime_to_str(o): - if isinstance(o, datetime.datetime): - return o.__str__() - - for name in names: - # get all log names - all_filenames = glob.glob('./logs/sensor_%s.log*' % name) - # concat all logs - df = pd.concat([pd.read_csv(f, parse_dates=True, date_parser=dateparse, index_col='DateTime', names=['DateTime', name], header=None) for f in all_filenames]) - logging.info("Read all files for {}".format(names)) - # resample if rate provided - if sample_rate is not None: - df = df[name].resample(sample_rate).max() - logging.info("Sampled now for {}".format(names)) - df = df.dropna() - # take every nth row so that total number of rows does not exceed max_rows * 2 - max_rows = 500 - total_rows = df.shape[0] - if (total_rows > 0) and (total_rows > max_rows): - nth = int(total_rows/max_rows) - if nth > 1: - df = df.iloc[::nth] - - if result is None: - result = df - else: - result = pd.merge(result, df, how='outer', left_index=True, right_index=True) - - data = {"time": df.index.tolist()} - - if len(names) > 1: - for name in names: - data[name] = result[name].interpolate(limit_direction='both', limit=10).tolist() - else: - data[name] = result.interpolate().tolist() - - logging.info("Send Log for {}".format(names)) - - return data - - async def get_data2(self, ids) -> dict: - def dateparse(time_in_secs): - return datetime.datetime.strptime(time_in_secs, '%Y-%m-%d %H:%M:%S') - - result = dict() - for id in ids: - # df = pd.read_csv("./logs/sensor_%s.log" % id, parse_dates=True, date_parser=dateparse, index_col='DateTime', names=['DateTime',"Values"], header=None) - # concat all logs - all_filenames = glob.glob('./logs/sensor_%s.log*' % id) - df = pd.concat([pd.read_csv(f, parse_dates=True, date_parser=dateparse, index_col='DateTime', names=['DateTime', 'Values'], header=None) for f in all_filenames]) - df = df.resample('60s').max() - df = df.dropna() - result[id] = {"time": df.index.astype(str).tolist(), "value":df.Values.tolist()} - return result - - - - def get_logfile_names(self, name:str ) -> list: - ''' - Get all log file names - :param name: log name as string. pattern /logs/sensor_%s.log* - :return: list of log file names - ''' - - return [os.path.basename(x) for x in glob.glob('./logs/sensor_%s.log*' % name)] - - def clear_log(self, name:str ) -> str: - - all_filenames = glob.glob('./logs/sensor_%s.log*' % name) - for f in all_filenames: - os.remove(f) - - if name in self.datalogger: - del self.datalogger[name] - - - def get_all_zip_file_names(self, name: str) -> list: - - ''' - Return a list of all zip file names - :param name: - :return: - ''' - - return [os.path.basename(x) for x in glob.glob('./logs/*-sensor-%s.zip' % name)] - - def clear_zip(self, name:str ) -> None: - """ - clear all zip files for a sensor - :param name: sensor name - :return: None - """ - - all_filenames = glob.glob('./logs/*-sensor-%s.zip' % name) - for f in all_filenames: - os.remove(f) - - def zip_log_data(self, name: str) -> str: - """ - :param name: sensor name - :return: zip_file_name - """ - - formatted_time = strftime("%Y-%m-%d-%H_%M_%S", localtime()) - file_name = './logs/%s-sensor-%s.zip' % (formatted_time, name) - zip = zipfile.ZipFile(file_name, 'w', zipfile.ZIP_DEFLATED) - all_filenames = glob.glob('./logs/sensor_%s.log*' % name) - for f in all_filenames: - zip.write(os.path.join(f)) - zip.close() - return os.path.basename(file_name) - - From ff0ba76cc0188eaccae42d4bd5be1a6fed7c16be Mon Sep 17 00:00:00 2001 From: phylax2020 Date: Tue, 19 Jul 2022 20:47:11 +0200 Subject: [PATCH 7/8] log_file_controller added again --- cbpi/controller/log_file_controller.py | 224 +++++++++++++++++++++++++ 1 file changed, 224 insertions(+) create mode 100644 cbpi/controller/log_file_controller.py diff --git a/cbpi/controller/log_file_controller.py b/cbpi/controller/log_file_controller.py new file mode 100644 index 0000000..968929f --- /dev/null +++ b/cbpi/controller/log_file_controller.py @@ -0,0 +1,224 @@ +import datetime +import glob +import logging +import os +from logging.handlers import RotatingFileHandler +from time import strftime, localtime +import pandas as pd +import zipfile +import base64 +import urllib3 +from cbpi.api import * +from cbpi.api.config import ConfigType +from cbpi.api.base import CBPiBase +import asyncio + + +class LogController: + + def __init__(self, cbpi): + ''' + + :param cbpi: craftbeerpi object + ''' + self.cbpi = cbpi + self.logger = logging.getLogger(__name__) + self.configuration = False + self.datalogger = {} + + def log_data(self, name: str, value: str) -> None: + self.logfiles = self.cbpi.config.get("CSVLOGFILES", "Yes") + self.influxdb = self.cbpi.config.get("INFLUXDB", "No") + if self.logfiles == "Yes": + if name not in self.datalogger: + max_bytes = int(self.cbpi.config.get("SENSOR_LOG_MAX_BYTES", 100000)) + backup_count = int(self.cbpi.config.get("SENSOR_LOG_BACKUP_COUNT", 3)) + + data_logger = logging.getLogger('cbpi.sensor.%s' % name) + data_logger.propagate = False + data_logger.setLevel(logging.DEBUG) + handler = RotatingFileHandler('./logs/sensor_%s.log' % name, maxBytes=max_bytes, backupCount=backup_count) + data_logger.addHandler(handler) + self.datalogger[name] = data_logger + + formatted_time = strftime("%Y-%m-%d %H:%M:%S", localtime()) + self.datalogger[name].info("%s,%s" % (formatted_time, str(value))) + if self.influxdb == "Yes": + self.influxdbcloud = self.cbpi.config.get("INFLUXDBCLOUD", "No") + self.influxdbaddr = self.cbpi.config.get("INFLUXDBADDR", None) + self.influxdbport = self.cbpi.config.get("INFLUXDBPORT", None) + self.influxdbname = self.cbpi.config.get("INFLUXDBNAME", None) + self.influxdbuser = self.cbpi.config.get("INFLUXDBUSER", None) + self.influxdbpwd = self.cbpi.config.get("INFLUXDBPWD", None) + + id = name + try: + chars = {'ö':'oe','ä':'ae','ü':'ue','Ö':'Oe','Ä':'Ae','Ü':'Ue'} + sensor=self.cbpi.sensor.find_by_id(name) + if sensor is not None: + itemname=sensor.name.replace(" ", "_") + for char in chars: + itemname = itemname.replace(char,chars[char]) + out="measurement,source=" + itemname + ",itemID=" + str(id) + " value="+str(value) + except Exception as e: + logging.error("InfluxDB ID Error: {}".format(e)) + + if self.influxdbcloud == "Yes": + self.influxdburl="https://" + self.influxdbaddr + "/api/v2/write?org=" + self.influxdbuser + "&bucket=" + self.influxdbname + "&precision=s" + try: + header = {'User-Agent': name, 'Authorization': "Token {}".format(self.influxdbpwd)} + http = urllib3.PoolManager() + req = http.request('POST',self.influxdburl, body=out, headers = header) + except Exception as e: + logging.error("InfluxDB cloud write Error: {}".format(e)) + + else: + self.base64string = base64.b64encode(('%s:%s' % (self.influxdbuser,self.influxdbpwd)).encode()) + self.influxdburl='http://' + self.influxdbaddr + ':' + str(self.influxdbport) + '/write?db=' + self.influxdbname + try: + header = {'User-Agent': name, 'Content-Type': 'application/x-www-form-urlencoded','Authorization': 'Basic %s' % self.base64string.decode('utf-8')} + http = urllib3.PoolManager() + req = http.request('POST',self.influxdburl, body=out, headers = header) + except Exception as e: + logging.error("InfluxDB write Error: {}".format(e)) + + + + async def get_data(self, names, sample_rate='60s'): + logging.info("Start Log for {}".format(names)) + ''' + :param names: name as string or list of names as string + :param sample_rate: rate for resampling the data + :return: + ''' + # make string to array + if isinstance(names, list) is False: + names = [names] + + # remove duplicates + names = set(names) + + + result = None + + def dateparse(time_in_secs): + ''' + Internal helper for date parsing + :param time_in_secs: + :return: + ''' + return datetime.datetime.strptime(time_in_secs, '%Y-%m-%d %H:%M:%S') + + def datetime_to_str(o): + if isinstance(o, datetime.datetime): + return o.__str__() + + for name in names: + # get all log names + all_filenames = glob.glob('./logs/sensor_%s.log*' % name) + # concat all logs + df = pd.concat([pd.read_csv(f, parse_dates=True, date_parser=dateparse, index_col='DateTime', names=['DateTime', name], header=None) for f in all_filenames]) + logging.info("Read all files for {}".format(names)) + # resample if rate provided + if sample_rate is not None: + df = df[name].resample(sample_rate).max() + logging.info("Sampled now for {}".format(names)) + df = df.dropna() + # take every nth row so that total number of rows does not exceed max_rows * 2 + max_rows = 500 + total_rows = df.shape[0] + if (total_rows > 0) and (total_rows > max_rows): + nth = int(total_rows/max_rows) + if nth > 1: + df = df.iloc[::nth] + + if result is None: + result = df + else: + result = pd.merge(result, df, how='outer', left_index=True, right_index=True) + + data = {"time": df.index.tolist()} + + if len(names) > 1: + for name in names: + data[name] = result[name].interpolate(limit_direction='both', limit=10).tolist() + else: + data[name] = result.interpolate().tolist() + + logging.info("Send Log for {}".format(names)) + + return data + + async def get_data2(self, ids) -> dict: + def dateparse(time_in_secs): + return datetime.datetime.strptime(time_in_secs, '%Y-%m-%d %H:%M:%S') + + result = dict() + for id in ids: + # df = pd.read_csv("./logs/sensor_%s.log" % id, parse_dates=True, date_parser=dateparse, index_col='DateTime', names=['DateTime',"Values"], header=None) + # concat all logs + all_filenames = glob.glob('./logs/sensor_%s.log*' % id) + df = pd.concat([pd.read_csv(f, parse_dates=True, date_parser=dateparse, index_col='DateTime', names=['DateTime', 'Values'], header=None) for f in all_filenames]) + df = df.resample('60s').max() + df = df.dropna() + result[id] = {"time": df.index.astype(str).tolist(), "value":df.Values.tolist()} + return result + + + + def get_logfile_names(self, name:str ) -> list: + ''' + Get all log file names + :param name: log name as string. pattern /logs/sensor_%s.log* + :return: list of log file names + ''' + + return [os.path.basename(x) for x in glob.glob('./logs/sensor_%s.log*' % name)] + + def clear_log(self, name:str ) -> str: + + all_filenames = glob.glob('./logs/sensor_%s.log*' % name) + for f in all_filenames: + os.remove(f) + + if name in self.datalogger: + del self.datalogger[name] + + + def get_all_zip_file_names(self, name: str) -> list: + + ''' + Return a list of all zip file names + :param name: + :return: + ''' + + return [os.path.basename(x) for x in glob.glob('./logs/*-sensor-%s.zip' % name)] + + def clear_zip(self, name:str ) -> None: + """ + clear all zip files for a sensor + :param name: sensor name + :return: None + """ + + all_filenames = glob.glob('./logs/*-sensor-%s.zip' % name) + for f in all_filenames: + os.remove(f) + + def zip_log_data(self, name: str) -> str: + """ + :param name: sensor name + :return: zip_file_name + """ + + formatted_time = strftime("%Y-%m-%d-%H_%M_%S", localtime()) + file_name = './logs/%s-sensor-%s.zip' % (formatted_time, name) + zip = zipfile.ZipFile(file_name, 'w', zipfile.ZIP_DEFLATED) + all_filenames = glob.glob('./logs/sensor_%s.log*' % name) + for f in all_filenames: + zip.write(os.path.join(f)) + zip.close() + return os.path.basename(file_name) + + From 16190124d0e608baec14b44a98696b7721a39f5f Mon Sep 17 00:00:00 2001 From: phylax2020 Date: Tue, 19 Jul 2022 21:03:17 +0200 Subject: [PATCH 8/8] hopefully this commit works! --- cbpi/controller/log_file_controller.py | 27 ++++++++++++++++--------- cbpi/extension/ConfigUpdate/__init__.py | 3 ++- 2 files changed, 20 insertions(+), 10 deletions(-) diff --git a/cbpi/controller/log_file_controller.py b/cbpi/controller/log_file_controller.py index bf10cb2..968929f 100644 --- a/cbpi/controller/log_file_controller.py +++ b/cbpi/controller/log_file_controller.py @@ -31,13 +31,8 @@ class LogController: self.influxdb = self.cbpi.config.get("INFLUXDB", "No") if self.logfiles == "Yes": if name not in self.datalogger: -<<<<<<< HEAD max_bytes = int(self.cbpi.config.get("SENSOR_LOG_MAX_BYTES", 100000)) backup_count = int(self.cbpi.config.get("SENSOR_LOG_BACKUP_COUNT", 3)) -======= - max_bytes = self.cbpi.config.get("SENSOR_LOG_MAX_BYTES", 1048576) - backup_count = self.cbpi.config.get("SENSOR_LOG_BACKUP_COUNT", 3) ->>>>>>> 5ea4160b4f6b7a7c759323cc01b0e40a05bca4b9 data_logger = logging.getLogger('cbpi.sensor.%s' % name) data_logger.propagate = False @@ -47,7 +42,7 @@ class LogController: self.datalogger[name] = data_logger formatted_time = strftime("%Y-%m-%d %H:%M:%S", localtime()) - self.datalogger[name].info("%s,%s" % (formatted_time, value)) + self.datalogger[name].info("%s,%s" % (formatted_time, str(value))) if self.influxdb == "Yes": self.influxdbcloud = self.cbpi.config.get("INFLUXDBCLOUD", "No") self.influxdbaddr = self.cbpi.config.get("INFLUXDBADDR", None) @@ -121,7 +116,6 @@ class LogController: for name in names: # get all log names all_filenames = glob.glob('./logs/sensor_%s.log*' % name) - # concat all logs df = pd.concat([pd.read_csv(f, parse_dates=True, date_parser=dateparse, index_col='DateTime', names=['DateTime', name], header=None) for f in all_filenames]) logging.info("Read all files for {}".format(names)) @@ -130,19 +124,29 @@ class LogController: df = df[name].resample(sample_rate).max() logging.info("Sampled now for {}".format(names)) df = df.dropna() + # take every nth row so that total number of rows does not exceed max_rows * 2 + max_rows = 500 + total_rows = df.shape[0] + if (total_rows > 0) and (total_rows > max_rows): + nth = int(total_rows/max_rows) + if nth > 1: + df = df.iloc[::nth] + if result is None: result = df else: result = pd.merge(result, df, how='outer', left_index=True, right_index=True) data = {"time": df.index.tolist()} - + if len(names) > 1: for name in names: data[name] = result[name].interpolate(limit_direction='both', limit=10).tolist() else: data[name] = result.interpolate().tolist() + logging.info("Send Log for {}".format(names)) + return data async def get_data2(self, ids) -> dict: @@ -151,7 +155,12 @@ class LogController: result = dict() for id in ids: - df = pd.read_csv("./logs/sensor_%s.log" % id, parse_dates=True, date_parser=dateparse, index_col='DateTime', names=['DateTime',"Values"], header=None) + # df = pd.read_csv("./logs/sensor_%s.log" % id, parse_dates=True, date_parser=dateparse, index_col='DateTime', names=['DateTime',"Values"], header=None) + # concat all logs + all_filenames = glob.glob('./logs/sensor_%s.log*' % id) + df = pd.concat([pd.read_csv(f, parse_dates=True, date_parser=dateparse, index_col='DateTime', names=['DateTime', 'Values'], header=None) for f in all_filenames]) + df = df.resample('60s').max() + df = df.dropna() result[id] = {"time": df.index.astype(str).tolist(), "value":df.Values.tolist()} return result diff --git a/cbpi/extension/ConfigUpdate/__init__.py b/cbpi/extension/ConfigUpdate/__init__.py index 8380e75..67ac417 100644 --- a/cbpi/extension/ConfigUpdate/__init__.py +++ b/cbpi/extension/ConfigUpdate/__init__.py @@ -47,7 +47,8 @@ class ConfigUpdate(CBPiExtension): influxdbcloud = self.cbpi.config.get("INFLUXDBCLOUD", None) mqttupdate = self.cbpi.config.get("MQTTUpdate", None) PRESSURE_UNIT = self.cbpi.config.get("PRESSURE_UNIT", None) - + SENSOR_LOG_BACKUP_COUNT = self.cbpi.config.get("SENSOR_LOG_BACKUP_COUNT", None) + SENSOR_LOG_MAX_BYTES = self.cbpi.config.get("SENSOR_LOG_MAX_BYTES", None) if boil_temp is None: logger.info("INIT Boil Temp Setting")