mirror of
https://github.com/PiBrewing/craftbeerpi4.git
synced 2024-11-25 16:38:36 +01:00
Allow changing log file size and log file backup count in settings. Default for log file size is now 130000 bytes (was 1MB). Dashboard with charts can be blocked
if accumulated size of log file data exceeds some megabytes. Also chart refresh rate should be greater than some 10 seconds so that chart plots can not block the user interface. In functions get_data (for dashboard charts) and get_data2 (for chart in analytics) the pandas resample function is reactivated respectively added to further reduce the amount of data to be transferred to the clients. In function log_data max_bytes and backup_count must be converted to int, when these settings are changed in the settings dialog.
This commit is contained in:
parent
e95237eef6
commit
8621888d81
2 changed files with 24 additions and 7 deletions
|
@ -144,5 +144,19 @@
|
||||||
"options": null,
|
"options": null,
|
||||||
"type": "step",
|
"type": "step",
|
||||||
"value": "NotificationStep"
|
"value": "NotificationStep"
|
||||||
|
},
|
||||||
|
"SENSOR_LOG_BACKUP_COUNT": {
|
||||||
|
"description": "Max. number of backup logs",
|
||||||
|
"name": "SENSOR_LOG_BACKUP_COUNT",
|
||||||
|
"options": null,
|
||||||
|
"type": "number",
|
||||||
|
"value": 3
|
||||||
|
},
|
||||||
|
"SENSOR_LOG_MAX_BYTES": {
|
||||||
|
"description": "Max. number of bytes in sensor logs",
|
||||||
|
"name": "SENSOR_LOG_MAX_BYTES",
|
||||||
|
"options": null,
|
||||||
|
"type": "number",
|
||||||
|
"value": "100000"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -31,8 +31,8 @@ class LogController:
|
||||||
self.influxdb = self.cbpi.config.get("INFLUXDB", "No")
|
self.influxdb = self.cbpi.config.get("INFLUXDB", "No")
|
||||||
if self.logfiles == "Yes":
|
if self.logfiles == "Yes":
|
||||||
if name not in self.datalogger:
|
if name not in self.datalogger:
|
||||||
max_bytes = self.cbpi.config.get("SENSOR_LOG_MAX_BYTES", 1048576)
|
max_bytes = int(self.cbpi.config.get("SENSOR_LOG_MAX_BYTES", 131072))
|
||||||
backup_count = self.cbpi.config.get("SENSOR_LOG_BACKUP_COUNT", 3)
|
backup_count = int(self.cbpi.config.get("SENSOR_LOG_BACKUP_COUNT", 3))
|
||||||
|
|
||||||
data_logger = logging.getLogger('cbpi.sensor.%s' % name)
|
data_logger = logging.getLogger('cbpi.sensor.%s' % name)
|
||||||
data_logger.propagate = False
|
data_logger.propagate = False
|
||||||
|
@ -120,10 +120,10 @@ class LogController:
|
||||||
df = pd.concat([pd.read_csv(f, parse_dates=True, date_parser=dateparse, index_col='DateTime', names=['DateTime', name], header=None) for f in all_filenames])
|
df = pd.concat([pd.read_csv(f, parse_dates=True, date_parser=dateparse, index_col='DateTime', names=['DateTime', name], header=None) for f in all_filenames])
|
||||||
logging.info("Read all files for {}".format(names))
|
logging.info("Read all files for {}".format(names))
|
||||||
# resample if rate provided
|
# resample if rate provided
|
||||||
# if sample_rate is not None:
|
if sample_rate is not None:
|
||||||
# df = df[name].resample(sample_rate).max()
|
df = df[name].resample(sample_rate).max()
|
||||||
# logging.info("Sampled now for {}".format(names))
|
logging.info("Sampled now for {}".format(names))
|
||||||
df = df[name].dropna()
|
df = df.dropna()
|
||||||
# take every nth row so that total number of rows does not exceed max_rows * 2
|
# take every nth row so that total number of rows does not exceed max_rows * 2
|
||||||
max_rows = 500
|
max_rows = 500
|
||||||
total_rows = df.shape[0]
|
total_rows = df.shape[0]
|
||||||
|
@ -138,6 +138,7 @@ class LogController:
|
||||||
result = pd.merge(result, df, how='outer', left_index=True, right_index=True)
|
result = pd.merge(result, df, how='outer', left_index=True, right_index=True)
|
||||||
|
|
||||||
data = {"time": df.index.tolist()}
|
data = {"time": df.index.tolist()}
|
||||||
|
|
||||||
if len(names) > 1:
|
if len(names) > 1:
|
||||||
for name in names:
|
for name in names:
|
||||||
data[name] = result[name].interpolate(limit_direction='both', limit=10).tolist()
|
data[name] = result[name].interpolate(limit_direction='both', limit=10).tolist()
|
||||||
|
@ -158,6 +159,8 @@ class LogController:
|
||||||
# concat all logs
|
# concat all logs
|
||||||
all_filenames = glob.glob('./logs/sensor_%s.log*' % id)
|
all_filenames = glob.glob('./logs/sensor_%s.log*' % id)
|
||||||
df = pd.concat([pd.read_csv(f, parse_dates=True, date_parser=dateparse, index_col='DateTime', names=['DateTime', 'Values'], header=None) for f in all_filenames])
|
df = pd.concat([pd.read_csv(f, parse_dates=True, date_parser=dateparse, index_col='DateTime', names=['DateTime', 'Values'], header=None) for f in all_filenames])
|
||||||
|
df = df.resample('60s').max()
|
||||||
|
df = df.dropna()
|
||||||
result[id] = {"time": df.index.astype(str).tolist(), "value":df.Values.tolist()}
|
result[id] = {"time": df.index.astype(str).tolist(), "value":df.Values.tolist()}
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue