mirror of
https://github.com/PiBrewing/craftbeerpi4.git
synced 2024-11-21 22:48:16 +01:00
moves CSV Sensor logging into core extension
This commit is contained in:
parent
4eebb17291
commit
384a8d5422
5 changed files with 60 additions and 27 deletions
|
@ -47,32 +47,13 @@ class LogController:
|
|||
asyncio.create_task(method(self.cbpi, id, value, formatted_time, name))
|
||||
|
||||
def log_data(self, id: str, value: str) -> None:
|
||||
# check which default log targets are enabled:
|
||||
self.logfiles = self.cbpi.config.get("CSVLOGFILES", "Yes")
|
||||
formatted_time = strftime("%Y-%m-%d %H:%M:%S", localtime())
|
||||
# ^^ both legacy log targets should probably be implemented as a core plugin each unsing the hook instead
|
||||
|
||||
# CSV target:
|
||||
if self.logfiles == "Yes":
|
||||
if id not in self.datalogger:
|
||||
max_bytes = int(self.cbpi.config.get("SENSOR_LOG_MAX_BYTES", 100000))
|
||||
backup_count = int(self.cbpi.config.get("SENSOR_LOG_BACKUP_COUNT", 3))
|
||||
|
||||
data_logger = logging.getLogger('cbpi.sensor.%s' % id)
|
||||
data_logger.propagate = False
|
||||
data_logger.setLevel(logging.DEBUG)
|
||||
handler = RotatingFileHandler(os.path.join(self.logsFolderPath, f"sensor_{id}.log"), maxBytes=max_bytes, backupCount=backup_count)
|
||||
data_logger.addHandler(handler)
|
||||
self.datalogger[id] = data_logger
|
||||
|
||||
self.datalogger[id].info("%s,%s" % (formatted_time, str(value)))
|
||||
|
||||
# all plugin targets:
|
||||
if self.sensor_data_listeners: # true if there are listners
|
||||
try:
|
||||
sensor=self.cbpi.sensor.find_by_id(id)
|
||||
if sensor is not None:
|
||||
name = sensor.name.replace(" ", "_")
|
||||
formatted_time = strftime("%Y-%m-%d %H:%M:%S", localtime())
|
||||
asyncio.create_task(self._call_sensor_data_listeners(id, value, formatted_time, name))
|
||||
except Exception as e:
|
||||
logging.error("sensor logging listener exception: {}".format(e))
|
||||
|
@ -171,10 +152,6 @@ class LogController:
|
|||
def clear_log(self, name:str ) -> str:
|
||||
all_filenames = glob.glob(os.path.join(self.logsFolderPath, f"sensor_{name}.log*"))
|
||||
|
||||
if name in self.datalogger:
|
||||
self.datalogger[name].removeHandler(self.datalogger[name].handlers[0])
|
||||
del self.datalogger[name]
|
||||
|
||||
for f in all_filenames:
|
||||
try:
|
||||
os.remove(f)
|
||||
|
|
|
@ -205,7 +205,7 @@ class ConfigUpdate(CBPiExtension):
|
|||
if logfiles is None:
|
||||
logger.info("INIT CSV logfiles")
|
||||
try:
|
||||
await self.cbpi.config.add("CSVLOGFILES", "Yes", ConfigType.SELECT, "Write sensor data to csv logfiles",
|
||||
await self.cbpi.config.add("CSVLOGFILES", "Yes", ConfigType.SELECT, "Write sensor data to csv logfiles (enabling requires restart)",
|
||||
[{"label": "Yes", "value": "Yes"},
|
||||
{"label": "No", "value": "No"}])
|
||||
except:
|
||||
|
@ -215,7 +215,7 @@ class ConfigUpdate(CBPiExtension):
|
|||
if influxdb is None:
|
||||
logger.info("INIT Influxdb")
|
||||
try:
|
||||
await self.cbpi.config.add("INFLUXDB", "No", ConfigType.SELECT, "Write sensor data to influxdb",
|
||||
await self.cbpi.config.add("INFLUXDB", "No", ConfigType.SELECT, "Write sensor data to influxdb (enabling requires restart)",
|
||||
[{"label": "Yes", "value": "Yes"},
|
||||
{"label": "No", "value": "No"}])
|
||||
except:
|
||||
|
|
52
cbpi/extension/SensorLogTarget_CSV/__init__.py
Normal file
52
cbpi/extension/SensorLogTarget_CSV/__init__.py
Normal file
|
@ -0,0 +1,52 @@
|
|||
|
||||
# -*- coding: utf-8 -*-
|
||||
import os
|
||||
from logging.handlers import RotatingFileHandler
|
||||
import logging
|
||||
from unittest.mock import MagicMock, patch
|
||||
import asyncio
|
||||
import random
|
||||
from cbpi.api import *
|
||||
from cbpi.api.config import ConfigType
|
||||
import urllib3
|
||||
import base64
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class SensorLogTargetCSV(CBPiExtension):
|
||||
|
||||
def __init__(self, cbpi): # called from cbpi on start
|
||||
self.cbpi = cbpi
|
||||
self.datalogger = {}
|
||||
self.logfiles = self.cbpi.config.get("CSVLOGFILES", "Yes")
|
||||
if self.logfiles == "No":
|
||||
return # never run()
|
||||
self._task = asyncio.create_task(self.run()) # one time run() only
|
||||
|
||||
|
||||
async def run(self): # called by __init__ once on start if CSV is enabled
|
||||
self.listener_ID = self.cbpi.log.add_sensor_data_listener(self.log_data_to_CSV)
|
||||
logger.info("CSV sensor log target listener ID: {}".format(self.listener_ID))
|
||||
|
||||
async def log_data_to_CSV(self, cbpi, id:str, value:str, formatted_time, name): # called by log_data() hook from the log file controller
|
||||
self.logfiles = self.cbpi.config.get("CSVLOGFILES", "Yes")
|
||||
if self.logfiles == "No":
|
||||
# We intentionally do not unsubscribe the listener here because then we had no way of resubscribing him without a restart of cbpi
|
||||
# as long as cbpi was STARTED with CSVLOGFILES set to Yes this function is still subscribed, so changes can be made on the fly.
|
||||
# but after initially enabling this logging target a restart is required.
|
||||
return
|
||||
if id not in self.datalogger:
|
||||
max_bytes = int(self.cbpi.config.get("SENSOR_LOG_MAX_BYTES", 100000))
|
||||
backup_count = int(self.cbpi.config.get("SENSOR_LOG_BACKUP_COUNT", 3))
|
||||
|
||||
data_logger = logging.getLogger('cbpi.sensor.%s' % id)
|
||||
data_logger.propagate = False
|
||||
data_logger.setLevel(logging.DEBUG)
|
||||
handler = RotatingFileHandler(os.path.join(self.logsFolderPath, f"sensor_{id}.log"), maxBytes=max_bytes, backupCount=backup_count)
|
||||
data_logger.addHandler(handler)
|
||||
self.datalogger[id] = data_logger
|
||||
|
||||
self.datalogger[id].info("%s,%s" % (formatted_time, str(value)))
|
||||
|
||||
def setup(cbpi):
|
||||
cbpi.plugin.register("SensorLogTargetCSV", SensorLogTargetCSV)
|
3
cbpi/extension/SensorLogTarget_CSV/config.yaml
Normal file
3
cbpi/extension/SensorLogTarget_CSV/config.yaml
Normal file
|
@ -0,0 +1,3 @@
|
|||
name: SensorLogTargetCSV
|
||||
version: 4
|
||||
active: true
|
|
@ -39,6 +39,7 @@ class SensorLogTargetInfluxDB(CBPiExtension):
|
|||
if self.influxdb == "No":
|
||||
# We intentionally do not unsubscribe the listener here because then we had no way of resubscribing him without a restart of cbpi
|
||||
# as long as cbpi was STARTED with INFLUXDB set to Yes this function is still subscribed, so changes can be made on the fly.
|
||||
# but after initially enabling this logging target a restart is required.
|
||||
return
|
||||
self.influxdbcloud = self.cbpi.config.get("INFLUXDBCLOUD", "No")
|
||||
self.influxdbaddr = self.cbpi.config.get("INFLUXDBADDR", None)
|
||||
|
|
Loading…
Reference in a new issue