log controller added

This commit is contained in:
manuel83 2019-08-05 20:51:20 +02:00
parent 8db6251fb7
commit 8200c48dfc
22 changed files with 807 additions and 961 deletions

File diff suppressed because it is too large Load diff

View file

@ -17,23 +17,11 @@ class CBPiSensor(CBPiExtension):
def log_data(self, value):
formatted_time = strftime("%Y-%m-%d %H:%M:%S", localtime())
self.data_logger.debug("%s,%s" % (formatted_time, value))
self.cbpi.log.log_data(self.id, value)
def init(self):
self.data_logger = logging.getLogger('cbpi.sensor.%s' % self.id)
self.data_logger.propagate = False
self.data_logger.setLevel(logging.DEBUG)
handler = RotatingFileHandler('./logs/sensor_%s.log' % self.id, maxBytes=2000, backupCount=10)
self.data_logger.addHandler(handler)
pass
async def run(self, cbpi):
self.logger.warning("Sensor Init not implemented")

View file

@ -36,6 +36,12 @@ def copy_splash():
shutil.copy(srcfile, destfile)
print("Splash Srceen created")
def clear_db():
import os.path
if os.path.exists(os.path.join(".", "craftbeerpi.db")) is True:
os.remove(os.path.join(".", "craftbeerpi.db"))
print("database Cleared")
def check_for_setup():
@ -80,6 +86,10 @@ def main():
copy_splash()
return
if args.action == "cleardb":
clear_db()
return
if args.action == "plugins":
list_plugins()
return

View file

@ -0,0 +1,119 @@
import datetime
import glob
import logging
import os
from logging.handlers import RotatingFileHandler
from time import strftime, localtime
import pandas as pd
class LogController:
def __init__(self, cbpi):
'''
:param cbpi: craftbeerpi object
'''
self.cbpi = cbpi
self.logger = logging.getLogger(__name__)
self.datalogger = {}
def log_data(self, name: str, value: str) -> None:
if name not in self.datalogger:
max_bytes = self.cbpi.config.get("SENSOR_LOG_MAX_BYTES", 1048576)
backup_count = self.cbpi.config.get("SENSOR_LOG_BACKUP_COUNT", 3)
data_logger = logging.getLogger('cbpi.sensor.%s' % name)
data_logger.propagate = False
data_logger.setLevel(logging.DEBUG)
handler = RotatingFileHandler('./logs/sensor_%s.log' % name, maxBytes=max_bytes, backupCount=backup_count)
data_logger.addHandler(handler)
self.datalogger[name] = data_logger
formatted_time = strftime("%Y-%m-%d %H:%M:%S", localtime())
self.datalogger[name].info("%s,%s" % (formatted_time, value))
async def get_data(self, names, sample_rate='60s'):
'''
:param names: name as string or list of names as string
:param sample_rate: rate for resampling the data
:return:
'''
# make string to array
if isinstance(names, list) is False:
names = [names]
# remove duplicates
names = set(names)
result = None
def dateparse(time_in_secs):
'''
Internal helper for date parsing
:param time_in_secs:
:return:
'''
return datetime.datetime.strptime(time_in_secs, '%Y-%m-%d %H:%M:%S')
def datetime_to_str(o):
if isinstance(o, datetime.datetime):
return o.__str__()
for name in names:
# get all log names
all_filenames = glob.glob('./logs/sensor_%s.log*' % name)
# concat all logs
df = pd.concat([pd.read_csv(f, parse_dates=True, date_parser=dateparse, index_col='DateTime', names=['DateTime', name], header=None) for f in all_filenames])
# resample if rate provided
if sample_rate is not None:
df = df[name].resample(sample_rate).max()
df = df.dropna()
if result is None:
result = df
else:
result = pd.merge(result, df, how='outer', left_index=True, right_index=True)
data = {"time": df.index.tolist()}
if len(names) > 1:
for name in names:
data[name] = result[name].interpolate(limit_direction='both', limit=10).tolist()
else:
data[name] = result.interpolate().tolist()
return data
def get_logfile_names(self, name:str ) -> list:
'''
Get all log file names
:param name: log name as string. pattern /logs/sensor_%s.log*
:return: list of log file names
'''
return = glob.glob('./logs/sensor_%s.log*' % name)
async def clear_log(self, name:str ) -> str:
'''
:param name: log name as string. pattern /logs/sensor_%s.log*
:return: None
'''
all_filenames = glob.glob('./logs/sensor_%s.log*' % name)
for f in all_filenames:
print(f)
os.remove(f)
if name in self.datalogger:
del self.datalogger[name]

View file

@ -84,7 +84,9 @@ class PluginController():
try:
logger.info("Trying to load plugin %s" % filename)
data = load_config(os.path.join(this_directory, "../extension/%s/config.yaml" % filename))
if (data.get("version") == 4):
if (data.get("active") is True and data.get("version") == 4):
self.modules[filename] = import_module("cbpi.extension.%s" % (filename))
self.modules[filename].setup(self.cbpi)
logger.info("Plugin %s loaded successful" % filename)

View file

@ -1,11 +1,9 @@
import json
import logging
import time
from cbpi.api import *
from cbpi.controller.crud_controller import CRUDController
from cbpi.database.model import StepModel
from utils.encoder import ComplexEncoder
class StepController(CRUDController):

View file

@ -1,160 +1,32 @@
import datetime
import re
import logging
import aiohttp
from aiohttp import web
import os
from aiojobs.aiohttp import get_scheduler_from_app
from cbpi.api import *
from cbpi.utils import json_dumps
class SystemController():
class SystemController:
def __init__(self, cbpi):
self.cbpi = cbpi
self.service = cbpi.actor
self.logger = logging.getLogger(__name__)
self.cbpi.register(self, "/system")
self.cbpi.app.on_startup.append(self.check_for_update)
async def check_for_update(self, app):
try:
timeout = aiohttp.ClientTimeout(total=1)
async with aiohttp.ClientSession(timeout=timeout) as session:
async with session.post('http://localhost:2202/check', json=dict(version=app["cbpi"].version)) as resp:
if (resp.status == 200):
data = await resp.json()
print(data)
@request_mapping("/", method="GET", auth_required=False)
async def state(self, request):
"""
---
description: Get complete system state
tags:
- System
responses:
"200":
description: successful operation
"""
return web.json_response(data=dict(
actor=self.cbpi.actor.get_state(),
sensor=self.cbpi.sensor.get_state(),
kettle=self.cbpi.kettle.get_state(),
step=await self.cbpi.step.get_state(),
dashboard=self.cbpi.dashboard.get_state(),
translations=self.cbpi.translation.get_all(),
config=self.cbpi.config.get_state())
, dumps=json_dumps)
@request_mapping("/restart", method="POST", name="RestartServer", auth_required=False)
def restart(self, request):
"""
---
description: Restart System - Not implemented
tags:
- System
responses:
"200":
description: successful operation
"""
return web.Response(text="NOT IMPLEMENTED")
@request_mapping("/shutdown", method="POST", name="ShutdownSerer", auth_required=False)
def shutdown(self, request):
"""
---
description: Shutdown System - Not implemented
tags:
- System
responses:
"200":
description: successful operation
"""
return web.Response(text="NOT IMPLEMENTED")
@request_mapping("/jobs", method="GET", name="get_jobs", auth_required=False)
def get_all_jobs(self, request):
"""
---
description: Get all running Jobs
tags:
- System
responses:
"200":
description: successful operation
"""
scheduler = get_scheduler_from_app(self.cbpi.app)
result = []
for j in scheduler:
try:
result.append(dict(name=j.name, type=j.type, time=j.start_time))
if data.get("version") != self.cbpi.version:
self.logger.info("Version Check: Newer Version exists")
else:
self.logger.info("Version Check: You are up to date")
except:
pass
return web.json_response(data=result)
@request_mapping("/events", method="GET", name="get_all_events", auth_required=False)
def get_all_events(self, request):
"""
---
description: Get list of all registered events
tags:
- System
responses:
"200":
description: successful operation
"""
return web.json_response(data=self.cbpi.bus.dump())
@request_mapping(path="/logs", auth_required=False)
async def http_get_log(self, request):
result = []
file_pattern = re.compile("^(\w+.).log(.?\d*)")
for filename in sorted(os.listdir("./logs"), reverse=True):#
if file_pattern.match(filename):
result.append(filename)
return web.json_response(result)
@request_mapping(path="/logs/{name}", method="DELETE", auth_required=False)
async def http_delete_log(self, request):
log_name = request.match_info['name']
file_patter = re.compile("^(\w+.).log(.?\d*)")
file_sensor_log = re.compile("^sensor_(\d).log(.?\d*)")
if file_patter.match(log_name):
pass
self.logger.warning("Version Check: Can't check for update")
@request_mapping(path="/logs", method="DELETE", auth_required=False)
async def http_delete_logs(self, request):
sensor_log_pattern = re.compile("sensor_([\d]).log$")
sensor_log_pattern2 = re.compile("sensor_([\d]).log.[\d]*$")
app_log_pattern = re.compile("app.log$")
for filename in sorted(os.listdir("./logs"), reverse=True):#
if app_log_pattern.match(filename):
with open(os.path.join("./logs/%s" % filename), 'w'):
pass
continue
for filename in sorted(os.listdir("./logs/sensors"), reverse=True):
if sensor_log_pattern.match(filename):
with open(os.path.join("./logs/sensors/%s" % filename), 'w'):
pass
continue
elif sensor_log_pattern2.match(filename):
os.remove(os.path.join("./logs/sensors/%s" % filename))
return web.Response(status=204)

View file

@ -19,6 +19,7 @@ from cbpi.controller.plugin_controller import PluginController
from cbpi.controller.sensor_controller import SensorController
from cbpi.controller.step_controller import StepController
from cbpi.controller.system_controller import SystemController
from cbpi.controller.log_file_controller import LogController
from cbpi.database.model import DBModel
from cbpi.eventbus import CBPiEventBus
from cbpi.http_endpoints.http_login import Login
@ -32,7 +33,8 @@ from cbpi.http_endpoints.http_sensor import SensorHttpEndpoints
from cbpi.http_endpoints.http_step import StepHttpEndpoints
from cbpi.controller.translation_controller import TranslationController
from cbpi.http_endpoints.http_translation import TranslationHttpEndpoint
from http_endpoints.http_plugin import PluginHttpEndpoints
from cbpi.http_endpoints.http_plugin import PluginHttpEndpoints
from cbpi.http_endpoints.http_system import SystemHttpEndpoints
logger = logging.getLogger(__name__)
@ -85,6 +87,7 @@ class CraftBeerPi():
self.actor = ActorController(self)
self.sensor = SensorController(self)
self.plugin = PluginController(self)
self.log = LogController(self)
self.system = SystemController(self)
self.kettle = KettleController(self)
@ -99,6 +102,7 @@ class CraftBeerPi():
self.http_dashboard = DashBoardHttpEndpoints(self)
self.http_translation = TranslationHttpEndpoint(self)
self.http_plugin = PluginHttpEndpoints(self)
self.http_system = SystemHttpEndpoints(self)
self.notification = NotificationController(self)
self.login = Login(self)

View file

@ -1,2 +1,3 @@
name: DummyActor
version: 4
active: true

View file

@ -1,2 +1,3 @@
name: DummyKettleLogic
version: 4
active: true

View file

@ -1,2 +1,3 @@
name: DummySensor
version: 4
active: true

View file

@ -1,2 +1,3 @@
name: DummyStep
version: 4
active: true

View file

@ -1,2 +1,3 @@
name: MQTT
version: 4.1
version: 4.0
active: false

View file

@ -1,6 +1,6 @@
from aiohttp import web
from api import request_mapping
from utils import json_dumps
from cbpi.api import request_mapping
from cbpi.utils import json_dumps
class PluginHttpEndpoints:

View file

@ -0,0 +1,111 @@
from aiohttp import web
from cbpi.job.aiohttp import get_scheduler_from_app
from cbpi.api import request_mapping
from cbpi.utils import json_dumps
class SystemHttpEndpoints:
def __init__(self,cbpi):
self.cbpi = cbpi
self.cbpi.register(self, url_prefix="/system")
@request_mapping("/", method="GET", auth_required=False)
async def state(self, request):
"""
---
description: Get complete system state
tags:
- System
responses:
"200":
description: successful operation
"""
return web.json_response(data=dict(
actor=self.cbpi.actor.get_state(),
sensor=self.cbpi.sensor.get_state(),
kettle=self.cbpi.kettle.get_state(),
step=await self.cbpi.step.get_state(),
dashboard=self.cbpi.dashboard.get_state(),
translations=self.cbpi.translation.get_all(),
config=self.cbpi.config.get_state())
, dumps=json_dumps)
@request_mapping(path="/logs", auth_required=False)
async def http_get_log(self, request):
result = []
file_pattern = re.compile("^(\w+.).log(.?\d*)")
for filename in sorted(os.listdir("./logs"), reverse=True): #
if file_pattern.match(filename):
result.append(filename)
return web.json_response(result)
@request_mapping(path="/logs/{name}", method="DELETE", auth_required=False)
async def delete_log(self, request):
log_name = request.match_info['name']
self.cbpi.log.delete_log(log_name)
@request_mapping(path="/logs", method="DELETE", auth_required=False)
async def delete_all_logs(self, request):
self.cbpi.log.delete_logs()
return web.Response(status=204)
@request_mapping("/events", method="GET", name="get_all_events", auth_required=False)
def get_all_events(self, request):
"""
---
description: Get list of all registered events
tags:
- System
responses:
"200":
description: successful operation
"""
return web.json_response(data=self.cbpi.bus.dump())
@request_mapping("/jobs", method="GET", name="get_jobs", auth_required=False)
def get_all_jobs(self, request):
"""
---
description: Get all running Jobs
tags:
- System
responses:
"200":
description: successful operation
"""
scheduler = get_scheduler_from_app(self.cbpi.app)
result = []
for j in scheduler:
try:
result.append(dict(name=j.name, type=j.type, time=j.start_time))
except:
pass
return web.json_response(data=result)
@request_mapping("/restart", method="POST", name="RestartServer", auth_required=False)
def restart(self, request):
"""
---
description: Restart System - Not implemented
tags:
- System
responses:
"200":
description: successful operation
"""
return web.Response(text="NOT IMPLEMENTED")
@request_mapping("/shutdown", method="POST", name="ShutdownSerer", auth_required=False)
def shutdown(self, request):
"""
---
description: Shutdown System - Not implemented
tags:
- System
responses:
"200":
description: successful operation
"""
return web.Response(text="NOT IMPLEMENTED")

View file

@ -52,9 +52,6 @@ async def get_package_name(request):
return web.json_response(data=dict(package_name=package_name))
app = web.Application()
app.add_routes([
web.get('/list', get_list),

View file

@ -1,13 +1,11 @@
SampleActor:
cbpi-actor:
api: 4.0
author: CraftBeerPi11
description: A sample Actor for CraftBeerPi
repo_url: https://github.com/craftbeerpi/sample_actor
SampleActor2:
cbpi-ui:
api: 4.0
author: CraftBeerPi
description: A sample Actor2 for CraftBeerPi
repo_url: https://github.com/craftbeerpi/sample_actor
requests:
installation_date: '2019-07-29 23:02:25'
version: '1.0'

Binary file not shown.

View file

@ -1,8 +1,72 @@
import datetime
import glob
import json
import logging
from logging.handlers import RotatingFileHandler
from time import strftime, localtime
import pandas as pd
import matplotlib.pyplot as plt
sid = 2
def test123(name: str) -> str:
data_logger = logging.getLogger('cbpi.sensor.%s' % sid)
data_logger.propagate = False
data_logger.setLevel(logging.DEBUG)
handler = RotatingFileHandler('./logs/sensor_%s.log' % sid, maxBytes=100_000, backupCount=10)
data_logger.addHandler(handler)
import random
print(name)
start = datetime.datetime.now()
'''
v = random.randint(50,60)
for i in range(5760):
d = start + datetime.timedelta(seconds=6*i)
formatted_time = d.strftime("%Y-%m-%d %H:%M:%S")
if i % 750 == 0:
v = random.randint(50,60)
data_logger.info("%s,%s" % (formatted_time, v))
test123("HALLO")
'''
def dateparse (time_in_secs):
return datetime.datetime.strptime(time_in_secs, '%Y-%m-%d %H:%M:%S')
all_filenames = glob.glob('./logs/sensor_1.log*')
all_filenames.sort()
all_filenames2 = glob.glob('./logs/sensor_2.log*')
all_filenames2.sort()
combined_csv = pd.concat([pd.read_csv(f, parse_dates=True, date_parser=dateparse, index_col='DateTime', names=['DateTime', 'Sensor1'], header=None) for f in all_filenames])
combined_csv2 = pd.concat([pd.read_csv(f, parse_dates=True, date_parser=dateparse, index_col='DateTime', names=['DateTime', 'Sensor2'], header=None) for f in all_filenames2])
print(combined_csv)
print(combined_csv2)
m2 = pd.merge(combined_csv, combined_csv2, how='inner', left_index=True, right_index=True)
print(m2)
m2.plot()
m2.plot(y=['Sensor1','Sensor2'])
ts = combined_csv.Sensor1.resample('5000s').max()
#ts.plot(y='Sensor1')
i = 0
def myconverter(o):
if isinstance(o, datetime.datetime):
return o.__str__()
data = {"time": ts.index.tolist(), "data": ts.tolist()}
s1 = json.dumps(data, default = myconverter)
plt.show()

View file

@ -25,7 +25,8 @@ setup(name='cbpi',
"cryptography==2.3.1",
"requests==2.22.0",
"voluptuous==0.11.5",
"pyfiglet==0.7.6"
"pyfiglet==0.7.6",
'pandas==0.25.0'
],
dependency_links=[
'https://testpypi.python.org/pypi'

39
tests/test_logger.py Normal file
View file

@ -0,0 +1,39 @@
import asyncio
import glob
from aiohttp.test_utils import AioHTTPTestCase, unittest_run_loop
from cbpi.craftbeerpi import CraftBeerPi, load_config
class UtilsTestCase(AioHTTPTestCase):
async def get_application(self):
self.cbpi = CraftBeerPi()
await self.cbpi.init_serivces()
return self.cbpi.app
@unittest_run_loop
async def test_log_data(self):
log_name = "test"
#clear all logs
await self.cbpi.log.clear_log(log_name)
assert len(glob.glob('./logs/sensor_%s.log*' % log_name)) == 0
# write log entries
for i in range(5):
print(log_name)
self.cbpi.log.log_data(log_name, 222)
await asyncio.sleep(1)
# read log data
data = await self.cbpi.log.get_data(log_name, sample_rate='1s')
assert len(data["time"]) == 5
await self.cbpi.log.clear_log(log_name)

View file

@ -13,3 +13,4 @@ class UtilsTestCase(AioHTTPTestCase):
@unittest_run_loop
async def test_load_file(self):
assert load_config("") is None