2018-06-01 23:01:31 +02:00
|
|
|
# pylint: disable=wrong-import-position
|
2018-05-21 16:40:22 +02:00
|
|
|
from __future__ import print_function
|
|
|
|
|
|
|
|
import codecs
|
2019-03-03 16:50:06 +01:00
|
|
|
import collections
|
2019-04-24 17:08:05 +02:00
|
|
|
import functools
|
2019-03-16 22:24:26 +01:00
|
|
|
import hashlib
|
2018-06-07 20:47:06 +02:00
|
|
|
import hmac
|
2018-05-21 16:40:22 +02:00
|
|
|
import json
|
|
|
|
import logging
|
2019-03-03 16:50:06 +01:00
|
|
|
import multiprocessing
|
2018-05-21 16:40:22 +02:00
|
|
|
import os
|
2019-03-16 22:24:26 +01:00
|
|
|
import shutil
|
2018-05-21 16:40:22 +02:00
|
|
|
import subprocess
|
2018-12-05 21:22:06 +01:00
|
|
|
import threading
|
|
|
|
|
|
|
|
import tornado
|
|
|
|
import tornado.concurrent
|
|
|
|
import tornado.gen
|
2019-02-13 16:54:02 +01:00
|
|
|
import tornado.httpserver
|
2018-12-05 21:22:06 +01:00
|
|
|
import tornado.ioloop
|
|
|
|
import tornado.iostream
|
|
|
|
from tornado.log import access_log
|
2019-02-13 16:54:02 +01:00
|
|
|
import tornado.netutil
|
2018-12-05 21:22:06 +01:00
|
|
|
import tornado.process
|
|
|
|
import tornado.web
|
|
|
|
import tornado.websocket
|
|
|
|
|
2019-02-13 16:54:02 +01:00
|
|
|
from esphome import const
|
|
|
|
from esphome.__main__ import get_serial_ports
|
2019-03-03 16:50:06 +01:00
|
|
|
from esphome.helpers import mkdir_p, get_bool_env, run_system_command
|
2019-03-31 11:04:41 +02:00
|
|
|
from esphome.py_compat import IS_PY2, decode_text
|
2019-02-13 16:54:02 +01:00
|
|
|
from esphome.storage_json import EsphomeStorageJSON, StorageJSON, \
|
2019-03-16 22:24:26 +01:00
|
|
|
esphome_storage_path, ext_storage_path, trash_storage_path
|
2019-02-13 16:54:02 +01:00
|
|
|
from esphome.util import shlex_quote
|
2018-06-01 22:49:04 +02:00
|
|
|
|
2018-12-05 21:22:06 +01:00
|
|
|
# pylint: disable=unused-import, wrong-import-order
|
|
|
|
from typing import Optional # noqa
|
2018-05-21 16:40:22 +02:00
|
|
|
|
2019-02-13 16:54:02 +01:00
|
|
|
from esphome.zeroconf import DashboardStatus, Zeroconf
|
2019-02-10 16:57:34 +01:00
|
|
|
|
2018-06-02 22:22:20 +02:00
|
|
|
_LOGGER = logging.getLogger(__name__)
|
2019-04-24 17:08:05 +02:00
|
|
|
|
|
|
|
|
|
|
|
class DashboardSettings(object):
|
|
|
|
def __init__(self):
|
|
|
|
self.config_dir = ''
|
|
|
|
self.password_digest = ''
|
|
|
|
self.using_password = False
|
|
|
|
self.on_hassio = False
|
|
|
|
self.cookie_secret = None
|
|
|
|
|
|
|
|
def parse_args(self, args):
|
|
|
|
self.on_hassio = args.hassio
|
|
|
|
if not self.on_hassio:
|
|
|
|
self.using_password = bool(args.password)
|
|
|
|
if self.using_password:
|
|
|
|
if IS_PY2:
|
|
|
|
self.password_digest = hmac.new(args.password).digest()
|
|
|
|
else:
|
|
|
|
self.password_digest = hmac.new(args.password.encode()).digest()
|
|
|
|
self.config_dir = args.configuration
|
|
|
|
|
|
|
|
@property
|
|
|
|
def relative_url(self):
|
|
|
|
return os.getenv('ESPHOME_DASHBOARD_RELATIVE_URL', '/')
|
|
|
|
|
|
|
|
@property
|
|
|
|
def status_use_ping(self):
|
|
|
|
return get_bool_env('ESPHOME_DASHBOARD_USE_PING')
|
|
|
|
|
|
|
|
@property
|
|
|
|
def using_hassio_auth(self):
|
|
|
|
if not self.on_hassio:
|
|
|
|
return False
|
|
|
|
return not get_bool_env('DISABLE_HA_AUTHENTICATION')
|
|
|
|
|
|
|
|
@property
|
|
|
|
def using_auth(self):
|
|
|
|
return self.using_password or self.using_hassio_auth
|
|
|
|
|
|
|
|
def check_password(self, password):
|
|
|
|
if not self.using_auth:
|
|
|
|
return True
|
|
|
|
|
|
|
|
if IS_PY2:
|
|
|
|
password = hmac.new(password).digest()
|
|
|
|
else:
|
|
|
|
password = hmac.new(password.encode()).digest()
|
|
|
|
return hmac.compare_digest(self.password_digest, password)
|
|
|
|
|
|
|
|
def rel_path(self, *args):
|
|
|
|
return os.path.join(self.config_dir, *args)
|
|
|
|
|
|
|
|
def list_yaml_files(self):
|
|
|
|
files = []
|
|
|
|
for file in os.listdir(self.config_dir):
|
|
|
|
if not file.endswith('.yaml'):
|
|
|
|
continue
|
|
|
|
if file.startswith('.'):
|
|
|
|
continue
|
|
|
|
if file == 'secrets.yaml':
|
|
|
|
continue
|
|
|
|
files.append(file)
|
|
|
|
files.sort()
|
|
|
|
return files
|
|
|
|
|
|
|
|
|
|
|
|
settings = DashboardSettings()
|
2018-06-07 20:47:06 +02:00
|
|
|
|
2019-01-19 22:09:46 +01:00
|
|
|
if IS_PY2:
|
|
|
|
cookie_authenticated_yes = 'yes'
|
|
|
|
else:
|
|
|
|
cookie_authenticated_yes = b'yes'
|
|
|
|
|
2018-06-07 20:47:06 +02:00
|
|
|
|
2019-03-03 16:50:06 +01:00
|
|
|
def template_args():
|
|
|
|
version = const.__version__
|
|
|
|
return {
|
|
|
|
'version': version,
|
|
|
|
'docs_link': 'https://beta.esphome.io/' if 'b' in version else 'https://esphome.io/',
|
|
|
|
'get_static_file_url': get_static_file_url,
|
2019-04-24 17:08:05 +02:00
|
|
|
'relative_url': settings.relative_url,
|
2019-03-03 16:50:06 +01:00
|
|
|
'streamer_mode': get_bool_env('ESPHOME_STREAMER_MODE'),
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2019-03-16 22:24:26 +01:00
|
|
|
def authenticated(func):
|
2019-04-24 17:08:05 +02:00
|
|
|
@functools.wraps(func)
|
2019-03-16 22:24:26 +01:00
|
|
|
def decorator(self, *args, **kwargs):
|
2019-04-22 21:56:30 +02:00
|
|
|
if not is_authenticated(self):
|
2019-04-24 17:08:05 +02:00
|
|
|
self.redirect('./login')
|
2019-03-16 22:24:26 +01:00
|
|
|
return None
|
|
|
|
return func(self, *args, **kwargs)
|
|
|
|
return decorator
|
|
|
|
|
|
|
|
|
2019-04-22 21:56:30 +02:00
|
|
|
def is_authenticated(request_handler):
|
2019-04-24 17:08:05 +02:00
|
|
|
if settings.on_hassio:
|
|
|
|
# Handle ingress - disable auth on ingress port
|
|
|
|
# X-Hassio-Ingress is automatically stripped on the non-ingress server in nginx
|
|
|
|
header = request_handler.request.headers.get('X-Hassio-Ingress', 'NO')
|
|
|
|
if str(header) == 'YES':
|
|
|
|
return True
|
|
|
|
if settings.using_auth:
|
2019-04-22 21:56:30 +02:00
|
|
|
return request_handler.get_secure_cookie('authenticated') == cookie_authenticated_yes
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
2019-03-16 22:24:26 +01:00
|
|
|
def bind_config(func):
|
|
|
|
def decorator(self, *args, **kwargs):
|
|
|
|
configuration = self.get_argument('configuration')
|
|
|
|
if not is_allowed(configuration):
|
|
|
|
self.set_status(500)
|
|
|
|
return None
|
|
|
|
kwargs = kwargs.copy()
|
|
|
|
kwargs['configuration'] = configuration
|
|
|
|
return func(self, *args, **kwargs)
|
|
|
|
return decorator
|
|
|
|
|
|
|
|
|
2018-06-07 20:47:06 +02:00
|
|
|
# pylint: disable=abstract-method
|
|
|
|
class BaseHandler(tornado.web.RequestHandler):
|
2019-04-22 21:56:30 +02:00
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
def websocket_class(cls):
|
|
|
|
# pylint: disable=protected-access
|
|
|
|
if not hasattr(cls, '_message_handlers'):
|
|
|
|
cls._message_handlers = {}
|
|
|
|
|
|
|
|
for _, method in cls.__dict__.iteritems():
|
|
|
|
if hasattr(method, "_message_handler"):
|
|
|
|
cls._message_handlers[method._message_handler] = method
|
2018-12-05 21:22:06 +01:00
|
|
|
|
2019-04-22 21:56:30 +02:00
|
|
|
return cls
|
|
|
|
|
|
|
|
|
|
|
|
def websocket_method(name):
|
|
|
|
def wrap(fn):
|
|
|
|
# pylint: disable=protected-access
|
|
|
|
fn._message_handler = name
|
|
|
|
return fn
|
|
|
|
return wrap
|
2018-05-21 16:40:22 +02:00
|
|
|
|
2018-06-01 22:49:04 +02:00
|
|
|
|
2018-05-21 16:40:22 +02:00
|
|
|
# pylint: disable=abstract-method, arguments-differ
|
2019-04-22 21:56:30 +02:00
|
|
|
@websocket_class
|
2019-02-13 16:54:02 +01:00
|
|
|
class EsphomeCommandWebSocket(tornado.websocket.WebSocketHandler):
|
2018-05-21 16:40:22 +02:00
|
|
|
def __init__(self, application, request, **kwargs):
|
2019-02-13 16:54:02 +01:00
|
|
|
super(EsphomeCommandWebSocket, self).__init__(application, request, **kwargs)
|
2019-04-22 21:56:30 +02:00
|
|
|
self._proc = None
|
|
|
|
self._is_closed = False
|
2018-05-21 16:40:22 +02:00
|
|
|
|
2019-04-24 17:08:05 +02:00
|
|
|
@authenticated
|
2018-05-21 16:40:22 +02:00
|
|
|
def on_message(self, message):
|
2019-04-22 21:56:30 +02:00
|
|
|
# Messages are always JSON, 500 when not
|
|
|
|
json_message = json.loads(message)
|
|
|
|
type_ = json_message['type']
|
|
|
|
# pylint: disable=no-member
|
|
|
|
handlers = type(self)._message_handlers
|
|
|
|
if type_ not in handlers:
|
|
|
|
_LOGGER.warning("Requested unknown message type %s", type_)
|
2018-05-21 16:40:22 +02:00
|
|
|
return
|
2019-04-22 21:56:30 +02:00
|
|
|
|
|
|
|
handlers[type_](self, json_message)
|
|
|
|
|
|
|
|
@websocket_method('spawn')
|
|
|
|
def handle_spawn(self, json_message):
|
|
|
|
if self._proc is not None:
|
|
|
|
# spawn can only be called once
|
|
|
|
return
|
|
|
|
command = self.build_command(json_message)
|
2018-12-05 21:22:06 +01:00
|
|
|
_LOGGER.info(u"Running command '%s'", ' '.join(shlex_quote(x) for x in command))
|
2019-04-22 21:56:30 +02:00
|
|
|
self._proc = tornado.process.Subprocess(command,
|
|
|
|
stdout=tornado.process.Subprocess.STREAM,
|
|
|
|
stderr=subprocess.STDOUT,
|
|
|
|
stdin=tornado.process.Subprocess.STREAM)
|
|
|
|
self._proc.set_exit_callback(self._proc_on_exit)
|
|
|
|
tornado.ioloop.IOLoop.current().spawn_callback(self._redirect_stdout)
|
|
|
|
|
|
|
|
@property
|
|
|
|
def is_process_active(self):
|
|
|
|
return self._proc is not None and self._proc.returncode is None
|
|
|
|
|
|
|
|
@websocket_method('stdin')
|
|
|
|
def handle_stdin(self, json_message):
|
|
|
|
if not self.is_process_active:
|
|
|
|
return
|
|
|
|
data = json_message['data']
|
|
|
|
data = codecs.encode(data, 'utf8', 'replace')
|
|
|
|
_LOGGER.debug("< stdin: %s", data)
|
|
|
|
self._proc.stdin.write(data)
|
2018-05-21 16:40:22 +02:00
|
|
|
|
|
|
|
@tornado.gen.coroutine
|
2019-04-22 21:56:30 +02:00
|
|
|
def _redirect_stdout(self):
|
|
|
|
if IS_PY2:
|
|
|
|
reg = '[\n\r]'
|
|
|
|
else:
|
|
|
|
reg = b'[\n\r]'
|
|
|
|
|
2018-05-21 16:40:22 +02:00
|
|
|
while True:
|
|
|
|
try:
|
2019-04-22 21:56:30 +02:00
|
|
|
data = yield self._proc.stdout.read_until_regex(reg)
|
2018-05-21 16:40:22 +02:00
|
|
|
except tornado.iostream.StreamClosedError:
|
|
|
|
break
|
2019-04-22 21:56:30 +02:00
|
|
|
data = codecs.decode(data, 'utf8', 'replace')
|
|
|
|
|
|
|
|
_LOGGER.debug("> stdout: %s", data)
|
|
|
|
self.write_message({'event': 'line', 'data': data})
|
2018-05-21 16:40:22 +02:00
|
|
|
|
2019-04-22 21:56:30 +02:00
|
|
|
def _proc_on_exit(self, returncode):
|
|
|
|
if not self._is_closed:
|
|
|
|
# Check if the proc was not forcibly closed
|
2019-04-24 17:08:05 +02:00
|
|
|
_LOGGER.info("Process exited with return code %s", returncode)
|
2018-05-21 16:40:22 +02:00
|
|
|
self.write_message({'event': 'exit', 'code': returncode})
|
|
|
|
|
|
|
|
def on_close(self):
|
2019-04-22 21:56:30 +02:00
|
|
|
# Check if proc exists (if 'start' has been run)
|
|
|
|
if self.is_process_active:
|
2018-05-21 16:40:22 +02:00
|
|
|
_LOGGER.debug("Terminating process")
|
2019-04-22 21:56:30 +02:00
|
|
|
self._proc.proc.terminate()
|
2019-05-11 11:41:09 +02:00
|
|
|
# Shutdown proc on WS close
|
|
|
|
self._is_closed = True
|
2018-05-21 16:40:22 +02:00
|
|
|
|
2019-04-22 21:56:30 +02:00
|
|
|
def build_command(self, json_message):
|
2018-05-21 16:40:22 +02:00
|
|
|
raise NotImplementedError
|
|
|
|
|
|
|
|
|
2019-02-13 16:54:02 +01:00
|
|
|
class EsphomeLogsHandler(EsphomeCommandWebSocket):
|
2019-04-22 21:56:30 +02:00
|
|
|
def build_command(self, json_message):
|
2019-04-24 17:08:05 +02:00
|
|
|
config_file = settings.rel_path(json_message['configuration'])
|
2019-04-22 21:56:30 +02:00
|
|
|
return ["esphome", "--dashboard", config_file, "logs", '--serial-port',
|
|
|
|
json_message["port"]]
|
2018-05-21 16:40:22 +02:00
|
|
|
|
|
|
|
|
2019-04-22 21:56:30 +02:00
|
|
|
class EsphomeUploadHandler(EsphomeCommandWebSocket):
|
|
|
|
def build_command(self, json_message):
|
2019-04-24 17:08:05 +02:00
|
|
|
config_file = settings.rel_path(json_message['configuration'])
|
2019-04-22 21:56:30 +02:00
|
|
|
return ["esphome", "--dashboard", config_file, "run", '--upload-port',
|
|
|
|
json_message["port"]]
|
2018-05-21 16:40:22 +02:00
|
|
|
|
|
|
|
|
2019-02-13 16:54:02 +01:00
|
|
|
class EsphomeCompileHandler(EsphomeCommandWebSocket):
|
2019-04-22 21:56:30 +02:00
|
|
|
def build_command(self, json_message):
|
2019-04-24 17:08:05 +02:00
|
|
|
config_file = settings.rel_path(json_message['configuration'])
|
2019-02-13 16:54:02 +01:00
|
|
|
return ["esphome", "--dashboard", config_file, "compile"]
|
2018-05-21 16:40:22 +02:00
|
|
|
|
|
|
|
|
2019-02-13 16:54:02 +01:00
|
|
|
class EsphomeValidateHandler(EsphomeCommandWebSocket):
|
2019-04-22 21:56:30 +02:00
|
|
|
def build_command(self, json_message):
|
2019-04-24 17:08:05 +02:00
|
|
|
config_file = settings.rel_path(json_message['configuration'])
|
2019-02-13 16:54:02 +01:00
|
|
|
return ["esphome", "--dashboard", config_file, "config"]
|
2018-06-03 12:16:43 +02:00
|
|
|
|
|
|
|
|
2019-02-13 16:54:02 +01:00
|
|
|
class EsphomeCleanMqttHandler(EsphomeCommandWebSocket):
|
2019-04-22 21:56:30 +02:00
|
|
|
def build_command(self, json_message):
|
2019-04-24 17:08:05 +02:00
|
|
|
config_file = settings.rel_path(json_message['configuration'])
|
2019-02-13 16:54:02 +01:00
|
|
|
return ["esphome", "--dashboard", config_file, "clean-mqtt"]
|
2018-10-04 19:01:02 +02:00
|
|
|
|
|
|
|
|
2019-02-13 16:54:02 +01:00
|
|
|
class EsphomeCleanHandler(EsphomeCommandWebSocket):
|
2019-04-22 21:56:30 +02:00
|
|
|
def build_command(self, json_message):
|
2019-04-24 17:08:05 +02:00
|
|
|
config_file = settings.rel_path(json_message['configuration'])
|
2019-02-13 16:54:02 +01:00
|
|
|
return ["esphome", "--dashboard", config_file, "clean"]
|
2018-10-14 18:52:21 +02:00
|
|
|
|
|
|
|
|
2019-04-22 21:56:30 +02:00
|
|
|
class EsphomeVscodeHandler(EsphomeCommandWebSocket):
|
|
|
|
def build_command(self, json_message):
|
|
|
|
return ["esphome", "--dashboard", "-q", 'dummy', "vscode"]
|
2018-11-03 14:08:31 +01:00
|
|
|
|
|
|
|
|
2019-05-11 11:41:09 +02:00
|
|
|
class EsphomeAceEditorHandler(EsphomeCommandWebSocket):
|
|
|
|
def build_command(self, json_message):
|
|
|
|
return ["esphome", "--dashboard", "-q", settings.config_dir, "vscode", "--ace"]
|
|
|
|
|
|
|
|
|
2018-06-07 20:47:06 +02:00
|
|
|
class SerialPortRequestHandler(BaseHandler):
|
2019-03-16 22:24:26 +01:00
|
|
|
@authenticated
|
2018-05-21 16:40:22 +02:00
|
|
|
def get(self):
|
|
|
|
ports = get_serial_ports()
|
|
|
|
data = []
|
|
|
|
for port, desc in ports:
|
|
|
|
if port == '/dev/ttyAMA0':
|
2018-06-03 11:18:53 +02:00
|
|
|
desc = 'UART pins on GPIO header'
|
|
|
|
split_desc = desc.split(' - ')
|
|
|
|
if len(split_desc) == 2 and split_desc[0] == split_desc[1]:
|
|
|
|
# Some serial ports repeat their values
|
|
|
|
desc = split_desc[0]
|
2018-05-21 16:40:22 +02:00
|
|
|
data.append({'port': port, 'desc': desc})
|
2018-06-03 11:18:53 +02:00
|
|
|
data.append({'port': 'OTA', 'desc': 'Over-The-Air'})
|
2019-01-03 16:05:33 +01:00
|
|
|
data.sort(key=lambda x: x['port'], reverse=True)
|
|
|
|
self.write(json.dumps(data))
|
2018-05-21 16:40:22 +02:00
|
|
|
|
|
|
|
|
2018-06-07 20:47:06 +02:00
|
|
|
class WizardRequestHandler(BaseHandler):
|
2019-03-16 22:24:26 +01:00
|
|
|
@authenticated
|
2018-05-21 16:40:22 +02:00
|
|
|
def post(self):
|
2019-02-13 16:54:02 +01:00
|
|
|
from esphome import wizard
|
2018-05-21 16:40:22 +02:00
|
|
|
|
2019-03-31 11:04:41 +02:00
|
|
|
kwargs = {k: u''.join(decode_text(x) for x in v) for k, v in self.request.arguments.items()}
|
2019-04-24 17:08:05 +02:00
|
|
|
destination = settings.rel_path(kwargs['name'] + u'.yaml')
|
2018-12-05 21:22:06 +01:00
|
|
|
wizard.wizard_write(path=destination, **kwargs)
|
2019-04-24 17:08:05 +02:00
|
|
|
self.redirect('./?begin=True')
|
2018-05-21 16:40:22 +02:00
|
|
|
|
|
|
|
|
2018-06-07 20:47:06 +02:00
|
|
|
class DownloadBinaryRequestHandler(BaseHandler):
|
2019-03-16 22:24:26 +01:00
|
|
|
@authenticated
|
|
|
|
@bind_config
|
|
|
|
def get(self, configuration=None):
|
2019-03-03 16:43:51 +01:00
|
|
|
# pylint: disable=no-value-for-parameter
|
2019-04-24 17:08:05 +02:00
|
|
|
storage_path = ext_storage_path(settings.config_dir, configuration)
|
2018-12-05 21:22:06 +01:00
|
|
|
storage_json = StorageJSON.load(storage_path)
|
|
|
|
if storage_json is None:
|
|
|
|
self.send_error()
|
|
|
|
return
|
|
|
|
|
|
|
|
path = storage_json.firmware_bin_path
|
2018-05-21 16:40:22 +02:00
|
|
|
self.set_header('Content-Type', 'application/octet-stream')
|
2018-12-05 21:22:06 +01:00
|
|
|
filename = '{}.bin'.format(storage_json.name)
|
|
|
|
self.set_header("Content-Disposition", 'attachment; filename="{}"'.format(filename))
|
2018-05-21 16:40:22 +02:00
|
|
|
with open(path, 'rb') as f:
|
2019-03-03 16:50:06 +01:00
|
|
|
while True:
|
|
|
|
data = f.read(16384)
|
2018-05-21 16:40:22 +02:00
|
|
|
if not data:
|
|
|
|
break
|
|
|
|
self.write(data)
|
|
|
|
self.finish()
|
|
|
|
|
|
|
|
|
2018-12-05 21:22:06 +01:00
|
|
|
def _list_dashboard_entries():
|
2019-04-24 17:08:05 +02:00
|
|
|
files = settings.list_yaml_files()
|
2018-12-05 21:22:06 +01:00
|
|
|
return [DashboardEntry(file) for file in files]
|
|
|
|
|
|
|
|
|
|
|
|
class DashboardEntry(object):
|
|
|
|
def __init__(self, filename):
|
|
|
|
self.filename = filename
|
|
|
|
self._storage = None
|
|
|
|
self._loaded_storage = False
|
|
|
|
|
|
|
|
@property
|
|
|
|
def full_path(self): # type: () -> str
|
2019-04-24 17:08:05 +02:00
|
|
|
return os.path.join(settings.config_dir, self.filename)
|
2018-12-05 21:22:06 +01:00
|
|
|
|
|
|
|
@property
|
|
|
|
def storage(self): # type: () -> Optional[StorageJSON]
|
|
|
|
if not self._loaded_storage:
|
2019-04-24 17:08:05 +02:00
|
|
|
self._storage = StorageJSON.load(ext_storage_path(settings.config_dir, self.filename))
|
2018-12-05 21:22:06 +01:00
|
|
|
self._loaded_storage = True
|
|
|
|
return self._storage
|
|
|
|
|
|
|
|
@property
|
|
|
|
def address(self):
|
|
|
|
if self.storage is None:
|
|
|
|
return None
|
|
|
|
return self.storage.address
|
|
|
|
|
|
|
|
@property
|
|
|
|
def name(self):
|
|
|
|
if self.storage is None:
|
|
|
|
return self.filename[:-len('.yaml')]
|
|
|
|
return self.storage.name
|
|
|
|
|
|
|
|
@property
|
|
|
|
def esp_platform(self):
|
|
|
|
if self.storage is None:
|
|
|
|
return None
|
|
|
|
return self.storage.esp_platform
|
|
|
|
|
|
|
|
@property
|
|
|
|
def board(self):
|
|
|
|
if self.storage is None:
|
|
|
|
return None
|
|
|
|
return self.storage.board
|
|
|
|
|
|
|
|
@property
|
|
|
|
def update_available(self):
|
|
|
|
if self.storage is None:
|
|
|
|
return True
|
|
|
|
return self.update_old != self.update_new
|
|
|
|
|
|
|
|
@property
|
|
|
|
def update_old(self):
|
|
|
|
if self.storage is None:
|
|
|
|
return ''
|
2019-02-13 16:54:02 +01:00
|
|
|
return self.storage.esphome_version or ''
|
2018-12-05 21:22:06 +01:00
|
|
|
|
|
|
|
@property
|
|
|
|
def update_new(self):
|
|
|
|
return const.__version__
|
|
|
|
|
|
|
|
|
2018-06-07 20:47:06 +02:00
|
|
|
class MainRequestHandler(BaseHandler):
|
2019-03-16 22:24:26 +01:00
|
|
|
@authenticated
|
2018-05-21 16:40:22 +02:00
|
|
|
def get(self):
|
2018-06-03 11:18:53 +02:00
|
|
|
begin = bool(self.get_argument('begin', False))
|
2018-12-05 21:22:06 +01:00
|
|
|
entries = _list_dashboard_entries()
|
|
|
|
|
2019-03-03 16:50:06 +01:00
|
|
|
self.render("templates/index.html", entries=entries, begin=begin,
|
|
|
|
**template_args())
|
2018-12-05 21:22:06 +01:00
|
|
|
|
|
|
|
|
2019-03-03 16:50:06 +01:00
|
|
|
def _ping_func(filename, address):
|
|
|
|
if os.name == 'nt':
|
|
|
|
command = ['ping', '-n', '1', address]
|
|
|
|
else:
|
|
|
|
command = ['ping', '-c', '1', address]
|
|
|
|
rc, _, _ = run_system_command(*command)
|
|
|
|
return filename, rc == 0
|
|
|
|
|
|
|
|
|
|
|
|
class MDNSStatusThread(threading.Thread):
|
2018-12-05 21:22:06 +01:00
|
|
|
def run(self):
|
2019-02-10 16:57:34 +01:00
|
|
|
zc = Zeroconf()
|
2018-12-05 21:22:06 +01:00
|
|
|
|
2019-02-10 16:57:34 +01:00
|
|
|
def on_update(dat):
|
|
|
|
for key, b in dat.items():
|
|
|
|
PING_RESULT[key] = b
|
|
|
|
|
|
|
|
stat = DashboardStatus(zc, on_update)
|
|
|
|
stat.start()
|
2018-12-05 21:22:06 +01:00
|
|
|
while not STOP_EVENT.is_set():
|
2019-02-10 16:57:34 +01:00
|
|
|
entries = _list_dashboard_entries()
|
|
|
|
stat.request_query({entry.filename: entry.name + '.local.' for entry in entries})
|
|
|
|
|
2018-12-05 21:22:06 +01:00
|
|
|
PING_REQUEST.wait()
|
|
|
|
PING_REQUEST.clear()
|
2019-02-10 16:57:34 +01:00
|
|
|
stat.stop()
|
|
|
|
stat.join()
|
|
|
|
zc.close()
|
2018-12-05 21:22:06 +01:00
|
|
|
|
|
|
|
|
2019-03-03 16:50:06 +01:00
|
|
|
class PingStatusThread(threading.Thread):
|
|
|
|
def run(self):
|
|
|
|
pool = multiprocessing.Pool(processes=8)
|
|
|
|
while not STOP_EVENT.is_set():
|
|
|
|
# Only do pings if somebody has the dashboard open
|
|
|
|
|
|
|
|
def callback(ret):
|
|
|
|
PING_RESULT[ret[0]] = ret[1]
|
|
|
|
|
|
|
|
entries = _list_dashboard_entries()
|
|
|
|
queue = collections.deque()
|
|
|
|
for entry in entries:
|
|
|
|
if entry.address is None:
|
|
|
|
PING_RESULT[entry.filename] = None
|
|
|
|
continue
|
|
|
|
|
|
|
|
result = pool.apply_async(_ping_func, (entry.filename, entry.address),
|
|
|
|
callback=callback)
|
|
|
|
queue.append(result)
|
|
|
|
|
|
|
|
while queue:
|
|
|
|
item = queue[0]
|
|
|
|
if item.ready():
|
|
|
|
queue.popleft()
|
|
|
|
continue
|
|
|
|
|
|
|
|
try:
|
|
|
|
item.get(0.1)
|
|
|
|
except OSError:
|
|
|
|
# ping not installed
|
|
|
|
pass
|
|
|
|
except multiprocessing.TimeoutError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
if STOP_EVENT.is_set():
|
|
|
|
pool.terminate()
|
|
|
|
return
|
|
|
|
|
|
|
|
PING_REQUEST.wait()
|
|
|
|
PING_REQUEST.clear()
|
|
|
|
|
|
|
|
|
2018-12-05 21:22:06 +01:00
|
|
|
class PingRequestHandler(BaseHandler):
|
2019-03-16 22:24:26 +01:00
|
|
|
@authenticated
|
2018-12-05 21:22:06 +01:00
|
|
|
def get(self):
|
|
|
|
PING_REQUEST.set()
|
|
|
|
self.write(json.dumps(PING_RESULT))
|
|
|
|
|
|
|
|
|
|
|
|
def is_allowed(configuration):
|
|
|
|
return os.path.sep not in configuration
|
|
|
|
|
|
|
|
|
|
|
|
class EditRequestHandler(BaseHandler):
|
2019-03-16 22:24:26 +01:00
|
|
|
@authenticated
|
|
|
|
@bind_config
|
|
|
|
def get(self, configuration=None):
|
2019-03-03 16:43:51 +01:00
|
|
|
# pylint: disable=no-value-for-parameter
|
2019-04-24 17:08:05 +02:00
|
|
|
with open(settings.rel_path(configuration), 'r') as f:
|
2018-12-05 21:22:06 +01:00
|
|
|
content = f.read()
|
|
|
|
self.write(content)
|
|
|
|
|
2019-03-16 22:24:26 +01:00
|
|
|
@authenticated
|
|
|
|
@bind_config
|
|
|
|
def post(self, configuration=None):
|
2019-03-03 16:43:51 +01:00
|
|
|
# pylint: disable=no-value-for-parameter
|
2019-04-24 17:08:05 +02:00
|
|
|
with open(settings.rel_path(configuration), 'wb') as f:
|
2018-12-05 21:22:06 +01:00
|
|
|
f.write(self.request.body)
|
|
|
|
self.set_status(200)
|
2019-03-16 22:24:26 +01:00
|
|
|
|
|
|
|
|
|
|
|
class DeleteRequestHandler(BaseHandler):
|
|
|
|
@authenticated
|
|
|
|
@bind_config
|
|
|
|
def post(self, configuration=None):
|
2019-04-24 17:08:05 +02:00
|
|
|
config_file = settings.rel_path(configuration)
|
|
|
|
storage_path = ext_storage_path(settings.config_dir, configuration)
|
2019-03-16 22:24:26 +01:00
|
|
|
storage_json = StorageJSON.load(storage_path)
|
|
|
|
if storage_json is None:
|
|
|
|
self.set_status(500)
|
|
|
|
return
|
|
|
|
|
|
|
|
name = storage_json.name
|
2019-04-24 17:08:05 +02:00
|
|
|
trash_path = trash_storage_path(settings.config_dir)
|
2019-03-16 22:24:26 +01:00
|
|
|
mkdir_p(trash_path)
|
|
|
|
shutil.move(config_file, os.path.join(trash_path, configuration))
|
|
|
|
|
|
|
|
# Delete build folder (if exists)
|
2019-04-24 17:08:05 +02:00
|
|
|
build_folder = os.path.join(settings.config_dir, name)
|
2019-03-16 22:24:26 +01:00
|
|
|
if build_folder is not None:
|
|
|
|
shutil.rmtree(build_folder, os.path.join(trash_path, name))
|
|
|
|
|
|
|
|
|
|
|
|
class UndoDeleteRequestHandler(BaseHandler):
|
|
|
|
@authenticated
|
|
|
|
@bind_config
|
|
|
|
def post(self, configuration=None):
|
2019-04-24 17:08:05 +02:00
|
|
|
config_file = settings.rel_path(configuration)
|
|
|
|
trash_path = trash_storage_path(settings.config_dir)
|
2019-03-16 22:24:26 +01:00
|
|
|
shutil.move(os.path.join(trash_path, configuration), config_file)
|
2018-12-05 21:22:06 +01:00
|
|
|
|
|
|
|
|
|
|
|
PING_RESULT = {} # type: dict
|
|
|
|
STOP_EVENT = threading.Event()
|
|
|
|
PING_REQUEST = threading.Event()
|
2018-05-21 16:40:22 +02:00
|
|
|
|
|
|
|
|
2018-06-07 20:47:06 +02:00
|
|
|
class LoginHandler(BaseHandler):
|
|
|
|
def get(self):
|
2019-04-24 17:08:05 +02:00
|
|
|
if settings.using_hassio_auth:
|
2018-12-05 21:22:06 +01:00
|
|
|
self.render_hassio_login()
|
|
|
|
return
|
2019-04-24 17:08:05 +02:00
|
|
|
self.write('<html><body><form action="./login" method="post">'
|
2018-06-07 20:47:06 +02:00
|
|
|
'Password: <input type="password" name="password">'
|
|
|
|
'<input type="submit" value="Sign in">'
|
|
|
|
'</form></body></html>')
|
|
|
|
|
2018-12-05 21:22:06 +01:00
|
|
|
def render_hassio_login(self, error=None):
|
2019-03-03 16:50:06 +01:00
|
|
|
self.render("templates/login.html", error=error, **template_args())
|
2018-12-05 21:22:06 +01:00
|
|
|
|
|
|
|
def post_hassio_login(self):
|
|
|
|
import requests
|
|
|
|
|
|
|
|
headers = {
|
|
|
|
'X-HASSIO-KEY': os.getenv('HASSIO_TOKEN'),
|
|
|
|
}
|
|
|
|
data = {
|
|
|
|
'username': str(self.get_argument('username', '')),
|
|
|
|
'password': str(self.get_argument('password', ''))
|
|
|
|
}
|
|
|
|
try:
|
|
|
|
req = requests.post('http://hassio/auth', headers=headers, data=data)
|
|
|
|
if req.status_code == 200:
|
2019-01-19 22:09:46 +01:00
|
|
|
self.set_secure_cookie("authenticated", cookie_authenticated_yes)
|
2018-12-05 21:22:06 +01:00
|
|
|
self.redirect('/')
|
|
|
|
return
|
|
|
|
except Exception as err: # pylint: disable=broad-except
|
2019-01-02 14:11:11 +01:00
|
|
|
_LOGGER.warning("Error during Hass.io auth request: %s", err)
|
2018-12-05 21:22:06 +01:00
|
|
|
self.set_status(500)
|
|
|
|
self.render_hassio_login(error="Internal server error")
|
|
|
|
return
|
|
|
|
self.set_status(401)
|
|
|
|
self.render_hassio_login(error="Invalid username or password")
|
|
|
|
|
2018-06-07 20:47:06 +02:00
|
|
|
def post(self):
|
2019-04-24 17:08:05 +02:00
|
|
|
if settings.using_hassio_auth:
|
2018-12-05 21:22:06 +01:00
|
|
|
self.post_hassio_login()
|
|
|
|
return
|
|
|
|
|
2018-06-07 20:47:06 +02:00
|
|
|
password = str(self.get_argument("password", ''))
|
2019-04-24 17:08:05 +02:00
|
|
|
if settings.check_password(password):
|
2019-01-19 22:09:46 +01:00
|
|
|
self.set_secure_cookie("authenticated", cookie_authenticated_yes)
|
2018-06-07 20:47:06 +02:00
|
|
|
self.redirect("/")
|
|
|
|
|
|
|
|
|
2018-12-24 14:29:11 +01:00
|
|
|
_STATIC_FILE_HASHES = {}
|
|
|
|
|
|
|
|
|
|
|
|
def get_static_file_url(name):
|
|
|
|
static_path = os.path.join(os.path.dirname(__file__), 'static')
|
|
|
|
if name in _STATIC_FILE_HASHES:
|
|
|
|
hash_ = _STATIC_FILE_HASHES[name]
|
|
|
|
else:
|
|
|
|
path = os.path.join(static_path, name)
|
|
|
|
with open(path, 'rb') as f_handle:
|
2019-03-16 22:24:26 +01:00
|
|
|
hash_ = hashlib.md5(f_handle.read()).hexdigest()[:8]
|
2018-12-24 14:29:11 +01:00
|
|
|
_STATIC_FILE_HASHES[name] = hash_
|
2019-04-24 17:08:05 +02:00
|
|
|
return u'./static/{}?hash={}'.format(name, hash_)
|
2018-12-24 14:29:11 +01:00
|
|
|
|
|
|
|
|
2018-06-03 11:18:53 +02:00
|
|
|
def make_app(debug=False):
|
2018-12-05 21:22:06 +01:00
|
|
|
def log_function(handler):
|
|
|
|
if handler.get_status() < 400:
|
|
|
|
log_method = access_log.info
|
|
|
|
|
|
|
|
if isinstance(handler, SerialPortRequestHandler) and not debug:
|
|
|
|
return
|
|
|
|
if isinstance(handler, PingRequestHandler) and not debug:
|
|
|
|
return
|
|
|
|
elif handler.get_status() < 500:
|
|
|
|
log_method = access_log.warning
|
|
|
|
else:
|
|
|
|
log_method = access_log.error
|
|
|
|
|
|
|
|
request_time = 1000.0 * handler.request.request_time()
|
|
|
|
# pylint: disable=protected-access
|
|
|
|
log_method("%d %s %.2fms", handler.get_status(),
|
|
|
|
handler._request_summary(), request_time)
|
|
|
|
|
|
|
|
class StaticFileHandler(tornado.web.StaticFileHandler):
|
|
|
|
def set_extra_headers(self, path):
|
|
|
|
if debug:
|
|
|
|
self.set_header('Cache-Control', 'no-store, no-cache, must-revalidate, max-age=0')
|
|
|
|
|
2018-05-21 16:40:22 +02:00
|
|
|
static_path = os.path.join(os.path.dirname(__file__), 'static')
|
2019-04-24 17:08:05 +02:00
|
|
|
app_settings = {
|
2019-01-19 22:10:10 +01:00
|
|
|
'debug': debug,
|
2019-04-24 17:08:05 +02:00
|
|
|
'cookie_secret': settings.cookie_secret,
|
2019-01-19 22:10:10 +01:00
|
|
|
'log_function': log_function,
|
|
|
|
'websocket_ping_interval': 30.0,
|
|
|
|
}
|
2019-04-24 17:08:05 +02:00
|
|
|
rel = settings.relative_url
|
2018-12-05 21:22:06 +01:00
|
|
|
app = tornado.web.Application([
|
2019-04-24 17:08:05 +02:00
|
|
|
(rel + "", MainRequestHandler),
|
|
|
|
(rel + "login", LoginHandler),
|
|
|
|
(rel + "logs", EsphomeLogsHandler),
|
|
|
|
(rel + "upload", EsphomeUploadHandler),
|
|
|
|
(rel + "compile", EsphomeCompileHandler),
|
|
|
|
(rel + "validate", EsphomeValidateHandler),
|
|
|
|
(rel + "clean-mqtt", EsphomeCleanMqttHandler),
|
|
|
|
(rel + "clean", EsphomeCleanHandler),
|
|
|
|
(rel + "vscode", EsphomeVscodeHandler),
|
2019-05-11 11:41:09 +02:00
|
|
|
(rel + "ace", EsphomeAceEditorHandler),
|
2019-04-24 17:08:05 +02:00
|
|
|
(rel + "edit", EditRequestHandler),
|
|
|
|
(rel + "download.bin", DownloadBinaryRequestHandler),
|
|
|
|
(rel + "serial-ports", SerialPortRequestHandler),
|
|
|
|
(rel + "ping", PingRequestHandler),
|
|
|
|
(rel + "delete", DeleteRequestHandler),
|
|
|
|
(rel + "undo-delete", UndoDeleteRequestHandler),
|
|
|
|
(rel + "wizard.html", WizardRequestHandler),
|
|
|
|
(rel + r"static/(.*)", StaticFileHandler, {'path': static_path}),
|
|
|
|
], **app_settings)
|
2018-12-24 14:29:11 +01:00
|
|
|
|
|
|
|
if debug:
|
|
|
|
_STATIC_FILE_HASHES.clear()
|
|
|
|
|
2018-12-05 21:22:06 +01:00
|
|
|
return app
|
|
|
|
|
|
|
|
|
2018-05-21 16:40:22 +02:00
|
|
|
def start_web_server(args):
|
2019-04-24 17:08:05 +02:00
|
|
|
settings.parse_args(args)
|
|
|
|
mkdir_p(settings.rel_path(".esphome"))
|
2018-12-05 21:22:06 +01:00
|
|
|
|
2019-04-24 17:08:05 +02:00
|
|
|
if settings.using_auth:
|
|
|
|
path = esphome_storage_path(settings.config_dir)
|
2019-02-13 16:54:02 +01:00
|
|
|
storage = EsphomeStorageJSON.load(path)
|
2018-12-05 21:22:06 +01:00
|
|
|
if storage is None:
|
2019-02-13 16:54:02 +01:00
|
|
|
storage = EsphomeStorageJSON.get_default()
|
2018-12-05 21:22:06 +01:00
|
|
|
storage.save(path)
|
2019-04-24 17:08:05 +02:00
|
|
|
settings.cookie_secret = storage.cookie_secret
|
2018-06-07 20:47:06 +02:00
|
|
|
|
2018-06-03 11:18:53 +02:00
|
|
|
app = make_app(args.verbose)
|
2019-01-02 12:21:26 +01:00
|
|
|
if args.socket is not None:
|
|
|
|
_LOGGER.info("Starting dashboard web server on unix socket %s and configuration dir %s...",
|
2019-04-24 17:08:05 +02:00
|
|
|
args.socket, settings.config_dir)
|
2019-01-02 12:21:26 +01:00
|
|
|
server = tornado.httpserver.HTTPServer(app)
|
|
|
|
socket = tornado.netutil.bind_unix_socket(args.socket, mode=0o666)
|
|
|
|
server.add_socket(socket)
|
|
|
|
else:
|
|
|
|
_LOGGER.info("Starting dashboard web server on port %s and configuration dir %s...",
|
2019-04-24 17:08:05 +02:00
|
|
|
args.port, settings.config_dir)
|
2019-01-02 12:21:26 +01:00
|
|
|
app.listen(args.port)
|
2018-10-04 19:01:02 +02:00
|
|
|
|
2019-01-02 12:21:26 +01:00
|
|
|
if args.open_ui:
|
|
|
|
import webbrowser
|
2018-10-04 19:01:02 +02:00
|
|
|
|
2019-01-02 12:21:26 +01:00
|
|
|
webbrowser.open('localhost:{}'.format(args.port))
|
2018-10-04 19:01:02 +02:00
|
|
|
|
2019-05-12 23:04:36 +02:00
|
|
|
if settings.status_use_ping:
|
2019-03-03 16:50:06 +01:00
|
|
|
status_thread = PingStatusThread()
|
|
|
|
else:
|
|
|
|
status_thread = MDNSStatusThread()
|
|
|
|
status_thread.start()
|
2018-05-21 16:40:22 +02:00
|
|
|
try:
|
|
|
|
tornado.ioloop.IOLoop.current().start()
|
|
|
|
except KeyboardInterrupt:
|
|
|
|
_LOGGER.info("Shutting down...")
|
2018-12-05 21:22:06 +01:00
|
|
|
STOP_EVENT.set()
|
|
|
|
PING_REQUEST.set()
|
2019-03-03 16:50:06 +01:00
|
|
|
status_thread.join()
|
2019-01-02 12:21:26 +01:00
|
|
|
if args.socket is not None:
|
|
|
|
os.remove(args.socket)
|