Merge pull request #46 from craftbeerpi/master

Align with upstream
This commit is contained in:
Alexander Vollkopf 2022-03-21 11:59:46 +01:00 committed by GitHub
commit bdc534e12c
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
4162 changed files with 1256 additions and 1598568 deletions

View file

@ -61,7 +61,7 @@ jobs:
if [[ $GITHUB_REF_NAME == master ]] || [[ $GITHUB_REF_NAME == main ]]; then
# when building master/main use :latest tag and the version number
# from the cbpi/__init__.py file
VERSION=$(grep -o -E "(([0-9]{1,2}[.]?){3}[0-9]+)" cbpi/__init__.py)
VERSION=$(grep -o -E "(([0-9]{1,2}[.]?){2,3}[0-9]+)" cbpi/__init__.py)
LATEST_IMAGE=${{ env.image-name }}:latest
BUILD_CACHE_IMAGE_NAME=${LATEST_IMAGE}
TAGS="${LATEST_IMAGE},${{ env.image-name }}:v${VERSION}"

2
.gitignore vendored
View file

@ -10,7 +10,9 @@ venv
cbpi/extension/ui
node_modules
.DS_STORE
.DS_Store
.vscode
.venv*
.DS_Store
.vscode/
config/*

View file

@ -1,13 +1,13 @@
# CraftBeerPi 4
[![Build](https://github.com/avollkopf/craftbeerpi4/actions/workflows/build.yml/badge.svg)](https://github.com/avollkopf/craftbeerpi4/actions/workflows/build.yml)
[![GitHub license](https://img.shields.io/github/license/avollkopf/craftbeerpi4)](https://github.com/avollkopf/craftbeerpi4/blob/master/LICENSE)
![GitHub issues](https://img.shields.io/github/issues-raw/Manuel83/craftbeerpi4)
[![Build](https://github.com/craftbeerpi/craftbeerpi4/actions/workflows/build.yml/badge.svg)](https://github.com/craftbeerpi/craftbeerpi4/actions/workflows/build.yml)
[![GitHub license](https://img.shields.io/github/license/craftbeerpi/craftbeerpi4)](https://github.com/craftbeerpi/craftbeerpi4/blob/master/LICENSE)
![GitHub issues](https://img.shields.io/github/issues-raw/craftbeerpi/craftbeerpi4)
![PyPI](https://img.shields.io/pypi/v/cbpi)
![Happy Brewing](https://img.shields.io/badge/CraftBeerPi%204-Happy%20Brewing-%23FBB117)
<p align="center">
<img src="https://github.com/Manuel83/craftbeerpi4-ui/blob/main/cbpi4ui/public/logo192.png?raw=true" alt="CraftBeerPi Logo"/>
<img src="https://github.com/craftbeerpi/craftbeerpi4-ui/blob/main/cbpi4ui/public/logo192.png?raw=true" alt="CraftBeerPi Logo"/>
</p>
CraftBeerPi 4 is an open source software solution to control the brewing and
@ -24,4 +24,4 @@ You can find a list of available plugins [here](https://openbrewing.gitbook.io/c
## 🧑‍🤝‍🧑 Contributers
Thanks to all the people who have contributed
[![contributors](https://contributors-img.web.app/image?repo=avollkopf/craftbeerpi4)](https://github.com/avollkopf/craftbeerpi4/graphs/contributors)
[![contributors](https://contributors-img.web.app/image?repo=craftbeerpi/craftbeerpi4)](https://github.com/craftbeerpi/craftbeerpi4/graphs/contributors)

BIN
cbpi/.DS_Store vendored

Binary file not shown.

View file

@ -1 +1,2 @@
__version__ = "4.0.1.17"
__version__ = "4.0.3.a2"

View file

@ -129,8 +129,9 @@ class Fermenter:
heater: Actor = None
cooler: Actor = None
brewname: str = None
description : str = None
props: Props = Props()
target_temp: int = 0
target_temp: float = 0
type: str = None
steps: List[Step]= field(default_factory=list)
instance: str = None
@ -150,7 +151,7 @@ class Fermenter:
state = False
steps = list(map(lambda item: item.to_dict(), self.steps))
return dict(id=self.id, name=self.name, state=state, sensor=self.sensor, heater=self.heater, cooler=self.cooler, brewname=self.brewname, props=self.props.to_dict() if self.props is not None else None, target_temp=self.target_temp, type=self.type, steps=steps)
return dict(id=self.id, name=self.name, state=state, sensor=self.sensor, heater=self.heater, cooler=self.cooler, brewname=self.brewname, description=self.description, props=self.props.to_dict() if self.props is not None else None, target_temp=self.target_temp, type=self.type, steps=steps)
@dataclass
@ -161,13 +162,15 @@ class FermenterStep:
props: Props = Props()
type: str = None
status: StepState = StepState.INITIAL
endtime: int = 0 # endtime if step is active and timer is running
instance: str = None
step: dict = None
def __str__(self):
return "name={} props={}, type={}, instance={}".format(self.name, self.props, self.type, self.instance)
def to_dict(self):
msg = self.instance.summary if self.instance is not None else ""
return dict(id=self.id, name=self.name, state_text=msg, type=self.type, status=self.status.value, props=self.props.to_dict())
return dict(id=self.id, name=self.name, state_text=msg, type=self.type, status=self.status.value, endtime=self.endtime, props=self.props.to_dict())

View file

@ -1,6 +1,7 @@
import asyncio
import logging
from abc import abstractmethod
import cbpi
from cbpi.api.base import CBPiBase
@ -77,7 +78,7 @@ class CBPiStep(CBPiBase):
self.task.cancel()
await self.task
except Exception as e:
self.logger.error(e)
logging.error(e)
async def reset(self):
pass
@ -116,8 +117,98 @@ class CBPiStep(CBPiBase):
def __str__(self):
return "name={} props={}, type={}".format(self.name, self.props, self.__class__.__name__)
class CBPiFermentationStep(CBPiStep):
class CBPiFermentationStep(CBPiBase):
def __init__(self, cbpi, fermenter, step, props, on_done) -> None:
self.fermenter = fermenter
super().__init__(cbpi, step.id, step.name, props, on_done)
self.name = step.get("name")
self.cbpi = cbpi
self.id = step.get("id")
self.timer = None
self._done_callback = on_done
self.props = props
self.endtime = int(step.get("endtime"))
self.cancel_reason: StepResult = None
self.summary = ""
self.task = None
self.running: bool = False
self.logger = logging.getLogger(__name__)
self.step = step
self.update_key="fermenterstepupdate"
def _done(self, task):
if self._done_callback is not None:
try:
result = task.result()
logging.info(result)
logging.info(self.fermenter.id)
fermenter=self.fermenter.id
self._done_callback(self, result, fermenter)
except Exception as e:
self.logger.error(e)
async def start(self):
self.logger.info("Start {}".format(self.name))
self.running = True
self.task = asyncio.create_task(self._run())
self.task.add_done_callback(self._done)
async def next(self, fermenter=None):
if fermenter is None:
self.running = False
self.cancel_reason = StepResult.NEXT
self.task.cancel()
await self.task
else:
await self.cbpi.fermenter.next(fermenter)
async def stop(self):
try:
self.running = False
if self.task is not None and self.task.done() is False:
self.cancel_reason = StepResult.STOP
logging.info(self.cancel_reason)
self.task.cancel()
await self.task
except Exception as e:
logging.error(e)
async def reset(self):
pass
async def on_props_update(self, props):
self.props = {**self.props, **props}
async def update_endtime(self):
await self.cbpi.fermenter.update_endtime(self.fermenter.id, self.id, self.endtime)
async def save_props(self):
self.cbpi.fermenter.save()
async def push_update(self):
self.cbpi.fermenter.push_update(self.update_key)
async def on_start(self):
pass
async def on_stop(self):
pass
async def _run(self):
try:
await self.on_start()
await self.run()
self.cancel_reason = StepResult.DONE
except asyncio.CancelledError as e:
pass
finally:
await self.on_stop()
return self.cancel_reason
@abstractmethod
async def run(self):
pass
def __str__(self):
return "name={} props={}, type={}".format(self.name, self.props, self.__class__.__name__)

View file

@ -64,8 +64,14 @@ class Timer(object):
@classmethod
def format_time(cls, time):
pattern = '{0:02d}:{1:02d}:{2:02d}'
pattern_h = '{0:02d}:{1:02d}:{2:02d}'
pattern_d = '{0:02d}D {1:02d}:{2:02d}:{3:02d}'
seconds = time % 60
minutes = math.floor(time / 60) % 60
hours = math.floor(time / 3600)
return pattern.format(hours, minutes, seconds)
hours = math.floor(time / 3600) % 24
days = math.floor(time / 86400)
if days != 0:
remaining_time = pattern_d.format(days, hours, minutes, seconds)
else:
remaining_time = pattern_h.format(hours, minutes, seconds)
return remaining_time

View file

@ -12,15 +12,19 @@ from cbpi.craftbeerpi import CraftBeerPi
import os
import platform
import pathlib
import pkgutil
import shutil
import yaml
import click
from subprocess import call
import zipfile
from importlib_metadata import version, metadata
from colorama import Fore, Back, Style
from importlib import import_module
import importlib
from jinja2 import Template
from importlib_metadata import metadata, version
from tabulate import tabulate
from PyInquirer import prompt, print_json
def create_config_file():
if os.path.exists(os.path.join(".", 'config', "config.yaml")) is False:
@ -82,6 +86,7 @@ def create_home_folder_structure():
pathlib.Path(os.path.join(".", 'config/dashboard')).mkdir(parents=True, exist_ok=True)
pathlib.Path(os.path.join(".", 'config/dashboard/widgets')).mkdir(parents=True, exist_ok=True)
pathlib.Path(os.path.join(".", 'config/recipes')).mkdir(parents=True, exist_ok=True)
pathlib.Path(os.path.join(".", 'config/fermenterrecipes')).mkdir(parents=True, exist_ok=True)
pathlib.Path(os.path.join(".", 'config/upload')).mkdir(parents=True, exist_ok=True)
print("Folder created")
@ -114,7 +119,7 @@ def clear_db():
import os.path
if os.path.exists(os.path.join(".", "craftbeerpi.db")) is True:
os.remove(os.path.join(".", "craftbeerpi.db"))
print("database Cleared")
print("database cleared")
def recursive_chown(path, owner, group):
for dirpath, dirnames, filenames in os.walk(path):
@ -135,6 +140,12 @@ def check_for_setup():
print("Please run 'cbpi setup' before starting the server ")
print("***************************************************")
return False
# if os.path.exists(os.path.join(".", "config", "fermenterrecipes")) is False:
# print("***************************************************")
# print("CraftBeerPi fermenterrecipes folder not found: %s" % os.path.join(".", "config/fermenterrecipes"))
# print("Please run 'cbpi setup' before starting the server ")
# print("***************************************************")
# return False
backupfile = os.path.join(".", "restored_config.zip")
if os.path.exists(os.path.join(backupfile)) is True:
print("***************************************************")
@ -164,7 +175,7 @@ def check_for_setup():
zip.extractall(output_path)
zip.close()
if system != "Windows":
print("Changing owner and group of config folder recursively to {}:{}".format(owner,group))
print(f"Changing owner and group of config folder recursively to {owner}:{group}")
recursive_chown(output_path, owner, group)
print("Removing backup file")
os.remove(backupfile)
@ -178,156 +189,45 @@ def check_for_setup():
else:
return True
def plugins_add(package_name):
if package_name is None:
print("Pleaes provide a plugin Name")
return
if package_name == 'autostart':
print("Add craftbeerpi.service to systemd")
try:
if os.path.exists(os.path.join("/etc/systemd/system","craftbeerpi.service")) is False:
srcfile = os.path.join(".", "config", "craftbeerpi.service")
destfile = os.path.join("/etc/systemd/system")
shutil.copy(srcfile, destfile)
print("Copied craftbeerpi.service to /etc/systemd/system")
os.system('systemctl enable craftbeerpi.service')
print('Enabled craftbeerpi service')
os.system('systemctl start craftbeerpi.service')
print('Started craftbeerpi.service')
else:
print("craftbeerpi.service is already located in /etc/systemd/system")
except Exception as e:
print(e)
return
return
if package_name == 'chromium':
print("Add chromium.desktop to /etc/xdg/autostart/")
try:
if os.path.exists(os.path.join("/etc/xdg/autostart/","chromium.desktop")) is False:
srcfile = os.path.join(".", "config", "chromium.desktop")
destfile = os.path.join("/etc/xdg/autostart/")
shutil.copy(srcfile, destfile)
print("Copied chromium.desktop to /etc/xdg/autostart/")
else:
print("chromium.desktop is already located in /etc/xdg/autostart/")
except Exception as e:
print(e)
return
return
installation = True
try:
try:
p_metadata= metadata(package_name)
p_name=p_metadata['Name']
#if p_name != package_name:
# print("Error. Package name {} does not exist. Did you mean {}".format(package_name,p_name))
# installation = False
except Exception as e:
print("Error. Package {} cannot be found in installed packages".format(package_name))
installation = False
if installation:
with open(os.path.join(".", 'config', "config.yaml"), 'rt') as f:
data = yaml.load(f, Loader=yaml.FullLoader)
if package_name in data["plugins"]:
print("")
print("Plugin {} already active".format(package_name))
print("")
return
data["plugins"].append(package_name)
with open(os.path.join(".", 'config', "config.yaml"), 'w') as outfile:
yaml.dump(data, outfile, default_flow_style=False)
print("")
print("Plugin {} activated".format(package_name))
print("")
except Exception as e:
print(e)
pass
def plugin_remove(package_name):
if package_name is None:
print("Pleaes provide a plugin Name")
return
if package_name == 'autostart':
print("Remove craftbeerpi.service from systemd")
try:
status = os.popen('systemctl list-units --type=service --state=running | grep craftbeerpi.service').read()
if status.find("craftbeerpi.service") != -1:
os.system('systemctl stop craftbeerpi.service')
print('Stopped craftbeerpi service')
os.system('systemctl disable craftbeerpi.service')
print('Removed craftbeerpi.service as service')
else:
print('craftbeerpi.service service is not running')
if os.path.exists(os.path.join("/etc/systemd/system","craftbeerpi.service")) is True:
os.remove(os.path.join("/etc/systemd/system","craftbeerpi.service"))
print("Deleted craftbeerpi.service from /etc/systemd/system")
else:
print("craftbeerpi.service is not located in /etc/systemd/system")
except Exception as e:
print(e)
return
return
if package_name == 'chromium':
print("Remove chromium.desktop from /etc/xdg/autostart/")
try:
if os.path.exists(os.path.join("/etc/xdg/autostart/","chromium.desktop")) is True:
os.remove(os.path.join("/etc/xdg/autostart/","chromium.desktop"))
print("Deleted chromium.desktop from /etc/xdg/autostart/")
else:
print("chromium.desktop is not located in /etc/xdg/autostart/")
except Exception as e:
print(e)
return
return
try:
with open(os.path.join(".", 'config', "config.yaml"), 'rt') as f:
data = yaml.load(f, Loader=yaml.FullLoader)
data["plugins"] = list(filter(lambda k: package_name not in k, data["plugins"]))
with open(os.path.join(".", 'config', "config.yaml"), 'w') as outfile:
yaml.dump(data, outfile, default_flow_style=False)
print("")
print("Plugin {} deactivated".format(package_name))
print("")
except Exception as e:
print(e)
pass
def plugins_list():
print("--------------------------------------")
print("List of active plugins")
try:
with open(os.path.join(".", 'config', "config.yaml"), 'rt') as f:
data = yaml.load(f, Loader=yaml.FullLoader)
result = []
print("")
print(Fore.LIGHTYELLOW_EX,"List of active plugins", Style.RESET_ALL)
print("")
discovered_plugins = {
name: importlib.import_module(name)
for finder, name, ispkg
in pkgutil.iter_modules()
if name.startswith('cbpi') and len(name) > 4
}
for key, module in discovered_plugins.items():
for p in data["plugins"]:
try:
p_metadata= metadata(p)
p_Homepage= p_metadata['Home-page']
p_version = p_metadata['Version']
p_Author = p_metadata['Author']
print("- ({})\t{}".format(p_version,p))
except Exception as e:
print (e)
pass
except Exception as e:
print(e)
pass
print("--------------------------------------")
try:
meta = metadata(key)
result.append(dict(Name=meta["Name"], Version=meta["Version"], Author=meta["Author"], Homepage=meta["Home-page"], Summary=meta["Summary"]))
except Exception as e:
print(e)
print(Fore.LIGHTGREEN_EX, tabulate(result, headers="keys"), Style.RESET_ALL)
def plugin_create(name):
def plugin_create():
print("Plugin Creation")
print("")
questions = [
{
'type': 'input',
'name': 'name',
'message': 'Plugin Name:',
}
]
answers = prompt(questions)
name = "cbpi4_" + answers["name"]
if os.path.exists(os.path.join(".", name)) is True:
print("Cant create Plugin. Folder {} already exists ".format(name))
return
@ -361,24 +261,34 @@ def plugin_create(name):
fh.write(outputText)
TEMPLATE_FILE = os.path.join("/", name, "config.yaml")
operatingsystem = str(platform.system()).lower()
if operatingsystem.startswith("win"):
TEMPLATE_FILE=str(TEMPLATE_FILE).replace('\\','/')
template = templateEnv.get_template(TEMPLATE_FILE)
outputText = template.render(name=name)
with open(os.path.join(".", name, name, "config.yaml"), "w") as fh:
fh.write(outputText)
print("")
print("")
print(
"Plugin {} created! See https://craftbeerpi.gitbook.io/craftbeerpi4/development how to run your plugin ".format(
name))
print("Plugin {}{}{} created! ".format(Fore.LIGHTGREEN_EX, name, Style.RESET_ALL) )
print("")
print("Happy Development! Cheers")
print("Developer Documentation: https://openbrewing.gitbook.io/craftbeerpi4_support/readme/development")
print("")
print("Happy developing! Cheers")
print("")
print("")
@click.group()
def main():
print("---------------------")
print("Welcome to CBPi")
print("---------------------")
level = logging.INFO
logging.basicConfig(level=level, format='%(asctime)s - %(levelname)s - %(name)s - %(message)s')
pass
@ -406,9 +316,10 @@ def onewire(list, setup):
@click.command()
def start():
'''Lets go brewing'''
if check_for_setup() is False:
return
print("START")
print("Starting up CraftBeerPi ...")
cbpi = CraftBeerPi()
cbpi.start()
@ -420,31 +331,105 @@ def plugins():
return
@click.command()
@click.argument('name')
def add(name):
'''Activate Plugin, autostart or chromium '''
plugins_add(name)
@click.command()
@click.argument('name')
def remove(name):
'''Deactivate Plugin, autostart or chromium'''
plugin_remove(name)
@click.command()
@click.argument('name')
def create(name):
def create():
'''Create New Plugin'''
plugin_create(name)
plugin_create()
@click.command()
@click.argument('name')
def autostart(name):
'''(on|off|status) Enable or disable autostart'''
if(name == "status"):
if os.path.exists(os.path.join("/etc/systemd/system","craftbeerpi.service")) is True:
print("CraftBeerPi Autostart is {}ON{}".format(Fore.LIGHTGREEN_EX,Style.RESET_ALL))
else:
print("CraftBeerPi Autostart is {}OFF{}".format(Fore.RED,Style.RESET_ALL))
elif(name == "on"):
print("Add craftbeerpi.service to systemd")
try:
if os.path.exists(os.path.join("/etc/systemd/system","craftbeerpi.service")) is False:
srcfile = os.path.join(".", "config", "craftbeerpi.service")
destfile = os.path.join("/etc/systemd/system")
shutil.copy(srcfile, destfile)
print("Copied craftbeerpi.service to /etc/systemd/system")
os.system('systemctl enable craftbeerpi.service')
print('Enabled craftbeerpi service')
os.system('systemctl start craftbeerpi.service')
print('Started craftbeerpi.service')
else:
print("craftbeerpi.service is already located in /etc/systemd/system")
except Exception as e:
print(e)
return
return
elif(name == "off"):
print("Remove craftbeerpi.service from systemd")
try:
status = os.popen('systemctl list-units --type=service --state=running | grep craftbeerpi.service').read()
if status.find("craftbeerpi.service") != -1:
os.system('systemctl stop craftbeerpi.service')
print('Stopped craftbeerpi service')
os.system('systemctl disable craftbeerpi.service')
print('Removed craftbeerpi.service as service')
else:
print('craftbeerpi.service service is not running')
if os.path.exists(os.path.join("/etc/systemd/system","craftbeerpi.service")) is True:
os.remove(os.path.join("/etc/systemd/system","craftbeerpi.service"))
print("Deleted craftbeerpi.service from /etc/systemd/system")
else:
print("craftbeerpi.service is not located in /etc/systemd/system")
except Exception as e:
print(e)
return
return
@click.command()
@click.argument('name')
def chromium(name):
'''(on|off|status) Enable or disable Kiosk mode'''
if(name == "status"):
if os.path.exists(os.path.join("/etc/xdg/autostart/","chromium.desktop")) is True:
print("CraftBeerPi Chromium Desktop is {}ON{}".format(Fore.LIGHTGREEN_EX,Style.RESET_ALL))
else:
print("CraftBeerPi Chromium Desktop is {}OFF{}".format(Fore.RED,Style.RESET_ALL))
elif(name == "on"):
print("Add chromium.desktop to /etc/xdg/autostart/")
try:
if os.path.exists(os.path.join("/etc/xdg/autostart/","chromium.desktop")) is False:
srcfile = os.path.join(".", "config", "chromium.desktop")
destfile = os.path.join("/etc/xdg/autostart/")
shutil.copy(srcfile, destfile)
print("Copied chromium.desktop to /etc/xdg/autostart/")
else:
print("chromium.desktop is already located in /etc/xdg/autostart/")
except Exception as e:
print(e)
return
return
elif(name == "off"):
print("Remove chromium.desktop from /etc/xdg/autostart/")
try:
if os.path.exists(os.path.join("/etc/xdg/autostart/","chromium.desktop")) is True:
os.remove(os.path.join("/etc/xdg/autostart/","chromium.desktop"))
print("Deleted chromium.desktop from /etc/xdg/autostart/")
else:
print("chromium.desktop is not located in /etc/xdg/autostart/")
except Exception as e:
print(e)
return
return
main.add_command(setup)
main.add_command(start)
main.add_command(autostart)
main.add_command(chromium)
main.add_command(plugins)
main.add_command(onewire)
main.add_command(add)
main.add_command(remove)
main.add_command(create)

View file

@ -22,7 +22,7 @@ class BasicController:
self.logger = logging.getLogger(__name__)
self.data = []
self.autostart = True
self._loop = asyncio.get_event_loop()
#self._loop = asyncio.get_event_loop()
self.path = os.path.join(".", 'config', file)
self.cbpi.app.on_cleanup.append(self.shutdown)
@ -100,7 +100,8 @@ class BasicController:
await item.instance.start()
item.instance.running = True
item.instance.task = self._loop.create_task(item.instance._run())
item.instance.task = asyncio.get_event_loop().create_task(item.instance._run())
#item.instance.task = self._loop.create_task(item.instance._run())
logging.info("{} started {}".format(self.name, id))

View file

@ -50,7 +50,7 @@ class DashboardController:
async def get_custom_widgets(self):
path = os.path.join(".", 'config', "dashboard", "widgets")
onlyfiles = [os.path.splitext(f)[0] for f in listdir(path) if isfile(join(path, f)) and f.endswith(".svg")]
onlyfiles = [os.path.splitext(f)[0] for f in sorted(listdir(path)) if isfile(join(path, f)) and f.endswith(".svg")]
return onlyfiles
async def get_dashboard_numbers(self):

View file

@ -1,9 +1,12 @@
from abc import abstractmethod
import asyncio
import cbpi
import copy
import json
import yaml
import logging
import os.path
import pathlib
from os import listdir
from os.path import isfile, join
import shortuuid
@ -12,74 +15,7 @@ from cbpi.controller.basic_controller2 import BasicController
from tabulate import tabulate
import sys, os
from ..api.step import CBPiStep, StepMove, StepResult, StepState, CBPiFermentationStep
class FermentStep:
def __init__(self, cbpi, step, on_done) -> None:
self.cbpi = cbpi
self.logger = logging.getLogger(__name__)
self.step = step
self.props = step.props
self._done_callback = on_done
self.task = None
self.summary = ""
def _done(self, task):
if self._done_callback is not None:
try:
result = task.result()
self._done_callback(self, result)
except Exception as e:
self.logger.error(e)
async def run(self):
while True:
await asyncio.sleep(1)
async def _run(self):
try:
await self.on_start()
await self.run()
self.cancel_reason = StepResult.DONE
except asyncio.CancelledError as e:
pass
finally:
await self.on_stop()
return self.cancel_reason
async def start(self):
self.logger.info("Start {}".format(self.step.name))
self.running = True
self.task = asyncio.create_task(self._run())
self.task.add_done_callback(self._done)
async def next(self):
self.running = False
self.cancel_reason = StepResult.NEXT
self.task.cancel()
await self.task
async def stop(self):
try:
self.running = False
if self.task is not None and self.task.done() is False:
self.logger.info("Stopping Task")
self.cancel_reason = StepResult.STOP
self.task.cancel()
await self.task
except Exception as e:
self.logger.error(e)
async def on_start(self):
self.props.hello = "WOOHOo"
pass
async def on_stop(self):
pass
class FermentationController:
def __init__(self, cbpi):
@ -87,7 +23,6 @@ class FermentationController:
self.cbpi = cbpi
self.logger = logging.getLogger(__name__)
self.path = os.path.join(".", 'config', "fermenter_data.json")
self._loop = asyncio.get_event_loop()
self.data = []
self.types = {}
self.steptypes = {}
@ -108,30 +43,40 @@ class FermentationController:
}
destfile = os.path.join(".", 'config', "fermenter_data.json")
json.dump(data,open(destfile,'w'),indent=4, sort_keys=True)
pathlib.Path(os.path.join(".", 'config/fermenterrecipes')).mkdir(parents=True, exist_ok=True)
def push_update(self):
self.cbpi.ws.send(dict(topic=self.update_key, data=list(map(lambda item: item.to_dict(), self.data))))
#self.cbpi.push_update("cbpi/{}".format(self.update_key), list(map(lambda item: item.to_dict(), self.data)))
for item in self.data:
self.cbpi.push_update("cbpi/{}/{}".format(self.update_key,item.id), item.to_dict())
pass
async def shutdown(self, app=None):
async def shutdown(self, app=None, fermenterid=None):
self.save()
for fermenter in self.data:
if (fermenterid == None):
for fermenter in self.data:
self.logger.info("Shutdown {}".format(fermenter.name))
for step in fermenter.steps:
try:
self.logger.info("Stop {}".format(step.name))
try:
step.instance.shutdown = True
except:
pass
await step.instance.stop()
except Exception as e:
self.logger.error(e)
else:
fermenter = self._find_by_id(fermenterid)
self.logger.info("Shutdown {}".format(fermenter.name))
for step in fermenter.steps:
try:
self.logger.info("Stop {}".format(step.name))
try:
step.instance.shutdown = True
except:
pass
await step.instance.stop()
except Exception as e:
self.logger.error(e)
async def load(self):
# if os.path.exists(self.path) is False:
# with open(self.path, "w") as file:
# json.dump(dict(basic={}, steps=[]), file, indent=4, sort_keys=True)
with open(self.path) as json_file:
data = json.load(json_file)
@ -141,24 +86,34 @@ class FermentationController:
def _create_step(self, fermenter, item):
id = item.get("id")
name = item.get("name")
props = Props(item.get("props"))
try:
endtime = int(item.get("endtime", 0))
except:
endtime=0
status = StepState(item.get("status", "I"))
if status == StepState.ACTIVE:
status = StepState("S")
type = item.get("type")
type_cfg = self.types.get(type)
if type_cfg is not None:
inst = type_cfg.get("class")()
print(inst)
try:
type_cfg = self.steptypes.get(type)
clazz = type_cfg.get("class")
instance = clazz(self.cbpi, fermenter, item, props, self._done)
except Exception as e:
logging.warning("Failed to create step instance %s - %s" % (id, e))
instance = None
step = FermenterStep(id=id, name=name, type=type, status=status, instance=None, fermenter=fermenter)
step.instance = FermentStep( self.cbpi, step, self._done)
step = FermenterStep(id=id, name=name, fermenter=fermenter, props=props, type=type, status=status, endtime=endtime, instance=instance)
return step
def _done(self, step_instance, result):
step_instance.step.status = StepState.DONE
def _done(self, step_instance, result, fermenter):
logging.info(result)
step_instance.step["status"] = "D"
self.save()
if result == StepResult.NEXT:
asyncio.create_task(self.start(step_instance.step.fermenter.id))
asyncio.create_task(self.start(fermenter))
def _create(self, data):
try:
@ -170,8 +125,9 @@ class FermentationController:
logictype = data.get("type")
temp = data.get("target_temp")
brewname = data.get("brewname")
description = data.get("description")
props = Props(data.get("props", {}))
fermenter = Fermenter(id, name, sensor, heater, cooler, brewname, props, temp, logictype)
fermenter = Fermenter(id, name, sensor, heater, cooler, brewname, description, props, temp, logictype)
fermenter.steps = list(map(lambda item: self._create_step(fermenter, item), data.get("steps", [])))
self.push_update()
return fermenter
@ -180,7 +136,7 @@ class FermentationController:
def _find_by_id(self, id):
return next((item for item in self.data if item.id == id), None)
return next((item for item in self.data if item.id == id), None)
async def get_all(self):
return list(map(lambda x: x.to_dict(), self.data))
@ -199,11 +155,39 @@ class FermentationController:
def get_state(self):
if self.data == []:
#logging.info(self.data)
pass
return {"data": list(map(lambda x: x.to_dict(), self.data)), "types":self.get_types(), "steptypes":self.get_steptypes()}
def get_step_state(self, fermenterid=None):
if self.data == []:
pass
fermentersteps=[]
steplist=list(map(lambda x: x.to_dict(), self.data))
for fermenter in steplist:
if fermenterid == fermenter.get("id"):
fermentersteps={"id": fermenter.get("id"), "steps": fermenter.get("steps")}
return fermentersteps
def get_fermenter_steps(self):
if self.data == []:
pass
fermentersteps=[]
steplist=list(map(lambda x: x.to_dict(), self.data))
for fermenter in steplist:
fermenterstep={"id": fermenter.get("id"), "steps": fermenter.get("steps")}
fermentersteps.append(fermenterstep)
return fermentersteps
async def find_step_by_id(self, id):
actionstep = None
for item in self.data:
step = self._find_step_by_id(item.steps, id)
if step is not None:
actionstep=step
return actionstep
async def get(self, id: str ):
return self._find_by_id(id)
@ -216,8 +200,6 @@ class FermentationController:
async def update(self, item: Fermenter ):
logging.info(item)
def _update(old_item: Fermenter, item: Fermenter):
old_item.name = item.name
old_item.sensor = item.sensor
@ -225,6 +207,7 @@ class FermentationController:
old_item.cooler = item.cooler
old_item.type = item.type
old_item.brewname = item.brewname
old_item.description = item.description
old_item.props = item.props
old_item.target_temp = item.target_temp
return old_item
@ -252,51 +235,141 @@ class FermentationController:
self.push_update()
def save(self):
data = dict(data=list(map(lambda item: item.to_dict(), self.data)))
data = dict(data=list(map(lambda item: item.to_dict(), self.data)))
with open(self.path, "w") as file:
json.dump(data, file, indent=4, sort_keys=True)
async def create_step(self, id, step: Step):
def create_step(self, id, item):
try:
step.id = shortuuid.uuid()
item = self._find_by_id(id)
stepid = shortuuid.uuid()
item['id'] = stepid
status = StepState("I")
type = item.get("type")
name = item.get("name")
endtime = item.get("endtime", 0)
props = Props(item.get("props"))
fermenter = self._find_by_id(id)
try:
type_cfg = self.steptypes.get(type)
clazz = type_cfg.get("class")
instance = clazz(self.cbpi, fermenter, item, props, self._done)
except Exception as e:
logging.warning("Failed to create step instance %s - %s" % (id, e))
instance = None
step = FermenterStep(id=stepid, name=name, fermenter=fermenter, props=props, type=type, status=status, endtime=endtime, instance=instance)
step.instance = FermentStep( self.cbpi, step, self._done)
item.steps.append(step)
self.save()
return step
except Exception as e:
self.logger.error(e)
async def update_step(self, id, step):
item = self._find_by_id(id)
item = list(map(lambda old: item if old.id == step.id else old, item.steps))
async def update_step(self, id, item):
fermenter = self._find_by_id(id)
stepid = item.get("id")
props = item.get("props")
status = StepState("I")
type = item.get("type")
endtime = 0
name = item.get("name")
props = Props(item.get("props"))
logging.info("update step")
try:
type_cfg = self.steptypes.get(type)
logging.info(type_cfg)
clazz = type_cfg.get("class")
logging.info(clazz)
instance = clazz(self.cbpi, fermenter, item, props, self._done)
logging.info(instance)
except Exception as e:
logging.warning("Failed to create step instance %s - %s " % (item.id, e))
instance = None
step = FermenterStep(id=stepid, name=name, fermenter=fermenter, props=props, type=type, status=status, endtime=endtime, instance=instance)
try:
fermenter.steps = list(map(lambda old: step if old.id == step.id else old, fermenter.steps))
except Exception as e:
logging.info(e)
self.save()
self.push_update("fermenterstepupdate")
async def delete_step(self, id, stepid):
item = self._find_by_id(id)
# might require later check if step is active
item.steps = list(filter(lambda item: item.id != stepid, item.steps))
self.save()
self.push_update("fermenterstepupdate")
async def clearsteps(self, id):
item = self._find_by_id(id)
# might require later check if step is active
item.steps = []
self.save()
self.push_update("fermenterstepupdate")
async def update_endtime(self, id, stepid, endtime):
try:
item = self._find_by_id(id)
step = self._find_step_by_id(item.steps, stepid)
step.endtime = int(endtime)
self.save()
self.push_update("fermenterstepupdate")
except Exception as e:
self.logger.error(e)
def _find_by_status(self, data, status):
return next((item for item in data if item.status == status), None)
def _find_step_by_id(self, data, id):
return next((item for item in data if item.id == id), None)
async def start(self, id):
self.logger.info("Start")
async def update_endtime(self, id, stepid, endtime):
try:
item = self._find_by_id(id)
step = self._find_by_status(item.steps, StepState.INITIAL)
step = self._find_step_by_id(item.steps, stepid)
step.endtime = int(endtime)
self.save()
self.push_update("fermenterstepupdate")
except Exception as e:
self.logger.error(e)
async def start(self, id):
self.logger.info("Start {}".format(id))
try:
item = self._find_by_id(id)
step = self._find_by_status(item.steps, StepState.ACTIVE)
if step is not None:
logging.error("Steps already running")
return
step = self._find_by_status(item.steps, StepState.STOP)
if step is not None:
endtime = step.endtime
await step.instance.start()
logging.info("Restarting step {}".format(step.name))
if endtime != 0:
logging.info("Need to change timer")
step.status = StepState.ACTIVE
self.save()
self.push_update()
self.push_update("fermenterstepupdate")
return
step = self._find_by_status(item.steps, StepState.INITIAL)
logging.info(step)
if step is None:
self.logger.info("No futher step to start")
else:
step.instance.endtime = 0
await step.instance.start()
logging.info("Starting step {}".format(step.name))
step.status = StepState.ACTIVE
self.save()
self.push_update()
self.push_update("fermenterstepupdate")
except Exception as e:
self.logger.error(e)
@ -305,9 +378,19 @@ class FermentationController:
try:
item = self._find_by_id(id)
step = self._find_by_status(item.steps, StepState.ACTIVE)
await step.instance.stop()
step.status = StepState.STOP
self.save()
#logging.info(step)
#logging.info(step.status)
if step != None:
logging.info("CALLING STOP STEP")
try:
await step.instance.stop()
step.status = StepState.STOP
self.save()
except Exception as e:
logging.error("Failed to stop fermenterstep - Id: %s" % step.id)
self.push_update()
self.push_update("fermenterstepupdate")
except Exception as e:
self.logger.error(e)
@ -326,7 +409,7 @@ class FermentationController:
await item.instance.start()
item.instance.running = True
item.instance.task = self._loop.create_task(item.instance._run())
item.instance.task = asyncio.get_event_loop().create_task(item.instance._run())
logging.info("{} started {}".format(item.name, id))
@ -352,8 +435,27 @@ class FermentationController:
self.logger.info("Next {} ".format(id))
try:
item = self._find_by_id(id)
logging.info(item)
step = self._find_by_status(item.steps, StepState.ACTIVE)
await step.instance.next()
logging.info(step)
if step is not None:
if step.instance is not None:
step.status = StepState.DONE
await step.instance.next()
step = self._find_by_status(item.steps, StepState.STOP)
logging.info(step)
if step is not None:
if step.instance is not None:
logging.info(step)
step.status = StepState.DONE
logging.info(step)
self.save()
await self.start(id)
else:
logging.info("No Step is running")
self.push_update()
self.push_update("fermenterstepupdate")
except Exception as e:
self.logger.error(e)
@ -367,10 +469,15 @@ class FermentationController:
self.logger.info("Stopping Step {} {}".format(step.name, step.id))
try:
await step.instance.stop()
await step.instance.reset()
step.status = StepState.INITIAL
step.endtime = 0
except Exception as e:
self.logger.error(e)
self.save()
self.push_update()
self.push_update("fermenterstepupdate")
except Exception as e:
self.logger.error(e)
@ -387,7 +494,98 @@ class FermentationController:
fermenter.steps[index], fermenter.steps[index+direction] = fermenter.steps[index+direction], fermenter.steps[index]
self.save()
self.push_update()
self.push_update("fermenterstepupdate")
except Exception as e:
self.logger.error(e)
def push_update(self, key="fermenterupdate"):
if key == self.update_key:
self.cbpi.ws.send(dict(topic=key, data=list(map(lambda item: item.to_dict(), self.data))))
for item in self.data:
self.cbpi.push_update("cbpi/{}/{}".format(self.update_key,item.id), item.to_dict())
pass
else:
fermentersteps=self.get_fermenter_steps()
self.cbpi.ws.send(dict(topic=key, data=fermentersteps))
# send mqtt update for active femrentersteps
for fermenter in fermentersteps:
for step in fermenter['steps']:
if step['status'] == 'A':
self.cbpi.push_update("cbpi/{}/{}/{}".format(key,fermenter['id'],step['id']), step)
async def call_action(self, id, action, parameter) -> None:
logging.info("FermenterStep Controller - call Action {} {}".format(id, action))
try:
item = await self.find_step_by_id(id)
logging.info(item)
await item.instance.__getattribute__(action)(**parameter)
except Exception as e:
logging.error("FermenterStep Controller - Failed to call action on {} {} {}".format(id, action, e))
async def savetobook(self, fermenterid):
name = shortuuid.uuid()
path = os.path.join(".", 'config', "fermenterrecipes", "{}.yaml".format(name))
fermenter=self._find_by_id(fermenterid)
try:
brewname = fermenter.brewname
description = fermenter.description
except:
brewname = ""
description = ""
self.basic_data={"name": brewname, "description": description}
try:
fermentersteps = fermenter.steps
except:
fermentersteps = []
data = dict(basic=self.basic_data, steps=list(map(lambda item: item.to_dict(), fermentersteps)))
with open(path, "w") as file:
yaml.dump(data, file)
async def load_recipe(self, data, fermenterid, name):
try:
await self.shutdown(None, fermenterid)
except:
pass
fermenter = self._find_by_id(fermenterid)
def add_runtime_data(item):
item["status"] = "I"
item["endtime"] = 0
item["id"] = shortuuid.uuid()
item["props"]["Sensor"] = fermenter.sensor
list(map(lambda item: add_runtime_data(item), data.get("steps")))
try:
fermenter.description = data['basic'].get("desc")
except:
fermenter.description = "No Description"
if name is not None:
fermenter.brewname = name
else:
try:
fermenter.brewname = data['basic'].get("name")
except:
fermenter.brewname = "Fermentation"
await self.update(fermenter)
fermenter.steps=[]
for item in data.get("steps"):
fermenter.steps.append(self.create_step(fermenterid, item))
self.save()
self.push_update("fermenterstepupdate")
async def add_step(self, fermenterid, newstep):
fermenter = self._find_by_id(fermenterid)
step = self.create_step(fermenterid, newstep)
fermenter.steps.append(step)
self.save()
self.push_update("fermenterstepupdate")
return step

View file

@ -10,7 +10,7 @@ from ..api.step import StepMove, StepResult, StepState
import re
class RecipeController:
class FermenterRecipeController:
def __init__(self, cbpi):
@ -29,57 +29,60 @@ class RecipeController:
async def create(self, name):
id = shortuuid.uuid()
path = os.path.join(".", 'config', "recipes", "{}.yaml".format(id))
data = dict(basic=dict(name=name, author=self.cbpi.config.get("AUTHOR", "John Doe")), steps=[])
path = os.path.join(".", 'config', "fermenterrecipes", "{}.yaml".format(id))
data = dict(basic=dict(name=name), steps=[])
with open(path, "w") as file:
yaml.dump(data, file)
return id
async def save(self, name, data):
path = os.path.join(".", 'config', "recipes", "{}.yaml".format(name))
path = os.path.join(".", 'config', "fermenterrecipes", "{}.yaml".format(name))
logging.info(data)
with open(path, "w") as file:
yaml.dump(data, file, indent=4, sort_keys=True)
self.cbpi.notify("{} saved".format(data["basic"].get("name")))
async def get_recipes(self):
path = os.path.join(".", 'config', "recipes")
path = os.path.join(".", 'config', "fermenterrecipes")
onlyfiles = [os.path.splitext(f)[0] for f in listdir(path) if isfile(join(path, f)) and f.endswith(".yaml")]
result = []
for filename in onlyfiles:
recipe_path = os.path.join(".", 'config', "recipes", "%s.yaml" % filename)
recipe_path = os.path.join(".", 'config', "fermenterrecipes", "%s.yaml" % filename)
with open(recipe_path) as file:
data = yaml.load(file, Loader=yaml.FullLoader)
dataset = data["basic"]
dataset["file"] = filename
result.append(dataset)
logging.info(result)
return result
async def get_by_name(self, name):
recipe_path = os.path.join(".", 'config', "recipes", "%s.yaml" % name)
recipe_path = os.path.join(".", 'config', "fermenterrecipes", "%s.yaml" % name)
with open(recipe_path) as file:
return yaml.load(file, Loader=yaml.FullLoader)
async def remove(self, name):
path = os.path.join(".", 'config', "recipes", "{}.yaml".format(name))
path = os.path.join(".", 'config', "fermenterrecipes", "{}.yaml".format(name))
os.remove(path)
self.cbpi.notify("{} delted".format(name))
async def brew(self, name):
async def brew(self, recipeid, fermenterid, name):
recipe_path = os.path.join(".", 'config', "recipes", "%s.yaml" % name)
recipe_path = os.path.join(".", 'config', "fermenterrecipes", "%s.yaml" % recipeid)
logging.info(recipe_path)
with open(recipe_path) as file:
data = yaml.load(file, Loader=yaml.FullLoader)
await self.cbpi.step.load_recipe(data)
await self.cbpi.fermenter.load_recipe(data, fermenterid, name)
async def clone(self, id, new_name):
recipe_path = os.path.join(".", 'config', "recipes", "%s.yaml" % id)
recipe_path = os.path.join(".", 'config', "fermenterrecipes", "%s.yaml" % id)
with open(recipe_path) as file:
data = yaml.load(file, Loader=yaml.FullLoader)
data["basic"]["name"] = new_name
new_id = shortuuid.uuid()
await self.save(new_id, data)
return new_id
return new_id

View file

@ -1,19 +1,17 @@
import importlib
import logging
import os
import pkgutil
from importlib import import_module
from importlib_metadata import version, metadata
import datetime
import aiohttp
import asyncio
import yaml
import subprocess
import sys
from cbpi.api import *
from cbpi.utils.utils import load_config
from importlib_metadata import metadata, version
logger = logging.getLogger(__name__)
class PluginController():
modules = {}
types = {}
@ -23,7 +21,8 @@ class PluginController():
def load_plugins(self):
this_directory = os.sep.join(os.path.abspath(__file__).split(os.sep)[:-1])
this_directory = os.sep.join(
os.path.abspath(__file__).split(os.sep)[:-1])
for filename in os.listdir(os.path.join(this_directory, "../extension")):
if os.path.isdir(
os.path.join(this_directory, "../extension/") + filename) is False or filename == "__pycache__":
@ -31,7 +30,7 @@ class PluginController():
try:
logger.info("Trying to load plugin %s" % filename)
data = load_config(os.path.join(
this_directory, "../extension/%s/config.yaml" % filename))
this_directory, "../extension/%s/config.yaml" % filename))
if (data.get("active") is True and data.get("version") == 4):
self.modules[filename] = import_module(
"cbpi.extension.%s" % (filename))
@ -42,21 +41,27 @@ class PluginController():
"Plugin %s is not supporting version 4" % filename)
except Exception as e:
logger.error(e)
def load_plugins_from_evn(self):
for p in self.cbpi.static_config.get("plugins", []):
discovered_plugins = {
name: importlib.import_module(name)
for finder, name, ispkg
in pkgutil.iter_modules()
if name.startswith('cbpi') and len(name) > 4
}
for key, value in discovered_plugins.items():
from importlib.metadata import version
try:
logger.info("Try to load plugin: %s " % p)
self.modules[p] = import_module(p)
self.modules[p].setup(self.cbpi)
logger.info("Plugin %s loaded successfully" % p)
logger.info("Try to load plugin: {} == {} ".format(
key, version(key)))
value.setup(self.cbpi)
logger.info("Plugin {} loaded successfully".format(key))
except Exception as e:
logger.error("FAILED to load plugin %s " % p)
logger.error("FAILED to load plugin {} ".format(key))
logger.error(e)
def register(self, name, clazz) -> None:
@ -75,7 +80,8 @@ class PluginController():
self.cbpi.kettle.types[name] = self._parse_step_props(clazz, name)
if issubclass(clazz, CBPiFermenterLogic):
self.cbpi.fermenter.types[name] = self._parse_step_props(clazz, name)
self.cbpi.fermenter.types[name] = self._parse_step_props(
clazz, name)
if issubclass(clazz, CBPiSensor):
self.cbpi.sensor.types[name] = self._parse_step_props(clazz, name)
@ -84,7 +90,8 @@ class PluginController():
self.cbpi.step.types[name] = self._parse_step_props(clazz, name)
if issubclass(clazz, CBPiFermentationStep):
self.cbpi.fermenter.steptypes[name] = self._parse_step_props(clazz, name)
self.cbpi.fermenter.steptypes[name] = self._parse_step_props(
clazz, name)
if issubclass(clazz, CBPiExtension):
self.c = clazz(self.cbpi)
@ -123,7 +130,8 @@ class PluginController():
parameters = []
for p in method.__getattribute__("parameters"):
parameters.append(self._parse_property_object(p))
result["actions"].append({"method": method_name, "label": key, "parameters": parameters})
result["actions"].append(
{"method": method_name, "label": key, "parameters": parameters})
return result
@ -185,27 +193,25 @@ class PluginController():
async def load_plugin_list(self):
result = []
try:
with open(os.path.join(".", 'config', "config.yaml"), 'rt') as f:
data = yaml.load(f, Loader=yaml.FullLoader)
discovered_plugins = {
name: importlib.import_module(name)
for finder, name, ispkg
in pkgutil.iter_modules()
if name.startswith('cbpi') and len(name) > 4
}
for key, module in discovered_plugins.items():
from importlib.metadata import version
try:
from importlib.metadata import (distribution, metadata,
version)
meta = metadata(key)
result.append({row: meta[row]
for row in list(metadata(key))})
except Exception as e:
logger.error("FAILED to load plugin {} ".format(key))
logger.error(e)
for p in data["plugins"]:
try:
p_metadata= metadata(p)
p_name = p_metadata['name']
p_version = p_metadata['Version']
p_summary = p_metadata['Summary']
p_homepage= p_metadata['Home-page']
p_author = p_metadata['Author']
p_author_email = p_metadata['Author-email']
p_license = p_metadata['License']
p_description = p_metadata['Description']
plugin_data = {'Name': p_name,'Version': p_version,'Summary': p_summary,'Homepage':p_homepage,'Author':p_author,'Email': p_author_email,'License': p_license,'Description': p_description}
result.append(plugin_data)
except:
pass
# print("- ({})\t{}".format(p_version,p))
except Exception as e:
print(e)
logger.error(e)
return []
pass
return result

View file

@ -20,7 +20,7 @@ class StepController:
self.cbpi = cbpi
self.logger = logging.getLogger(__name__)
self.path = os.path.join(".", 'config', "step_data.json")
self._loop = asyncio.get_event_loop()
#self._loop = asyncio.get_event_loop()
self.basic_data = {}
self.step = None
self.types = {}
@ -46,8 +46,8 @@ class StepController:
except Exception as e:
logging.warning("Failed to create step instance %s - %s" % (id, e))
instance = None
return Step(id, name, type=type, status=status, instance=instance, props=props )
step=Step(id, name, type=type, status=status, instance=instance, props=props )
return step
def load(self, startActive=False):
@ -68,8 +68,9 @@ class StepController:
self.profile = list(map(lambda item: self.create(item), self.profile))
if startActive is True:
active_step = self.find_by_status("A")
if active_step is not None:
self._loop.create_task(self.start_step(active_step))
if active_step is not None:
asyncio.get_event_loop().create_task(self.start_step(active_step))
#self._loop.create_task(self.start_step(active_step))
async def add(self, item: Step):
logging.debug("Add step")
@ -198,6 +199,7 @@ class StepController:
def get_types(self):
result = {}
for key, value in self.types.items():
#if "ferment" not in str(value.get("class")).lower():
result[key] = dict(name=value.get("name"), properties=value.get("properties"), actions=value.get("actions"))
return result

View file

@ -178,6 +178,8 @@ class SystemController:
mempercent = 0
eth0IP = "N/A"
wlan0IP = "N/A"
eth0speed = "N/A"
wlan0speed = "N/A"
TEMP_UNIT=self.cbpi.config.get("TEMP_UNIT", "C")
FAHRENHEIT = False if TEMP_UNIT == "C" else True
@ -225,12 +227,31 @@ class SystemController:
if str(addr.family) == "AddressFamily.AF_INET":
if addr.address:
wlan0IP = addr.address
info = psutil.net_if_stats()
try:
for nic in info:
if nic == 'eth0':
if info[nic].isup == True:
if info[nic].speed:
eth0speed = info[nic].speed
else:
eth0speed = "down"
if nic == 'wlan0':
if info[nic].isup == True:
ratestring = os.popen('iwlist wlan0 rate | grep Rate').read()
start = ratestring.find("=") + 1
end = ratestring.find(" Mb/s")
wlan0speed = ratestring[start:end]
else:
wlan0speed = "down"
except Exception as e:
logging.info(e)
except:
pass
if system == "Windows":
try:
ethernet = psutil.net_if_addrs()
ethernet = psutil.net_if_addrs()
for nic, addrs in ethernet.items():
if nic == "Ethernet":
for addr in addrs:
@ -242,6 +263,23 @@ class SystemController:
if str(addr.family) == "AddressFamily.AF_INET":
if addr.address:
wlan0IP = addr.address
info = psutil.net_if_stats()
try:
for nic in info:
if nic == 'Ethernet':
if info[nic].isup == True:
if info[nic].speed:
eth0speed = info[nic].speed
else:
eth0speed = "down"
if nic == 'WLAN':
if info[nic].isup == True:
if info[nic].speed:
wlan0speed = info[nic].speed
else:
wlan0speed = "down"
except Exception as e:
logging.info(e)
except:
pass
@ -258,7 +296,9 @@ class SystemController:
'temp': temp,
'temp_unit': TEMP_UNIT,
'eth0': eth0IP,
'wlan0': wlan0IP}
'wlan0': wlan0IP,
'eth0speed': eth0speed,
'wlan0speed': wlan0speed}
return systeminfo

View file

@ -1,5 +1,10 @@
import asyncio
import sys
try:
from asyncio import set_event_loop_policy, WindowsSelectorEventLoopPolicy
except ImportError:
pass
import json
from voluptuous.schema_builder import message
from cbpi.api.dataclasses import NotificationType
@ -25,6 +30,7 @@ from cbpi.controller.plugin_controller import PluginController
from cbpi.controller.sensor_controller import SensorController
from cbpi.controller.step_controller import StepController
from cbpi.controller.recipe_controller import RecipeController
from cbpi.controller.fermenter_recipe_controller import FermenterRecipeController
from cbpi.controller.upload_controller import UploadController
from cbpi.controller.fermentation_controller import FermentationController
@ -45,6 +51,7 @@ from cbpi.http_endpoints.http_kettle import KettleHttpEndpoints
from cbpi.http_endpoints.http_sensor import SensorHttpEndpoints
from cbpi.http_endpoints.http_step import StepHttpEndpoints
from cbpi.http_endpoints.http_recipe import RecipeHttpEndpoints
from cbpi.http_endpoints.http_fermenterrecipe import FermenterRecipeHttpEndpoints
from cbpi.http_endpoints.http_plugin import PluginHttpEndpoints
from cbpi.http_endpoints.http_system import SystemHttpEndpoints
from cbpi.http_endpoints.http_log import LogHttpEndpoints
@ -80,13 +87,17 @@ async def error_middleware(request, handler):
class CraftBeerPi:
def __init__(self):
operationsystem= sys.platform
if operationsystem.startswith('win'):
set_event_loop_policy(WindowsSelectorEventLoopPolicy())
self.path = os.sep.join(os.path.abspath(__file__).split(os.sep)[:-1]) # The path to the package dir
self.version = __version__
self.static_config = load_config(os.path.join(".", 'config', "config.yaml"))
self.database_file = os.path.join(".", 'config', "craftbeerpi.db")
logger.info("Init CraftBeerPI")
policy = auth.SessionTktAuthentication(urandom(32), 60, include_ip=True)
@ -112,6 +123,7 @@ class CraftBeerPi:
self.fermenter : FermentationController = FermentationController(self)
self.step : StepController = StepController(self)
self.recipe : RecipeController = RecipeController(self)
self.fermenterrecipe : FermenterRecipeController = FermenterRecipeController(self)
self.upload : UploadController = UploadController(self)
self.notification : NotificationController = NotificationController(self)
self.satellite = None
@ -121,6 +133,7 @@ class CraftBeerPi:
self.http_step = StepHttpEndpoints(self)
self.http_recipe = RecipeHttpEndpoints(self)
self.http_fermenterrecipe = FermenterRecipeHttpEndpoints(self)
self.http_sensor = SensorHttpEndpoints(self)
self.http_config = ConfigHttpEndpoints(self)
self.http_actor = ActorHttpEndpoints(self)
@ -247,7 +260,7 @@ class CraftBeerPi:
f = Figlet(font='big')
logger.info("\n%s" % f.renderText("CraftBeerPi %s " % self.version))
logger.info("www.CraftBeerPi.com")
logger.info("(c) 2021 Manuel Fritsch")
logger.info("(c) 2021/2022 Manuel Fritsch / Alexander Vollkopf")
def _setup_http_index(self):
async def http_index(request):

View file

@ -24,14 +24,17 @@ import warnings
class FermenterNotificationStep(CBPiFermentationStep):
async def NextStep(self, **kwargs):
await self.next()
await self.next(self.fermenter.id)
return StepResult.DONE
async def on_timer_done(self,timer):
self.summary = self.props.get("Notification","")
if self.AutoNext == True:
self.cbpi.notify(self.name, self.props.get("Notification",""), NotificationType.INFO)
await self.next()
if self.shutdown != True:
await self.next(self.fermenter.id)
return StepResult.DONE
else:
self.cbpi.notify(self.name, self.props.get("Notification",""), NotificationType.INFO, action=[NotificationAction("Next Step", self.NextStep)])
await self.push_update()
@ -40,6 +43,7 @@ class FermenterNotificationStep(CBPiFermentationStep):
await self.push_update()
async def on_start(self):
self.shutdown = False
self.summary=""
self.AutoNext = False if self.props.get("AutoNext", "No") == "No" else True
if self.timer is None:
@ -62,34 +66,37 @@ class FermenterNotificationStep(CBPiFermentationStep):
@parameters([Property.Number(label="Temp", configurable=True),
Property.Sensor(label="Sensor"),
Property.Kettle(label="Kettle"),
Property.Text(label="Notification",configurable = True, description = "Text for notification when Temp is reached"),
Property.Select(label="AutoMode",options=["Yes","No"], description="Switch Kettlelogic automatically on and off -> Yes")])
Property.Select(label="AutoMode",options=["Yes","No"], description="Switch Fermenterlogic automatically on and off -> Yes")])
class FermenterTargetTempStep(CBPiFermentationStep):
async def NextStep(self, **kwargs):
await self.next()
if self.shutdown != True:
await self.next(self.fermenter.id)
return StepResult.DONE
async def on_timer_done(self,timer):
self.summary = ""
self.kettle.target_temp = 0
await self.push_update()
if self.AutoMode == True:
await self.setAutoMode(False)
self.cbpi.notify(self.name, self.props.get("Notification","Target Temp reached. Please add malt and klick next to move on."), action=[NotificationAction("Next Step", self.NextStep)])
self.cbpi.notify(self.name, self.props.get("Notification","Target Temp reached. Please add malt and klick next to move on."))
await self.next(self.fermenter.id)
return StepResult.DONE
async def on_timer_update(self,timer, seconds):
await self.push_update()
async def on_start(self):
self.shutdown = False
self.AutoMode = True if self.props.get("AutoMode","No") == "Yes" else False
self.kettle=self.get_kettle(self.props.get("Kettle", None))
if self.kettle is not None:
self.kettle.target_temp = int(self.props.get("Temp", 0))
if self.fermenter is not None:
self.fermenter.target_temp = float(self.props.get("Temp", 0))
if self.AutoMode == True:
await self.setAutoMode(True)
self.summary = "Waiting for Target Temp"
if self.cbpi.kettle is not None and self.timer is None:
if self.fermenter is not None and self.timer is None:
self.timer = Timer(1 ,on_update=self.on_timer_update, on_done=self.on_timer_done)
await self.push_update()
@ -101,36 +108,51 @@ class FermenterTargetTempStep(CBPiFermentationStep):
await self.push_update()
async def run(self):
while self.running == True:
await asyncio.sleep(1)
sensor_value = self.get_sensor_value(self.props.get("Sensor", None)).get("value")
if sensor_value >= int(self.props.get("Temp",0)) and self.timer.is_running is not True:
self.timer.start()
self.timer.is_running = True
while self.get_sensor_value(self.props.get("Sensor", None)).get("value") > 900:
await asyncio.sleep(1)
self.starttemp= self.get_sensor_value(self.props.get("Sensor", None)).get("value")
if self.fermenter.target_temp >= self.starttemp:
logging.info("warmup")
while self.running == True:
sensor_value = self.get_sensor_value(self.props.get("Sensor", None)).get("value")
if sensor_value >= self.fermenter.target_temp and self.timer.is_running is not True:
self.timer.start()
self.timer.is_running = True
await asyncio.sleep(1)
elif self.fermenter.target_temp <= self.starttemp:
logging.info("Cooldown")
while self.running == True:
sensor_value = self.get_sensor_value(self.props.get("Sensor", None)).get("value")
if sensor_value <= self.fermenter.target_temp and self.timer.is_running is not True:
self.timer.start()
self.timer.is_running = True
await asyncio.sleep(1)
await self.push_update()
return StepResult.DONE
async def reset(self):
self.timer = Timer(1 ,on_update=self.on_timer_update, on_done=self.on_timer_done)
self.timer.is_running == False
async def setAutoMode(self, auto_state):
try:
if (self.kettle.instance is None or self.kettle.instance.state == False) and (auto_state is True):
await self.cbpi.kettle.toggle(self.kettle.id)
elif (self.kettle.instance.state == True) and (auto_state is False):
await self.cbpi.kettle.stop(self.kettle.id)
if (self.fermenter.instance is None or self.fermenter.instance.state == False) and (auto_state is True):
await self.cbpi.fermenter.toggle(self.fermenter.id)
elif (self.fermenter.instance.state == True) and (auto_state is False):
await self.cbpi.fermenter.toggle(self.fermenter.id)
await self.push_update()
except Exception as e:
logging.error("Failed to switch on KettleLogic {} {}".format(self.kettle.id, e))
logging.error("Failed to switch on FermenterLogic {} {}".format(self.fermenter.id, e))
@parameters([Property.Number(label="Timer", description="Time in Minutes", configurable=True),
@parameters([Property.Number(label="TimerD", description="Timer Days", configurable=True),
Property.Number(label="TimerH", description="Timer Hours", configurable=True),
Property.Number(label="TimerM", description="Timer Minutes", configurable=True),
Property.Number(label="Temp", configurable=True),
Property.Sensor(label="Sensor"),
Property.Kettle(label="Kettle"),
Property.Select(label="AutoMode",options=["Yes","No"], description="Switch Kettlelogic automatically on and off -> Yes")])
class FermentationStep(CBPiFermentationStep):
Property.Select(label="AutoMode",options=["Yes","No"], description="Switch Fermenterlogic automatically on and off -> Yes")])
class FermenterStep(CBPiFermentationStep):
@action("Start Timer", [])
async def start_timer(self):
@ -138,49 +160,75 @@ class FermentationStep(CBPiFermentationStep):
self.cbpi.notify(self.name, 'Timer started', NotificationType.INFO)
self.timer.start()
self.timer.is_running = True
self.endtime = time.time() + self.fermentationtime
await self.update_endtime()
estimated_completion_time = datetime.fromtimestamp(time.time()+ self.fermentationtime)
self.cbpi.notify(self.name, 'Timer started. Estimated completion: {}'.format(estimated_completion_time.strftime("%d.%m, %H:%M")), NotificationType.INFO)
else:
self.cbpi.notify(self.name, 'Timer is already running', NotificationType.WARNING)
@action("Add 5 Minutes to Timer", [])
async def add_timer(self):
if self.timer.is_running == True:
self.cbpi.notify(self.name, '5 Minutes added', NotificationType.INFO)
await self.timer.add(300)
else:
self.cbpi.notify(self.name, 'Timer must be running to add time', NotificationType.WARNING)
# @action("Add 1 Day to Timer", [])
# async def add_timer(self):
# if self.timer.is_running == True:
# self.cbpi.notify(self.name, '1 Day added', NotificationType.INFO)
# await self.timer.add(86400)
# self.endtime = self.endtime +86400
# await self.update_endtime()
# else:
# self.cbpi.notify(self.name, 'Timer must be running to add time', NotificationType.WARNING)
async def on_timer_done(self,timer):
self.summary = ""
self.kettle.target_temp = 0
if self.AutoMode == True:
await self.setAutoMode(False)
self.cbpi.notify(self.name, 'Step finished', NotificationType.SUCCESS)
await self.next()
if self.shutdown != True:
await self.next(self.fermenter.id)
return StepResult.DONE
async def on_timer_update(self,timer, seconds):
self.summary = Timer.format_time(seconds)
await self.push_update()
async def on_start(self):
self.shutdown = False
if self.endtime == 0:
timeD=int(self.props.get("TimerD", 0))
timeH=int(self.props.get("TimerH", 0))
timeM=int(self.props.get("TimerM", 0))
self.fermentationtime=(timeM+(60*timeH)+(1440*timeD)) *60
else:
self.fermentationtime = self.endtime - time.time()
self.AutoMode = True if self.props.get("AutoMode", "No") == "Yes" else False
self.kettle=self.get_kettle(self.props.Kettle)
if self.kettle is not None:
self.kettle.target_temp = int(self.props.get("Temp", 0))
if self.fermenter is not None:
self.fermenter.target_temp = float(self.props.get("Temp", 0))
if self.AutoMode == True:
await self.setAutoMode(True)
await self.push_update()
if self.cbpi.kettle is not None and self.timer is None:
self.timer = Timer(int(self.props.get("Timer",0)) *60 ,on_update=self.on_timer_update, on_done=self.on_timer_done)
elif self.cbpi.kettle is not None:
if self.fermenter is not None and self.timer is None:
logging.info("Set Timer")
self.timer = Timer(self.fermentationtime ,on_update=self.on_timer_update, on_done=self.on_timer_done)
self.timer.is_running = False
elif self.fermenter is not None:
try:
if self.timer.is_running == True:
self.timer.start()
self.endtime = time.time() + self.fermentationtime
await self.update_endtime()
except:
pass
if self.endtime != 0 and self.timer is not None and self.timer.is_running == False:
self.timer.start()
self.timer.is_running = True
estimated_completion_time = datetime.fromtimestamp(time.time()+ self.fermentationtime)
self.cbpi.notify(self.name, 'Timer restarted. Estimated completion: {}'.format(estimated_completion_time.strftime("%d.%m, %H:%M")), NotificationType.INFO)
self.summary = "Waiting for Target Temp"
await self.push_update()
@ -192,29 +240,56 @@ class FermentationStep(CBPiFermentationStep):
await self.push_update()
async def reset(self):
self.timer = Timer(int(self.props.get("Timer",0)) *60 ,on_update=self.on_timer_update, on_done=self.on_timer_done)
timeD=int(self.props.get("TimerD", 0))
timeH=int(self.props.get("TimerH", 0))
timeM=int(self.props.get("TimerM", 0))
self.fermentationtime=(timeM+(60*timeH)+(1440*timeD)) *60
self.timer = Timer(self.fermentationtime ,on_update=self.on_timer_update, on_done=self.on_timer_done)
self.endtime = 0
self.timer.is_running == False
async def run(self):
while self.running == True:
while self.get_sensor_value(self.props.get("Sensor", None)).get("value") > 900:
await asyncio.sleep(1)
sensor_value = self.get_sensor_value(self.props.get("Sensor", None)).get("value")
if sensor_value >= int(self.props.get("Temp",0)) and self.timer.is_running is not True:
self.timer.start()
self.timer.is_running = True
estimated_completion_time = datetime.fromtimestamp(time.time()+ (int(self.props.get("Timer",0)))*60)
self.cbpi.notify(self.name, 'Timer started. Estimated completion: {}'.format(estimated_completion_time.strftime("%H:%M")), NotificationType.INFO)
self.starttemp= self.get_sensor_value(self.props.get("Sensor", None)).get("value")
if self.fermenter.target_temp >= self.starttemp:
logging.info("warmup")
while self.running == True:
await asyncio.sleep(1)
sensor_value = self.get_sensor_value(self.props.get("Sensor", None)).get("value")
if sensor_value >= self.fermenter.target_temp and self.timer.is_running is not True:
self.timer.start()
self.timer.is_running = True
self.endtime = time.time() + self.fermentationtime
await self.update_endtime()
estimated_completion_time = datetime.fromtimestamp(time.time()+ self.fermentationtime)
self.cbpi.notify(self.name, 'Timer started. Estimated completion: {}'.format(estimated_completion_time.strftime("%d.%m, %H:%M")), NotificationType.INFO)
elif self.fermenter.target_temp <= self.starttemp:
logging.info("cooldown")
while self.running == True:
await asyncio.sleep(1)
sensor_value = self.get_sensor_value(self.props.get("Sensor", None)).get("value")
if sensor_value <= self.fermenter.target_temp and self.timer.is_running is not True:
self.timer.start()
self.timer.is_running = True
self.endtime = time.time() + self.fermentationtime
await self.update_endtime()
estimated_completion_time = datetime.fromtimestamp(time.time()+ self.fermentationtime)
self.cbpi.notify(self.name, 'Timer started. Estimated completion: {}'.format(estimated_completion_time.strftime("%d.%m, %H:%M")), NotificationType.INFO)
return StepResult.DONE
async def setAutoMode(self, auto_state):
try:
if (self.kettle.instance is None or self.kettle.instance.state == False) and (auto_state is True):
await self.cbpi.kettle.toggle(self.kettle.id)
elif (self.kettle.instance.state == True) and (auto_state is False):
await self.cbpi.kettle.stop(self.kettle.id)
if (self.fermenter.instance is None or self.fermenter.instance.state == False) and (auto_state is True):
await self.cbpi.fermenter.toggle(self.fermenter.id)
elif (self.fermenter.instance.state == True) and (auto_state is False):
await self.cbpi.fermenter.toggle(self.fermenter.id)
await self.push_update()
except Exception as e:
logging.error("Failed to switch on KettleLogic {} {}".format(self.kettle.id, e))
logging.error("Failed to switch on FermenterLogic {} {}".format(self.fermenter.id, e))
def setup(cbpi):
@ -228,4 +303,4 @@ def setup(cbpi):
cbpi.plugin.register("FermenterNotificationStep", FermenterNotificationStep)
cbpi.plugin.register("FermenterTargetTempStep", FermenterTargetTempStep)
cbpi.plugin.register("FermentationStep", FermentationStep)
cbpi.plugin.register("FermenterStep", FermenterStep)

View file

@ -32,7 +32,7 @@ class FermenterAutostart(CBPiExtension):
self.fermenter=self.cbpi.fermenter._find_by_id(fermenter_id)
try:
if (self.fermenter.instance is None or self.fermenter.instance.state == False):
await self.cbpi.fermenter.toggle(self.fermenter.id)
await self.cbpi.fermenter.start(self.fermenter.id)
logging.info("Successfully switched on Ferenterlogic for Fermenter {}".format(self.fermenter.id))
except Exception as e:
logging.error("Failed to switch on FermenterLogic {} {}".format(self.fermenter.id, e))

View file

@ -10,7 +10,7 @@ logger = logging.getLogger(__name__)
try:
import RPi.GPIO as GPIO
except Exception:
logger.error("Failed to load RPi.GPIO. Using Mock")
logger.warning("Failed to load RPi.GPIO. Using Mock instead")
MockRPi = MagicMock()
modules = {
"RPi": MockRPi,

View file

@ -22,6 +22,8 @@ class MQTTActor(CBPiActor):
async def on_start(self):
self.topic = self.props.get("Topic", None)
self.power = 100
await self.off()
self.state = False
async def on(self, power=None):
if power is not None:

View file

@ -18,7 +18,7 @@ class MQTTSensor(CBPiSensor):
if self.payload_text != None:
self.payload_text = self.payload_text.split('.')
self.mqtt_task = self.cbpi.satellite.subcribe(self.Topic, self.on_message)
self.value: int = 0
self.value: float = 999
async def on_message(self, message):
val = json.loads(message)

View file

@ -1,5 +1,5 @@
from cbpi.controller.fermentation_controller import FermentationController
from cbpi.api.dataclasses import Fermenter, Step, Props
from cbpi.api.dataclasses import Fermenter, Step, Props, FermenterStep
from aiohttp import web
from cbpi.api import *
import logging
@ -77,7 +77,7 @@ class FermentationHttpEndpoints():
description: successful operation
"""
data = await request.json()
fermenter = Fermenter(id=id, name=data.get("name"), sensor=data.get("sensor"), heater=data.get("heater"), cooler=data.get("cooler"), brewname=data.get("brewname"), target_temp=data.get("target_temp"), props=Props(data.get("props", {})), type=data.get("type"))
fermenter = Fermenter(id=id, name=data.get("name"), sensor=data.get("sensor"), heater=data.get("heater"), cooler=data.get("cooler"), brewname=data.get("brewname"), description=data.get("description"), target_temp=data.get("target_temp"), props=Props(data.get("props", {})), type=data.get("type"))
response_data = await self.controller.create(fermenter)
return web.json_response(data=response_data.to_dict())
@ -115,7 +115,7 @@ class FermentationHttpEndpoints():
"""
id = request.match_info['id']
data = await request.json()
fermenter = Fermenter(id=id, name=data.get("name"), sensor=data.get("sensor"), heater=data.get("heater"), cooler=data.get("cooler"), brewname=data.get("brewname"), target_temp=data.get("target_temp"), props=Props(data.get("props", {})), type=data.get("type"))
fermenter = Fermenter(id=id, name=data.get("name"), sensor=data.get("sensor"), heater=data.get("heater"), cooler=data.get("cooler"), brewname=data.get("brewname"), description=data.get("description"), target_temp=data.get("target_temp"), props=Props(data.get("props", {})), type=data.get("type"))
return web.json_response(data=(await self.controller.update(fermenter)).to_dict())
@request_mapping(path="/{id}", method="DELETE", auth_required=False)
@ -283,3 +283,334 @@ class FermentationHttpEndpoints():
data = await request.json()
await self.controller.set_target_temp(id,data.get("temp"))
return web.Response(status=204)
@request_mapping(path="/{id}/addstep", method="POST", auth_required=False)
async def http_add_step(self, request):
"""
---
description: Add Fermenterstep
tags:
- Fermenter
parameters:
- name: "id"
in: "path"
description: "Fermenter ID"
required: true
type: "integer"
format: "int64"
- in: body
name: body
description: Create a fermenterstep
required: true
schema:
type: object
responses:
"200":
description: successful operation
"""
data = await request.json()
fermenterid= request.match_info['id']
newstep = {"name": data.get("name"), "props": data.get("props", {}), "endtime": 0, "type": data.get("type")}
response_data = await self.controller.add_step(fermenterid,newstep)
return web.json_response(data=response_data.to_dict())
@request_mapping(path="/{fermenterid}/{stepid}", method="PUT", auth_required=False)
async def http_updatestep(self, request):
"""
---
description: Update FermenterStep
tags:
- Fermenter
parameters:
- name: "fermenterid"
in: "path"
description: "Fermenter ID"
required: true
type: "integer"
format: "int64"
- name: "stepid"
in: "path"
description: "Step ID"
required: true
type: "integer"
format: "int64"
- in: body
name: body
description: Update a Femrenterstep
required: false
schema:
type: object
responses:
"200":
description: successful operation
"""
data = await request.json()
stepid = request.match_info['stepid']
fermenterid = request.match_info['fermenterid']
updatedstep = {"id": stepid, "name": data.get("name"), "endtime": 0, "props": data.get("props", {}), "type": data.get("type")}
#step = FermenterStep(stepid, data.get("name"), None, Props(data.get("props", {})), data.get("type"))
await self.controller.update_step(fermenterid,updatedstep)
return web.Response(status=200)
@request_mapping(path="/{fermenterid}/{stepid}", method="DELETE", auth_required=False)
async def http_deletestep(self, request):
"""
---
description: Delete Fermenterstep
tags:
- Fermenter
parameters:
- name: "fermenterid"
in: "path"
description: "Fermenter ID"
required: true
type: "integer"
format: "int64"
- name: "stepid"
in: "path"
description: "Step ID"
required: true
type: "integer"
format: "int64"
responses:
"204":
description: successful operation
"""
stepid = request.match_info['stepid']
fermenterid = request.match_info['fermenterid']
await self.controller.delete_step(fermenterid,stepid)
return web.Response(status=204)
@request_mapping(path="/movestep", method="PUT", auth_required=False)
async def http_movestep(self, request):
"""
---
description: Move Fermenterstep
tags:
- Fermenter
parameters:
- in: body
name: body
description: Created an kettle
required: false
schema:
type: object
properties:
fermenterid:
type: string
stepid:
type: string
direction:
type: "integer"
format: "int64"
responses:
"204":
description: successful operation
"""
data = await request.json()
await self.controller.move_step(data["fermenterid"],data["stepid"], data["direction"])
return web.Response(status=204)
@request_mapping(path="/{id}/getsteps", method="GET", auth_required=False)
async def http_get_steps(self, request):
"""
---
description: Get Fermentersteps for Fermenter
tags:
- Fermenter
parameters:
- name: "id"
in: "path"
description: "Fermenter ID"
required: true
type: "integer"
format: "int64"
responses:
"200":
description: successful operation
"""
fermenterid= request.match_info['id']
response_data = self.controller.get_step_state(fermenterid)
return web.json_response(data=response_data)
@request_mapping(path="/{id}/clearsteps", method="POST", auth_required=False)
async def http_clear_steps(self, request):
"""
---
description: Clear all steps for Fermenter with fermenterid
tags:
- Fermenter
parameters:
- name: "id"
in: "path"
description: "Fermenter ID"
required: true
type: "integer"
format: "int64"
responses:
"200":
description: successful operation
"""
fermenterid= request.match_info['id']
await self.controller.clearsteps(fermenterid)
return web.Response(status=200)
@request_mapping(path="/{id}/startstep", method="POST", auth_required=False)
async def http_start_steps(self, request):
"""
---
description: Start steps for Fermenter with fermenterid
tags:
- Fermenter
parameters:
- name: "id"
in: "path"
description: "Fermenter ID"
required: true
type: "integer"
format: "int64"
responses:
"200":
description: successful operation
"""
fermenterid= request.match_info['id']
await self.controller.start(fermenterid)
return web.Response(status=200)
@request_mapping(path="/{id}/stopstep", method="POST", auth_required=False)
async def http_stop_steps(self, request):
"""
---
description: Stop steps for Fermenter with fermenterid
tags:
- Fermenter
parameters:
- name: "id"
in: "path"
description: "Fermenter ID"
required: true
type: "integer"
format: "int64"
responses:
"200":
description: successful operation
"""
fermenterid= request.match_info['id']
await self.controller.stop(fermenterid)
return web.Response(status=200)
@request_mapping(path="/{id}/nextstep", method="POST", auth_required=False)
async def http_next_step(self, request):
"""
---
description: Triggers next step for Fermenter with fermenterid
tags:
- Fermenter
parameters:
- name: "id"
in: "path"
description: "Fermenter ID"
required: true
type: "integer"
format: "int64"
responses:
"200":
description: successful operation
"""
fermenterid= request.match_info['id']
await self.controller.next(fermenterid)
return web.Response(status=200)
@request_mapping(path="/{id}/reset", method="POST", auth_required=False)
async def http_reset(self, request):
"""
---
description: Resets step status for Fermenter with fermenterid
tags:
- Fermenter
parameters:
- name: "id"
in: "path"
description: "Fermenter ID"
required: true
type: "integer"
format: "int64"
responses:
"200":
description: successful operation
"""
fermenterid= request.match_info['id']
await self.controller.reset(fermenterid)
return web.Response(status=200)
@request_mapping(path="/action/{id}", method="POST", auth_required=False)
async def http_call_action(self, request):
"""
---
description: Call action
tags:
- Fermenter
parameters:
- name: "id"
in: "path"
description: "FermenterStep id"
required: true
type: "integer"
format: "int64"
- in: body
name: body
description: call action for fermenter Step
required: false
schema:
type: object
properties:
action:
type: string
parameter:
type: "array"
items:
type: string
responses:
"204":
description: successful operation
"""
data = await request.json()
id = request.match_info['id']
await self.controller.call_action(id,data.get("action"), data.get("parameter",[]))
return web.Response(status=204)
@request_mapping(path="/savetobook/{id}", method="POST", auth_required=False)
async def http_savetobook(self, request):
"""
---
description: Save Active FermenterRecipe to Fermenter Recipe Book
tags:
- Fermenter
responses:
"204":
description: successful operation
"""
fermenterid = request.match_info['id']
await self.controller.savetobook(fermenterid)
return web.Response(status=204)

View file

@ -1,14 +1,15 @@
from cbpi.controller.recipe_controller import RecipeController
from cbpi.controller.fermenter_recipe_controller import FermenterRecipeController
from cbpi.api.dataclasses import Props, Step
from aiohttp import web
from cbpi.api import *
import logging
class RecipeHttpEndpoints():
class FermenterRecipeHttpEndpoints():
def __init__(self, cbpi):
self.cbpi = cbpi
self.controller : RecipeController = cbpi.recipe
self.cbpi.register(self, "/recipe")
self.controller : FermenterRecipeController = cbpi.fermenterrecipe
self.cbpi.register(self, "/fermenterrecipe")
@request_mapping(path="/", method="GET", auth_required=False)
async def http_get_all(self, request):
@ -16,7 +17,7 @@ class RecipeHttpEndpoints():
---
description: Get all recipes
tags:
- Recipe
- FermenterRecipe
responses:
"200":
description: successful operation
@ -29,7 +30,7 @@ class RecipeHttpEndpoints():
---
description: Get all recipes
tags:
- Recipe
- FermenterRecipe
parameters:
- name: "name"
in: "path"
@ -50,7 +51,7 @@ class RecipeHttpEndpoints():
---
description: Add Recipe
tags:
- Recipe
- FermenterRecipe
responses:
"200":
@ -68,7 +69,7 @@ class RecipeHttpEndpoints():
---
description: Save Recipe
tags:
- Recipe
- FermenterRecipe
parameters:
- name: "id"
in: "path"
@ -89,6 +90,7 @@ class RecipeHttpEndpoints():
data = await request.json()
name = request.match_info['name']
await self.controller.save(name, data)
print(data)
return web.Response(status=204)
@request_mapping(path="/{name}", method="DELETE", auth_required=False)
@ -98,7 +100,7 @@ class RecipeHttpEndpoints():
---
description: Delete
tags:
- Recipe
- FermenterRecipe
parameters:
- name: "id"
in: "path"
@ -115,14 +117,14 @@ class RecipeHttpEndpoints():
await self.controller.remove(name)
return web.Response(status=204)
@request_mapping(path="/{name}/brew", method="POST", auth_required=False)
@request_mapping(path="/{recipeid}/{fermenterid}/{name}/brew", method="POST", auth_required=False)
async def http_brew(self, request):
"""
---
description: Brew
description: Send Recipe to Fermenter
tags:
- Recipe
- FermenterRecipe
parameters:
- name: "name"
in: "path"
@ -135,8 +137,10 @@ class RecipeHttpEndpoints():
"200":
description: successful operation
"""
recipeid = request.match_info['recipeid']
name = request.match_info['name']
await self.controller.brew(name)
fermenterid = request.match_info['fermenterid']
await self.controller.brew(recipeid,fermenterid,name)
return web.Response(status=204)
@request_mapping(path="/{id}/clone", method="POST", auth_required=False)
@ -146,7 +150,7 @@ class RecipeHttpEndpoints():
---
description: Brew
tags:
- Recipe
- FermenterRecipe
parameters:
- name: "id"
in: "path"
@ -164,7 +168,6 @@ class RecipeHttpEndpoints():
description: successful operation
"""
id = request.match_info['id']
data = await request.json()
data = await request.json()
return web.json_response(dict(id=await self.controller.clone(id, data.get("name"))))

View file

@ -1,3 +1,4 @@
import re
from aiohttp import web
from aiohttp import streamer
from cbpi.job.aiohttp import get_scheduler_from_app
@ -7,7 +8,7 @@ from cbpi.utils import json_dumps
from cbpi import __version__
import pathlib
import os
import json
from cbpi.controller.system_controller import SystemController
class SystemHttpEndpoints:
@ -33,6 +34,7 @@ class SystemHttpEndpoints:
sensor=self.cbpi.sensor.get_state(),
kettle=self.cbpi.kettle.get_state(),
step=self.cbpi.step.get_state(),
fermentersteps=self.cbpi.fermenter.get_fermenter_steps(),
config=self.cbpi.config.get_state(),
version=__version__)
, dumps=json_dumps)

View file

@ -1,22 +1,24 @@
aiohttp==3.7.4
aiohttp==3.8.1
aiohttp-auth==0.1.1
aiohttp-route-decorator==0.1.4
aiohttp-security==0.4.0
aiohttp-session==2.9.0
aiohttp-swagger==1.0.15
aiojobs==0.3.0
aiosqlite==0.16.0
cryptography==3.3.2
requests==2.25.1
voluptuous==0.12.1
aiohttp-session==2.11.0
aiohttp-swagger==1.0.16
aiojobs==1.0.0
aiosqlite==0.17.0
cryptography==36.0.1
requests==2.27.1
voluptuous==0.12.2
pyfiglet==0.8.post1
pandas==1.4.0
shortuuid==1.0.1
tabulate==0.8.7
numpy==1.22.0
pandas==1.4.1
shortuuid==1.0.8
tabulate==0.8.9
numpy==1.22.2
cbpi4ui
click==7.1.2
importlib_metadata==4.8.2
click==8.0.4
importlib_metadata==4.11.1
asyncio-mqtt
psutil==5.8.0
psutil==5.9.0
zipp>=0.5
PyInquirer==1.0.3
colorama==0.4.4

View file

@ -27,32 +27,33 @@ setup(name='cbpi',
'': ['*.txt', '*.rst', '*.yaml'],
'cbpi': ['*','*.txt', '*.rst', '*.yaml']},
python_requires='>=3',
python_requires='>=3.9',
install_requires=[
"aiohttp==3.7.4",
"aiohttp==3.8.1",
"aiohttp-auth==0.1.1",
"aiohttp-route-decorator==0.1.4",
"aiohttp-security==0.4.0",
"aiohttp-session==2.9.0",
"aiohttp-swagger==1.0.15",
"aiojobs==0.3.0",
"aiosqlite==0.16.0",
"cryptography==3.3.2",
"requests==2.25.1",
"voluptuous==0.12.1",
"aiohttp-session==2.11.0",
"aiohttp-swagger==1.0.16",
"aiojobs==1.0.0 ",
"aiosqlite==0.17.0",
"cryptography==36.0.1",
"requests==2.27.1",
"voluptuous==0.12.2",
"pyfiglet==0.8.post1",
'click==7.1.2',
'shortuuid==1.0.1',
'tabulate==0.8.7',
'click==8.0.4',
'shortuuid==1.0.8',
'tabulate==0.8.9',
'asyncio-mqtt',
'psutil==5.8.0',
'PyInquirer==1.0.3',
'colorama==0.4.4',
'psutil==5.9.0',
'cbpi4ui',
'importlib_metadata'] + (
['RPi.GPIO==0.7.1'] if raspberrypi else [] ) +
(['numpy==1.22.0'] if (int(platform.python_version_tuple()[1]) >= 9) and (int(platform.python_version_tuple()[0]) == 3) else ['numpy==1.20.3'] ) +
(['pandas==1.4.0'] if (int(platform.python_version_tuple()[1]) >= 9) and (int(platform.python_version_tuple()[0]) == 3) else ['pandas==1.1.5'] ),
'importlib_metadata',
'numpy==1.22.2',
'pandas==1.4.1'] + (
['RPi.GPIO==0.7.1'] if raspberrypi else [] ),
dependency_links=[
'https://testpypi.python.org/pypi',

View file

@ -1,76 +0,0 @@
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly
deactivate () {
# reset old environment variables
if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then
PATH="${_OLD_VIRTUAL_PATH:-}"
export PATH
unset _OLD_VIRTUAL_PATH
fi
if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then
PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}"
export PYTHONHOME
unset _OLD_VIRTUAL_PYTHONHOME
fi
# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands. Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
hash -r
fi
if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then
PS1="${_OLD_VIRTUAL_PS1:-}"
export PS1
unset _OLD_VIRTUAL_PS1
fi
unset VIRTUAL_ENV
if [ ! "$1" = "nondestructive" ] ; then
# Self destruct!
unset -f deactivate
fi
}
# unset irrelevant variables
deactivate nondestructive
VIRTUAL_ENV="/Users/manuelfritsch/Documents/git/cbpi4/venv3"
export VIRTUAL_ENV
_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH
# unset PYTHONHOME if set
# this will fail if PYTHONHOME is set to the empty string (which is bad anyway)
# could use `if (set -u; : $PYTHONHOME) ;` in bash
if [ -n "${PYTHONHOME:-}" ] ; then
_OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}"
unset PYTHONHOME
fi
if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then
_OLD_VIRTUAL_PS1="${PS1:-}"
if [ "x(venv3) " != x ] ; then
PS1="(venv3) ${PS1:-}"
else
if [ "`basename \"$VIRTUAL_ENV\"`" = "__" ] ; then
# special case for Aspen magic directories
# see http://www.zetadev.com/software/aspen/
PS1="[`basename \`dirname \"$VIRTUAL_ENV\"\``] $PS1"
else
PS1="(`basename \"$VIRTUAL_ENV\"`)$PS1"
fi
fi
export PS1
fi
# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands. Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
hash -r
fi

View file

@ -1,37 +0,0 @@
# This file must be used with "source bin/activate.csh" *from csh*.
# You cannot run it directly.
# Created by Davide Di Blasi <davidedb@gmail.com>.
# Ported to Python 3.3 venv by Andrew Svetlov <andrew.svetlov@gmail.com>
alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; test "\!:*" != "nondestructive" && unalias deactivate'
# Unset irrelevant variables.
deactivate nondestructive
setenv VIRTUAL_ENV "/Users/manuelfritsch/Documents/git/cbpi4/venv3"
set _OLD_VIRTUAL_PATH="$PATH"
setenv PATH "$VIRTUAL_ENV/bin:$PATH"
set _OLD_VIRTUAL_PROMPT="$prompt"
if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then
if ("venv3" != "") then
set env_name = "venv3"
else
if (`basename "VIRTUAL_ENV"` == "__") then
# special case for Aspen magic directories
# see http://www.zetadev.com/software/aspen/
set env_name = `basename \`dirname "$VIRTUAL_ENV"\``
else
set env_name = `basename "$VIRTUAL_ENV"`
endif
endif
set prompt = "[$env_name] $prompt"
unset env_name
endif
alias pydoc python -m pydoc
rehash

View file

@ -1,75 +0,0 @@
# This file must be used with ". bin/activate.fish" *from fish* (http://fishshell.org)
# you cannot run it directly
function deactivate -d "Exit virtualenv and return to normal shell environment"
# reset old environment variables
if test -n "$_OLD_VIRTUAL_PATH"
set -gx PATH $_OLD_VIRTUAL_PATH
set -e _OLD_VIRTUAL_PATH
end
if test -n "$_OLD_VIRTUAL_PYTHONHOME"
set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
set -e _OLD_VIRTUAL_PYTHONHOME
end
if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
functions -e fish_prompt
set -e _OLD_FISH_PROMPT_OVERRIDE
functions -c _old_fish_prompt fish_prompt
functions -e _old_fish_prompt
end
set -e VIRTUAL_ENV
if test "$argv[1]" != "nondestructive"
# Self destruct!
functions -e deactivate
end
end
# unset irrelevant variables
deactivate nondestructive
set -gx VIRTUAL_ENV "/Users/manuelfritsch/Documents/git/cbpi4/venv3"
set -gx _OLD_VIRTUAL_PATH $PATH
set -gx PATH "$VIRTUAL_ENV/bin" $PATH
# unset PYTHONHOME if set
if set -q PYTHONHOME
set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
set -e PYTHONHOME
end
if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
# fish uses a function instead of an env var to generate the prompt.
# save the current fish_prompt function as the function _old_fish_prompt
functions -c fish_prompt _old_fish_prompt
# with the original prompt function renamed, we can override with our own.
function fish_prompt
# Save the return status of the last command
set -l old_status $status
# Prompt override?
if test -n "(venv3) "
printf "%s%s" "(venv3) " (set_color normal)
else
# ...Otherwise, prepend env
set -l _checkbase (basename "$VIRTUAL_ENV")
if test $_checkbase = "__"
# special case for Aspen magic directories
# see http://www.zetadev.com/software/aspen/
printf "%s[%s]%s " (set_color -b blue white) (basename (dirname "$VIRTUAL_ENV")) (set_color normal)
else
printf "%s(%s)%s" (set_color -b blue white) (basename "$VIRTUAL_ENV") (set_color normal)
end
end
# Restore the return status of the previous command.
echo "exit $old_status" | .
_old_fish_prompt
end
set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
end

View file

@ -1,11 +0,0 @@
#!/Users/manuelfritsch/Documents/git/cbpi4/venv3/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from autopep8 import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())

View file

@ -1,12 +0,0 @@
#!/Users/manuelfritsch/Documents/git/cbpi4/venv3/bin/python3
# EASY-INSTALL-ENTRY-SCRIPT: 'cbpi==4.0.0.28','console_scripts','cbpi'
__requires__ = 'cbpi==4.0.0.28'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('cbpi==4.0.0.28', 'console_scripts', 'cbpi')()
)

View file

@ -1,11 +0,0 @@
#!/Users/manuelfritsch/Documents/git/cbpi4/venv3/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from chardet.cli.chardetect import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())

View file

@ -1,11 +0,0 @@
#!/Users/manuelfritsch/Documents/git/cbpi4/venv3/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from setuptools.command.easy_install import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())

View file

@ -1,11 +0,0 @@
#!/Users/manuelfritsch/Documents/git/cbpi4/venv3/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from setuptools.command.easy_install import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())

View file

@ -1,11 +0,0 @@
#!/Users/manuelfritsch/Documents/git/cbpi4/venv3/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from numpy.f2py.f2py2e import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())

View file

@ -1,11 +0,0 @@
#!/Users/manuelfritsch/Documents/git/cbpi4/venv3/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from numpy.f2py.f2py2e import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())

View file

@ -1,11 +0,0 @@
#!/Users/manuelfritsch/Documents/git/cbpi4/venv3/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from numpy.f2py.f2py2e import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())

View file

@ -1,11 +0,0 @@
#!/Users/manuelfritsch/Documents/git/cbpi4/venv3/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())

View file

@ -1,11 +0,0 @@
#!/Users/manuelfritsch/Documents/git/cbpi4/venv3/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())

View file

@ -1,11 +0,0 @@
#!/Users/manuelfritsch/Documents/git/cbpi4/venv3/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())

View file

@ -1,11 +0,0 @@
#!/Users/manuelfritsch/Documents/git/cbpi4/venv3/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from pycodestyle import _main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(_main())

View file

@ -1,11 +0,0 @@
#!/Users/manuelfritsch/Documents/git/cbpi4/venv3/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from pyfiglet import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())

View file

@ -1 +0,0 @@
python3

View file

@ -1 +0,0 @@
/Library/Frameworks/Python.framework/Versions/3.7/bin/python3

View file

@ -1,11 +0,0 @@
#!/Users/manuelfritsch/Documents/git/cbpi4/venv3/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from tabulate import _main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(_main())

View file

@ -1,28 +0,0 @@
Copyright 2007 Pallets
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

View file

@ -1,106 +0,0 @@
Metadata-Version: 2.1
Name: Jinja2
Version: 2.11.3
Summary: A very fast and expressive template engine.
Home-page: https://palletsprojects.com/p/jinja/
Author: Armin Ronacher
Author-email: armin.ronacher@active-4.com
Maintainer: Pallets
Maintainer-email: contact@palletsprojects.com
License: BSD-3-Clause
Project-URL: Documentation, https://jinja.palletsprojects.com/
Project-URL: Code, https://github.com/pallets/jinja
Project-URL: Issue tracker, https://github.com/pallets/jinja/issues
Platform: UNKNOWN
Classifier: Development Status :: 5 - Production/Stable
Classifier: Environment :: Web Environment
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: BSD License
Classifier: Operating System :: OS Independent
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 2
Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: PyPy
Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
Classifier: Topic :: Software Development :: Libraries :: Python Modules
Classifier: Topic :: Text Processing :: Markup :: HTML
Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*
Description-Content-Type: text/x-rst
Requires-Dist: MarkupSafe (>=0.23)
Provides-Extra: i18n
Requires-Dist: Babel (>=0.8) ; extra == 'i18n'
Jinja
=====
Jinja is a fast, expressive, extensible templating engine. Special
placeholders in the template allow writing code similar to Python
syntax. Then the template is passed data to render the final document.
It includes:
- Template inheritance and inclusion.
- Define and import macros within templates.
- HTML templates can use autoescaping to prevent XSS from untrusted
user input.
- A sandboxed environment can safely render untrusted templates.
- AsyncIO support for generating templates and calling async
functions.
- I18N support with Babel.
- Templates are compiled to optimized Python code just-in-time and
cached, or can be compiled ahead-of-time.
- Exceptions point to the correct line in templates to make debugging
easier.
- Extensible filters, tests, functions, and even syntax.
Jinja's philosophy is that while application logic belongs in Python if
possible, it shouldn't make the template designer's job difficult by
restricting functionality too much.
Installing
----------
Install and update using `pip`_:
.. code-block:: text
$ pip install -U Jinja2
.. _pip: https://pip.pypa.io/en/stable/quickstart/
In A Nutshell
-------------
.. code-block:: jinja
{% extends "base.html" %}
{% block title %}Members{% endblock %}
{% block content %}
<ul>
{% for user in users %}
<li><a href="{{ user.url }}">{{ user.username }}</a></li>
{% endfor %}
</ul>
{% endblock %}
Links
-----
- Website: https://palletsprojects.com/p/jinja/
- Documentation: https://jinja.palletsprojects.com/
- Releases: https://pypi.org/project/Jinja2/
- Code: https://github.com/pallets/jinja
- Issue tracker: https://github.com/pallets/jinja/issues
- Test status: https://dev.azure.com/pallets/jinja/_build
- Official chat: https://discord.gg/t6rrQZH

View file

@ -1,61 +0,0 @@
jinja2/__init__.py,sha256=LZUXmxJc2GIchfSAeMWsxCWiQYO-w1-736f2Q3I8ms8,1549
jinja2/_compat.py,sha256=B6Se8HjnXVpzz9-vfHejn-DV2NjaVK-Iewupc5kKlu8,3191
jinja2/_identifier.py,sha256=EdgGJKi7O1yvr4yFlvqPNEqV6M1qHyQr8Gt8GmVTKVM,1775
jinja2/asyncfilters.py,sha256=XJtYXTxFvcJ5xwk6SaDL4S0oNnT0wPYvXBCSzc482fI,4250
jinja2/asyncsupport.py,sha256=ZBFsDLuq3Gtji3Ia87lcyuDbqaHZJRdtShZcqwpFnSQ,7209
jinja2/bccache.py,sha256=3Pmp4jo65M9FQuIxdxoDBbEDFwe4acDMQf77nEJfrHA,12139
jinja2/compiler.py,sha256=Ta9W1Lit542wItAHXlDcg0sEOsFDMirCdlFPHAurg4o,66284
jinja2/constants.py,sha256=RR1sTzNzUmKco6aZicw4JpQpJGCuPuqm1h1YmCNUEFY,1458
jinja2/debug.py,sha256=neR7GIGGjZH3_ILJGVUYy3eLQCCaWJMXOb7o0kGInWc,8529
jinja2/defaults.py,sha256=85B6YUUCyWPSdrSeVhcqFVuu_bHUAQXeey--FIwSeVQ,1126
jinja2/environment.py,sha256=XDSLKc4SqNLMOwTSq3TbWEyA5WyXfuLuVD0wAVjEFwM,50629
jinja2/exceptions.py,sha256=VjNLawcmf2ODffqVMCQK1cRmvFaUfQWF4u8ouP3QPcE,5425
jinja2/ext.py,sha256=AtwL5O5enT_L3HR9-oBvhGyUTdGoyaqG_ICtnR_EVd4,26441
jinja2/filters.py,sha256=9ORilsZrUoydSI9upz8_qGy7gozDWLYoFmlIBFSVRnQ,41439
jinja2/idtracking.py,sha256=J3O4VHsrbf3wzwiBc7Cro26kHb6_5kbULeIOzocchIU,9211
jinja2/lexer.py,sha256=nUFLRKhhKmmEWkLI65nQePgcQs7qsRdjVYZETMt_v0g,30331
jinja2/loaders.py,sha256=C-fST_dmFjgWkp0ZuCkrgICAoOsoSIF28wfAFink0oU,17666
jinja2/meta.py,sha256=QjyYhfNRD3QCXjBJpiPl9KgkEkGXJbAkCUq4-Ur10EQ,4131
jinja2/nativetypes.py,sha256=Ul__gtVw4xH-0qvUvnCNHedQeNDwmEuyLJztzzSPeRg,2753
jinja2/nodes.py,sha256=Mk1oJPVgIjnQw9WOqILvcu3rLepcFZ0ahxQm2mbwDwc,31095
jinja2/optimizer.py,sha256=gQLlMYzvQhluhzmAIFA1tXS0cwgWYOjprN-gTRcHVsc,1457
jinja2/parser.py,sha256=fcfdqePNTNyvosIvczbytVA332qpsURvYnCGcjDHSkA,35660
jinja2/runtime.py,sha256=0y-BRyIEZ9ltByL2Id6GpHe1oDRQAwNeQvI0SKobNMw,30618
jinja2/sandbox.py,sha256=knayyUvXsZ-F0mk15mO2-ehK9gsw04UhB8td-iUOtLc,17127
jinja2/tests.py,sha256=iO_Y-9Vo60zrVe1lMpSl5sKHqAxe2leZHC08OoZ8K24,4799
jinja2/utils.py,sha256=Wy4yC3IByqUWwnKln6SdaixdzgK74P6F5nf-gQZrYnU,22436
jinja2/visitor.py,sha256=DUHupl0a4PGp7nxRtZFttUzAi1ccxzqc2hzetPYUz8U,3240
Jinja2-2.11.3.dist-info/LICENSE.rst,sha256=O0nc7kEF6ze6wQ-vG-JgQI_oXSUrjp3y4JefweCUQ3s,1475
Jinja2-2.11.3.dist-info/METADATA,sha256=PscpJ1C3RSp8xcjV3fAuTz13rKbGxmzJXnMQFH-WKhs,3535
Jinja2-2.11.3.dist-info/WHEEL,sha256=Z-nyYpwrcSqxfdux5Mbn_DQ525iP7J2DG3JgGvOYyTQ,110
Jinja2-2.11.3.dist-info/entry_points.txt,sha256=Qy_DkVo6Xj_zzOtmErrATe8lHZhOqdjpt3e4JJAGyi8,61
Jinja2-2.11.3.dist-info/top_level.txt,sha256=PkeVWtLb3-CqjWi1fO29OCbj55EhX_chhKrCdrVe_zs,7
Jinja2-2.11.3.dist-info/RECORD,,
Jinja2-2.11.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
jinja2/__pycache__/lexer.cpython-37.pyc,,
jinja2/__pycache__/parser.cpython-37.pyc,,
jinja2/__pycache__/constants.cpython-37.pyc,,
jinja2/__pycache__/idtracking.cpython-37.pyc,,
jinja2/__pycache__/debug.cpython-37.pyc,,
jinja2/__pycache__/sandbox.cpython-37.pyc,,
jinja2/__pycache__/exceptions.cpython-37.pyc,,
jinja2/__pycache__/ext.cpython-37.pyc,,
jinja2/__pycache__/loaders.cpython-37.pyc,,
jinja2/__pycache__/environment.cpython-37.pyc,,
jinja2/__pycache__/meta.cpython-37.pyc,,
jinja2/__pycache__/asyncfilters.cpython-37.pyc,,
jinja2/__pycache__/asyncsupport.cpython-37.pyc,,
jinja2/__pycache__/tests.cpython-37.pyc,,
jinja2/__pycache__/optimizer.cpython-37.pyc,,
jinja2/__pycache__/_identifier.cpython-37.pyc,,
jinja2/__pycache__/bccache.cpython-37.pyc,,
jinja2/__pycache__/compiler.cpython-37.pyc,,
jinja2/__pycache__/_compat.cpython-37.pyc,,
jinja2/__pycache__/defaults.cpython-37.pyc,,
jinja2/__pycache__/nodes.cpython-37.pyc,,
jinja2/__pycache__/utils.cpython-37.pyc,,
jinja2/__pycache__/filters.cpython-37.pyc,,
jinja2/__pycache__/runtime.cpython-37.pyc,,
jinja2/__pycache__/__init__.cpython-37.pyc,,
jinja2/__pycache__/visitor.cpython-37.pyc,,
jinja2/__pycache__/nativetypes.cpython-37.pyc,,

View file

@ -1,6 +0,0 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.36.2)
Root-Is-Purelib: true
Tag: py2-none-any
Tag: py3-none-any

View file

@ -1,3 +0,0 @@
[babel.extractors]
jinja2 = jinja2.ext:babel_extract [i18n]

View file

@ -1,28 +0,0 @@
Copyright 2010 Pallets
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

View file

@ -1,94 +0,0 @@
Metadata-Version: 2.1
Name: MarkupSafe
Version: 1.1.1
Summary: Safely add untrusted strings to HTML/XML markup.
Home-page: https://palletsprojects.com/p/markupsafe/
Author: Armin Ronacher
Author-email: armin.ronacher@active-4.com
Maintainer: The Pallets Team
Maintainer-email: contact@palletsprojects.com
License: BSD-3-Clause
Project-URL: Documentation, https://markupsafe.palletsprojects.com/
Project-URL: Code, https://github.com/pallets/markupsafe
Project-URL: Issue tracker, https://github.com/pallets/markupsafe/issues
Platform: UNKNOWN
Classifier: Development Status :: 5 - Production/Stable
Classifier: Environment :: Web Environment
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: BSD License
Classifier: Operating System :: OS Independent
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 2
Classifier: Programming Language :: Python :: 3
Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
Classifier: Topic :: Software Development :: Libraries :: Python Modules
Classifier: Topic :: Text Processing :: Markup :: HTML
Requires-Python: >=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*
Description-Content-Type: text/x-rst
MarkupSafe
==========
MarkupSafe implements a text object that escapes characters so it is
safe to use in HTML and XML. Characters that have special meanings are
replaced so that they display as the actual characters. This mitigates
injection attacks, meaning untrusted user input can safely be displayed
on a page.
Installing
----------
Install and update using `pip`_:
.. code-block:: text
pip install -U MarkupSafe
.. _pip: https://pip.pypa.io/en/stable/quickstart/
Examples
--------
.. code-block:: pycon
>>> from markupsafe import Markup, escape
>>> # escape replaces special characters and wraps in Markup
>>> escape('<script>alert(document.cookie);</script>')
Markup(u'&lt;script&gt;alert(document.cookie);&lt;/script&gt;')
>>> # wrap in Markup to mark text "safe" and prevent escaping
>>> Markup('<strong>Hello</strong>')
Markup('<strong>hello</strong>')
>>> escape(Markup('<strong>Hello</strong>'))
Markup('<strong>hello</strong>')
>>> # Markup is a text subclass (str on Python 3, unicode on Python 2)
>>> # methods and operators escape their arguments
>>> template = Markup("Hello <em>%s</em>")
>>> template % '"World"'
Markup('Hello <em>&#34;World&#34;</em>')
Donate
------
The Pallets organization develops and supports MarkupSafe and other
libraries that use it. In order to grow the community of contributors
and users, and allow the maintainers to devote more time to the
projects, `please donate today`_.
.. _please donate today: https://palletsprojects.com/donate
Links
-----
* Website: https://palletsprojects.com/p/markupsafe/
* Documentation: https://markupsafe.palletsprojects.com/
* Releases: https://pypi.org/project/MarkupSafe/
* Code: https://github.com/pallets/markupsafe
* Issue tracker: https://github.com/pallets/markupsafe/issues
* Test status: https://dev.azure.com/pallets/markupsafe/_build
* Official chat: https://discord.gg/t6rrQZH

View file

@ -1,16 +0,0 @@
markupsafe/__init__.py,sha256=oTblO5f9KFM-pvnq9bB0HgElnqkJyqHnFN1Nx2NIvnY,10126
markupsafe/_compat.py,sha256=uEW1ybxEjfxIiuTbRRaJpHsPFf4yQUMMKaPgYEC5XbU,558
markupsafe/_constants.py,sha256=zo2ajfScG-l1Sb_52EP3MlDCqO7Y1BVHUXXKRsVDRNk,4690
markupsafe/_native.py,sha256=d-8S_zzYt2y512xYcuSxq0NeG2DUUvG80wVdTn-4KI8,1873
markupsafe/_speedups.c,sha256=k0fzEIK3CP6MmMqeY0ob43TP90mVN0DTyn7BAl3RqSg,9884
markupsafe/_speedups.cpython-37m-darwin.so,sha256=Rg1LaaphK67NRgvgR4b05sAu3ZC3dlJ1q1_GXe60rHQ,35072
MarkupSafe-1.1.1.dist-info/LICENSE.rst,sha256=SJqOEQhQntmKN7uYPhHg9-HTHwvY-Zp5yESOf_N9B-o,1475
MarkupSafe-1.1.1.dist-info/METADATA,sha256=-XXnVvCxQP2QbHutIQq_7Pk9OATy-x0NC7gN_3_SCRE,3167
MarkupSafe-1.1.1.dist-info/WHEEL,sha256=WVG6A_oY5zUoNXUyvfLMOSXC-hqC5VZacUL1MeSOz70,110
MarkupSafe-1.1.1.dist-info/top_level.txt,sha256=qy0Plje5IJuvsCBjejJyhDCjEAdcDLK_2agVcex8Z6U,11
MarkupSafe-1.1.1.dist-info/RECORD,,
MarkupSafe-1.1.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
markupsafe/__pycache__/_native.cpython-37.pyc,,
markupsafe/__pycache__/_constants.cpython-37.pyc,,
markupsafe/__pycache__/_compat.cpython-37.pyc,,
markupsafe/__pycache__/__init__.cpython-37.pyc,,

View file

@ -1,5 +0,0 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.36.2)
Root-Is-Purelib: false
Tag: cp37-cp37m-macosx_10_9_x86_64

View file

@ -1,20 +0,0 @@
Copyright (c) 2017-2021 Ingy döt Net
Copyright (c) 2006-2016 Kirill Simonov
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View file

@ -1,46 +0,0 @@
Metadata-Version: 2.1
Name: PyYAML
Version: 5.4.1
Summary: YAML parser and emitter for Python
Home-page: https://pyyaml.org/
Author: Kirill Simonov
Author-email: xi@resolvent.net
License: MIT
Download-URL: https://pypi.org/project/PyYAML/
Project-URL: Bug Tracker, https://github.com/yaml/pyyaml/issues
Project-URL: CI, https://github.com/yaml/pyyaml/actions
Project-URL: Documentation, https://pyyaml.org/wiki/PyYAMLDocumentation
Project-URL: Mailing lists, http://lists.sourceforge.net/lists/listinfo/yaml-core
Project-URL: Source Code, https://github.com/yaml/pyyaml
Platform: Any
Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: MIT License
Classifier: Operating System :: OS Independent
Classifier: Programming Language :: Cython
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 2
Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: PyPy
Classifier: Topic :: Software Development :: Libraries :: Python Modules
Classifier: Topic :: Text Processing :: Markup
Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*
YAML is a data serialization format designed for human readability
and interaction with scripting languages. PyYAML is a YAML parser
and emitter for Python.
PyYAML features a complete YAML 1.1 parser, Unicode support, pickle
support, capable extension API, and sensible error messages. PyYAML
supports standard YAML tags and provides Python-specific tags that
allow to represent an arbitrary Python object.
PyYAML is applicable for a broad range of tasks from complex
configuration files to object serialization and persistence.

View file

@ -1,43 +0,0 @@
_yaml/__init__.py,sha256=04Ae_5osxahpJHa3XBZUAf4wi6XX32gR8D6X6p64GEA,1402
yaml/__init__.py,sha256=gfp2CbRVhzknghkiiJD2l6Z0pI-mv_iZHPSJ4aj0-nY,13170
yaml/_yaml.cpython-37m-darwin.so,sha256=prnbDrV8DfzEtBNgZK5Mtbkmh7G4UKDUA-Xx7F8ZfZs,448624
yaml/composer.py,sha256=_Ko30Wr6eDWUeUpauUGT3Lcg9QPBnOPVlTnIMRGJ9FM,4883
yaml/constructor.py,sha256=kNgkfaeLUkwQYY_Q6Ff1Tz2XVw_pG1xVE9Ak7z-viLA,28639
yaml/cyaml.py,sha256=6ZrAG9fAYvdVe2FK_w0hmXoG7ZYsoYUwapG8CiC72H0,3851
yaml/dumper.py,sha256=PLctZlYwZLp7XmeUdwRuv4nYOZ2UBnDIUy8-lKfLF-o,2837
yaml/emitter.py,sha256=jghtaU7eFwg31bG0B7RZea_29Adi9CKmXq_QjgQpCkQ,43006
yaml/error.py,sha256=Ah9z-toHJUbE9j-M8YpxgSRM5CgLCcwVzJgLLRF2Fxo,2533
yaml/events.py,sha256=50_TksgQiE4up-lKo_V-nBy-tAIxkIPQxY5qDhKCeHw,2445
yaml/loader.py,sha256=UVa-zIqmkFSCIYq_PgSGm4NSJttHY2Rf_zQ4_b1fHN0,2061
yaml/nodes.py,sha256=gPKNj8pKCdh2d4gr3gIYINnPOaOxGhJAUiYhGRnPE84,1440
yaml/parser.py,sha256=ilWp5vvgoHFGzvOZDItFoGjD6D42nhlZrZyjAwa0oJo,25495
yaml/reader.py,sha256=0dmzirOiDG4Xo41RnuQS7K9rkY3xjHiVasfDMNTqCNw,6794
yaml/representer.py,sha256=82UM3ZxUQKqsKAF4ltWOxCS6jGPIFtXpGs7mvqyv4Xs,14184
yaml/resolver.py,sha256=Z1W8AOMA6Proy4gIO2OhUO4IPS_bFNAl0Ca3rwChpPg,8999
yaml/scanner.py,sha256=KeQIKGNlSyPE8QDwionHxy9CgbqE5teJEz05FR9-nAg,51277
yaml/serializer.py,sha256=ChuFgmhU01hj4xgI8GaKv6vfM2Bujwa9i7d2FAHj7cA,4165
yaml/tokens.py,sha256=lTQIzSVw8Mg9wv459-TjiOQe6wVziqaRlqX2_89rp54,2573
yaml/__pycache__/__init__.cpython-37.pyc,sha256=BT2G6vuLqtl24Yazl_BTiWNVL_eWa463vBRw6EzkYnE,11736
yaml/__pycache__/composer.cpython-37.pyc,sha256=pUIfvPVwm6nQgfSqMoPDwWCHq0Q8tkvLqnV02x0LWK8,3497
yaml/__pycache__/constructor.cpython-37.pyc,sha256=juRoNZURP03EFLfHtLc7ksBZFaBX6rrFOh1h5ekOlzY,21223
yaml/__pycache__/cyaml.cpython-37.pyc,sha256=-Aa-eWnuxZ1JHVAPkQMVRzM0V3bJLDAT63kE8vLGcWM,3667
yaml/__pycache__/dumper.cpython-37.pyc,sha256=2sX7qVZ5lWxv6Upb87sBTeyRIsKyKgKKposCA32VOpU,2037
yaml/__pycache__/emitter.cpython-37.pyc,sha256=d6xBEGmZM9dppMH71xFASgCGTBkwNOSk7hFu0c77td4,25287
yaml/__pycache__/error.cpython-37.pyc,sha256=ixzwceH8Hj5SQP-K7EpSdATWbZnYqSZ-9ggbZql3lTc,2248
yaml/__pycache__/events.cpython-37.pyc,sha256=BsC1S1A7MIeiZTXniRm64kBKf7eNEb72wgSyZa9wdWs,4014
yaml/__pycache__/loader.cpython-37.pyc,sha256=z0Z_0wFNq4wC93xRz3xXVgKXSyeJo_9NlSP_KM-Q258,2246
yaml/__pycache__/nodes.cpython-37.pyc,sha256=PhNPeBWTYDzIKngO556mJhx_BIoDnAIbVaQH9j5qoTg,1692
yaml/__pycache__/parser.cpython-37.pyc,sha256=f8nrupPuPBu640bHCxw1upOI1gleGD0OnZO61Ke4w44,11842
yaml/__pycache__/reader.cpython-37.pyc,sha256=Evtv1Je-xmXe2cn6llqwDthhhEGo7xSqtt-X3H4yh6c,4455
yaml/__pycache__/representer.cpython-37.pyc,sha256=7Toj31yd3JI883VhAJNo906zwYS_1OPCtcRS0o7VXEU,10078
yaml/__pycache__/resolver.cpython-37.pyc,sha256=ToRLdIZnr2UCCnlzoUJeYjF_5LjZfdZQJALM5dao_RI,5448
yaml/__pycache__/scanner.cpython-37.pyc,sha256=1BS76EIvYaP8-Dv5O2GbI25Y3mTb60yfvn4hyH2G4RM,25788
yaml/__pycache__/serializer.cpython-37.pyc,sha256=rckMp9nvrgGLLpSSEkenUmPxSe9C4s_DWkmbh8Ayvgs,3327
yaml/__pycache__/tokens.cpython-37.pyc,sha256=mDKFX-_apCXzBM7oHVvqfbvGSsqcjcMbqzuU3TlVd0g,5191
PyYAML-5.4.1.dist-info/LICENSE,sha256=jTko-dxEkP1jVwfLiOsmvXZBAqcoKVQwfT5RZ6V36KQ,1101
PyYAML-5.4.1.dist-info/METADATA,sha256=XnrM5LY-uS85ica26gKUK0dGG-xmPjmGfDTSLpIHQFk,2087
PyYAML-5.4.1.dist-info/WHEEL,sha256=WVG6A_oY5zUoNXUyvfLMOSXC-hqC5VZacUL1MeSOz70,110
PyYAML-5.4.1.dist-info/top_level.txt,sha256=rpj0IVMTisAjh_1vG3Ccf9v5jpCQwAz6cD1IVU5ZdhQ,11
PyYAML-5.4.1.dist-info/RECORD,,
PyYAML-5.4.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
_yaml/__pycache__/__init__.cpython-37.pyc,,

View file

@ -1,5 +0,0 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.36.2)
Root-Is-Purelib: false
Tag: cp37-cp37m-macosx_10_9_x86_64

View file

@ -1,33 +0,0 @@
# This is a stub package designed to roughly emulate the _yaml
# extension module, which previously existed as a standalone module
# and has been moved into the `yaml` package namespace.
# It does not perfectly mimic its old counterpart, but should get
# close enough for anyone who's relying on it even when they shouldn't.
import yaml
# in some circumstances, the yaml module we imoprted may be from a different version, so we need
# to tread carefully when poking at it here (it may not have the attributes we expect)
if not getattr(yaml, '__with_libyaml__', False):
from sys import version_info
exc = ModuleNotFoundError if version_info >= (3, 6) else ImportError
raise exc("No module named '_yaml'")
else:
from yaml._yaml import *
import warnings
warnings.warn(
'The _yaml extension module is now located at yaml._yaml'
' and its location is subject to change. To use the'
' LibYAML-based parser and emitter, import from `yaml`:'
' `from yaml import CLoader as Loader, CDumper as Dumper`.',
DeprecationWarning
)
del warnings
# Don't `del yaml` here because yaml is actually an existing
# namespace member of _yaml.
__name__ = '_yaml'
# If the module is top-level (i.e. not a part of any specific package)
# then the attribute should be set to ''.
# https://docs.python.org/3.8/library/types.html
__package__ = ''

View file

@ -1,201 +0,0 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "{}"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright 2013-2020 aiohttp maintainers
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View file

@ -1,964 +0,0 @@
Metadata-Version: 2.1
Name: aiohttp
Version: 3.7.4
Summary: Async http client/server framework (asyncio)
Home-page: https://github.com/aio-libs/aiohttp
Author: Nikolay Kim
Author-email: fafhrd91@gmail.com
Maintainer: Nikolay Kim <fafhrd91@gmail.com>, Andrew Svetlov <andrew.svetlov@gmail.com>
Maintainer-email: aio-libs@googlegroups.com
License: Apache 2
Project-URL: Chat: Gitter, https://gitter.im/aio-libs/Lobby
Project-URL: CI: Azure Pipelines, https://dev.azure.com/aio-libs/aiohttp/_build
Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/aiohttp
Project-URL: Docs: RTD, https://docs.aiohttp.org
Project-URL: GitHub: issues, https://github.com/aio-libs/aiohttp/issues
Project-URL: GitHub: repo, https://github.com/aio-libs/aiohttp
Platform: UNKNOWN
Classifier: License :: OSI Approved :: Apache Software License
Classifier: Intended Audience :: Developers
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Development Status :: 5 - Production/Stable
Classifier: Operating System :: POSIX
Classifier: Operating System :: MacOS :: MacOS X
Classifier: Operating System :: Microsoft :: Windows
Classifier: Topic :: Internet :: WWW/HTTP
Classifier: Framework :: AsyncIO
Requires-Python: >=3.6
Requires-Dist: attrs (>=17.3.0)
Requires-Dist: chardet (<4.0,>=2.0)
Requires-Dist: multidict (<7.0,>=4.5)
Requires-Dist: async-timeout (<4.0,>=3.0)
Requires-Dist: yarl (<2.0,>=1.0)
Requires-Dist: typing-extensions (>=3.6.5)
Requires-Dist: idna-ssl (>=1.0) ; python_version < "3.7"
Provides-Extra: speedups
Requires-Dist: aiodns ; extra == 'speedups'
Requires-Dist: brotlipy ; extra == 'speedups'
Requires-Dist: cchardet ; extra == 'speedups'
==================================
Async http client/server framework
==================================
.. image:: https://raw.githubusercontent.com/aio-libs/aiohttp/master/docs/_static/aiohttp-icon-128x128.png
:height: 64px
:width: 64px
:alt: aiohttp logo
|
.. image:: https://github.com/aio-libs/aiohttp/workflows/CI/badge.svg
:target: https://github.com/aio-libs/aiohttp/actions?query=workflow%3ACI
:alt: GitHub Actions status for master branch
.. image:: https://codecov.io/gh/aio-libs/aiohttp/branch/master/graph/badge.svg
:target: https://codecov.io/gh/aio-libs/aiohttp
:alt: codecov.io status for master branch
.. image:: https://badge.fury.io/py/aiohttp.svg
:target: https://pypi.org/project/aiohttp
:alt: Latest PyPI package version
.. image:: https://readthedocs.org/projects/aiohttp/badge/?version=latest
:target: https://docs.aiohttp.org/
:alt: Latest Read The Docs
.. image:: https://img.shields.io/discourse/status?server=https%3A%2F%2Faio-libs.discourse.group
:target: https://aio-libs.discourse.group
:alt: Discourse status
.. image:: https://badges.gitter.im/Join%20Chat.svg
:target: https://gitter.im/aio-libs/Lobby
:alt: Chat on Gitter
Key Features
============
- Supports both client and server side of HTTP protocol.
- Supports both client and server Web-Sockets out-of-the-box and avoids
Callback Hell.
- Provides Web-server with middlewares and plugable routing.
Getting started
===============
Client
------
To get something from the web:
.. code-block:: python
import aiohttp
import asyncio
async def main():
async with aiohttp.ClientSession() as session:
async with session.get('http://python.org') as response:
print("Status:", response.status)
print("Content-type:", response.headers['content-type'])
html = await response.text()
print("Body:", html[:15], "...")
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
This prints:
.. code-block::
Status: 200
Content-type: text/html; charset=utf-8
Body: <!doctype html> ...
Coming from `requests <https://requests.readthedocs.io/>`_ ? Read `why we need so many lines <https://aiohttp.readthedocs.io/en/latest/http_request_lifecycle.html>`_.
Server
------
An example using a simple server:
.. code-block:: python
# examples/server_simple.py
from aiohttp import web
async def handle(request):
name = request.match_info.get('name', "Anonymous")
text = "Hello, " + name
return web.Response(text=text)
async def wshandle(request):
ws = web.WebSocketResponse()
await ws.prepare(request)
async for msg in ws:
if msg.type == web.WSMsgType.text:
await ws.send_str("Hello, {}".format(msg.data))
elif msg.type == web.WSMsgType.binary:
await ws.send_bytes(msg.data)
elif msg.type == web.WSMsgType.close:
break
return ws
app = web.Application()
app.add_routes([web.get('/', handle),
web.get('/echo', wshandle),
web.get('/{name}', handle)])
if __name__ == '__main__':
web.run_app(app)
Documentation
=============
https://aiohttp.readthedocs.io/
Demos
=====
https://github.com/aio-libs/aiohttp-demos
External links
==============
* `Third party libraries
<http://aiohttp.readthedocs.io/en/latest/third_party.html>`_
* `Built with aiohttp
<http://aiohttp.readthedocs.io/en/latest/built_with.html>`_
* `Powered by aiohttp
<http://aiohttp.readthedocs.io/en/latest/powered_by.html>`_
Feel free to make a Pull Request for adding your link to these pages!
Communication channels
======================
*aio-libs discourse group*: https://aio-libs.discourse.group
*gitter chat* https://gitter.im/aio-libs/Lobby
We support `Stack Overflow
<https://stackoverflow.com/questions/tagged/aiohttp>`_.
Please add *aiohttp* tag to your question there.
Requirements
============
- Python >= 3.6
- async-timeout_
- attrs_
- chardet_
- multidict_
- yarl_
Optionally you may install the cChardet_ and aiodns_ libraries (highly
recommended for sake of speed).
.. _chardet: https://pypi.python.org/pypi/chardet
.. _aiodns: https://pypi.python.org/pypi/aiodns
.. _attrs: https://github.com/python-attrs/attrs
.. _multidict: https://pypi.python.org/pypi/multidict
.. _yarl: https://pypi.python.org/pypi/yarl
.. _async-timeout: https://pypi.python.org/pypi/async_timeout
.. _cChardet: https://pypi.python.org/pypi/cchardet
License
=======
``aiohttp`` is offered under the Apache 2 license.
Keepsafe
========
The aiohttp community would like to thank Keepsafe
(https://www.getkeepsafe.com) for its support in the early days of
the project.
Source code
===========
The latest developer version is available in a GitHub repository:
https://github.com/aio-libs/aiohttp
Benchmarks
==========
If you are interested in efficiency, the AsyncIO community maintains a
list of benchmarks on the official wiki:
https://github.com/python/asyncio/wiki/Benchmarks
=========
Changelog
=========
..
You should *NOT* be adding new change log entries to this file, this
file is managed by towncrier. You *may* edit previous change logs to
fix problems like typo corrections or such.
To add a new change log entry, please see
https://pip.pypa.io/en/latest/development/#adding-a-news-entry
we named the news folder "changes".
WARNING: Don't drop the next directive!
.. towncrier release notes start
3.7.3 (2021-02-25)
==================
Bugfixes
--------
- **(SECURITY BUG)** Started preventing open redirects in the
``aiohttp.web.normalize_path_middleware`` middleware. For
more details, see
https://github.com/aio-libs/aiohttp/security/advisories/GHSA-v6wp-4m6f-gcjg.
Thanks to `Beast Glatisant <https://github.com/g147>`__ for
finding the first instance of this issue and `Jelmer Vernooij
<https://jelmer.uk/>`__ for reporting and tracking it down
in aiohttp.
`#5497 <https://github.com/aio-libs/aiohttp/issues/5497>`_
- Fix interpretation difference of the pure-Python and the Cython-based
HTTP parsers construct a ``yarl.URL`` object for HTTP request-target.
Before this fix, the Python parser would turn the URI's absolute-path
for ``//some-path`` into ``/`` while the Cython code preserved it as
``//some-path``. Now, both do the latter.
`#5498 <https://github.com/aio-libs/aiohttp/issues/5498>`_
----
3.7.3 (2020-11-18)
==================
Features
--------
- Use Brotli instead of brotlipy
`#3803 <https://github.com/aio-libs/aiohttp/issues/3803>`_
- Made exceptions pickleable. Also changed the repr of some exceptions.
`#4077 <https://github.com/aio-libs/aiohttp/issues/4077>`_
Bugfixes
--------
- Raise a ClientResponseError instead of an AssertionError for a blank
HTTP Reason Phrase.
`#3532 <https://github.com/aio-libs/aiohttp/issues/3532>`_
- Fix ``web_middlewares.normalize_path_middleware`` behavior for patch without slash.
`#3669 <https://github.com/aio-libs/aiohttp/issues/3669>`_
- Fix overshadowing of overlapped sub-applications prefixes.
`#3701 <https://github.com/aio-libs/aiohttp/issues/3701>`_
- Make `BaseConnector.close()` a coroutine and wait until the client closes all connections. Drop deprecated "with Connector():" syntax.
`#3736 <https://github.com/aio-libs/aiohttp/issues/3736>`_
- Reset the ``sock_read`` timeout each time data is received for a ``aiohttp.client`` response.
`#3808 <https://github.com/aio-libs/aiohttp/issues/3808>`_
- Fixed type annotation for add_view method of UrlDispatcher to accept any subclass of View
`#3880 <https://github.com/aio-libs/aiohttp/issues/3880>`_
- Fixed querying the address families from DNS that the current host supports.
`#5156 <https://github.com/aio-libs/aiohttp/issues/5156>`_
- Change return type of MultipartReader.__aiter__() and BodyPartReader.__aiter__() to AsyncIterator.
`#5163 <https://github.com/aio-libs/aiohttp/issues/5163>`_
- Provide x86 Windows wheels.
`#5230 <https://github.com/aio-libs/aiohttp/issues/5230>`_
Improved Documentation
----------------------
- Add documentation for ``aiohttp.web.FileResponse``.
`#3958 <https://github.com/aio-libs/aiohttp/issues/3958>`_
- Removed deprecation warning in tracing example docs
`#3964 <https://github.com/aio-libs/aiohttp/issues/3964>`_
- Fixed wrong "Usage" docstring of ``aiohttp.client.request``.
`#4603 <https://github.com/aio-libs/aiohttp/issues/4603>`_
- Add aiohttp-pydantic to third party libraries
`#5228 <https://github.com/aio-libs/aiohttp/issues/5228>`_
Misc
----
- `#4102 <https://github.com/aio-libs/aiohttp/issues/4102>`_
----
3.7.2 (2020-10-27)
==================
Bugfixes
--------
- Fixed static files handling for loops without ``.sendfile()`` support
`#5149 <https://github.com/aio-libs/aiohttp/issues/5149>`_
----
3.7.1 (2020-10-25)
==================
Bugfixes
--------
- Fixed a type error caused by the conditional import of `Protocol`.
`#5111 <https://github.com/aio-libs/aiohttp/issues/5111>`_
- Server doesn't send Content-Length for 1xx or 204
`#4901 <https://github.com/aio-libs/aiohttp/issues/4901>`_
- Fix run_app typing
`#4957 <https://github.com/aio-libs/aiohttp/issues/4957>`_
- Always require ``typing_extensions`` library.
`#5107 <https://github.com/aio-libs/aiohttp/issues/5107>`_
- Fix a variable-shadowing bug causing `ThreadedResolver.resolve` to
return the resolved IP as the ``hostname`` in each record, which prevented
validation of HTTPS connections.
`#5110 <https://github.com/aio-libs/aiohttp/issues/5110>`_
- Added annotations to all public attributes.
`#5115 <https://github.com/aio-libs/aiohttp/issues/5115>`_
- Fix flaky test_when_timeout_smaller_second
`#5116 <https://github.com/aio-libs/aiohttp/issues/5116>`_
- Ensure sending a zero byte file does not throw an exception
`#5124 <https://github.com/aio-libs/aiohttp/issues/5124>`_
- Fix a bug in ``web.run_app()`` about Python version checking on Windows
`#5127 <https://github.com/aio-libs/aiohttp/issues/5127>`_
----
3.7.0 (2020-10-24)
==================
Features
--------
- Response headers are now prepared prior to running ``on_response_prepare`` hooks, directly before headers are sent to the client.
`#1958 <https://github.com/aio-libs/aiohttp/issues/1958>`_
- Add a ``quote_cookie`` option to ``CookieJar``, a way to skip quotation wrapping of cookies containing special characters.
`#2571 <https://github.com/aio-libs/aiohttp/issues/2571>`_
- Call ``AccessLogger.log`` with the current exception available from ``sys.exc_info()``.
`#3557 <https://github.com/aio-libs/aiohttp/issues/3557>`_
- `web.UrlDispatcher.add_routes` and `web.Application.add_routes` return a list
of registered `AbstractRoute` instances. `AbstractRouteDef.register` (and all
subclasses) return a list of registered resources registered resource.
`#3866 <https://github.com/aio-libs/aiohttp/issues/3866>`_
- Added properties of default ClientSession params to ClientSession class so it is available for introspection
`#3882 <https://github.com/aio-libs/aiohttp/issues/3882>`_
- Don't cancel web handler on peer disconnection, raise `OSError` on reading/writing instead.
`#4080 <https://github.com/aio-libs/aiohttp/issues/4080>`_
- Implement BaseRequest.get_extra_info() to access a protocol transports' extra info.
`#4189 <https://github.com/aio-libs/aiohttp/issues/4189>`_
- Added `ClientSession.timeout` property.
`#4191 <https://github.com/aio-libs/aiohttp/issues/4191>`_
- allow use of SameSite in cookies.
`#4224 <https://github.com/aio-libs/aiohttp/issues/4224>`_
- Use ``loop.sendfile()`` instead of custom implementation if available.
`#4269 <https://github.com/aio-libs/aiohttp/issues/4269>`_
- Apply SO_REUSEADDR to test server's socket.
`#4393 <https://github.com/aio-libs/aiohttp/issues/4393>`_
- Use .raw_host instead of slower .host in client API
`#4402 <https://github.com/aio-libs/aiohttp/issues/4402>`_
- Allow configuring the buffer size of input stream by passing ``read_bufsize`` argument.
`#4453 <https://github.com/aio-libs/aiohttp/issues/4453>`_
- Pass tests on Python 3.8 for Windows.
`#4513 <https://github.com/aio-libs/aiohttp/issues/4513>`_
- Add `method` and `url` attributes to `TraceRequestChunkSentParams` and `TraceResponseChunkReceivedParams`.
`#4674 <https://github.com/aio-libs/aiohttp/issues/4674>`_
- Add ClientResponse.ok property for checking status code under 400.
`#4711 <https://github.com/aio-libs/aiohttp/issues/4711>`_
- Don't ceil timeouts that are smaller than 5 seconds.
`#4850 <https://github.com/aio-libs/aiohttp/issues/4850>`_
- TCPSite now listens by default on all interfaces instead of just IPv4 when `None` is passed in as the host.
`#4894 <https://github.com/aio-libs/aiohttp/issues/4894>`_
- Bump ``http_parser`` to 2.9.4
`#5070 <https://github.com/aio-libs/aiohttp/issues/5070>`_
Bugfixes
--------
- Fix keepalive connections not being closed in time
`#3296 <https://github.com/aio-libs/aiohttp/issues/3296>`_
- Fix failed websocket handshake leaving connection hanging.
`#3380 <https://github.com/aio-libs/aiohttp/issues/3380>`_
- Fix tasks cancellation order on exit. The run_app task needs to be cancelled first for cleanup hooks to run with all tasks intact.
`#3805 <https://github.com/aio-libs/aiohttp/issues/3805>`_
- Don't start heartbeat until _writer is set
`#4062 <https://github.com/aio-libs/aiohttp/issues/4062>`_
- Fix handling of multipart file uploads without a content type.
`#4089 <https://github.com/aio-libs/aiohttp/issues/4089>`_
- Preserve view handler function attributes across middlewares
`#4174 <https://github.com/aio-libs/aiohttp/issues/4174>`_
- Fix the string representation of ``ServerDisconnectedError``.
`#4175 <https://github.com/aio-libs/aiohttp/issues/4175>`_
- Raising RuntimeError when trying to get encoding from not read body
`#4214 <https://github.com/aio-libs/aiohttp/issues/4214>`_
- Remove warning messages from noop.
`#4282 <https://github.com/aio-libs/aiohttp/issues/4282>`_
- Raise ClientPayloadError if FormData re-processed.
`#4345 <https://github.com/aio-libs/aiohttp/issues/4345>`_
- Fix a warning about unfinished task in ``web_protocol.py``
`#4408 <https://github.com/aio-libs/aiohttp/issues/4408>`_
- Fixed 'deflate' compression. According to RFC 2616 now.
`#4506 <https://github.com/aio-libs/aiohttp/issues/4506>`_
- Fixed OverflowError on platforms with 32-bit time_t
`#4515 <https://github.com/aio-libs/aiohttp/issues/4515>`_
- Fixed request.body_exists returns wrong value for methods without body.
`#4528 <https://github.com/aio-libs/aiohttp/issues/4528>`_
- Fix connecting to link-local IPv6 addresses.
`#4554 <https://github.com/aio-libs/aiohttp/issues/4554>`_
- Fix a problem with connection waiters that are never awaited.
`#4562 <https://github.com/aio-libs/aiohttp/issues/4562>`_
- Always make sure transport is not closing before reuse a connection.
Reuse a protocol based on keepalive in headers is unreliable.
For example, uWSGI will not support keepalive even it serves a
HTTP 1.1 request, except explicitly configure uWSGI with a
``--http-keepalive`` option.
Servers designed like uWSGI could cause aiohttp intermittently
raise a ConnectionResetException when the protocol poll runs
out and some protocol is reused.
`#4587 <https://github.com/aio-libs/aiohttp/issues/4587>`_
- Handle the last CRLF correctly even if it is received via separate TCP segment.
`#4630 <https://github.com/aio-libs/aiohttp/issues/4630>`_
- Fix the register_resource function to validate route name before splitting it so that route name can include python keywords.
`#4691 <https://github.com/aio-libs/aiohttp/issues/4691>`_
- Improve typing annotations for ``web.Request``, ``aiohttp.ClientResponse`` and
``multipart`` module.
`#4736 <https://github.com/aio-libs/aiohttp/issues/4736>`_
- Fix resolver task is not awaited when connector is cancelled
`#4795 <https://github.com/aio-libs/aiohttp/issues/4795>`_
- Fix a bug "Aiohttp doesn't return any error on invalid request methods"
`#4798 <https://github.com/aio-libs/aiohttp/issues/4798>`_
- Fix HEAD requests for static content.
`#4809 <https://github.com/aio-libs/aiohttp/issues/4809>`_
- Fix incorrect size calculation for memoryview
`#4890 <https://github.com/aio-libs/aiohttp/issues/4890>`_
- Add HTTPMove to _all__.
`#4897 <https://github.com/aio-libs/aiohttp/issues/4897>`_
- Fixed the type annotations in the ``tracing`` module.
`#4912 <https://github.com/aio-libs/aiohttp/issues/4912>`_
- Fix typing for multipart ``__aiter__``.
`#4931 <https://github.com/aio-libs/aiohttp/issues/4931>`_
- Fix for race condition on connections in BaseConnector that leads to exceeding the connection limit.
`#4936 <https://github.com/aio-libs/aiohttp/issues/4936>`_
- Add forced UTF-8 encoding for ``application/rdap+json`` responses.
`#4938 <https://github.com/aio-libs/aiohttp/issues/4938>`_
- Fix inconsistency between Python and C http request parsers in parsing pct-encoded URL.
`#4972 <https://github.com/aio-libs/aiohttp/issues/4972>`_
- Fix connection closing issue in HEAD request.
`#5012 <https://github.com/aio-libs/aiohttp/issues/5012>`_
- Fix type hint on BaseRunner.addresses (from ``List[str]`` to ``List[Any]``)
`#5086 <https://github.com/aio-libs/aiohttp/issues/5086>`_
- Make `web.run_app()` more responsive to Ctrl+C on Windows for Python < 3.8. It slightly
increases CPU load as a side effect.
`#5098 <https://github.com/aio-libs/aiohttp/issues/5098>`_
Improved Documentation
----------------------
- Fix example code in client quick-start
`#3376 <https://github.com/aio-libs/aiohttp/issues/3376>`_
- Updated the docs so there is no contradiction in ``ttl_dns_cache`` default value
`#3512 <https://github.com/aio-libs/aiohttp/issues/3512>`_
- Add 'Deploy with SSL' to docs.
`#4201 <https://github.com/aio-libs/aiohttp/issues/4201>`_
- Change typing of the secure argument on StreamResponse.set_cookie from ``Optional[str]`` to ``Optional[bool]``
`#4204 <https://github.com/aio-libs/aiohttp/issues/4204>`_
- Changes ``ttl_dns_cache`` type from int to Optional[int].
`#4270 <https://github.com/aio-libs/aiohttp/issues/4270>`_
- Simplify README hello word example and add a documentation page for people coming from requests.
`#4272 <https://github.com/aio-libs/aiohttp/issues/4272>`_
- Improve some code examples in the documentation involving websockets and starting a simple HTTP site with an AppRunner.
`#4285 <https://github.com/aio-libs/aiohttp/issues/4285>`_
- Fix typo in code example in Multipart docs
`#4312 <https://github.com/aio-libs/aiohttp/issues/4312>`_
- Fix code example in Multipart section.
`#4314 <https://github.com/aio-libs/aiohttp/issues/4314>`_
- Update contributing guide so new contributors read the most recent version of that guide. Update command used to create test coverage reporting.
`#4810 <https://github.com/aio-libs/aiohttp/issues/4810>`_
- Spelling: Change "canonize" to "canonicalize".
`#4986 <https://github.com/aio-libs/aiohttp/issues/4986>`_
- Add ``aiohttp-sse-client`` library to third party usage list.
`#5084 <https://github.com/aio-libs/aiohttp/issues/5084>`_
Misc
----
- `#2856 <https://github.com/aio-libs/aiohttp/issues/2856>`_, `#4218 <https://github.com/aio-libs/aiohttp/issues/4218>`_, `#4250 <https://github.com/aio-libs/aiohttp/issues/4250>`_
----
3.6.3 (2020-10-12)
==================
Bugfixes
--------
- Pin yarl to ``<1.6.0`` to avoid buggy behavior that will be fixed by the next aiohttp
release.
3.6.2 (2019-10-09)
==================
Features
--------
- Made exceptions pickleable. Also changed the repr of some exceptions.
`#4077 <https://github.com/aio-libs/aiohttp/issues/4077>`_
- Use ``Iterable`` type hint instead of ``Sequence`` for ``Application`` *middleware*
parameter. `#4125 <https://github.com/aio-libs/aiohttp/issues/4125>`_
Bugfixes
--------
- Reset the ``sock_read`` timeout each time data is received for a
``aiohttp.ClientResponse``. `#3808
<https://github.com/aio-libs/aiohttp/issues/3808>`_
- Fix handling of expired cookies so they are not stored in CookieJar.
`#4063 <https://github.com/aio-libs/aiohttp/issues/4063>`_
- Fix misleading message in the string representation of ``ClientConnectorError``;
``self.ssl == None`` means default SSL context, not SSL disabled `#4097
<https://github.com/aio-libs/aiohttp/issues/4097>`_
- Don't clobber HTTP status when using FileResponse.
`#4106 <https://github.com/aio-libs/aiohttp/issues/4106>`_
Improved Documentation
----------------------
- Added minimal required logging configuration to logging documentation.
`#2469 <https://github.com/aio-libs/aiohttp/issues/2469>`_
- Update docs to reflect proxy support.
`#4100 <https://github.com/aio-libs/aiohttp/issues/4100>`_
- Fix typo in code example in testing docs.
`#4108 <https://github.com/aio-libs/aiohttp/issues/4108>`_
Misc
----
- `#4102 <https://github.com/aio-libs/aiohttp/issues/4102>`_
----
3.6.1 (2019-09-19)
==================
Features
--------
- Compatibility with Python 3.8.
`#4056 <https://github.com/aio-libs/aiohttp/issues/4056>`_
Bugfixes
--------
- correct some exception string format
`#4068 <https://github.com/aio-libs/aiohttp/issues/4068>`_
- Emit a warning when ``ssl.OP_NO_COMPRESSION`` is
unavailable because the runtime is built against
an outdated OpenSSL.
`#4052 <https://github.com/aio-libs/aiohttp/issues/4052>`_
- Update multidict requirement to >= 4.5
`#4057 <https://github.com/aio-libs/aiohttp/issues/4057>`_
Improved Documentation
----------------------
- Provide pytest-aiohttp namespace for pytest fixtures in docs.
`#3723 <https://github.com/aio-libs/aiohttp/issues/3723>`_
----
3.6.0 (2019-09-06)
==================
Features
--------
- Add support for Named Pipes (Site and Connector) under Windows. This feature requires
Proactor event loop to work. `#3629
<https://github.com/aio-libs/aiohttp/issues/3629>`_
- Removed ``Transfer-Encoding: chunked`` header from websocket responses to be
compatible with more http proxy servers. `#3798
<https://github.com/aio-libs/aiohttp/issues/3798>`_
- Accept non-GET request for starting websocket handshake on server side.
`#3980 <https://github.com/aio-libs/aiohttp/issues/3980>`_
Bugfixes
--------
- Raise a ClientResponseError instead of an AssertionError for a blank
HTTP Reason Phrase.
`#3532 <https://github.com/aio-libs/aiohttp/issues/3532>`_
- Fix an issue where cookies would sometimes not be set during a redirect.
`#3576 <https://github.com/aio-libs/aiohttp/issues/3576>`_
- Change normalize_path_middleware to use 308 redirect instead of 301.
This behavior should prevent clients from being unable to use PUT/POST
methods on endpoints that are redirected because of a trailing slash.
`#3579 <https://github.com/aio-libs/aiohttp/issues/3579>`_
- Drop the processed task from ``all_tasks()`` list early. It prevents logging about a
task with unhandled exception when the server is used in conjunction with
``asyncio.run()``. `#3587 <https://github.com/aio-libs/aiohttp/issues/3587>`_
- ``Signal`` type annotation changed from ``Signal[Callable[['TraceConfig'],
Awaitable[None]]]`` to ``Signal[Callable[ClientSession, SimpleNamespace, ...]``.
`#3595 <https://github.com/aio-libs/aiohttp/issues/3595>`_
- Use sanitized URL as Location header in redirects
`#3614 <https://github.com/aio-libs/aiohttp/issues/3614>`_
- Improve typing annotations for multipart.py along with changes required
by mypy in files that references multipart.py.
`#3621 <https://github.com/aio-libs/aiohttp/issues/3621>`_
- Close session created inside ``aiohttp.request`` when unhandled exception occurs
`#3628 <https://github.com/aio-libs/aiohttp/issues/3628>`_
- Cleanup per-chunk data in generic data read. Memory leak fixed.
`#3631 <https://github.com/aio-libs/aiohttp/issues/3631>`_
- Use correct type for add_view and family
`#3633 <https://github.com/aio-libs/aiohttp/issues/3633>`_
- Fix _keepalive field in __slots__ of ``RequestHandler``.
`#3644 <https://github.com/aio-libs/aiohttp/issues/3644>`_
- Properly handle ConnectionResetError, to silence the "Cannot write to closing
transport" exception when clients disconnect uncleanly.
`#3648 <https://github.com/aio-libs/aiohttp/issues/3648>`_
- Suppress pytest warnings due to ``test_utils`` classes
`#3660 <https://github.com/aio-libs/aiohttp/issues/3660>`_
- Fix overshadowing of overlapped sub-application prefixes.
`#3701 <https://github.com/aio-libs/aiohttp/issues/3701>`_
- Fixed return type annotation for WSMessage.json()
`#3720 <https://github.com/aio-libs/aiohttp/issues/3720>`_
- Properly expose TooManyRedirects publicly as documented.
`#3818 <https://github.com/aio-libs/aiohttp/issues/3818>`_
- Fix missing brackets for IPv6 in proxy CONNECT request
`#3841 <https://github.com/aio-libs/aiohttp/issues/3841>`_
- Make the signature of ``aiohttp.test_utils.TestClient.request`` match
``asyncio.ClientSession.request`` according to the docs `#3852
<https://github.com/aio-libs/aiohttp/issues/3852>`_
- Use correct style for re-exported imports, makes mypy ``--strict`` mode happy.
`#3868 <https://github.com/aio-libs/aiohttp/issues/3868>`_
- Fixed type annotation for add_view method of UrlDispatcher to accept any subclass of
View `#3880 <https://github.com/aio-libs/aiohttp/issues/3880>`_
- Made cython HTTP parser set Reason-Phrase of the response to an empty string if it is
missing. `#3906 <https://github.com/aio-libs/aiohttp/issues/3906>`_
- Add URL to the string representation of ClientResponseError.
`#3959 <https://github.com/aio-libs/aiohttp/issues/3959>`_
- Accept ``istr`` keys in ``LooseHeaders`` type hints.
`#3976 <https://github.com/aio-libs/aiohttp/issues/3976>`_
- Fixed race conditions in _resolve_host caching and throttling when tracing is enabled.
`#4013 <https://github.com/aio-libs/aiohttp/issues/4013>`_
- For URLs like "unix://localhost/..." set Host HTTP header to "localhost" instead of
"localhost:None". `#4039 <https://github.com/aio-libs/aiohttp/issues/4039>`_
Improved Documentation
----------------------
- Modify documentation for Background Tasks to remove deprecated usage of event loop.
`#3526 <https://github.com/aio-libs/aiohttp/issues/3526>`_
- use ``if __name__ == '__main__':`` in server examples.
`#3775 <https://github.com/aio-libs/aiohttp/issues/3775>`_
- Update documentation reference to the default access logger.
`#3783 <https://github.com/aio-libs/aiohttp/issues/3783>`_
- Improve documentation for ``web.BaseRequest.path`` and ``web.BaseRequest.raw_path``.
`#3791 <https://github.com/aio-libs/aiohttp/issues/3791>`_
- Removed deprecation warning in tracing example docs
`#3964 <https://github.com/aio-libs/aiohttp/issues/3964>`_
----
3.5.4 (2019-01-12)
==================
Bugfixes
--------
- Fix stream ``.read()`` / ``.readany()`` / ``.iter_any()`` which used to return a
partial content only in case of compressed content
`#3525 <https://github.com/aio-libs/aiohttp/issues/3525>`_
3.5.3 (2019-01-10)
==================
Bugfixes
--------
- Fix type stubs for ``aiohttp.web.run_app(access_log=True)`` and fix edge case of
``access_log=True`` and the event loop being in debug mode. `#3504
<https://github.com/aio-libs/aiohttp/issues/3504>`_
- Fix ``aiohttp.ClientTimeout`` type annotations to accept ``None`` for fields
`#3511 <https://github.com/aio-libs/aiohttp/issues/3511>`_
- Send custom per-request cookies even if session jar is empty
`#3515 <https://github.com/aio-libs/aiohttp/issues/3515>`_
- Restore Linux binary wheels publishing on PyPI
----
3.5.2 (2019-01-08)
==================
Features
--------
- ``FileResponse`` from ``web_fileresponse.py`` uses a ``ThreadPoolExecutor`` to work
with files asynchronously. I/O based payloads from ``payload.py`` uses a
``ThreadPoolExecutor`` to work with I/O objects asynchronously. `#3313
<https://github.com/aio-libs/aiohttp/issues/3313>`_
- Internal Server Errors in plain text if the browser does not support HTML.
`#3483 <https://github.com/aio-libs/aiohttp/issues/3483>`_
Bugfixes
--------
- Preserve MultipartWriter parts headers on write. Refactor the way how
``Payload.headers`` are handled. Payload instances now always have headers and
Content-Type defined. Fix Payload Content-Disposition header reset after initial
creation. `#3035 <https://github.com/aio-libs/aiohttp/issues/3035>`_
- Log suppressed exceptions in ``GunicornWebWorker``.
`#3464 <https://github.com/aio-libs/aiohttp/issues/3464>`_
- Remove wildcard imports.
`#3468 <https://github.com/aio-libs/aiohttp/issues/3468>`_
- Use the same task for app initialization and web server handling in gunicorn workers.
It allows to use Python3.7 context vars smoothly.
`#3471 <https://github.com/aio-libs/aiohttp/issues/3471>`_
- Fix handling of chunked+gzipped response when first chunk does not give uncompressed
data `#3477 <https://github.com/aio-libs/aiohttp/issues/3477>`_
- Replace ``collections.MutableMapping`` with ``collections.abc.MutableMapping`` to
avoid a deprecation warning. `#3480
<https://github.com/aio-libs/aiohttp/issues/3480>`_
- ``Payload.size`` type annotation changed from ``Optional[float]`` to
``Optional[int]``. `#3484 <https://github.com/aio-libs/aiohttp/issues/3484>`_
- Ignore done tasks when cancels pending activities on ``web.run_app`` finalization.
`#3497 <https://github.com/aio-libs/aiohttp/issues/3497>`_
Improved Documentation
----------------------
- Add documentation for ``aiohttp.web.HTTPException``.
`#3490 <https://github.com/aio-libs/aiohttp/issues/3490>`_
Misc
----
- `#3487 <https://github.com/aio-libs/aiohttp/issues/3487>`_
----
3.5.1 (2018-12-24)
====================
- Fix a regression about ``ClientSession._requote_redirect_url`` modification in debug
mode.
3.5.0 (2018-12-22)
====================
Features
--------
- The library type annotations are checked in strict mode now.
- Add support for setting cookies for individual request (`#2387
<https://github.com/aio-libs/aiohttp/pull/2387>`_)
- Application.add_domain implementation (`#2809
<https://github.com/aio-libs/aiohttp/pull/2809>`_)
- The default ``app`` in the request returned by ``test_utils.make_mocked_request`` can
now have objects assigned to it and retrieved using the ``[]`` operator. (`#3174
<https://github.com/aio-libs/aiohttp/pull/3174>`_)
- Make ``request.url`` accessible when transport is closed. (`#3177
<https://github.com/aio-libs/aiohttp/pull/3177>`_)
- Add ``zlib_executor_size`` argument to ``Response`` constructor to allow compression
to run in a background executor to avoid blocking the main thread and potentially
triggering health check failures. (`#3205
<https://github.com/aio-libs/aiohttp/pull/3205>`_)
- Enable users to set ``ClientTimeout`` in ``aiohttp.request`` (`#3213
<https://github.com/aio-libs/aiohttp/pull/3213>`_)
- Don't raise a warning if ``NETRC`` environment variable is not set and ``~/.netrc``
file doesn't exist. (`#3267 <https://github.com/aio-libs/aiohttp/pull/3267>`_)
- Add default logging handler to web.run_app If the ``Application.debug``` flag is set
and the default logger ``aiohttp.access`` is used, access logs will now be output
using a *stderr* ``StreamHandler`` if no handlers are attached. Furthermore, if the
default logger has no log level set, the log level will be set to ``DEBUG``. (`#3324
<https://github.com/aio-libs/aiohttp/pull/3324>`_)
- Add method argument to ``session.ws_connect()``. Sometimes server API requires a
different HTTP method for WebSocket connection establishment. For example, ``Docker
exec`` needs POST. (`#3378 <https://github.com/aio-libs/aiohttp/pull/3378>`_)
- Create a task per request handling. (`#3406
<https://github.com/aio-libs/aiohttp/pull/3406>`_)
Bugfixes
--------
- Enable passing ``access_log_class`` via ``handler_args`` (`#3158
<https://github.com/aio-libs/aiohttp/pull/3158>`_)
- Return empty bytes with end-of-chunk marker in empty stream reader. (`#3186
<https://github.com/aio-libs/aiohttp/pull/3186>`_)
- Accept ``CIMultiDictProxy`` instances for ``headers`` argument in ``web.Response``
constructor. (`#3207 <https://github.com/aio-libs/aiohttp/pull/3207>`_)
- Don't uppercase HTTP method in parser (`#3233
<https://github.com/aio-libs/aiohttp/pull/3233>`_)
- Make method match regexp RFC-7230 compliant (`#3235
<https://github.com/aio-libs/aiohttp/pull/3235>`_)
- Add ``app.pre_frozen`` state to properly handle startup signals in
sub-applications. (`#3237 <https://github.com/aio-libs/aiohttp/pull/3237>`_)
- Enhanced parsing and validation of helpers.BasicAuth.decode. (`#3239
<https://github.com/aio-libs/aiohttp/pull/3239>`_)
- Change imports from collections module in preparation for 3.8. (`#3258
<https://github.com/aio-libs/aiohttp/pull/3258>`_)
- Ensure Host header is added first to ClientRequest to better replicate browser (`#3265
<https://github.com/aio-libs/aiohttp/pull/3265>`_)
- Fix forward compatibility with Python 3.8: importing ABCs directly from the
collections module will not be supported anymore. (`#3273
<https://github.com/aio-libs/aiohttp/pull/3273>`_)
- Keep the query string by ``normalize_path_middleware``. (`#3278
<https://github.com/aio-libs/aiohttp/pull/3278>`_)
- Fix missing parameter ``raise_for_status`` for aiohttp.request() (`#3290
<https://github.com/aio-libs/aiohttp/pull/3290>`_)
- Bracket IPv6 addresses in the HOST header (`#3304
<https://github.com/aio-libs/aiohttp/pull/3304>`_)
- Fix default message for server ping and pong frames. (`#3308
<https://github.com/aio-libs/aiohttp/pull/3308>`_)
- Fix tests/test_connector.py typo and tests/autobahn/server.py duplicate loop
def. (`#3337 <https://github.com/aio-libs/aiohttp/pull/3337>`_)
- Fix false-negative indicator end_of_HTTP_chunk in StreamReader.readchunk function
(`#3361 <https://github.com/aio-libs/aiohttp/pull/3361>`_)
- Release HTTP response before raising status exception (`#3364
<https://github.com/aio-libs/aiohttp/pull/3364>`_)
- Fix task cancellation when ``sendfile()`` syscall is used by static file
handling. (`#3383 <https://github.com/aio-libs/aiohttp/pull/3383>`_)
- Fix stack trace for ``asyncio.TimeoutError`` which was not logged, when it is caught
in the handler. (`#3414 <https://github.com/aio-libs/aiohttp/pull/3414>`_)
Improved Documentation
----------------------
- Improve documentation of ``Application.make_handler`` parameters. (`#3152
<https://github.com/aio-libs/aiohttp/pull/3152>`_)
- Fix BaseRequest.raw_headers doc. (`#3215
<https://github.com/aio-libs/aiohttp/pull/3215>`_)
- Fix typo in TypeError exception reason in ``web.Application._handle`` (`#3229
<https://github.com/aio-libs/aiohttp/pull/3229>`_)
- Make server access log format placeholder %b documentation reflect
behavior and docstring. (`#3307 <https://github.com/aio-libs/aiohttp/pull/3307>`_)
Deprecations and Removals
-------------------------
- Deprecate modification of ``session.requote_redirect_url`` (`#2278
<https://github.com/aio-libs/aiohttp/pull/2278>`_)
- Deprecate ``stream.unread_data()`` (`#3260
<https://github.com/aio-libs/aiohttp/pull/3260>`_)
- Deprecated use of boolean in ``resp.enable_compression()`` (`#3318
<https://github.com/aio-libs/aiohttp/pull/3318>`_)
- Encourage creation of aiohttp public objects inside a coroutine (`#3331
<https://github.com/aio-libs/aiohttp/pull/3331>`_)
- Drop dead ``Connection.detach()`` and ``Connection.writer``. Both methods were broken
for more than 2 years. (`#3358 <https://github.com/aio-libs/aiohttp/pull/3358>`_)
- Deprecate ``app.loop``, ``request.loop``, ``client.loop`` and ``connector.loop``
properties. (`#3374 <https://github.com/aio-libs/aiohttp/pull/3374>`_)
- Deprecate explicit debug argument. Use asyncio debug mode instead. (`#3381
<https://github.com/aio-libs/aiohttp/pull/3381>`_)
- Deprecate body parameter in HTTPException (and derived classes) constructor. (`#3385
<https://github.com/aio-libs/aiohttp/pull/3385>`_)
- Deprecate bare connector close, use ``async with connector:`` and ``await
connector.close()`` instead. (`#3417
<https://github.com/aio-libs/aiohttp/pull/3417>`_)
- Deprecate obsolete ``read_timeout`` and ``conn_timeout`` in ``ClientSession``
constructor. (`#3438 <https://github.com/aio-libs/aiohttp/pull/3438>`_)
Misc
----
- #3341, #3351

View file

@ -1,135 +0,0 @@
aiohttp/__init__.py,sha256=kz1O7HepmZpNXAVHUS8h8tDqHip6G7ApF45_04MurZ0,6934
aiohttp/_cparser.pxd,sha256=tgw30SL6kQSczzGMlMhx2Cuhf_O8P8ZPimVCb85xILc,3959
aiohttp/_find_header.c,sha256=s8Urnxv8CDd2NuKCEzCsLWy4NbVFXfLMNlg-s2bCg3A,187570
aiohttp/_find_header.h,sha256=5oOgQ85nF6V7rpU8NhyE5vyGkTo1Cgf1GIYrtxSTzQI,170
aiohttp/_find_header.pxd,sha256=0GfwFCPN2zxEKTO1_MA5sYq2UfzsG8kcV3aTqvwlz3g,68
aiohttp/_frozenlist.c,sha256=4m7L0I0wWumfGoXIMaYYeKxf9zG3ssoIxB-meEQDgh4,294193
aiohttp/_frozenlist.cpython-37m-darwin.so,sha256=R4a4ORdHkSdM-c61yAaUMUplWjGVtJDFX4JIRAsf7iw,92472
aiohttp/_frozenlist.pyx,sha256=BD8LcERExsWdo4qzuuQ84f-L_pHVzkUQO0lEAOe3Fog,2605
aiohttp/_headers.pxi,sha256=n701k28dVPjwRnx5j6LpJhLTfj7dqu2vJt7f0O60Oyg,2007
aiohttp/_helpers.c,sha256=meXX95gKMrawWPUl-q3Sdyf4VxSdsH-8lbCON6-ATOo,211989
aiohttp/_helpers.cpython-37m-darwin.so,sha256=XyeNlPexhFMQrbjG30DWOffQ6FyYBa3QblpW0kwopX0,68128
aiohttp/_helpers.pyi,sha256=ZoKiJSS51PxELhI2cmIr5737YjjZcJt7FbIRO3ym1Ss,202
aiohttp/_helpers.pyx,sha256=XeLbNft5X_4ifi8QB8i6TyrRuayijMSO3IDHeSA89uM,1049
aiohttp/_http_parser.c,sha256=O51TiHCn0eXUeiBWCyl7E7y-JjZLgeLRj10D18HudSU,1011524
aiohttp/_http_parser.cpython-37m-darwin.so,sha256=fxc3YCdZ9YM_1M_ke621_TlAh0d4FPkhgzofgAsX3Ds,497200
aiohttp/_http_parser.pyx,sha256=8GiPsugeqSvwoXgiJg2VkaMJeRAdoSpLhzET_EWftfo,29022
aiohttp/_http_writer.c,sha256=QxJ09pt3guD8aVsQ7LaAxX4vxBUhJ7RhxeVMVWu8BUk,213020
aiohttp/_http_writer.cpython-37m-darwin.so,sha256=KW_V9a5eTCniI-P4eMxmqGI14cUseIR9ld_YRYAbpOE,62872
aiohttp/_http_writer.pyx,sha256=Tnt_e6pcZZVOhaW3yNt3hqDsNJgIGwqUIPeSqAMIYoE,4200
aiohttp/_websocket.c,sha256=F5V1i__yewPzFcgYKmcTWk_CjptCZUYlhQesX14u4ec,137427
aiohttp/_websocket.cpython-37m-darwin.so,sha256=90xJg0yXVPna02tLlSXA4cwzMCs2Hts6WvEcrYKFtWU,43648
aiohttp/_websocket.pyx,sha256=1XuOSNDCbyDrzF5uMA2isqausSs8l2jWTLDlNDLM9Io,1561
aiohttp/abc.py,sha256=tbhrXunYB8-Jmq3-e6zbYtXaCR7Vfb8eRo4Ifk-juLQ,5247
aiohttp/base_protocol.py,sha256=eNgmzKP-Lbh5vxSmShDBKTJ0JSAVpcrAZa-EKqmCTKA,2701
aiohttp/client.py,sha256=9firS0XBuNEGshPE81CsmLxnkfkZoJsRUV6amoGZ1FA,43916
aiohttp/client_exceptions.py,sha256=fW2rrDEcWitwpXhQIFtViuC2JEHDx1oIXXQsj6YGd5I,8529
aiohttp/client_proto.py,sha256=Iek_ZuTr_2xFoKt30zveOBh1xYt6iXE7XZVwUPiCXTs,8163
aiohttp/client_reqrep.py,sha256=okBWp3TC_leAiD0-kEtS8H0v8fZPTSVzyDXDwusSZrs,36439
aiohttp/client_ws.py,sha256=sxkvG5Vw37Mr6V_8w9WZ0ulo_jKL_tRtSK1l9bRIPlY,10287
aiohttp/connector.py,sha256=9A8wGKGoOXZCzRUBnw2dwT457aWXAxFSn4fPaEK5Lh8,42968
aiohttp/cookiejar.py,sha256=SbX_n44bl69PrGIPOmj2imHwynzBlUbxrZUe16T3K-4,12163
aiohttp/formdata.py,sha256=UJ4DgqX4Jk-LZpnKOeOCUewL5264eO4DyHPKOk5vgos,6080
aiohttp/frozenlist.py,sha256=nFqoDVmyxfStqiY_FxcrME8GpvhRDXf3mh-BGCVxhnQ,1718
aiohttp/frozenlist.pyi,sha256=bRNKoI2j1roPdtgfx_nseDanvBqZsZUNHDqmXtfjlRo,1434
aiohttp/hdrs.py,sha256=WsjDJYADYEyJk7-oNXNhA2M3MwtyLkhJAklyzLtclfU,3446
aiohttp/helpers.py,sha256=iAQNQGGoqN8ovgdMfHC2rggYSbOaSTToiUJqg1LJrzw,22918
aiohttp/http.py,sha256=CHdBKhli7yITl_VfpBDdjgF6nT_cCWPRXov8UzBeo2k,1824
aiohttp/http_exceptions.py,sha256=eACQt7azwNbUi8aqXB5J2tIcc_CB4_4y4mLM9SZd4tM,2586
aiohttp/http_parser.py,sha256=wNwFkglxrBFRUHWN4AUbbUuFckMgwy5k_cX6zR4CEiE,30781
aiohttp/http_websocket.py,sha256=k_mvpvQUsliv7F7ZlXmz01hzBnoysSHAr4KyZDC-O78,25098
aiohttp/http_writer.py,sha256=ofDAEH8IJry013XsbrrfNiV82Fr7Gs3d7syduMx1vYU,5341
aiohttp/locks.py,sha256=j5VO5HkQY_N5DRJAezRJXavE9y2N9Y9Sq3oWu8ftAOA,1220
aiohttp/log.py,sha256=BbNKx9e3VMIm0xYjZI0IcBBoS7wjdeIeSaiJE7-qK2g,325
aiohttp/multipart.py,sha256=ZrChyWL811TpsxcL0rcmx3VIJjzj-cINkrjgk9GhBt4,32251
aiohttp/payload.py,sha256=jvvuncmzh4RuHUR05srhzSMXCUCxclUPgFnEDm1vl40,13333
aiohttp/payload_streamer.py,sha256=qW8jvfRJYYnpRro74vQbrTXFd_5JZsHyVGi4-bulOwo,2102
aiohttp/py.typed,sha256=sow9soTwP9T_gEAQSVh7Gb8855h04Nwmhs2We-JRgZM,7
aiohttp/pytest_plugin.py,sha256=0z5FACR9mUEZp72jakz_jfDuy-dsmv2HISIY0EB01K4,11009
aiohttp/resolver.py,sha256=VmUmtY_mjuRQCy4OrByS74bgooPzUeHrO_pEUPaMXc4,4608
aiohttp/signals.py,sha256=YSWNpNheFirmccC7vWMr8J1TDWBN_P0m3Ch_FiZ3HVA,852
aiohttp/signals.pyi,sha256=SLTfUPdx1-g4VSTqCnBXyhSCl0-KQ-Z0mCsEsIvBfV4,319
aiohttp/streams.py,sha256=zmCpGESe27u2b-JmyOem_FThgOn9Iq_TpwIHJMjJfmY,20530
aiohttp/tcp_helpers.py,sha256=l3XD9wWiuMawXCMdqiNSRHIz0iVtMKi7lwqXnHvyK3k,962
aiohttp/test_utils.py,sha256=NG7YcgAE_E7LOlcaNQ8JhgOadhg5bg174-tukUQLDMA,20253
aiohttp/tracing.py,sha256=-G2cfF6LNFqY8nCbnHJ8NkLdnJV1WSjRtKOnJ1gz_lw,14363
aiohttp/typedefs.py,sha256=PUsJ7Y-f8bRqyDOdqd681VvaNi_LETRJ5L7rCPfPRN4,1374
aiohttp/web.py,sha256=WNOEIih5I3g_0BSIJkrKvWuTQ8qZVKoGkjg-v6wcSms,17881
aiohttp/web_app.py,sha256=lDfoKwy1y8_pnMkrqcfHiMnhWwH_4FCkCuSMLT4huA0,17053
aiohttp/web_exceptions.py,sha256=-ZqN_zv7EiaLDS0Na5IpgXmiiNv9QZm8sebYoW3zeYc,10106
aiohttp/web_fileresponse.py,sha256=hpzBp-L-VuVrlsSjoTvGRWpgWZrLODkaoQlGCNQrpag,9025
aiohttp/web_log.py,sha256=hDwnH_USBiNqC1-XggEorcuVQUuKhEezCfjPY1wkac4,7498
aiohttp/web_middlewares.py,sha256=QYD2PiB_AMkIQWpBEiQRP0UM_vBmZ7sAM9Qes3SoXH0,4193
aiohttp/web_protocol.py,sha256=2nHNxxg7Mf8ZP88SFPxnBNGL8zmPhQpjtZ68VT0PeIo,23251
aiohttp/web_request.py,sha256=fY-GmGuV3oayr5r0Szd1VsHraN8T-uW2wCij6iFfZOg,26454
aiohttp/web_response.py,sha256=Xb9PfJhFHzu9Od4qHRmN9O6z5ojBRuL0ETqmR6S_8Ek,26202
aiohttp/web_routedef.py,sha256=PIHf5KvaOnG30pg1J8IhZewoHLOEMd5emWBZBBWS9yE,6109
aiohttp/web_runner.py,sha256=4AM4y6lgmCxBU1AHt1Sdd7IpB4pm9dWqSdapNfgzqG8,11194
aiohttp/web_server.py,sha256=Zp-StXkTSDZ8Lyg2joDdRD05QRu9oh8LET3AKfx9D2I,2058
aiohttp/web_urldispatcher.py,sha256=M930z-C2umyGyU4KmGVsabJNL5ZpT6Aj5tpSf6mSSPc,39532
aiohttp/web_ws.py,sha256=f912oImpu973icUK0QUjeXIxa57R837gdkUUn1ZnSpQ,16783
aiohttp/worker.py,sha256=T1CvPYNP4rqsdpyz3dNRuqNF5bzrH3WaEdDMLDsfLOc,8022
aiohttp/.hash/_cparser.pxd.hash,sha256=g9ydcacNvFizu1GVl--RrXpWlSawIZ-fN050i7iUo5c,64
aiohttp/.hash/_find_header.pxd.hash,sha256=BtbSPeZ2eOtt807jecTkoEeBQ4C5AiBxjzbvJj9eV6E,64
aiohttp/.hash/_frozenlist.pyx.hash,sha256=hmgKZnSvKFUnUuT9uCV7orAxYP6Js40SN70NrDWjDmI,64
aiohttp/.hash/_helpers.pyi.hash,sha256=pvlsQIMFVemm4rvS1BkQkfS2PGgJn5ZO2RmMSFxwCiE,64
aiohttp/.hash/_helpers.pyx.hash,sha256=JWBqptE_5nbmOzG3dYXdmEuAy_Y2snJfcR124T4TZpo,64
aiohttp/.hash/_http_parser.pyx.hash,sha256=qP5I4Mu5pXc_3aDx2RSPfN2j1akQ-szA5D269nw_Sxw,64
aiohttp/.hash/_http_writer.pyx.hash,sha256=CFBO14jclGTimiln6e8yXnfWwk-4s_ylld3lE6JrnY8,64
aiohttp/.hash/_websocket.pyx.hash,sha256=_6cqIcIQjICfniPJShOdspzQaB95Lu6cinOZJ6yESz8,64
aiohttp/.hash/frozenlist.pyi.hash,sha256=021hwHbk-0EfQ2c5mefxSctfrrvJ7CnQ0mPXYTBGJ4o,64
aiohttp/.hash/hdrs.py.hash,sha256=T8i1rURdD81PR6t5qOA05Lg3yV6C9Mm4LTjJucc36yA,64
aiohttp/.hash/signals.pyi.hash,sha256=GQA2kFtF7PaBpEJDZ6OpBpFaMhi_2ieNF7HaxYDmKQQ,64
aiohttp-3.7.4.dist-info/LICENSE.txt,sha256=lmJ77QrQjpsu-p9OBOgIN80FUOdpSg_sM7HaslUCgqs,11321
aiohttp-3.7.4.dist-info/METADATA,sha256=0Dssbrhzakprcv1OgK090WQ7AsehillLeEr7pAK0wOg,38601
aiohttp-3.7.4.dist-info/WHEEL,sha256=SPFsdN-g7Osut7El6pRHojcroYU7REc1DdD4VXzi4B4,111
aiohttp-3.7.4.dist-info/top_level.txt,sha256=iv-JIaacmTl-hSho3QmphcKnbRRYx1st47yjz_178Ro,8
aiohttp-3.7.4.dist-info/RECORD,,
aiohttp-3.7.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
aiohttp/__pycache__/web_urldispatcher.cpython-37.pyc,,
aiohttp/__pycache__/payload.cpython-37.pyc,,
aiohttp/__pycache__/typedefs.cpython-37.pyc,,
aiohttp/__pycache__/web_ws.cpython-37.pyc,,
aiohttp/__pycache__/web_routedef.cpython-37.pyc,,
aiohttp/__pycache__/client_exceptions.cpython-37.pyc,,
aiohttp/__pycache__/client.cpython-37.pyc,,
aiohttp/__pycache__/web_exceptions.cpython-37.pyc,,
aiohttp/__pycache__/web.cpython-37.pyc,,
aiohttp/__pycache__/test_utils.cpython-37.pyc,,
aiohttp/__pycache__/streams.cpython-37.pyc,,
aiohttp/__pycache__/client_proto.cpython-37.pyc,,
aiohttp/__pycache__/web_app.cpython-37.pyc,,
aiohttp/__pycache__/signals.cpython-37.pyc,,
aiohttp/__pycache__/web_fileresponse.cpython-37.pyc,,
aiohttp/__pycache__/web_protocol.cpython-37.pyc,,
aiohttp/__pycache__/multipart.cpython-37.pyc,,
aiohttp/__pycache__/payload_streamer.cpython-37.pyc,,
aiohttp/__pycache__/connector.cpython-37.pyc,,
aiohttp/__pycache__/web_runner.cpython-37.pyc,,
aiohttp/__pycache__/log.cpython-37.pyc,,
aiohttp/__pycache__/hdrs.cpython-37.pyc,,
aiohttp/__pycache__/client_ws.cpython-37.pyc,,
aiohttp/__pycache__/pytest_plugin.cpython-37.pyc,,
aiohttp/__pycache__/web_request.cpython-37.pyc,,
aiohttp/__pycache__/web_server.cpython-37.pyc,,
aiohttp/__pycache__/http_parser.cpython-37.pyc,,
aiohttp/__pycache__/base_protocol.cpython-37.pyc,,
aiohttp/__pycache__/abc.cpython-37.pyc,,
aiohttp/__pycache__/web_log.cpython-37.pyc,,
aiohttp/__pycache__/formdata.cpython-37.pyc,,
aiohttp/__pycache__/cookiejar.cpython-37.pyc,,
aiohttp/__pycache__/http.cpython-37.pyc,,
aiohttp/__pycache__/tcp_helpers.cpython-37.pyc,,
aiohttp/__pycache__/web_response.cpython-37.pyc,,
aiohttp/__pycache__/http_writer.cpython-37.pyc,,
aiohttp/__pycache__/helpers.cpython-37.pyc,,
aiohttp/__pycache__/resolver.cpython-37.pyc,,
aiohttp/__pycache__/http_exceptions.cpython-37.pyc,,
aiohttp/__pycache__/frozenlist.cpython-37.pyc,,
aiohttp/__pycache__/tracing.cpython-37.pyc,,
aiohttp/__pycache__/client_reqrep.cpython-37.pyc,,
aiohttp/__pycache__/worker.cpython-37.pyc,,
aiohttp/__pycache__/http_websocket.cpython-37.pyc,,
aiohttp/__pycache__/web_middlewares.cpython-37.pyc,,
aiohttp/__pycache__/__init__.cpython-37.pyc,,
aiohttp/__pycache__/locks.cpython-37.pyc,,

View file

@ -1,5 +0,0 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.36.2)
Root-Is-Purelib: false
Tag: cp37-cp37m-macosx_10_14_x86_64

View file

@ -1 +0,0 @@
b60c37d122fa91049ccf318c94c871d82ba17ff3bc3fc64f8a65426fce7120b7

View file

@ -1 +0,0 @@
d067f01423cddb3c442933b5fcc039b18ab651fcec1bc91c577693aafc25cf78

View file

@ -1 +0,0 @@
043f0b704444c6c59da38ab3bae43ce1ff8bfe91d5ce45103b494400e7b71688

View file

@ -1 +0,0 @@
6682a22524b9d4fc442e123672622be7bdfb6238d9709b7b15b2113b7ca6d52b

View file

@ -1 +0,0 @@
5de2db35fb795ffe227e2f1007c8ba4f2ad1b9aca28cc48edc80c779203cf6e3

View file

@ -1 +0,0 @@
f0688fb2e81ea92bf0a17822260d9591a30979101da12a4b873113fc459fb5fa

View file

@ -1 +0,0 @@
4e7b7f7baa5c65954e85a5b7c8db7786a0ec3498081b0a9420f792a803086281

View file

@ -1 +0,0 @@
d57b8e48d0c26f20ebcc5e6e300da2b2a6aeb12b3c9768d64cb0e53432ccf48a

View file

@ -1 +0,0 @@
6d134aa08da3d6ba0f76d81fc7f9ec7836a7bc1a99b1950d1c3aa65ed7e3951a

View file

@ -1 +0,0 @@
5ac8c3258003604c8993bfa8357361036337330b722e4849024972ccbb5c95f5

View file

@ -1 +0,0 @@
48b4df50f771d7e8385524ea0a7057ca1482974f8a43e674982b04b08bc17d5e

View file

@ -1,217 +0,0 @@
__version__ = "3.7.4"
from typing import Tuple
from . import hdrs as hdrs
from .client import (
BaseConnector as BaseConnector,
ClientConnectionError as ClientConnectionError,
ClientConnectorCertificateError as ClientConnectorCertificateError,
ClientConnectorError as ClientConnectorError,
ClientConnectorSSLError as ClientConnectorSSLError,
ClientError as ClientError,
ClientHttpProxyError as ClientHttpProxyError,
ClientOSError as ClientOSError,
ClientPayloadError as ClientPayloadError,
ClientProxyConnectionError as ClientProxyConnectionError,
ClientRequest as ClientRequest,
ClientResponse as ClientResponse,
ClientResponseError as ClientResponseError,
ClientSession as ClientSession,
ClientSSLError as ClientSSLError,
ClientTimeout as ClientTimeout,
ClientWebSocketResponse as ClientWebSocketResponse,
ContentTypeError as ContentTypeError,
Fingerprint as Fingerprint,
InvalidURL as InvalidURL,
NamedPipeConnector as NamedPipeConnector,
RequestInfo as RequestInfo,
ServerConnectionError as ServerConnectionError,
ServerDisconnectedError as ServerDisconnectedError,
ServerFingerprintMismatch as ServerFingerprintMismatch,
ServerTimeoutError as ServerTimeoutError,
TCPConnector as TCPConnector,
TooManyRedirects as TooManyRedirects,
UnixConnector as UnixConnector,
WSServerHandshakeError as WSServerHandshakeError,
request as request,
)
from .cookiejar import CookieJar as CookieJar, DummyCookieJar as DummyCookieJar
from .formdata import FormData as FormData
from .helpers import BasicAuth as BasicAuth, ChainMapProxy as ChainMapProxy
from .http import (
HttpVersion as HttpVersion,
HttpVersion10 as HttpVersion10,
HttpVersion11 as HttpVersion11,
WebSocketError as WebSocketError,
WSCloseCode as WSCloseCode,
WSMessage as WSMessage,
WSMsgType as WSMsgType,
)
from .multipart import (
BadContentDispositionHeader as BadContentDispositionHeader,
BadContentDispositionParam as BadContentDispositionParam,
BodyPartReader as BodyPartReader,
MultipartReader as MultipartReader,
MultipartWriter as MultipartWriter,
content_disposition_filename as content_disposition_filename,
parse_content_disposition as parse_content_disposition,
)
from .payload import (
PAYLOAD_REGISTRY as PAYLOAD_REGISTRY,
AsyncIterablePayload as AsyncIterablePayload,
BufferedReaderPayload as BufferedReaderPayload,
BytesIOPayload as BytesIOPayload,
BytesPayload as BytesPayload,
IOBasePayload as IOBasePayload,
JsonPayload as JsonPayload,
Payload as Payload,
StringIOPayload as StringIOPayload,
StringPayload as StringPayload,
TextIOPayload as TextIOPayload,
get_payload as get_payload,
payload_type as payload_type,
)
from .payload_streamer import streamer as streamer
from .resolver import (
AsyncResolver as AsyncResolver,
DefaultResolver as DefaultResolver,
ThreadedResolver as ThreadedResolver,
)
from .signals import Signal as Signal
from .streams import (
EMPTY_PAYLOAD as EMPTY_PAYLOAD,
DataQueue as DataQueue,
EofStream as EofStream,
FlowControlDataQueue as FlowControlDataQueue,
StreamReader as StreamReader,
)
from .tracing import (
TraceConfig as TraceConfig,
TraceConnectionCreateEndParams as TraceConnectionCreateEndParams,
TraceConnectionCreateStartParams as TraceConnectionCreateStartParams,
TraceConnectionQueuedEndParams as TraceConnectionQueuedEndParams,
TraceConnectionQueuedStartParams as TraceConnectionQueuedStartParams,
TraceConnectionReuseconnParams as TraceConnectionReuseconnParams,
TraceDnsCacheHitParams as TraceDnsCacheHitParams,
TraceDnsCacheMissParams as TraceDnsCacheMissParams,
TraceDnsResolveHostEndParams as TraceDnsResolveHostEndParams,
TraceDnsResolveHostStartParams as TraceDnsResolveHostStartParams,
TraceRequestChunkSentParams as TraceRequestChunkSentParams,
TraceRequestEndParams as TraceRequestEndParams,
TraceRequestExceptionParams as TraceRequestExceptionParams,
TraceRequestRedirectParams as TraceRequestRedirectParams,
TraceRequestStartParams as TraceRequestStartParams,
TraceResponseChunkReceivedParams as TraceResponseChunkReceivedParams,
)
__all__: Tuple[str, ...] = (
"hdrs",
# client
"BaseConnector",
"ClientConnectionError",
"ClientConnectorCertificateError",
"ClientConnectorError",
"ClientConnectorSSLError",
"ClientError",
"ClientHttpProxyError",
"ClientOSError",
"ClientPayloadError",
"ClientProxyConnectionError",
"ClientResponse",
"ClientRequest",
"ClientResponseError",
"ClientSSLError",
"ClientSession",
"ClientTimeout",
"ClientWebSocketResponse",
"ContentTypeError",
"Fingerprint",
"InvalidURL",
"RequestInfo",
"ServerConnectionError",
"ServerDisconnectedError",
"ServerFingerprintMismatch",
"ServerTimeoutError",
"TCPConnector",
"TooManyRedirects",
"UnixConnector",
"NamedPipeConnector",
"WSServerHandshakeError",
"request",
# cookiejar
"CookieJar",
"DummyCookieJar",
# formdata
"FormData",
# helpers
"BasicAuth",
"ChainMapProxy",
# http
"HttpVersion",
"HttpVersion10",
"HttpVersion11",
"WSMsgType",
"WSCloseCode",
"WSMessage",
"WebSocketError",
# multipart
"BadContentDispositionHeader",
"BadContentDispositionParam",
"BodyPartReader",
"MultipartReader",
"MultipartWriter",
"content_disposition_filename",
"parse_content_disposition",
# payload
"AsyncIterablePayload",
"BufferedReaderPayload",
"BytesIOPayload",
"BytesPayload",
"IOBasePayload",
"JsonPayload",
"PAYLOAD_REGISTRY",
"Payload",
"StringIOPayload",
"StringPayload",
"TextIOPayload",
"get_payload",
"payload_type",
# payload_streamer
"streamer",
# resolver
"AsyncResolver",
"DefaultResolver",
"ThreadedResolver",
# signals
"Signal",
"DataQueue",
"EMPTY_PAYLOAD",
"EofStream",
"FlowControlDataQueue",
"StreamReader",
# tracing
"TraceConfig",
"TraceConnectionCreateEndParams",
"TraceConnectionCreateStartParams",
"TraceConnectionQueuedEndParams",
"TraceConnectionQueuedStartParams",
"TraceConnectionReuseconnParams",
"TraceDnsCacheHitParams",
"TraceDnsCacheMissParams",
"TraceDnsResolveHostEndParams",
"TraceDnsResolveHostStartParams",
"TraceRequestChunkSentParams",
"TraceRequestEndParams",
"TraceRequestExceptionParams",
"TraceRequestRedirectParams",
"TraceRequestStartParams",
"TraceResponseChunkReceivedParams",
)
try:
from .worker import GunicornUVLoopWebWorker, GunicornWebWorker
__all__ += ("GunicornWebWorker", "GunicornUVLoopWebWorker")
except ImportError: # pragma: no cover
pass

View file

@ -1,140 +0,0 @@
from libc.stdint cimport uint16_t, uint32_t, uint64_t
cdef extern from "../vendor/http-parser/http_parser.h":
ctypedef int (*http_data_cb) (http_parser*,
const char *at,
size_t length) except -1
ctypedef int (*http_cb) (http_parser*) except -1
struct http_parser:
unsigned int type
unsigned int flags
unsigned int state
unsigned int header_state
unsigned int index
uint32_t nread
uint64_t content_length
unsigned short http_major
unsigned short http_minor
unsigned int status_code
unsigned int method
unsigned int http_errno
unsigned int upgrade
void *data
struct http_parser_settings:
http_cb on_message_begin
http_data_cb on_url
http_data_cb on_status
http_data_cb on_header_field
http_data_cb on_header_value
http_cb on_headers_complete
http_data_cb on_body
http_cb on_message_complete
http_cb on_chunk_header
http_cb on_chunk_complete
enum http_parser_type:
HTTP_REQUEST,
HTTP_RESPONSE,
HTTP_BOTH
enum http_errno:
HPE_OK,
HPE_CB_message_begin,
HPE_CB_url,
HPE_CB_header_field,
HPE_CB_header_value,
HPE_CB_headers_complete,
HPE_CB_body,
HPE_CB_message_complete,
HPE_CB_status,
HPE_CB_chunk_header,
HPE_CB_chunk_complete,
HPE_INVALID_EOF_STATE,
HPE_HEADER_OVERFLOW,
HPE_CLOSED_CONNECTION,
HPE_INVALID_VERSION,
HPE_INVALID_STATUS,
HPE_INVALID_METHOD,
HPE_INVALID_URL,
HPE_INVALID_HOST,
HPE_INVALID_PORT,
HPE_INVALID_PATH,
HPE_INVALID_QUERY_STRING,
HPE_INVALID_FRAGMENT,
HPE_LF_EXPECTED,
HPE_INVALID_HEADER_TOKEN,
HPE_INVALID_CONTENT_LENGTH,
HPE_INVALID_CHUNK_SIZE,
HPE_INVALID_CONSTANT,
HPE_INVALID_INTERNAL_STATE,
HPE_STRICT,
HPE_PAUSED,
HPE_UNKNOWN
enum flags:
F_CHUNKED,
F_CONNECTION_KEEP_ALIVE,
F_CONNECTION_CLOSE,
F_CONNECTION_UPGRADE,
F_TRAILING,
F_UPGRADE,
F_SKIPBODY,
F_CONTENTLENGTH
enum http_method:
DELETE, GET, HEAD, POST, PUT, CONNECT, OPTIONS, TRACE, COPY,
LOCK, MKCOL, MOVE, PROPFIND, PROPPATCH, SEARCH, UNLOCK, BIND,
REBIND, UNBIND, ACL, REPORT, MKACTIVITY, CHECKOUT, MERGE,
MSEARCH, NOTIFY, SUBSCRIBE, UNSUBSCRIBE, PATCH, PURGE, MKCALENDAR,
LINK, UNLINK
void http_parser_init(http_parser *parser, http_parser_type type)
size_t http_parser_execute(http_parser *parser,
const http_parser_settings *settings,
const char *data,
size_t len)
int http_should_keep_alive(const http_parser *parser)
void http_parser_settings_init(http_parser_settings *settings)
const char *http_errno_name(http_errno err)
const char *http_errno_description(http_errno err)
const char *http_method_str(http_method m)
# URL Parser
enum http_parser_url_fields:
UF_SCHEMA = 0,
UF_HOST = 1,
UF_PORT = 2,
UF_PATH = 3,
UF_QUERY = 4,
UF_FRAGMENT = 5,
UF_USERINFO = 6,
UF_MAX = 7
struct http_parser_url_field_data:
uint16_t off
uint16_t len
struct http_parser_url:
uint16_t field_set
uint16_t port
http_parser_url_field_data[<int>UF_MAX] field_data
void http_parser_url_init(http_parser_url *u)
int http_parser_parse_url(const char *buf,
size_t buflen,
int is_connect,
http_parser_url *u)

File diff suppressed because it is too large Load diff

View file

@ -1,14 +0,0 @@
#ifndef _FIND_HEADERS_H
#define _FIND_HEADERS_H
#ifdef __cplusplus
extern "C" {
#endif
int find_header(const char *str, int size);
#ifdef __cplusplus
}
#endif
#endif

View file

@ -1,2 +0,0 @@
cdef extern from "_find_header.h":
int find_header(char *, int)

File diff suppressed because it is too large Load diff

View file

@ -1,108 +0,0 @@
from collections.abc import MutableSequence
cdef class FrozenList:
cdef readonly bint frozen
cdef list _items
def __init__(self, items=None):
self.frozen = False
if items is not None:
items = list(items)
else:
items = []
self._items = items
cdef object _check_frozen(self):
if self.frozen:
raise RuntimeError("Cannot modify frozen list.")
cdef inline object _fast_len(self):
return len(self._items)
def freeze(self):
self.frozen = True
def __getitem__(self, index):
return self._items[index]
def __setitem__(self, index, value):
self._check_frozen()
self._items[index] = value
def __delitem__(self, index):
self._check_frozen()
del self._items[index]
def __len__(self):
return self._fast_len()
def __iter__(self):
return self._items.__iter__()
def __reversed__(self):
return self._items.__reversed__()
def __richcmp__(self, other, op):
if op == 0: # <
return list(self) < other
if op == 1: # <=
return list(self) <= other
if op == 2: # ==
return list(self) == other
if op == 3: # !=
return list(self) != other
if op == 4: # >
return list(self) > other
if op == 5: # =>
return list(self) >= other
def insert(self, pos, item):
self._check_frozen()
self._items.insert(pos, item)
def __contains__(self, item):
return item in self._items
def __iadd__(self, items):
self._check_frozen()
self._items += list(items)
return self
def index(self, item):
return self._items.index(item)
def remove(self, item):
self._check_frozen()
self._items.remove(item)
def clear(self):
self._check_frozen()
self._items.clear()
def extend(self, items):
self._check_frozen()
self._items += list(items)
def reverse(self):
self._check_frozen()
self._items.reverse()
def pop(self, index=-1):
self._check_frozen()
return self._items.pop(index)
def append(self, item):
self._check_frozen()
return self._items.append(item)
def count(self, item):
return self._items.count(item)
def __repr__(self):
return '<FrozenList(frozen={}, {!r})>'.format(self.frozen,
self._items)
MutableSequence.register(FrozenList)

View file

@ -1,83 +0,0 @@
# The file is autogenerated from aiohttp/hdrs.py
# Run ./tools/gen.py to update it after the origin changing.
from . import hdrs
cdef tuple headers = (
hdrs.ACCEPT,
hdrs.ACCEPT_CHARSET,
hdrs.ACCEPT_ENCODING,
hdrs.ACCEPT_LANGUAGE,
hdrs.ACCEPT_RANGES,
hdrs.ACCESS_CONTROL_ALLOW_CREDENTIALS,
hdrs.ACCESS_CONTROL_ALLOW_HEADERS,
hdrs.ACCESS_CONTROL_ALLOW_METHODS,
hdrs.ACCESS_CONTROL_ALLOW_ORIGIN,
hdrs.ACCESS_CONTROL_EXPOSE_HEADERS,
hdrs.ACCESS_CONTROL_MAX_AGE,
hdrs.ACCESS_CONTROL_REQUEST_HEADERS,
hdrs.ACCESS_CONTROL_REQUEST_METHOD,
hdrs.AGE,
hdrs.ALLOW,
hdrs.AUTHORIZATION,
hdrs.CACHE_CONTROL,
hdrs.CONNECTION,
hdrs.CONTENT_DISPOSITION,
hdrs.CONTENT_ENCODING,
hdrs.CONTENT_LANGUAGE,
hdrs.CONTENT_LENGTH,
hdrs.CONTENT_LOCATION,
hdrs.CONTENT_MD5,
hdrs.CONTENT_RANGE,
hdrs.CONTENT_TRANSFER_ENCODING,
hdrs.CONTENT_TYPE,
hdrs.COOKIE,
hdrs.DATE,
hdrs.DESTINATION,
hdrs.DIGEST,
hdrs.ETAG,
hdrs.EXPECT,
hdrs.EXPIRES,
hdrs.FORWARDED,
hdrs.FROM,
hdrs.HOST,
hdrs.IF_MATCH,
hdrs.IF_MODIFIED_SINCE,
hdrs.IF_NONE_MATCH,
hdrs.IF_RANGE,
hdrs.IF_UNMODIFIED_SINCE,
hdrs.KEEP_ALIVE,
hdrs.LAST_EVENT_ID,
hdrs.LAST_MODIFIED,
hdrs.LINK,
hdrs.LOCATION,
hdrs.MAX_FORWARDS,
hdrs.ORIGIN,
hdrs.PRAGMA,
hdrs.PROXY_AUTHENTICATE,
hdrs.PROXY_AUTHORIZATION,
hdrs.RANGE,
hdrs.REFERER,
hdrs.RETRY_AFTER,
hdrs.SEC_WEBSOCKET_ACCEPT,
hdrs.SEC_WEBSOCKET_EXTENSIONS,
hdrs.SEC_WEBSOCKET_KEY,
hdrs.SEC_WEBSOCKET_KEY1,
hdrs.SEC_WEBSOCKET_PROTOCOL,
hdrs.SEC_WEBSOCKET_VERSION,
hdrs.SERVER,
hdrs.SET_COOKIE,
hdrs.TE,
hdrs.TRAILER,
hdrs.TRANSFER_ENCODING,
hdrs.URI,
hdrs.UPGRADE,
hdrs.USER_AGENT,
hdrs.VARY,
hdrs.VIA,
hdrs.WWW_AUTHENTICATE,
hdrs.WANT_DIGEST,
hdrs.WARNING,
hdrs.X_FORWARDED_FOR,
hdrs.X_FORWARDED_HOST,
hdrs.X_FORWARDED_PROTO,
)

File diff suppressed because it is too large Load diff

View file

@ -1,6 +0,0 @@
from typing import Any
class reify:
def __init__(self, wrapped: Any) -> None: ...
def __get__(self, inst: Any, owner: Any) -> Any: ...
def __set__(self, inst: Any, value: Any) -> None: ...

View file

@ -1,35 +0,0 @@
cdef class reify:
"""Use as a class method decorator. It operates almost exactly like
the Python `@property` decorator, but it puts the result of the
method it decorates into the instance dict after the first call,
effectively replacing the function it decorates with an instance
variable. It is, in Python parlance, a data descriptor.
"""
cdef object wrapped
cdef object name
def __init__(self, wrapped):
self.wrapped = wrapped
self.name = wrapped.__name__
@property
def __doc__(self):
return self.wrapped.__doc__
def __get__(self, inst, owner):
try:
try:
return inst._cache[self.name]
except KeyError:
val = self.wrapped(inst)
inst._cache[self.name] = val
return val
except AttributeError:
if inst is None:
return self
raise
def __set__(self, inst, value):
raise AttributeError("reified property is read-only")

File diff suppressed because it is too large Load diff

View file

@ -1,875 +0,0 @@
#cython: language_level=3
#
# Based on https://github.com/MagicStack/httptools
#
from __future__ import absolute_import, print_function
from cpython cimport (
Py_buffer,
PyBUF_SIMPLE,
PyBuffer_Release,
PyBytes_AsString,
PyBytes_AsStringAndSize,
PyObject_GetBuffer,
)
from cpython.mem cimport PyMem_Free, PyMem_Malloc
from libc.limits cimport ULLONG_MAX
from libc.string cimport memcpy
from multidict import CIMultiDict as _CIMultiDict, CIMultiDictProxy as _CIMultiDictProxy
from yarl import URL as _URL
from aiohttp import hdrs
from .http_exceptions import (
BadHttpMessage,
BadStatusLine,
ContentLengthError,
InvalidHeader,
InvalidURLError,
LineTooLong,
PayloadEncodingError,
TransferEncodingError,
)
from .http_parser import DeflateBuffer as _DeflateBuffer
from .http_writer import (
HttpVersion as _HttpVersion,
HttpVersion10 as _HttpVersion10,
HttpVersion11 as _HttpVersion11,
)
from .streams import EMPTY_PAYLOAD as _EMPTY_PAYLOAD, StreamReader as _StreamReader
cimport cython
from aiohttp cimport _cparser as cparser
include "_headers.pxi"
from aiohttp cimport _find_header
DEF DEFAULT_FREELIST_SIZE = 250
cdef extern from "Python.h":
int PyByteArray_Resize(object, Py_ssize_t) except -1
Py_ssize_t PyByteArray_Size(object) except -1
char* PyByteArray_AsString(object)
__all__ = ('HttpRequestParser', 'HttpResponseParser',
'RawRequestMessage', 'RawResponseMessage')
cdef object URL = _URL
cdef object URL_build = URL.build
cdef object CIMultiDict = _CIMultiDict
cdef object CIMultiDictProxy = _CIMultiDictProxy
cdef object HttpVersion = _HttpVersion
cdef object HttpVersion10 = _HttpVersion10
cdef object HttpVersion11 = _HttpVersion11
cdef object SEC_WEBSOCKET_KEY1 = hdrs.SEC_WEBSOCKET_KEY1
cdef object CONTENT_ENCODING = hdrs.CONTENT_ENCODING
cdef object EMPTY_PAYLOAD = _EMPTY_PAYLOAD
cdef object StreamReader = _StreamReader
cdef object DeflateBuffer = _DeflateBuffer
cdef inline object extend(object buf, const char* at, size_t length):
cdef Py_ssize_t s
cdef char* ptr
s = PyByteArray_Size(buf)
PyByteArray_Resize(buf, s + length)
ptr = PyByteArray_AsString(buf)
memcpy(ptr + s, at, length)
DEF METHODS_COUNT = 34;
cdef list _http_method = []
for i in range(METHODS_COUNT):
_http_method.append(
cparser.http_method_str(<cparser.http_method> i).decode('ascii'))
cdef inline str http_method_str(int i):
if i < METHODS_COUNT:
return <str>_http_method[i]
else:
return "<unknown>"
cdef inline object find_header(bytes raw_header):
cdef Py_ssize_t size
cdef char *buf
cdef int idx
PyBytes_AsStringAndSize(raw_header, &buf, &size)
idx = _find_header.find_header(buf, size)
if idx == -1:
return raw_header.decode('utf-8', 'surrogateescape')
return headers[idx]
@cython.freelist(DEFAULT_FREELIST_SIZE)
cdef class RawRequestMessage:
cdef readonly str method
cdef readonly str path
cdef readonly object version # HttpVersion
cdef readonly object headers # CIMultiDict
cdef readonly object raw_headers # tuple
cdef readonly object should_close
cdef readonly object compression
cdef readonly object upgrade
cdef readonly object chunked
cdef readonly object url # yarl.URL
def __init__(self, method, path, version, headers, raw_headers,
should_close, compression, upgrade, chunked, url):
self.method = method
self.path = path
self.version = version
self.headers = headers
self.raw_headers = raw_headers
self.should_close = should_close
self.compression = compression
self.upgrade = upgrade
self.chunked = chunked
self.url = url
def __repr__(self):
info = []
info.append(("method", self.method))
info.append(("path", self.path))
info.append(("version", self.version))
info.append(("headers", self.headers))
info.append(("raw_headers", self.raw_headers))
info.append(("should_close", self.should_close))
info.append(("compression", self.compression))
info.append(("upgrade", self.upgrade))
info.append(("chunked", self.chunked))
info.append(("url", self.url))
sinfo = ', '.join(name + '=' + repr(val) for name, val in info)
return '<RawRequestMessage(' + sinfo + ')>'
def _replace(self, **dct):
cdef RawRequestMessage ret
ret = _new_request_message(self.method,
self.path,
self.version,
self.headers,
self.raw_headers,
self.should_close,
self.compression,
self.upgrade,
self.chunked,
self.url)
if "method" in dct:
ret.method = dct["method"]
if "path" in dct:
ret.path = dct["path"]
if "version" in dct:
ret.version = dct["version"]
if "headers" in dct:
ret.headers = dct["headers"]
if "raw_headers" in dct:
ret.raw_headers = dct["raw_headers"]
if "should_close" in dct:
ret.should_close = dct["should_close"]
if "compression" in dct:
ret.compression = dct["compression"]
if "upgrade" in dct:
ret.upgrade = dct["upgrade"]
if "chunked" in dct:
ret.chunked = dct["chunked"]
if "url" in dct:
ret.url = dct["url"]
return ret
cdef _new_request_message(str method,
str path,
object version,
object headers,
object raw_headers,
bint should_close,
object compression,
bint upgrade,
bint chunked,
object url):
cdef RawRequestMessage ret
ret = RawRequestMessage.__new__(RawRequestMessage)
ret.method = method
ret.path = path
ret.version = version
ret.headers = headers
ret.raw_headers = raw_headers
ret.should_close = should_close
ret.compression = compression
ret.upgrade = upgrade
ret.chunked = chunked
ret.url = url
return ret
@cython.freelist(DEFAULT_FREELIST_SIZE)
cdef class RawResponseMessage:
cdef readonly object version # HttpVersion
cdef readonly int code
cdef readonly str reason
cdef readonly object headers # CIMultiDict
cdef readonly object raw_headers # tuple
cdef readonly object should_close
cdef readonly object compression
cdef readonly object upgrade
cdef readonly object chunked
def __init__(self, version, code, reason, headers, raw_headers,
should_close, compression, upgrade, chunked):
self.version = version
self.code = code
self.reason = reason
self.headers = headers
self.raw_headers = raw_headers
self.should_close = should_close
self.compression = compression
self.upgrade = upgrade
self.chunked = chunked
def __repr__(self):
info = []
info.append(("version", self.version))
info.append(("code", self.code))
info.append(("reason", self.reason))
info.append(("headers", self.headers))
info.append(("raw_headers", self.raw_headers))
info.append(("should_close", self.should_close))
info.append(("compression", self.compression))
info.append(("upgrade", self.upgrade))
info.append(("chunked", self.chunked))
sinfo = ', '.join(name + '=' + repr(val) for name, val in info)
return '<RawResponseMessage(' + sinfo + ')>'
cdef _new_response_message(object version,
int code,
str reason,
object headers,
object raw_headers,
bint should_close,
object compression,
bint upgrade,
bint chunked):
cdef RawResponseMessage ret
ret = RawResponseMessage.__new__(RawResponseMessage)
ret.version = version
ret.code = code
ret.reason = reason
ret.headers = headers
ret.raw_headers = raw_headers
ret.should_close = should_close
ret.compression = compression
ret.upgrade = upgrade
ret.chunked = chunked
return ret
@cython.internal
cdef class HttpParser:
cdef:
cparser.http_parser* _cparser
cparser.http_parser_settings* _csettings
bytearray _raw_name
bytearray _raw_value
bint _has_value
object _protocol
object _loop
object _timer
size_t _max_line_size
size_t _max_field_size
size_t _max_headers
bint _response_with_body
bint _read_until_eof
bint _started
object _url
bytearray _buf
str _path
str _reason
object _headers
list _raw_headers
bint _upgraded
list _messages
object _payload
bint _payload_error
object _payload_exception
object _last_error
bint _auto_decompress
int _limit
str _content_encoding
Py_buffer py_buf
def __cinit__(self):
self._cparser = <cparser.http_parser*> \
PyMem_Malloc(sizeof(cparser.http_parser))
if self._cparser is NULL:
raise MemoryError()
self._csettings = <cparser.http_parser_settings*> \
PyMem_Malloc(sizeof(cparser.http_parser_settings))
if self._csettings is NULL:
raise MemoryError()
def __dealloc__(self):
PyMem_Free(self._cparser)
PyMem_Free(self._csettings)
cdef _init(self, cparser.http_parser_type mode,
object protocol, object loop, int limit,
object timer=None,
size_t max_line_size=8190, size_t max_headers=32768,
size_t max_field_size=8190, payload_exception=None,
bint response_with_body=True, bint read_until_eof=False,
bint auto_decompress=True):
cparser.http_parser_init(self._cparser, mode)
self._cparser.data = <void*>self
self._cparser.content_length = 0
cparser.http_parser_settings_init(self._csettings)
self._protocol = protocol
self._loop = loop
self._timer = timer
self._buf = bytearray()
self._payload = None
self._payload_error = 0
self._payload_exception = payload_exception
self._messages = []
self._raw_name = bytearray()
self._raw_value = bytearray()
self._has_value = False
self._max_line_size = max_line_size
self._max_headers = max_headers
self._max_field_size = max_field_size
self._response_with_body = response_with_body
self._read_until_eof = read_until_eof
self._upgraded = False
self._auto_decompress = auto_decompress
self._content_encoding = None
self._csettings.on_url = cb_on_url
self._csettings.on_status = cb_on_status
self._csettings.on_header_field = cb_on_header_field
self._csettings.on_header_value = cb_on_header_value
self._csettings.on_headers_complete = cb_on_headers_complete
self._csettings.on_body = cb_on_body
self._csettings.on_message_begin = cb_on_message_begin
self._csettings.on_message_complete = cb_on_message_complete
self._csettings.on_chunk_header = cb_on_chunk_header
self._csettings.on_chunk_complete = cb_on_chunk_complete
self._last_error = None
self._limit = limit
cdef _process_header(self):
if self._raw_name:
raw_name = bytes(self._raw_name)
raw_value = bytes(self._raw_value)
name = find_header(raw_name)
value = raw_value.decode('utf-8', 'surrogateescape')
self._headers.add(name, value)
if name is CONTENT_ENCODING:
self._content_encoding = value
PyByteArray_Resize(self._raw_name, 0)
PyByteArray_Resize(self._raw_value, 0)
self._has_value = False
self._raw_headers.append((raw_name, raw_value))
cdef _on_header_field(self, char* at, size_t length):
cdef Py_ssize_t size
cdef char *buf
if self._has_value:
self._process_header()
size = PyByteArray_Size(self._raw_name)
PyByteArray_Resize(self._raw_name, size + length)
buf = PyByteArray_AsString(self._raw_name)
memcpy(buf + size, at, length)
cdef _on_header_value(self, char* at, size_t length):
cdef Py_ssize_t size
cdef char *buf
size = PyByteArray_Size(self._raw_value)
PyByteArray_Resize(self._raw_value, size + length)
buf = PyByteArray_AsString(self._raw_value)
memcpy(buf + size, at, length)
self._has_value = True
cdef _on_headers_complete(self):
self._process_header()
method = http_method_str(self._cparser.method)
should_close = not cparser.http_should_keep_alive(self._cparser)
upgrade = self._cparser.upgrade
chunked = self._cparser.flags & cparser.F_CHUNKED
raw_headers = tuple(self._raw_headers)
headers = CIMultiDictProxy(self._headers)
if upgrade or self._cparser.method == 5: # cparser.CONNECT:
self._upgraded = True
# do not support old websocket spec
if SEC_WEBSOCKET_KEY1 in headers:
raise InvalidHeader(SEC_WEBSOCKET_KEY1)
encoding = None
enc = self._content_encoding
if enc is not None:
self._content_encoding = None
enc = enc.lower()
if enc in ('gzip', 'deflate', 'br'):
encoding = enc
if self._cparser.type == cparser.HTTP_REQUEST:
msg = _new_request_message(
method, self._path,
self.http_version(), headers, raw_headers,
should_close, encoding, upgrade, chunked, self._url)
else:
msg = _new_response_message(
self.http_version(), self._cparser.status_code, self._reason,
headers, raw_headers, should_close, encoding,
upgrade, chunked)
if (ULLONG_MAX > self._cparser.content_length > 0 or chunked or
self._cparser.method == 5 or # CONNECT: 5
(self._cparser.status_code >= 199 and
self._cparser.content_length == ULLONG_MAX and
self._read_until_eof)
):
payload = StreamReader(
self._protocol, timer=self._timer, loop=self._loop,
limit=self._limit)
else:
payload = EMPTY_PAYLOAD
self._payload = payload
if encoding is not None and self._auto_decompress:
self._payload = DeflateBuffer(payload, encoding)
if not self._response_with_body:
payload = EMPTY_PAYLOAD
self._messages.append((msg, payload))
cdef _on_message_complete(self):
self._payload.feed_eof()
self._payload = None
cdef _on_chunk_header(self):
self._payload.begin_http_chunk_receiving()
cdef _on_chunk_complete(self):
self._payload.end_http_chunk_receiving()
cdef object _on_status_complete(self):
pass
cdef inline http_version(self):
cdef cparser.http_parser* parser = self._cparser
if parser.http_major == 1:
if parser.http_minor == 0:
return HttpVersion10
elif parser.http_minor == 1:
return HttpVersion11
return HttpVersion(parser.http_major, parser.http_minor)
### Public API ###
def feed_eof(self):
cdef bytes desc
if self._payload is not None:
if self._cparser.flags & cparser.F_CHUNKED:
raise TransferEncodingError(
"Not enough data for satisfy transfer length header.")
elif self._cparser.flags & cparser.F_CONTENTLENGTH:
raise ContentLengthError(
"Not enough data for satisfy content length header.")
elif self._cparser.http_errno != cparser.HPE_OK:
desc = cparser.http_errno_description(
<cparser.http_errno> self._cparser.http_errno)
raise PayloadEncodingError(desc.decode('latin-1'))
else:
self._payload.feed_eof()
elif self._started:
self._on_headers_complete()
if self._messages:
return self._messages[-1][0]
def feed_data(self, data):
cdef:
size_t data_len
size_t nb
PyObject_GetBuffer(data, &self.py_buf, PyBUF_SIMPLE)
data_len = <size_t>self.py_buf.len
nb = cparser.http_parser_execute(
self._cparser,
self._csettings,
<char*>self.py_buf.buf,
data_len)
PyBuffer_Release(&self.py_buf)
if (self._cparser.http_errno != cparser.HPE_OK):
if self._payload_error == 0:
if self._last_error is not None:
ex = self._last_error
self._last_error = None
else:
ex = parser_error_from_errno(
<cparser.http_errno> self._cparser.http_errno)
self._payload = None
raise ex
if self._messages:
messages = self._messages
self._messages = []
else:
messages = ()
if self._upgraded:
return messages, True, data[nb:]
else:
return messages, False, b''
def set_upgraded(self, val):
self._upgraded = val
cdef class HttpRequestParser(HttpParser):
def __init__(self, protocol, loop, int limit, timer=None,
size_t max_line_size=8190, size_t max_headers=32768,
size_t max_field_size=8190, payload_exception=None,
bint response_with_body=True, bint read_until_eof=False,
):
self._init(cparser.HTTP_REQUEST, protocol, loop, limit, timer,
max_line_size, max_headers, max_field_size,
payload_exception, response_with_body, read_until_eof)
cdef object _on_status_complete(self):
cdef Py_buffer py_buf
if not self._buf:
return
self._path = self._buf.decode('utf-8', 'surrogateescape')
if self._cparser.method == 5: # CONNECT
self._url = URL(self._path)
else:
PyObject_GetBuffer(self._buf, &py_buf, PyBUF_SIMPLE)
try:
self._url = _parse_url(<char*>py_buf.buf,
py_buf.len)
finally:
PyBuffer_Release(&py_buf)
PyByteArray_Resize(self._buf, 0)
cdef class HttpResponseParser(HttpParser):
def __init__(self, protocol, loop, int limit, timer=None,
size_t max_line_size=8190, size_t max_headers=32768,
size_t max_field_size=8190, payload_exception=None,
bint response_with_body=True, bint read_until_eof=False,
bint auto_decompress=True
):
self._init(cparser.HTTP_RESPONSE, protocol, loop, limit, timer,
max_line_size, max_headers, max_field_size,
payload_exception, response_with_body, read_until_eof,
auto_decompress)
cdef object _on_status_complete(self):
if self._buf:
self._reason = self._buf.decode('utf-8', 'surrogateescape')
PyByteArray_Resize(self._buf, 0)
else:
self._reason = self._reason or ''
cdef int cb_on_message_begin(cparser.http_parser* parser) except -1:
cdef HttpParser pyparser = <HttpParser>parser.data
pyparser._started = True
pyparser._headers = CIMultiDict()
pyparser._raw_headers = []
PyByteArray_Resize(pyparser._buf, 0)
pyparser._path = None
pyparser._reason = None
return 0
cdef int cb_on_url(cparser.http_parser* parser,
const char *at, size_t length) except -1:
cdef HttpParser pyparser = <HttpParser>parser.data
try:
if length > pyparser._max_line_size:
raise LineTooLong(
'Status line is too long', pyparser._max_line_size, length)
extend(pyparser._buf, at, length)
except BaseException as ex:
pyparser._last_error = ex
return -1
else:
return 0
cdef int cb_on_status(cparser.http_parser* parser,
const char *at, size_t length) except -1:
cdef HttpParser pyparser = <HttpParser>parser.data
cdef str reason
try:
if length > pyparser._max_line_size:
raise LineTooLong(
'Status line is too long', pyparser._max_line_size, length)
extend(pyparser._buf, at, length)
except BaseException as ex:
pyparser._last_error = ex
return -1
else:
return 0
cdef int cb_on_header_field(cparser.http_parser* parser,
const char *at, size_t length) except -1:
cdef HttpParser pyparser = <HttpParser>parser.data
cdef Py_ssize_t size
try:
pyparser._on_status_complete()
size = len(pyparser._raw_name) + length
if size > pyparser._max_field_size:
raise LineTooLong(
'Header name is too long', pyparser._max_field_size, size)
pyparser._on_header_field(at, length)
except BaseException as ex:
pyparser._last_error = ex
return -1
else:
return 0
cdef int cb_on_header_value(cparser.http_parser* parser,
const char *at, size_t length) except -1:
cdef HttpParser pyparser = <HttpParser>parser.data
cdef Py_ssize_t size
try:
size = len(pyparser._raw_value) + length
if size > pyparser._max_field_size:
raise LineTooLong(
'Header value is too long', pyparser._max_field_size, size)
pyparser._on_header_value(at, length)
except BaseException as ex:
pyparser._last_error = ex
return -1
else:
return 0
cdef int cb_on_headers_complete(cparser.http_parser* parser) except -1:
cdef HttpParser pyparser = <HttpParser>parser.data
try:
pyparser._on_status_complete()
pyparser._on_headers_complete()
except BaseException as exc:
pyparser._last_error = exc
return -1
else:
if pyparser._cparser.upgrade or pyparser._cparser.method == 5: # CONNECT
return 2
else:
return 0
cdef int cb_on_body(cparser.http_parser* parser,
const char *at, size_t length) except -1:
cdef HttpParser pyparser = <HttpParser>parser.data
cdef bytes body = at[:length]
try:
pyparser._payload.feed_data(body, length)
except BaseException as exc:
if pyparser._payload_exception is not None:
pyparser._payload.set_exception(pyparser._payload_exception(str(exc)))
else:
pyparser._payload.set_exception(exc)
pyparser._payload_error = 1
return -1
else:
return 0
cdef int cb_on_message_complete(cparser.http_parser* parser) except -1:
cdef HttpParser pyparser = <HttpParser>parser.data
try:
pyparser._started = False
pyparser._on_message_complete()
except BaseException as exc:
pyparser._last_error = exc
return -1
else:
return 0
cdef int cb_on_chunk_header(cparser.http_parser* parser) except -1:
cdef HttpParser pyparser = <HttpParser>parser.data
try:
pyparser._on_chunk_header()
except BaseException as exc:
pyparser._last_error = exc
return -1
else:
return 0
cdef int cb_on_chunk_complete(cparser.http_parser* parser) except -1:
cdef HttpParser pyparser = <HttpParser>parser.data
try:
pyparser._on_chunk_complete()
except BaseException as exc:
pyparser._last_error = exc
return -1
else:
return 0
cdef parser_error_from_errno(cparser.http_errno errno):
cdef bytes desc = cparser.http_errno_description(errno)
if errno in (cparser.HPE_CB_message_begin,
cparser.HPE_CB_url,
cparser.HPE_CB_header_field,
cparser.HPE_CB_header_value,
cparser.HPE_CB_headers_complete,
cparser.HPE_CB_body,
cparser.HPE_CB_message_complete,
cparser.HPE_CB_status,
cparser.HPE_CB_chunk_header,
cparser.HPE_CB_chunk_complete):
cls = BadHttpMessage
elif errno == cparser.HPE_INVALID_STATUS:
cls = BadStatusLine
elif errno == cparser.HPE_INVALID_METHOD:
cls = BadStatusLine
elif errno == cparser.HPE_INVALID_URL:
cls = InvalidURLError
else:
cls = BadHttpMessage
return cls(desc.decode('latin-1'))
def parse_url(url):
cdef:
Py_buffer py_buf
char* buf_data
PyObject_GetBuffer(url, &py_buf, PyBUF_SIMPLE)
try:
buf_data = <char*>py_buf.buf
return _parse_url(buf_data, py_buf.len)
finally:
PyBuffer_Release(&py_buf)
cdef _parse_url(char* buf_data, size_t length):
cdef:
cparser.http_parser_url* parsed
int res
str schema = None
str host = None
object port = None
str path = None
str query = None
str fragment = None
str user = None
str password = None
str userinfo = None
object result = None
int off
int ln
parsed = <cparser.http_parser_url*> \
PyMem_Malloc(sizeof(cparser.http_parser_url))
if parsed is NULL:
raise MemoryError()
cparser.http_parser_url_init(parsed)
try:
res = cparser.http_parser_parse_url(buf_data, length, 0, parsed)
if res == 0:
if parsed.field_set & (1 << cparser.UF_SCHEMA):
off = parsed.field_data[<int>cparser.UF_SCHEMA].off
ln = parsed.field_data[<int>cparser.UF_SCHEMA].len
schema = buf_data[off:off+ln].decode('utf-8', 'surrogateescape')
else:
schema = ''
if parsed.field_set & (1 << cparser.UF_HOST):
off = parsed.field_data[<int>cparser.UF_HOST].off
ln = parsed.field_data[<int>cparser.UF_HOST].len
host = buf_data[off:off+ln].decode('utf-8', 'surrogateescape')
else:
host = ''
if parsed.field_set & (1 << cparser.UF_PORT):
port = parsed.port
if parsed.field_set & (1 << cparser.UF_PATH):
off = parsed.field_data[<int>cparser.UF_PATH].off
ln = parsed.field_data[<int>cparser.UF_PATH].len
path = buf_data[off:off+ln].decode('utf-8', 'surrogateescape')
else:
path = ''
if parsed.field_set & (1 << cparser.UF_QUERY):
off = parsed.field_data[<int>cparser.UF_QUERY].off
ln = parsed.field_data[<int>cparser.UF_QUERY].len
query = buf_data[off:off+ln].decode('utf-8', 'surrogateescape')
else:
query = ''
if parsed.field_set & (1 << cparser.UF_FRAGMENT):
off = parsed.field_data[<int>cparser.UF_FRAGMENT].off
ln = parsed.field_data[<int>cparser.UF_FRAGMENT].len
fragment = buf_data[off:off+ln].decode('utf-8', 'surrogateescape')
else:
fragment = ''
if parsed.field_set & (1 << cparser.UF_USERINFO):
off = parsed.field_data[<int>cparser.UF_USERINFO].off
ln = parsed.field_data[<int>cparser.UF_USERINFO].len
userinfo = buf_data[off:off+ln].decode('utf-8', 'surrogateescape')
user, sep, password = userinfo.partition(':')
return URL_build(scheme=schema,
user=user, password=password, host=host, port=port,
path=path, query_string=query, fragment=fragment, encoded=True)
else:
raise InvalidURLError("invalid url {!r}".format(buf_data))
finally:
PyMem_Free(parsed)

Some files were not shown because too many files have changed in this diff Show more