kopia lustrzana https://github.com/gd3kr/BlenderGPT
first commit
commit
416affdb7c
|
@ -0,0 +1,175 @@
|
|||
import sys
|
||||
import os
|
||||
import bpy
|
||||
|
||||
# Add the 'libs' folder to the Python path
|
||||
libs_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "lib")
|
||||
if libs_path not in sys.path:
|
||||
sys.path.append(libs_path)
|
||||
|
||||
print (libs_path)
|
||||
import openai
|
||||
|
||||
openai.api_key = os.getenv("OPENAI_API_KEY")
|
||||
|
||||
bl_info = {
|
||||
"name": "GPT-4 Blender Assistant",
|
||||
"blender": (2, 82, 0),
|
||||
"category": "Object",
|
||||
"author": "Aarya (@gd3kr)",
|
||||
"version": (1, 0, 0),
|
||||
"location": "3D View > UI > GPT-4 Blender Assistant",
|
||||
"description": "Generate Blender Python code using OpenAI's GPT-4 to perform various tasks.",
|
||||
"warning": "",
|
||||
"wiki_url": "",
|
||||
"tracker_url": "",
|
||||
}
|
||||
|
||||
|
||||
def init_props():
|
||||
bpy.types.Scene.gpt4_natural_language_input = bpy.props.StringProperty(
|
||||
name="Command",
|
||||
description="Enter the natural language command",
|
||||
default="",
|
||||
)
|
||||
bpy.types.Scene.gpt4_button_pressed = bpy.props.BoolProperty(default=False)
|
||||
|
||||
|
||||
|
||||
def clear_props():
|
||||
del bpy.types.Scene.gpt4_natural_language_input
|
||||
del bpy.types.Scene.gpt4_button_pressed
|
||||
|
||||
|
||||
def generate_blender_code(prompt):
|
||||
|
||||
try:
|
||||
response = openai.ChatCompletion.create(
|
||||
model="gpt-4",
|
||||
messages=[{
|
||||
"role": "system",
|
||||
"content": "You are a assistant made for the purposes of helping the user with Blender, the 3D software. Reply only with the code, without markdown, preferably import entire modules instead of bits. do not perform destructive operations on the meshes. Do not use cap_ends. Do not do more than what is asked (setting up render settings, adding cameras, etc)"
|
||||
},
|
||||
{"role": "user", "content": "Hey, can you please create Blender code for me that accomplishes the following task: " + prompt + "? \n" + "code:\n"}
|
||||
],
|
||||
stream=True,
|
||||
max_tokens=1000,
|
||||
)
|
||||
except Exception as e: # Use GPT-3.5 if GPT-4 is not available
|
||||
response = openai.ChatCompletion.create(
|
||||
model="gpt-3.5-turbo",
|
||||
messages=[{
|
||||
"role": "system",
|
||||
"content": "You are a assistant made for the purposes of helping the user with Blender, the 3D software. Reply only with the code, without markdown, preferably import entire modules instead of bits. do not perform destructive operations on the meshes. Do not use cap_ends. Do not do more than what is asked (setting up render settings, adding cameras, etc)"
|
||||
},
|
||||
{"role": "user", "content": "Hey, can you please create Blender code for me that accomplishes the following task: " + prompt + "? \n" + "code:\n"}
|
||||
],
|
||||
stream=True,
|
||||
max_tokens=1000,
|
||||
)
|
||||
return None
|
||||
|
||||
try:
|
||||
collected_events = []
|
||||
completion_text = ''
|
||||
# iterate through the stream of events
|
||||
for event in response:
|
||||
# check if role key is present in event['choices'][0]['delta']
|
||||
if 'role' in event['choices'][0]['delta']:
|
||||
# skip
|
||||
continue
|
||||
# check if length of event['choices'][0]['delta']['content'] is 0
|
||||
if len(event['choices'][0]['delta']) == 0:
|
||||
# skip
|
||||
continue
|
||||
collected_events.append(event) # save the event response
|
||||
# extract the text
|
||||
event_text = event['choices'][0]['delta']['content']
|
||||
completion_text += event_text # append the text
|
||||
# clear print screen
|
||||
print(completion_text, flush=True, end='\r')
|
||||
# print the text
|
||||
return completion_text
|
||||
except IndexError:
|
||||
return None
|
||||
|
||||
|
||||
class GPT4_PT_Panel(bpy.types.Panel):
|
||||
bl_label = "GPT-4 Blender Assistant"
|
||||
bl_idname = "GPT4_PT_Panel"
|
||||
bl_space_type = 'VIEW_3D'
|
||||
bl_region_type = 'UI'
|
||||
bl_category = 'GPT-4 Assistant'
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
column = layout.column(align=True)
|
||||
|
||||
column.label(text="Enter a natural language command:")
|
||||
|
||||
# Add the input field for natural language commands
|
||||
column.prop(context.scene, "gpt4_natural_language_input", text="")
|
||||
|
||||
# Execute the operator with the input from the user
|
||||
button_label = "Please wait...(this might take some time)" if context.scene.gpt4_button_pressed else "Execute"
|
||||
operator = column.operator("gpt4.execute", text=button_label)
|
||||
operator.natural_language_input = context.scene.gpt4_natural_language_input
|
||||
|
||||
column.separator()
|
||||
|
||||
|
||||
class GPT4_OT_Execute(bpy.types.Operator):
|
||||
bl_idname = "gpt4.execute"
|
||||
bl_label = "GPT-4 Execute"
|
||||
bl_options = {'REGISTER', 'UNDO'}
|
||||
|
||||
natural_language_input: bpy.props.StringProperty(
|
||||
name="Command",
|
||||
description="Enter the natural language command",
|
||||
default="",
|
||||
)
|
||||
|
||||
def execute(self, context):
|
||||
context.scene.gpt4_button_pressed = True
|
||||
bpy.ops.wm.redraw_timer(type='DRAW_WIN_SWAP', iterations=1)
|
||||
|
||||
blender_code = generate_blender_code(self.natural_language_input)
|
||||
|
||||
if blender_code:
|
||||
# Add this line to print the generated code.
|
||||
print("Generated code:", blender_code)
|
||||
try:
|
||||
exec(blender_code)
|
||||
except Exception as e:
|
||||
self.report({'ERROR'}, f"Error executing generated code: {e}")
|
||||
context.scene.gpt4_button_pressed = False
|
||||
return {'CANCELLED'}
|
||||
else:
|
||||
self.report({'ERROR'}, "Failed to generate Blender Python code")
|
||||
context.scene.gpt4_button_pressed = False
|
||||
return {'CANCELLED'}
|
||||
|
||||
context.scene.gpt4_button_pressed = False
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
def menu_func(self, context):
|
||||
self.layout.operator(GPT4_OT_Execute.bl_idname)
|
||||
|
||||
|
||||
def register():
|
||||
bpy.utils.register_class(GPT4_OT_Execute)
|
||||
bpy.utils.register_class(GPT4_PT_Panel)
|
||||
bpy.types.VIEW3D_MT_mesh_add.append(menu_func)
|
||||
init_props()
|
||||
|
||||
|
||||
def unregister():
|
||||
bpy.utils.unregister_class(GPT4_OT_Execute)
|
||||
bpy.utils.unregister_class(GPT4_PT_Panel)
|
||||
bpy.types.VIEW3D_MT_mesh_add.remove(menu_func)
|
||||
clear_props()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
register()
|
|
@ -0,0 +1 @@
|
|||
pip
|
|
@ -0,0 +1,13 @@
|
|||
Copyright aio-libs contributors.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
|
@ -0,0 +1,253 @@
|
|||
Metadata-Version: 2.1
|
||||
Name: aiohttp
|
||||
Version: 3.8.4
|
||||
Summary: Async http client/server framework (asyncio)
|
||||
Home-page: https://github.com/aio-libs/aiohttp
|
||||
Maintainer: aiohttp team <team@aiohttp.org>
|
||||
Maintainer-email: team@aiohttp.org
|
||||
License: Apache 2
|
||||
Project-URL: Chat: Gitter, https://gitter.im/aio-libs/Lobby
|
||||
Project-URL: CI: GitHub Actions, https://github.com/aio-libs/aiohttp/actions?query=workflow%3ACI
|
||||
Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/aiohttp
|
||||
Project-URL: Docs: Changelog, https://docs.aiohttp.org/en/stable/changes.html
|
||||
Project-URL: Docs: RTD, https://docs.aiohttp.org
|
||||
Project-URL: GitHub: issues, https://github.com/aio-libs/aiohttp/issues
|
||||
Project-URL: GitHub: repo, https://github.com/aio-libs/aiohttp
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Framework :: AsyncIO
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: License :: OSI Approved :: Apache Software License
|
||||
Classifier: Operating System :: POSIX
|
||||
Classifier: Operating System :: MacOS :: MacOS X
|
||||
Classifier: Operating System :: Microsoft :: Windows
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Classifier: Programming Language :: Python :: 3.6
|
||||
Classifier: Programming Language :: Python :: 3.7
|
||||
Classifier: Programming Language :: Python :: 3.8
|
||||
Classifier: Programming Language :: Python :: 3.9
|
||||
Classifier: Programming Language :: Python :: 3.10
|
||||
Classifier: Topic :: Internet :: WWW/HTTP
|
||||
Requires-Python: >=3.6
|
||||
Description-Content-Type: text/x-rst
|
||||
License-File: LICENSE.txt
|
||||
Requires-Dist: attrs (>=17.3.0)
|
||||
Requires-Dist: charset-normalizer (<4.0,>=2.0)
|
||||
Requires-Dist: multidict (<7.0,>=4.5)
|
||||
Requires-Dist: async-timeout (<5.0,>=4.0.0a3)
|
||||
Requires-Dist: yarl (<2.0,>=1.0)
|
||||
Requires-Dist: frozenlist (>=1.1.1)
|
||||
Requires-Dist: aiosignal (>=1.1.2)
|
||||
Requires-Dist: idna-ssl (>=1.0) ; python_version < "3.7"
|
||||
Requires-Dist: asynctest (==0.13.0) ; python_version < "3.8"
|
||||
Requires-Dist: typing-extensions (>=3.7.4) ; python_version < "3.8"
|
||||
Provides-Extra: speedups
|
||||
Requires-Dist: aiodns ; extra == 'speedups'
|
||||
Requires-Dist: Brotli ; extra == 'speedups'
|
||||
Requires-Dist: cchardet ; (python_version < "3.10") and extra == 'speedups'
|
||||
|
||||
==================================
|
||||
Async http client/server framework
|
||||
==================================
|
||||
|
||||
.. image:: https://raw.githubusercontent.com/aio-libs/aiohttp/master/docs/aiohttp-plain.svg
|
||||
:height: 64px
|
||||
:width: 64px
|
||||
:alt: aiohttp logo
|
||||
|
||||
|
|
||||
|
||||
.. image:: https://github.com/aio-libs/aiohttp/workflows/CI/badge.svg
|
||||
:target: https://github.com/aio-libs/aiohttp/actions?query=workflow%3ACI
|
||||
:alt: GitHub Actions status for master branch
|
||||
|
||||
.. image:: https://codecov.io/gh/aio-libs/aiohttp/branch/master/graph/badge.svg
|
||||
:target: https://codecov.io/gh/aio-libs/aiohttp
|
||||
:alt: codecov.io status for master branch
|
||||
|
||||
.. image:: https://badge.fury.io/py/aiohttp.svg
|
||||
:target: https://pypi.org/project/aiohttp
|
||||
:alt: Latest PyPI package version
|
||||
|
||||
.. image:: https://readthedocs.org/projects/aiohttp/badge/?version=latest
|
||||
:target: https://docs.aiohttp.org/
|
||||
:alt: Latest Read The Docs
|
||||
|
||||
.. image:: https://img.shields.io/discourse/status?server=https%3A%2F%2Faio-libs.discourse.group
|
||||
:target: https://aio-libs.discourse.group
|
||||
:alt: Discourse status
|
||||
|
||||
.. image:: https://badges.gitter.im/Join%20Chat.svg
|
||||
:target: https://gitter.im/aio-libs/Lobby
|
||||
:alt: Chat on Gitter
|
||||
|
||||
|
||||
Key Features
|
||||
============
|
||||
|
||||
- Supports both client and server side of HTTP protocol.
|
||||
- Supports both client and server Web-Sockets out-of-the-box and avoids
|
||||
Callback Hell.
|
||||
- Provides Web-server with middlewares and plugable routing.
|
||||
|
||||
|
||||
Getting started
|
||||
===============
|
||||
|
||||
Client
|
||||
------
|
||||
|
||||
To get something from the web:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
import aiohttp
|
||||
import asyncio
|
||||
|
||||
async def main():
|
||||
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.get('http://python.org') as response:
|
||||
|
||||
print("Status:", response.status)
|
||||
print("Content-type:", response.headers['content-type'])
|
||||
|
||||
html = await response.text()
|
||||
print("Body:", html[:15], "...")
|
||||
|
||||
asyncio.run(main())
|
||||
|
||||
This prints:
|
||||
|
||||
.. code-block::
|
||||
|
||||
Status: 200
|
||||
Content-type: text/html; charset=utf-8
|
||||
Body: <!doctype html> ...
|
||||
|
||||
Coming from `requests <https://requests.readthedocs.io/>`_ ? Read `why we need so many lines <https://aiohttp.readthedocs.io/en/latest/http_request_lifecycle.html>`_.
|
||||
|
||||
Server
|
||||
------
|
||||
|
||||
An example using a simple server:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# examples/server_simple.py
|
||||
from aiohttp import web
|
||||
|
||||
async def handle(request):
|
||||
name = request.match_info.get('name', "Anonymous")
|
||||
text = "Hello, " + name
|
||||
return web.Response(text=text)
|
||||
|
||||
async def wshandle(request):
|
||||
ws = web.WebSocketResponse()
|
||||
await ws.prepare(request)
|
||||
|
||||
async for msg in ws:
|
||||
if msg.type == web.WSMsgType.text:
|
||||
await ws.send_str("Hello, {}".format(msg.data))
|
||||
elif msg.type == web.WSMsgType.binary:
|
||||
await ws.send_bytes(msg.data)
|
||||
elif msg.type == web.WSMsgType.close:
|
||||
break
|
||||
|
||||
return ws
|
||||
|
||||
|
||||
app = web.Application()
|
||||
app.add_routes([web.get('/', handle),
|
||||
web.get('/echo', wshandle),
|
||||
web.get('/{name}', handle)])
|
||||
|
||||
if __name__ == '__main__':
|
||||
web.run_app(app)
|
||||
|
||||
|
||||
Documentation
|
||||
=============
|
||||
|
||||
https://aiohttp.readthedocs.io/
|
||||
|
||||
|
||||
Demos
|
||||
=====
|
||||
|
||||
https://github.com/aio-libs/aiohttp-demos
|
||||
|
||||
|
||||
External links
|
||||
==============
|
||||
|
||||
* `Third party libraries
|
||||
<http://aiohttp.readthedocs.io/en/latest/third_party.html>`_
|
||||
* `Built with aiohttp
|
||||
<http://aiohttp.readthedocs.io/en/latest/built_with.html>`_
|
||||
* `Powered by aiohttp
|
||||
<http://aiohttp.readthedocs.io/en/latest/powered_by.html>`_
|
||||
|
||||
Feel free to make a Pull Request for adding your link to these pages!
|
||||
|
||||
|
||||
Communication channels
|
||||
======================
|
||||
|
||||
*aio-libs discourse group*: https://aio-libs.discourse.group
|
||||
|
||||
*gitter chat* https://gitter.im/aio-libs/Lobby
|
||||
|
||||
We support `Stack Overflow
|
||||
<https://stackoverflow.com/questions/tagged/aiohttp>`_.
|
||||
Please add *aiohttp* tag to your question there.
|
||||
|
||||
Requirements
|
||||
============
|
||||
|
||||
- Python >= 3.6
|
||||
- async-timeout_
|
||||
- attrs_
|
||||
- charset-normalizer_
|
||||
- multidict_
|
||||
- yarl_
|
||||
- frozenlist_
|
||||
|
||||
Optionally you may install the cChardet_ and aiodns_ libraries (highly
|
||||
recommended for sake of speed).
|
||||
|
||||
.. _charset-normalizer: https://pypi.org/project/charset-normalizer
|
||||
.. _aiodns: https://pypi.python.org/pypi/aiodns
|
||||
.. _attrs: https://github.com/python-attrs/attrs
|
||||
.. _multidict: https://pypi.python.org/pypi/multidict
|
||||
.. _frozenlist: https://pypi.org/project/frozenlist/
|
||||
.. _yarl: https://pypi.python.org/pypi/yarl
|
||||
.. _async-timeout: https://pypi.python.org/pypi/async_timeout
|
||||
.. _cChardet: https://pypi.python.org/pypi/cchardet
|
||||
|
||||
License
|
||||
=======
|
||||
|
||||
``aiohttp`` is offered under the Apache 2 license.
|
||||
|
||||
|
||||
Keepsafe
|
||||
========
|
||||
|
||||
The aiohttp community would like to thank Keepsafe
|
||||
(https://www.getkeepsafe.com) for its support in the early days of
|
||||
the project.
|
||||
|
||||
|
||||
Source code
|
||||
===========
|
||||
|
||||
The latest developer version is available in a GitHub repository:
|
||||
https://github.com/aio-libs/aiohttp
|
||||
|
||||
Benchmarks
|
||||
==========
|
||||
|
||||
If you are interested in efficiency, the AsyncIO community maintains a
|
||||
list of benchmarks on the official wiki:
|
||||
https://github.com/python/asyncio/wiki/Benchmarks
|
|
@ -0,0 +1,117 @@
|
|||
aiohttp-3.8.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
aiohttp-3.8.4.dist-info/LICENSE.txt,sha256=wUk-nxDVnR-6n53ygAjhVX4zz5-6yM4SY6ozk5goA94,601
|
||||
aiohttp-3.8.4.dist-info/METADATA,sha256=ThZlzmCRsP4HrQ6TkzZfjJH97oYCJ2IFfxsKUrMZt7Q,7608
|
||||
aiohttp-3.8.4.dist-info/RECORD,,
|
||||
aiohttp-3.8.4.dist-info/WHEEL,sha256=J_4V_gB-O6Y7Pn6lk91K27JaIhI-q07YM5J8Ufzqla4,100
|
||||
aiohttp-3.8.4.dist-info/top_level.txt,sha256=iv-JIaacmTl-hSho3QmphcKnbRRYx1st47yjz_178Ro,8
|
||||
aiohttp/.hash/_cparser.pxd.hash,sha256=1tLMUc3IzMppOVKW99LKG1TD6szTXGSaCfHgiPOL9aA,108
|
||||
aiohttp/.hash/_find_header.pxd.hash,sha256=TxG5w4etbVd6sfm5JWbdf5PW6LnuXRQnlMoFBVGKN2E,112
|
||||
aiohttp/.hash/_helpers.pyi.hash,sha256=D1pTrCkUaJ3by1XeGH_nE-amt7XdjfRHcm9oRtoGhHQ,108
|
||||
aiohttp/.hash/_helpers.pyx.hash,sha256=MA4zlNd5xukP4VDAbnoId0Azv8HxCpwLWie2gSMPLsw,108
|
||||
aiohttp/.hash/_http_parser.pyx.hash,sha256=H1bEygSJvt4Oc3GhF_yMpoO4ZYENKF9uqxK8Qtuz_w4,112
|
||||
aiohttp/.hash/_http_writer.pyx.hash,sha256=oqW0CdYfKNwSX0PKREN8Qz_Mi4aaioGCIPbEvsaEgaM,112
|
||||
aiohttp/.hash/_websocket.pyx.hash,sha256=8AcsJ5Tb8lZ9_QVXor_1Xbtl5igK1iP5rtEZZ0iA2AE,110
|
||||
aiohttp/.hash/hdrs.py.hash,sha256=Vfr7WRlz3YbkgRFBfXksCI8mA7PXUMhs37jnv0kiqWQ,103
|
||||
aiohttp/__init__.py,sha256=YlwcFRv0Q3O903_FqwC0d5s3FICP7uc6XhaK3EmLOz8,7086
|
||||
aiohttp/__pycache__/__init__.cpython-39.pyc,,
|
||||
aiohttp/__pycache__/abc.cpython-39.pyc,,
|
||||
aiohttp/__pycache__/base_protocol.cpython-39.pyc,,
|
||||
aiohttp/__pycache__/client.cpython-39.pyc,,
|
||||
aiohttp/__pycache__/client_exceptions.cpython-39.pyc,,
|
||||
aiohttp/__pycache__/client_proto.cpython-39.pyc,,
|
||||
aiohttp/__pycache__/client_reqrep.cpython-39.pyc,,
|
||||
aiohttp/__pycache__/client_ws.cpython-39.pyc,,
|
||||
aiohttp/__pycache__/connector.cpython-39.pyc,,
|
||||
aiohttp/__pycache__/cookiejar.cpython-39.pyc,,
|
||||
aiohttp/__pycache__/formdata.cpython-39.pyc,,
|
||||
aiohttp/__pycache__/hdrs.cpython-39.pyc,,
|
||||
aiohttp/__pycache__/helpers.cpython-39.pyc,,
|
||||
aiohttp/__pycache__/http.cpython-39.pyc,,
|
||||
aiohttp/__pycache__/http_exceptions.cpython-39.pyc,,
|
||||
aiohttp/__pycache__/http_parser.cpython-39.pyc,,
|
||||
aiohttp/__pycache__/http_websocket.cpython-39.pyc,,
|
||||
aiohttp/__pycache__/http_writer.cpython-39.pyc,,
|
||||
aiohttp/__pycache__/locks.cpython-39.pyc,,
|
||||
aiohttp/__pycache__/log.cpython-39.pyc,,
|
||||
aiohttp/__pycache__/multipart.cpython-39.pyc,,
|
||||
aiohttp/__pycache__/payload.cpython-39.pyc,,
|
||||
aiohttp/__pycache__/payload_streamer.cpython-39.pyc,,
|
||||
aiohttp/__pycache__/pytest_plugin.cpython-39.pyc,,
|
||||
aiohttp/__pycache__/resolver.cpython-39.pyc,,
|
||||
aiohttp/__pycache__/streams.cpython-39.pyc,,
|
||||
aiohttp/__pycache__/tcp_helpers.cpython-39.pyc,,
|
||||
aiohttp/__pycache__/test_utils.cpython-39.pyc,,
|
||||
aiohttp/__pycache__/tracing.cpython-39.pyc,,
|
||||
aiohttp/__pycache__/typedefs.cpython-39.pyc,,
|
||||
aiohttp/__pycache__/web.cpython-39.pyc,,
|
||||
aiohttp/__pycache__/web_app.cpython-39.pyc,,
|
||||
aiohttp/__pycache__/web_exceptions.cpython-39.pyc,,
|
||||
aiohttp/__pycache__/web_fileresponse.cpython-39.pyc,,
|
||||
aiohttp/__pycache__/web_log.cpython-39.pyc,,
|
||||
aiohttp/__pycache__/web_middlewares.cpython-39.pyc,,
|
||||
aiohttp/__pycache__/web_protocol.cpython-39.pyc,,
|
||||
aiohttp/__pycache__/web_request.cpython-39.pyc,,
|
||||
aiohttp/__pycache__/web_response.cpython-39.pyc,,
|
||||
aiohttp/__pycache__/web_routedef.cpython-39.pyc,,
|
||||
aiohttp/__pycache__/web_runner.cpython-39.pyc,,
|
||||
aiohttp/__pycache__/web_server.cpython-39.pyc,,
|
||||
aiohttp/__pycache__/web_urldispatcher.cpython-39.pyc,,
|
||||
aiohttp/__pycache__/web_ws.cpython-39.pyc,,
|
||||
aiohttp/__pycache__/worker.cpython-39.pyc,,
|
||||
aiohttp/_cparser.pxd,sha256=rEtEshtn54wSf_ZCJ0EBjRyVBkIOv_oh17BcaNOd6V8,5188
|
||||
aiohttp/_find_header.pxd,sha256=BFUSmxhemBtblqxzjzH3x03FfxaWlTyuAIOz8YZ5_nM,70
|
||||
aiohttp/_headers.pxi,sha256=1MhCe6Un_KI1tpO85HnDfzVO94BhcirLanAOys5FIHA,2090
|
||||
aiohttp/_helpers.cp39-win_amd64.pyd,sha256=saSU6G6FDzwzX8L8wnLM3ztPffh6I6LCWFXRffajt50,39936
|
||||
aiohttp/_helpers.pyi,sha256=2Hd5IC0Zf4YTEJ412suyyhsh1kVyVDv5g4stgyo2Ksc,208
|
||||
aiohttp/_helpers.pyx,sha256=tgl7fZh0QMT6cjf4jSJ8iaO6DdQD3GON2-SH4N5_ETg,1084
|
||||
aiohttp/_http_parser.cp39-win_amd64.pyd,sha256=jedetBkAYpdEjskn7uupJGVOt1XKGGA3FGhMgfqtrbk,215552
|
||||
aiohttp/_http_parser.pyx,sha256=1AHorYdvPR_sEjQSnCWT9kwGZKWFapVRZT910TUx7OM,28168
|
||||
aiohttp/_http_writer.cp39-win_amd64.pyd,sha256=zGoLudoYg_87WtYocd1NAA3KvGemARaQmkhDYBHbNpo,35840
|
||||
aiohttp/_http_writer.pyx,sha256=8CBLytO2rx1kdpWe9HYSznhLXdeZWyE-3xI7jaGasag,4738
|
||||
aiohttp/_websocket.cp39-win_amd64.pyd,sha256=c_0vaDGix66LSY8sncK3DaPBNQkZDa5aDk3226SyAtY,24064
|
||||
aiohttp/_websocket.pyx,sha256=o9J7yi9c2-jTBjE3dUkXxhDWKvRWJz5GZfyLsgJQa38,1617
|
||||
aiohttp/abc.py,sha256=h_4OTYoks0wu64YIpvNPMFkFaVqh1cJ7HdygN1fAKp8,5712
|
||||
aiohttp/base_protocol.py,sha256=u4ITEnXHJ88gNDngHxiU01ZPQhMy_m2eQTJx0cqwvXA,2831
|
||||
aiohttp/client.py,sha256=vP-rZMty2GJbMeWqnwITHZQjdqtFTTGEJGGW9Ndv1gg,46342
|
||||
aiohttp/client_exceptions.py,sha256=k-ys_A4QQW-IH4-fQIAYgBj6Zjjf2METxILp2cElMuk,9612
|
||||
aiohttp/client_proto.py,sha256=3Cx_gH0ZZNNkVCzhnysBe09zbLhnN9jZX3gvqsp_SbE,8421
|
||||
aiohttp/client_reqrep.py,sha256=DKI4Wq2DyHgr6k1IVnKbRoOwvHQXxo3emKYJEy8oIa0,38107
|
||||
aiohttp/client_ws.py,sha256=70XQ5cWdvbwgA6tko-1GfhbSJ70bAHjm1OE4mWtWDw4,10816
|
||||
aiohttp/connector.py,sha256=h5hT-7KZ02NUBQ2OT3wCvyO0h7TZfl7ENFNBgRJUKEM,52630
|
||||
aiohttp/cookiejar.py,sha256=dih2tt5EPa_Z-Xp4oq3xmYYKBQKAIdM4JiZEKHPrvGU,14070
|
||||
aiohttp/formdata.py,sha256=iySnD63XJwo4l1TT_KZiJGNzzmn0RtvvF2_FnJM32Uo,6278
|
||||
aiohttp/hdrs.py,sha256=ygDnAqYCot8NMQyjck2r9CzpTspAOXpHknNl-yxIybY,4838
|
||||
aiohttp/helpers.py,sha256=evbtkLDIbZ3oW8V3A5ODXQ8YPHTfk5t19AzZEKWhfnI,27276
|
||||
aiohttp/http.py,sha256=YPu2yu8TQAiQfIiu9JpE4ICmE0D4ljADgLXtNFlEJnc,1870
|
||||
aiohttp/http_exceptions.py,sha256=rLwhCbFrOpQ_ntr3GnxaxD3oRnTTNM1utmDDBUbdVTU,2691
|
||||
aiohttp/http_parser.py,sha256=gokMNEZeTCHfA52a7LwiGxEShLHAOfSxbCq-8QWh9QU,34061
|
||||
aiohttp/http_websocket.py,sha256=a7yQM4cQcM-DxhRITp8dT_0jfWxqbU-yTWTgqH8Zfpc,26000
|
||||
aiohttp/http_writer.py,sha256=w0Q5-jEx3V40vuf4VAaKxSitEaiKIUyHRI_qUM0ado4,6131
|
||||
aiohttp/locks.py,sha256=vp1Z4zx0SvooSffw88dkZ-7qpk2CqRf5vWh2dpKagTA,1177
|
||||
aiohttp/log.py,sha256=zYUTvXsMQ9Sz1yNN8kXwd5Qxu49a1FzjZ_wQqriEc8M,333
|
||||
aiohttp/multipart.py,sha256=DqB2JIhnmhpmC3x5KuYaDGPfS6a1yMixnyVRsVSOcDI,33274
|
||||
aiohttp/payload.py,sha256=5RuKdJJj1WYwIo_vp-4Jdcn5ZoTzrAYna90DG0xNpXQ,14099
|
||||
aiohttp/payload_streamer.py,sha256=6bbqsfgysHzfUTJIUlqad1wRklVuLmNHVtScv_mWhGY,2187
|
||||
aiohttp/py.typed,sha256=3VVwXUAWVEVX7sDwyYDnW5ZdBC9_Z9AJAFfLCleUW0k,8
|
||||
aiohttp/pytest_plugin.py,sha256=26CllzLUw-JGrwfXYIK-XGbDMCDwbIaomV7Sk4WZVG4,12163
|
||||
aiohttp/resolver.py,sha256=UWM-6HJkOLwkGveoesBnGv-pJePtpE7js6ujYi7XHoc,5252
|
||||
aiohttp/streams.py,sha256=yWdQ0wIGIQCfI9OGdhzImtQKcUBRNfQ9Dw2pzXm-dCM,21418
|
||||
aiohttp/tcp_helpers.py,sha256=K-hhGh3jd6qCEnHJo8LvFyfJwBjh99UKI7A0aSRVhj4,998
|
||||
aiohttp/test_utils.py,sha256=gs74TWcysSMuhfAvae0Dy-jKkWVKBWKp2nLt_qfi3x0,22140
|
||||
aiohttp/tracing.py,sha256=qOwPt4vyGRVlebUUSuOJ5Vc9cc4ZGOOPr5TuweOSoVE,15649
|
||||
aiohttp/typedefs.py,sha256=E-iS7tNE3CrcMAjz5uiwr_Zlp4CKpCU99hzAKw08g14,1830
|
||||
aiohttp/web.py,sha256=lzB4xbNvgULFAN6uxb1Yg63qAyIFgapIHYGk3z7uZeU,18669
|
||||
aiohttp/web_app.py,sha256=QEkoioPMr8vLTFGgzGEdOWkX5xM9b44neR-cYyfjs6c,17727
|
||||
aiohttp/web_exceptions.py,sha256=ZhwkxigtA1hmmxkVQTH9cTRgU_cz5wb-0Enm-dlj_v4,10539
|
||||
aiohttp/web_fileresponse.py,sha256=tRRZyBNPL579Ac8Q2mSF7qjHIkV42z9VyVEkvQLOJlQ,11072
|
||||
aiohttp/web_log.py,sha256=IxdzjN1glBIssfpWBv6cS9TtyyAaJF0hJHCqt7IxEtE,7765
|
||||
aiohttp/web_middlewares.py,sha256=qmHNhOdhTtJZjChDiBlF9M-SGxF3vz2koDnxMPrlNCA,4256
|
||||
aiohttp/web_protocol.py,sha256=VVlY1FBTEg5JxtDhYO7bvfamylBEpKffdJFSN9_6cC8,23078
|
||||
aiohttp/web_request.py,sha256=nOKHoJdzco_41mfGHUclYrdoZKazD8dCiAbd0UOwQzU,29069
|
||||
aiohttp/web_response.py,sha256=Ont7UsoFinG6fpLwuVTOLHHZDWZ5VM0uGFCOiUII-pk,28296
|
||||
aiohttp/web_routedef.py,sha256=KrQtG7OzlE1ylPZf54yDJjBLl36nGvYl8Z9mA6SWayk,6368
|
||||
aiohttp/web_runner.py,sha256=OGDcOLBeCZuOPNeTa7wi8bLll_7kzviMAx7vRgKs4UE,11538
|
||||
aiohttp/web_server.py,sha256=peGS1H8OvZEd4LuB_hqTA_6HQhc4stFeYQIh4XCmZ2A,2112
|
||||
aiohttp/web_urldispatcher.py,sha256=r6lC1U7puKU45VHcvQW5mJ6ienTBCi01ouz077kiJHQ,40703
|
||||
aiohttp/web_ws.py,sha256=MqL1XuwXeh_xf3X9oAwvNGvVA3zvxcykR_O26EKkL0A,17631
|
||||
aiohttp/worker.py,sha256=WlC9ItbJ3NPCHUFIJcbEGOQkC9kWcumURyxfcKYtqws,9032
|
|
@ -0,0 +1,5 @@
|
|||
Wheel-Version: 1.0
|
||||
Generator: bdist_wheel (0.38.4)
|
||||
Root-Is-Purelib: false
|
||||
Tag: cp39-cp39-win_amd64
|
||||
|
|
@ -0,0 +1 @@
|
|||
aiohttp
|
|
@ -0,0 +1 @@
|
|||
ac4b44b21b67e78c127ff6422741018d1c9506420ebffa21d7b05c68d39de95f *D:/a/aiohttp/aiohttp/aiohttp/_cparser.pxd
|
|
@ -0,0 +1 @@
|
|||
0455129b185e981b5b96ac738f31f7c74dc57f1696953cae0083b3f18679fe73 *D:/a/aiohttp/aiohttp/aiohttp/_find_header.pxd
|
|
@ -0,0 +1 @@
|
|||
d87779202d197f8613109e35dacbb2ca1b21d64572543bf9838b2d832a362ac7 *D:/a/aiohttp/aiohttp/aiohttp/_helpers.pyi
|
|
@ -0,0 +1 @@
|
|||
b6097b7d987440c4fa7237f88d227c89a3ba0dd403dc638ddbe487e0de7f1138 *D:/a/aiohttp/aiohttp/aiohttp/_helpers.pyx
|
|
@ -0,0 +1 @@
|
|||
d401e8ad876f3d1fec1234129c2593f64c0664a5856a9551653f75d13531ece3 *D:/a/aiohttp/aiohttp/aiohttp/_http_parser.pyx
|
|
@ -0,0 +1 @@
|
|||
f0204bcad3b6af1d6476959ef47612ce784b5dd7995b213edf123b8da19ab1a8 *D:/a/aiohttp/aiohttp/aiohttp/_http_writer.pyx
|
|
@ -0,0 +1 @@
|
|||
a3d27bca2f5cdbe8d3063137754917c610d62af456273e4665fc8bb202506b7f *D:/a/aiohttp/aiohttp/aiohttp/_websocket.pyx
|
|
@ -0,0 +1 @@
|
|||
ca00e702a602a2df0d310ca3724dabf42ce94eca40397a47927365fb2c48c9b6 *D:/a/aiohttp/aiohttp/aiohttp/hdrs.py
|
|
@ -0,0 +1,216 @@
|
|||
__version__ = "3.8.4"
|
||||
|
||||
from typing import Tuple
|
||||
|
||||
from . import hdrs as hdrs
|
||||
from .client import (
|
||||
BaseConnector as BaseConnector,
|
||||
ClientConnectionError as ClientConnectionError,
|
||||
ClientConnectorCertificateError as ClientConnectorCertificateError,
|
||||
ClientConnectorError as ClientConnectorError,
|
||||
ClientConnectorSSLError as ClientConnectorSSLError,
|
||||
ClientError as ClientError,
|
||||
ClientHttpProxyError as ClientHttpProxyError,
|
||||
ClientOSError as ClientOSError,
|
||||
ClientPayloadError as ClientPayloadError,
|
||||
ClientProxyConnectionError as ClientProxyConnectionError,
|
||||
ClientRequest as ClientRequest,
|
||||
ClientResponse as ClientResponse,
|
||||
ClientResponseError as ClientResponseError,
|
||||
ClientSession as ClientSession,
|
||||
ClientSSLError as ClientSSLError,
|
||||
ClientTimeout as ClientTimeout,
|
||||
ClientWebSocketResponse as ClientWebSocketResponse,
|
||||
ContentTypeError as ContentTypeError,
|
||||
Fingerprint as Fingerprint,
|
||||
InvalidURL as InvalidURL,
|
||||
NamedPipeConnector as NamedPipeConnector,
|
||||
RequestInfo as RequestInfo,
|
||||
ServerConnectionError as ServerConnectionError,
|
||||
ServerDisconnectedError as ServerDisconnectedError,
|
||||
ServerFingerprintMismatch as ServerFingerprintMismatch,
|
||||
ServerTimeoutError as ServerTimeoutError,
|
||||
TCPConnector as TCPConnector,
|
||||
TooManyRedirects as TooManyRedirects,
|
||||
UnixConnector as UnixConnector,
|
||||
WSServerHandshakeError as WSServerHandshakeError,
|
||||
request as request,
|
||||
)
|
||||
from .cookiejar import CookieJar as CookieJar, DummyCookieJar as DummyCookieJar
|
||||
from .formdata import FormData as FormData
|
||||
from .helpers import BasicAuth, ChainMapProxy, ETag
|
||||
from .http import (
|
||||
HttpVersion as HttpVersion,
|
||||
HttpVersion10 as HttpVersion10,
|
||||
HttpVersion11 as HttpVersion11,
|
||||
WebSocketError as WebSocketError,
|
||||
WSCloseCode as WSCloseCode,
|
||||
WSMessage as WSMessage,
|
||||
WSMsgType as WSMsgType,
|
||||
)
|
||||
from .multipart import (
|
||||
BadContentDispositionHeader as BadContentDispositionHeader,
|
||||
BadContentDispositionParam as BadContentDispositionParam,
|
||||
BodyPartReader as BodyPartReader,
|
||||
MultipartReader as MultipartReader,
|
||||
MultipartWriter as MultipartWriter,
|
||||
content_disposition_filename as content_disposition_filename,
|
||||
parse_content_disposition as parse_content_disposition,
|
||||
)
|
||||
from .payload import (
|
||||
PAYLOAD_REGISTRY as PAYLOAD_REGISTRY,
|
||||
AsyncIterablePayload as AsyncIterablePayload,
|
||||
BufferedReaderPayload as BufferedReaderPayload,
|
||||
BytesIOPayload as BytesIOPayload,
|
||||
BytesPayload as BytesPayload,
|
||||
IOBasePayload as IOBasePayload,
|
||||
JsonPayload as JsonPayload,
|
||||
Payload as Payload,
|
||||
StringIOPayload as StringIOPayload,
|
||||
StringPayload as StringPayload,
|
||||
TextIOPayload as TextIOPayload,
|
||||
get_payload as get_payload,
|
||||
payload_type as payload_type,
|
||||
)
|
||||
from .payload_streamer import streamer as streamer
|
||||
from .resolver import (
|
||||
AsyncResolver as AsyncResolver,
|
||||
DefaultResolver as DefaultResolver,
|
||||
ThreadedResolver as ThreadedResolver,
|
||||
)
|
||||
from .streams import (
|
||||
EMPTY_PAYLOAD as EMPTY_PAYLOAD,
|
||||
DataQueue as DataQueue,
|
||||
EofStream as EofStream,
|
||||
FlowControlDataQueue as FlowControlDataQueue,
|
||||
StreamReader as StreamReader,
|
||||
)
|
||||
from .tracing import (
|
||||
TraceConfig as TraceConfig,
|
||||
TraceConnectionCreateEndParams as TraceConnectionCreateEndParams,
|
||||
TraceConnectionCreateStartParams as TraceConnectionCreateStartParams,
|
||||
TraceConnectionQueuedEndParams as TraceConnectionQueuedEndParams,
|
||||
TraceConnectionQueuedStartParams as TraceConnectionQueuedStartParams,
|
||||
TraceConnectionReuseconnParams as TraceConnectionReuseconnParams,
|
||||
TraceDnsCacheHitParams as TraceDnsCacheHitParams,
|
||||
TraceDnsCacheMissParams as TraceDnsCacheMissParams,
|
||||
TraceDnsResolveHostEndParams as TraceDnsResolveHostEndParams,
|
||||
TraceDnsResolveHostStartParams as TraceDnsResolveHostStartParams,
|
||||
TraceRequestChunkSentParams as TraceRequestChunkSentParams,
|
||||
TraceRequestEndParams as TraceRequestEndParams,
|
||||
TraceRequestExceptionParams as TraceRequestExceptionParams,
|
||||
TraceRequestRedirectParams as TraceRequestRedirectParams,
|
||||
TraceRequestStartParams as TraceRequestStartParams,
|
||||
TraceResponseChunkReceivedParams as TraceResponseChunkReceivedParams,
|
||||
)
|
||||
|
||||
__all__: Tuple[str, ...] = (
|
||||
"hdrs",
|
||||
# client
|
||||
"BaseConnector",
|
||||
"ClientConnectionError",
|
||||
"ClientConnectorCertificateError",
|
||||
"ClientConnectorError",
|
||||
"ClientConnectorSSLError",
|
||||
"ClientError",
|
||||
"ClientHttpProxyError",
|
||||
"ClientOSError",
|
||||
"ClientPayloadError",
|
||||
"ClientProxyConnectionError",
|
||||
"ClientResponse",
|
||||
"ClientRequest",
|
||||
"ClientResponseError",
|
||||
"ClientSSLError",
|
||||
"ClientSession",
|
||||
"ClientTimeout",
|
||||
"ClientWebSocketResponse",
|
||||
"ContentTypeError",
|
||||
"Fingerprint",
|
||||
"InvalidURL",
|
||||
"RequestInfo",
|
||||
"ServerConnectionError",
|
||||
"ServerDisconnectedError",
|
||||
"ServerFingerprintMismatch",
|
||||
"ServerTimeoutError",
|
||||
"TCPConnector",
|
||||
"TooManyRedirects",
|
||||
"UnixConnector",
|
||||
"NamedPipeConnector",
|
||||
"WSServerHandshakeError",
|
||||
"request",
|
||||
# cookiejar
|
||||
"CookieJar",
|
||||
"DummyCookieJar",
|
||||
# formdata
|
||||
"FormData",
|
||||
# helpers
|
||||
"BasicAuth",
|
||||
"ChainMapProxy",
|
||||
"ETag",
|
||||
# http
|
||||
"HttpVersion",
|
||||
"HttpVersion10",
|
||||
"HttpVersion11",
|
||||
"WSMsgType",
|
||||
"WSCloseCode",
|
||||
"WSMessage",
|
||||
"WebSocketError",
|
||||
# multipart
|
||||
"BadContentDispositionHeader",
|
||||
"BadContentDispositionParam",
|
||||
"BodyPartReader",
|
||||
"MultipartReader",
|
||||
"MultipartWriter",
|
||||
"content_disposition_filename",
|
||||
"parse_content_disposition",
|
||||
# payload
|
||||
"AsyncIterablePayload",
|
||||
"BufferedReaderPayload",
|
||||
"BytesIOPayload",
|
||||
"BytesPayload",
|
||||
"IOBasePayload",
|
||||
"JsonPayload",
|
||||
"PAYLOAD_REGISTRY",
|
||||
"Payload",
|
||||
"StringIOPayload",
|
||||
"StringPayload",
|
||||
"TextIOPayload",
|
||||
"get_payload",
|
||||
"payload_type",
|
||||
# payload_streamer
|
||||
"streamer",
|
||||
# resolver
|
||||
"AsyncResolver",
|
||||
"DefaultResolver",
|
||||
"ThreadedResolver",
|
||||
# streams
|
||||
"DataQueue",
|
||||
"EMPTY_PAYLOAD",
|
||||
"EofStream",
|
||||
"FlowControlDataQueue",
|
||||
"StreamReader",
|
||||
# tracing
|
||||
"TraceConfig",
|
||||
"TraceConnectionCreateEndParams",
|
||||
"TraceConnectionCreateStartParams",
|
||||
"TraceConnectionQueuedEndParams",
|
||||
"TraceConnectionQueuedStartParams",
|
||||
"TraceConnectionReuseconnParams",
|
||||
"TraceDnsCacheHitParams",
|
||||
"TraceDnsCacheMissParams",
|
||||
"TraceDnsResolveHostEndParams",
|
||||
"TraceDnsResolveHostStartParams",
|
||||
"TraceRequestChunkSentParams",
|
||||
"TraceRequestEndParams",
|
||||
"TraceRequestExceptionParams",
|
||||
"TraceRequestRedirectParams",
|
||||
"TraceRequestStartParams",
|
||||
"TraceResponseChunkReceivedParams",
|
||||
)
|
||||
|
||||
try:
|
||||
from .worker import GunicornUVLoopWebWorker, GunicornWebWorker
|
||||
|
||||
__all__ += ("GunicornWebWorker", "GunicornUVLoopWebWorker")
|
||||
except ImportError: # pragma: no cover
|
||||
pass
|
Plik binarny nie jest wyświetlany.
Plik binarny nie jest wyświetlany.
Plik binarny nie jest wyświetlany.
Plik binarny nie jest wyświetlany.
Plik binarny nie jest wyświetlany.
Plik binarny nie jest wyświetlany.
Plik binarny nie jest wyświetlany.
Plik binarny nie jest wyświetlany.
Plik binarny nie jest wyświetlany.
Plik binarny nie jest wyświetlany.
Plik binarny nie jest wyświetlany.
Plik binarny nie jest wyświetlany.
Plik binarny nie jest wyświetlany.
Plik binarny nie jest wyświetlany.
Plik binarny nie jest wyświetlany.
Plik binarny nie jest wyświetlany.
Plik binarny nie jest wyświetlany.
Plik binarny nie jest wyświetlany.
Plik binarny nie jest wyświetlany.
Plik binarny nie jest wyświetlany.
Plik binarny nie jest wyświetlany.
Plik binarny nie jest wyświetlany.
Plik binarny nie jest wyświetlany.
Plik binarny nie jest wyświetlany.
Plik binarny nie jest wyświetlany.
Plik binarny nie jest wyświetlany.
Plik binarny nie jest wyświetlany.
Plik binarny nie jest wyświetlany.
Plik binarny nie jest wyświetlany.
Plik binarny nie jest wyświetlany.
Plik binarny nie jest wyświetlany.
Plik binarny nie jest wyświetlany.
Plik binarny nie jest wyświetlany.
Plik binarny nie jest wyświetlany.
Plik binarny nie jest wyświetlany.
Plik binarny nie jest wyświetlany.
Plik binarny nie jest wyświetlany.
Plik binarny nie jest wyświetlany.
Plik binarny nie jest wyświetlany.
Plik binarny nie jest wyświetlany.
Plik binarny nie jest wyświetlany.
Plik binarny nie jest wyświetlany.
Plik binarny nie jest wyświetlany.
Plik binarny nie jest wyświetlany.
Plik binarny nie jest wyświetlany.
|
@ -0,0 +1,190 @@
|
|||
from libc.stdint cimport (
|
||||
int8_t,
|
||||
int16_t,
|
||||
int32_t,
|
||||
int64_t,
|
||||
uint8_t,
|
||||
uint16_t,
|
||||
uint32_t,
|
||||
uint64_t,
|
||||
)
|
||||
|
||||
|
||||
cdef extern from "../vendor/llhttp/build/llhttp.h":
|
||||
|
||||
struct llhttp__internal_s:
|
||||
int32_t _index
|
||||
void* _span_pos0
|
||||
void* _span_cb0
|
||||
int32_t error
|
||||
const char* reason
|
||||
const char* error_pos
|
||||
void* data
|
||||
void* _current
|
||||
uint64_t content_length
|
||||
uint8_t type
|
||||
uint8_t method
|
||||
uint8_t http_major
|
||||
uint8_t http_minor
|
||||
uint8_t header_state
|
||||
uint8_t lenient_flags
|
||||
uint8_t upgrade
|
||||
uint8_t finish
|
||||
uint16_t flags
|
||||
uint16_t status_code
|
||||
void* settings
|
||||
|
||||
ctypedef llhttp__internal_s llhttp__internal_t
|
||||
ctypedef llhttp__internal_t llhttp_t
|
||||
|
||||
ctypedef int (*llhttp_data_cb)(llhttp_t*, const char *at, size_t length) except -1
|
||||
ctypedef int (*llhttp_cb)(llhttp_t*) except -1
|
||||
|
||||
struct llhttp_settings_s:
|
||||
llhttp_cb on_message_begin
|
||||
llhttp_data_cb on_url
|
||||
llhttp_data_cb on_status
|
||||
llhttp_data_cb on_header_field
|
||||
llhttp_data_cb on_header_value
|
||||
llhttp_cb on_headers_complete
|
||||
llhttp_data_cb on_body
|
||||
llhttp_cb on_message_complete
|
||||
llhttp_cb on_chunk_header
|
||||
llhttp_cb on_chunk_complete
|
||||
|
||||
llhttp_cb on_url_complete
|
||||
llhttp_cb on_status_complete
|
||||
llhttp_cb on_header_field_complete
|
||||
llhttp_cb on_header_value_complete
|
||||
|
||||
ctypedef llhttp_settings_s llhttp_settings_t
|
||||
|
||||
enum llhttp_errno:
|
||||
HPE_OK,
|
||||
HPE_INTERNAL,
|
||||
HPE_STRICT,
|
||||
HPE_LF_EXPECTED,
|
||||
HPE_UNEXPECTED_CONTENT_LENGTH,
|
||||
HPE_CLOSED_CONNECTION,
|
||||
HPE_INVALID_METHOD,
|
||||
HPE_INVALID_URL,
|
||||
HPE_INVALID_CONSTANT,
|
||||
HPE_INVALID_VERSION,
|
||||
HPE_INVALID_HEADER_TOKEN,
|
||||
HPE_INVALID_CONTENT_LENGTH,
|
||||
HPE_INVALID_CHUNK_SIZE,
|
||||
HPE_INVALID_STATUS,
|
||||
HPE_INVALID_EOF_STATE,
|
||||
HPE_INVALID_TRANSFER_ENCODING,
|
||||
HPE_CB_MESSAGE_BEGIN,
|
||||
HPE_CB_HEADERS_COMPLETE,
|
||||
HPE_CB_MESSAGE_COMPLETE,
|
||||
HPE_CB_CHUNK_HEADER,
|
||||
HPE_CB_CHUNK_COMPLETE,
|
||||
HPE_PAUSED,
|
||||
HPE_PAUSED_UPGRADE,
|
||||
HPE_USER
|
||||
|
||||
ctypedef llhttp_errno llhttp_errno_t
|
||||
|
||||
enum llhttp_flags:
|
||||
F_CONNECTION_KEEP_ALIVE,
|
||||
F_CONNECTION_CLOSE,
|
||||
F_CONNECTION_UPGRADE,
|
||||
F_CHUNKED,
|
||||
F_UPGRADE,
|
||||
F_CONTENT_LENGTH,
|
||||
F_SKIPBODY,
|
||||
F_TRAILING,
|
||||
F_TRANSFER_ENCODING
|
||||
|
||||
enum llhttp_lenient_flags:
|
||||
LENIENT_HEADERS,
|
||||
LENIENT_CHUNKED_LENGTH
|
||||
|
||||
enum llhttp_type:
|
||||
HTTP_REQUEST,
|
||||
HTTP_RESPONSE,
|
||||
HTTP_BOTH
|
||||
|
||||
enum llhttp_finish_t:
|
||||
HTTP_FINISH_SAFE,
|
||||
HTTP_FINISH_SAFE_WITH_CB,
|
||||
HTTP_FINISH_UNSAFE
|
||||
|
||||
enum llhttp_method:
|
||||
HTTP_DELETE,
|
||||
HTTP_GET,
|
||||
HTTP_HEAD,
|
||||
HTTP_POST,
|
||||
HTTP_PUT,
|
||||
HTTP_CONNECT,
|
||||
HTTP_OPTIONS,
|
||||
HTTP_TRACE,
|
||||
HTTP_COPY,
|
||||
HTTP_LOCK,
|
||||
HTTP_MKCOL,
|
||||
HTTP_MOVE,
|
||||
HTTP_PROPFIND,
|
||||
HTTP_PROPPATCH,
|
||||
HTTP_SEARCH,
|
||||
HTTP_UNLOCK,
|
||||
HTTP_BIND,
|
||||
HTTP_REBIND,
|
||||
HTTP_UNBIND,
|
||||
HTTP_ACL,
|
||||
HTTP_REPORT,
|
||||
HTTP_MKACTIVITY,
|
||||
HTTP_CHECKOUT,
|
||||
HTTP_MERGE,
|
||||
HTTP_MSEARCH,
|
||||
HTTP_NOTIFY,
|
||||
HTTP_SUBSCRIBE,
|
||||
HTTP_UNSUBSCRIBE,
|
||||
HTTP_PATCH,
|
||||
HTTP_PURGE,
|
||||
HTTP_MKCALENDAR,
|
||||
HTTP_LINK,
|
||||
HTTP_UNLINK,
|
||||
HTTP_SOURCE,
|
||||
HTTP_PRI,
|
||||
HTTP_DESCRIBE,
|
||||
HTTP_ANNOUNCE,
|
||||
HTTP_SETUP,
|
||||
HTTP_PLAY,
|
||||
HTTP_PAUSE,
|
||||
HTTP_TEARDOWN,
|
||||
HTTP_GET_PARAMETER,
|
||||
HTTP_SET_PARAMETER,
|
||||
HTTP_REDIRECT,
|
||||
HTTP_RECORD,
|
||||
HTTP_FLUSH
|
||||
|
||||
ctypedef llhttp_method llhttp_method_t;
|
||||
|
||||
void llhttp_settings_init(llhttp_settings_t* settings)
|
||||
void llhttp_init(llhttp_t* parser, llhttp_type type,
|
||||
const llhttp_settings_t* settings)
|
||||
|
||||
llhttp_errno_t llhttp_execute(llhttp_t* parser, const char* data, size_t len)
|
||||
llhttp_errno_t llhttp_finish(llhttp_t* parser)
|
||||
|
||||
int llhttp_message_needs_eof(const llhttp_t* parser)
|
||||
|
||||
int llhttp_should_keep_alive(const llhttp_t* parser)
|
||||
|
||||
void llhttp_pause(llhttp_t* parser)
|
||||
void llhttp_resume(llhttp_t* parser)
|
||||
|
||||
void llhttp_resume_after_upgrade(llhttp_t* parser)
|
||||
|
||||
llhttp_errno_t llhttp_get_errno(const llhttp_t* parser)
|
||||
const char* llhttp_get_error_reason(const llhttp_t* parser)
|
||||
void llhttp_set_error_reason(llhttp_t* parser, const char* reason)
|
||||
const char* llhttp_get_error_pos(const llhttp_t* parser)
|
||||
const char* llhttp_errno_name(llhttp_errno_t err)
|
||||
|
||||
const char* llhttp_method_name(llhttp_method_t method)
|
||||
|
||||
void llhttp_set_lenient_headers(llhttp_t* parser, int enabled)
|
||||
void llhttp_set_lenient_chunked_length(llhttp_t* parser, int enabled)
|
|
@ -0,0 +1,2 @@
|
|||
cdef extern from "_find_header.h":
|
||||
int find_header(char *, int)
|
|
@ -0,0 +1,83 @@
|
|||
# The file is autogenerated from aiohttp/hdrs.py
|
||||
# Run ./tools/gen.py to update it after the origin changing.
|
||||
|
||||
from . import hdrs
|
||||
cdef tuple headers = (
|
||||
hdrs.ACCEPT,
|
||||
hdrs.ACCEPT_CHARSET,
|
||||
hdrs.ACCEPT_ENCODING,
|
||||
hdrs.ACCEPT_LANGUAGE,
|
||||
hdrs.ACCEPT_RANGES,
|
||||
hdrs.ACCESS_CONTROL_ALLOW_CREDENTIALS,
|
||||
hdrs.ACCESS_CONTROL_ALLOW_HEADERS,
|
||||
hdrs.ACCESS_CONTROL_ALLOW_METHODS,
|
||||
hdrs.ACCESS_CONTROL_ALLOW_ORIGIN,
|
||||
hdrs.ACCESS_CONTROL_EXPOSE_HEADERS,
|
||||
hdrs.ACCESS_CONTROL_MAX_AGE,
|
||||
hdrs.ACCESS_CONTROL_REQUEST_HEADERS,
|
||||
hdrs.ACCESS_CONTROL_REQUEST_METHOD,
|
||||
hdrs.AGE,
|
||||
hdrs.ALLOW,
|
||||
hdrs.AUTHORIZATION,
|
||||
hdrs.CACHE_CONTROL,
|
||||
hdrs.CONNECTION,
|
||||
hdrs.CONTENT_DISPOSITION,
|
||||
hdrs.CONTENT_ENCODING,
|
||||
hdrs.CONTENT_LANGUAGE,
|
||||
hdrs.CONTENT_LENGTH,
|
||||
hdrs.CONTENT_LOCATION,
|
||||
hdrs.CONTENT_MD5,
|
||||
hdrs.CONTENT_RANGE,
|
||||
hdrs.CONTENT_TRANSFER_ENCODING,
|
||||
hdrs.CONTENT_TYPE,
|
||||
hdrs.COOKIE,
|
||||
hdrs.DATE,
|
||||
hdrs.DESTINATION,
|
||||
hdrs.DIGEST,
|
||||
hdrs.ETAG,
|
||||
hdrs.EXPECT,
|
||||
hdrs.EXPIRES,
|
||||
hdrs.FORWARDED,
|
||||
hdrs.FROM,
|
||||
hdrs.HOST,
|
||||
hdrs.IF_MATCH,
|
||||
hdrs.IF_MODIFIED_SINCE,
|
||||
hdrs.IF_NONE_MATCH,
|
||||
hdrs.IF_RANGE,
|
||||
hdrs.IF_UNMODIFIED_SINCE,
|
||||
hdrs.KEEP_ALIVE,
|
||||
hdrs.LAST_EVENT_ID,
|
||||
hdrs.LAST_MODIFIED,
|
||||
hdrs.LINK,
|
||||
hdrs.LOCATION,
|
||||
hdrs.MAX_FORWARDS,
|
||||
hdrs.ORIGIN,
|
||||
hdrs.PRAGMA,
|
||||
hdrs.PROXY_AUTHENTICATE,
|
||||
hdrs.PROXY_AUTHORIZATION,
|
||||
hdrs.RANGE,
|
||||
hdrs.REFERER,
|
||||
hdrs.RETRY_AFTER,
|
||||
hdrs.SEC_WEBSOCKET_ACCEPT,
|
||||
hdrs.SEC_WEBSOCKET_EXTENSIONS,
|
||||
hdrs.SEC_WEBSOCKET_KEY,
|
||||
hdrs.SEC_WEBSOCKET_KEY1,
|
||||
hdrs.SEC_WEBSOCKET_PROTOCOL,
|
||||
hdrs.SEC_WEBSOCKET_VERSION,
|
||||
hdrs.SERVER,
|
||||
hdrs.SET_COOKIE,
|
||||
hdrs.TE,
|
||||
hdrs.TRAILER,
|
||||
hdrs.TRANSFER_ENCODING,
|
||||
hdrs.URI,
|
||||
hdrs.UPGRADE,
|
||||
hdrs.USER_AGENT,
|
||||
hdrs.VARY,
|
||||
hdrs.VIA,
|
||||
hdrs.WWW_AUTHENTICATE,
|
||||
hdrs.WANT_DIGEST,
|
||||
hdrs.WARNING,
|
||||
hdrs.X_FORWARDED_FOR,
|
||||
hdrs.X_FORWARDED_HOST,
|
||||
hdrs.X_FORWARDED_PROTO,
|
||||
)
|
Plik binarny nie jest wyświetlany.
|
@ -0,0 +1,6 @@
|
|||
from typing import Any
|
||||
|
||||
class reify:
|
||||
def __init__(self, wrapped: Any) -> None: ...
|
||||
def __get__(self, inst: Any, owner: Any) -> Any: ...
|
||||
def __set__(self, inst: Any, value: Any) -> None: ...
|
|
@ -0,0 +1,35 @@
|
|||
cdef class reify:
|
||||
"""Use as a class method decorator. It operates almost exactly like
|
||||
the Python `@property` decorator, but it puts the result of the
|
||||
method it decorates into the instance dict after the first call,
|
||||
effectively replacing the function it decorates with an instance
|
||||
variable. It is, in Python parlance, a data descriptor.
|
||||
|
||||
"""
|
||||
|
||||
cdef object wrapped
|
||||
cdef object name
|
||||
|
||||
def __init__(self, wrapped):
|
||||
self.wrapped = wrapped
|
||||
self.name = wrapped.__name__
|
||||
|
||||
@property
|
||||
def __doc__(self):
|
||||
return self.wrapped.__doc__
|
||||
|
||||
def __get__(self, inst, owner):
|
||||
try:
|
||||
try:
|
||||
return inst._cache[self.name]
|
||||
except KeyError:
|
||||
val = self.wrapped(inst)
|
||||
inst._cache[self.name] = val
|
||||
return val
|
||||
except AttributeError:
|
||||
if inst is None:
|
||||
return self
|
||||
raise
|
||||
|
||||
def __set__(self, inst, value):
|
||||
raise AttributeError("reified property is read-only")
|
Plik binarny nie jest wyświetlany.
|
@ -0,0 +1,832 @@
|
|||
#cython: language_level=3
|
||||
#
|
||||
# Based on https://github.com/MagicStack/httptools
|
||||
#
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
from cpython cimport (
|
||||
Py_buffer,
|
||||
PyBUF_SIMPLE,
|
||||
PyBuffer_Release,
|
||||
PyBytes_AsString,
|
||||
PyBytes_AsStringAndSize,
|
||||
PyObject_GetBuffer,
|
||||
)
|
||||
from cpython.mem cimport PyMem_Free, PyMem_Malloc
|
||||
from libc.limits cimport ULLONG_MAX
|
||||
from libc.string cimport memcpy
|
||||
|
||||
from multidict import CIMultiDict as _CIMultiDict, CIMultiDictProxy as _CIMultiDictProxy
|
||||
from yarl import URL as _URL
|
||||
|
||||
from aiohttp import hdrs
|
||||
|
||||
from .http_exceptions import (
|
||||
BadHttpMessage,
|
||||
BadStatusLine,
|
||||
ContentLengthError,
|
||||
InvalidHeader,
|
||||
InvalidURLError,
|
||||
LineTooLong,
|
||||
PayloadEncodingError,
|
||||
TransferEncodingError,
|
||||
)
|
||||
from .http_parser import DeflateBuffer as _DeflateBuffer
|
||||
from .http_writer import (
|
||||
HttpVersion as _HttpVersion,
|
||||
HttpVersion10 as _HttpVersion10,
|
||||
HttpVersion11 as _HttpVersion11,
|
||||
)
|
||||
from .streams import EMPTY_PAYLOAD as _EMPTY_PAYLOAD, StreamReader as _StreamReader
|
||||
|
||||
cimport cython
|
||||
|
||||
from aiohttp cimport _cparser as cparser
|
||||
|
||||
include "_headers.pxi"
|
||||
|
||||
from aiohttp cimport _find_header
|
||||
|
||||
DEF DEFAULT_FREELIST_SIZE = 250
|
||||
|
||||
cdef extern from "Python.h":
|
||||
int PyByteArray_Resize(object, Py_ssize_t) except -1
|
||||
Py_ssize_t PyByteArray_Size(object) except -1
|
||||
char* PyByteArray_AsString(object)
|
||||
|
||||
__all__ = ('HttpRequestParser', 'HttpResponseParser',
|
||||
'RawRequestMessage', 'RawResponseMessage')
|
||||
|
||||
cdef object URL = _URL
|
||||
cdef object URL_build = URL.build
|
||||
cdef object CIMultiDict = _CIMultiDict
|
||||
cdef object CIMultiDictProxy = _CIMultiDictProxy
|
||||
cdef object HttpVersion = _HttpVersion
|
||||
cdef object HttpVersion10 = _HttpVersion10
|
||||
cdef object HttpVersion11 = _HttpVersion11
|
||||
cdef object SEC_WEBSOCKET_KEY1 = hdrs.SEC_WEBSOCKET_KEY1
|
||||
cdef object CONTENT_ENCODING = hdrs.CONTENT_ENCODING
|
||||
cdef object EMPTY_PAYLOAD = _EMPTY_PAYLOAD
|
||||
cdef object StreamReader = _StreamReader
|
||||
cdef object DeflateBuffer = _DeflateBuffer
|
||||
|
||||
|
||||
cdef inline object extend(object buf, const char* at, size_t length):
|
||||
cdef Py_ssize_t s
|
||||
cdef char* ptr
|
||||
s = PyByteArray_Size(buf)
|
||||
PyByteArray_Resize(buf, s + length)
|
||||
ptr = PyByteArray_AsString(buf)
|
||||
memcpy(ptr + s, at, length)
|
||||
|
||||
|
||||
DEF METHODS_COUNT = 46;
|
||||
|
||||
cdef list _http_method = []
|
||||
|
||||
for i in range(METHODS_COUNT):
|
||||
_http_method.append(
|
||||
cparser.llhttp_method_name(<cparser.llhttp_method_t> i).decode('ascii'))
|
||||
|
||||
|
||||
cdef inline str http_method_str(int i):
|
||||
if i < METHODS_COUNT:
|
||||
return <str>_http_method[i]
|
||||
else:
|
||||
return "<unknown>"
|
||||
|
||||
cdef inline object find_header(bytes raw_header):
|
||||
cdef Py_ssize_t size
|
||||
cdef char *buf
|
||||
cdef int idx
|
||||
PyBytes_AsStringAndSize(raw_header, &buf, &size)
|
||||
idx = _find_header.find_header(buf, size)
|
||||
if idx == -1:
|
||||
return raw_header.decode('utf-8', 'surrogateescape')
|
||||
return headers[idx]
|
||||
|
||||
|
||||
@cython.freelist(DEFAULT_FREELIST_SIZE)
|
||||
cdef class RawRequestMessage:
|
||||
cdef readonly str method
|
||||
cdef readonly str path
|
||||
cdef readonly object version # HttpVersion
|
||||
cdef readonly object headers # CIMultiDict
|
||||
cdef readonly object raw_headers # tuple
|
||||
cdef readonly object should_close
|
||||
cdef readonly object compression
|
||||
cdef readonly object upgrade
|
||||
cdef readonly object chunked
|
||||
cdef readonly object url # yarl.URL
|
||||
|
||||
def __init__(self, method, path, version, headers, raw_headers,
|
||||
should_close, compression, upgrade, chunked, url):
|
||||
self.method = method
|
||||
self.path = path
|
||||
self.version = version
|
||||
self.headers = headers
|
||||
self.raw_headers = raw_headers
|
||||
self.should_close = should_close
|
||||
self.compression = compression
|
||||
self.upgrade = upgrade
|
||||
self.chunked = chunked
|
||||
self.url = url
|
||||
|
||||
def __repr__(self):
|
||||
info = []
|
||||
info.append(("method", self.method))
|
||||
info.append(("path", self.path))
|
||||
info.append(("version", self.version))
|
||||
info.append(("headers", self.headers))
|
||||
info.append(("raw_headers", self.raw_headers))
|
||||
info.append(("should_close", self.should_close))
|
||||
info.append(("compression", self.compression))
|
||||
info.append(("upgrade", self.upgrade))
|
||||
info.append(("chunked", self.chunked))
|
||||
info.append(("url", self.url))
|
||||
sinfo = ', '.join(name + '=' + repr(val) for name, val in info)
|
||||
return '<RawRequestMessage(' + sinfo + ')>'
|
||||
|
||||
def _replace(self, **dct):
|
||||
cdef RawRequestMessage ret
|
||||
ret = _new_request_message(self.method,
|
||||
self.path,
|
||||
self.version,
|
||||
self.headers,
|
||||
self.raw_headers,
|
||||
self.should_close,
|
||||
self.compression,
|
||||
self.upgrade,
|
||||
self.chunked,
|
||||
self.url)
|
||||
if "method" in dct:
|
||||
ret.method = dct["method"]
|
||||
if "path" in dct:
|
||||
ret.path = dct["path"]
|
||||
if "version" in dct:
|
||||
ret.version = dct["version"]
|
||||
if "headers" in dct:
|
||||
ret.headers = dct["headers"]
|
||||
if "raw_headers" in dct:
|
||||
ret.raw_headers = dct["raw_headers"]
|
||||
if "should_close" in dct:
|
||||
ret.should_close = dct["should_close"]
|
||||
if "compression" in dct:
|
||||
ret.compression = dct["compression"]
|
||||
if "upgrade" in dct:
|
||||
ret.upgrade = dct["upgrade"]
|
||||
if "chunked" in dct:
|
||||
ret.chunked = dct["chunked"]
|
||||
if "url" in dct:
|
||||
ret.url = dct["url"]
|
||||
return ret
|
||||
|
||||
cdef _new_request_message(str method,
|
||||
str path,
|
||||
object version,
|
||||
object headers,
|
||||
object raw_headers,
|
||||
bint should_close,
|
||||
object compression,
|
||||
bint upgrade,
|
||||
bint chunked,
|
||||
object url):
|
||||
cdef RawRequestMessage ret
|
||||
ret = RawRequestMessage.__new__(RawRequestMessage)
|
||||
ret.method = method
|
||||
ret.path = path
|
||||
ret.version = version
|
||||
ret.headers = headers
|
||||
ret.raw_headers = raw_headers
|
||||
ret.should_close = should_close
|
||||
ret.compression = compression
|
||||
ret.upgrade = upgrade
|
||||
ret.chunked = chunked
|
||||
ret.url = url
|
||||
return ret
|
||||
|
||||
|
||||
@cython.freelist(DEFAULT_FREELIST_SIZE)
|
||||
cdef class RawResponseMessage:
|
||||
cdef readonly object version # HttpVersion
|
||||
cdef readonly int code
|
||||
cdef readonly str reason
|
||||
cdef readonly object headers # CIMultiDict
|
||||
cdef readonly object raw_headers # tuple
|
||||
cdef readonly object should_close
|
||||
cdef readonly object compression
|
||||
cdef readonly object upgrade
|
||||
cdef readonly object chunked
|
||||
|
||||
def __init__(self, version, code, reason, headers, raw_headers,
|
||||
should_close, compression, upgrade, chunked):
|
||||
self.version = version
|
||||
self.code = code
|
||||
self.reason = reason
|
||||
self.headers = headers
|
||||
self.raw_headers = raw_headers
|
||||
self.should_close = should_close
|
||||
self.compression = compression
|
||||
self.upgrade = upgrade
|
||||
self.chunked = chunked
|
||||
|
||||
def __repr__(self):
|
||||
info = []
|
||||
info.append(("version", self.version))
|
||||
info.append(("code", self.code))
|
||||
info.append(("reason", self.reason))
|
||||
info.append(("headers", self.headers))
|
||||
info.append(("raw_headers", self.raw_headers))
|
||||
info.append(("should_close", self.should_close))
|
||||
info.append(("compression", self.compression))
|
||||
info.append(("upgrade", self.upgrade))
|
||||
info.append(("chunked", self.chunked))
|
||||
sinfo = ', '.join(name + '=' + repr(val) for name, val in info)
|
||||
return '<RawResponseMessage(' + sinfo + ')>'
|
||||
|
||||
|
||||
cdef _new_response_message(object version,
|
||||
int code,
|
||||
str reason,
|
||||
object headers,
|
||||
object raw_headers,
|
||||
bint should_close,
|
||||
object compression,
|
||||
bint upgrade,
|
||||
bint chunked):
|
||||
cdef RawResponseMessage ret
|
||||
ret = RawResponseMessage.__new__(RawResponseMessage)
|
||||
ret.version = version
|
||||
ret.code = code
|
||||
ret.reason = reason
|
||||
ret.headers = headers
|
||||
ret.raw_headers = raw_headers
|
||||
ret.should_close = should_close
|
||||
ret.compression = compression
|
||||
ret.upgrade = upgrade
|
||||
ret.chunked = chunked
|
||||
return ret
|
||||
|
||||
|
||||
@cython.internal
|
||||
cdef class HttpParser:
|
||||
|
||||
cdef:
|
||||
cparser.llhttp_t* _cparser
|
||||
cparser.llhttp_settings_t* _csettings
|
||||
|
||||
bytearray _raw_name
|
||||
bytearray _raw_value
|
||||
bint _has_value
|
||||
|
||||
object _protocol
|
||||
object _loop
|
||||
object _timer
|
||||
|
||||
size_t _max_line_size
|
||||
size_t _max_field_size
|
||||
size_t _max_headers
|
||||
bint _response_with_body
|
||||
bint _read_until_eof
|
||||
|
||||
bint _started
|
||||
object _url
|
||||
bytearray _buf
|
||||
str _path
|
||||
str _reason
|
||||
object _headers
|
||||
list _raw_headers
|
||||
bint _upgraded
|
||||
list _messages
|
||||
object _payload
|
||||
bint _payload_error
|
||||
object _payload_exception
|
||||
object _last_error
|
||||
bint _auto_decompress
|
||||
int _limit
|
||||
|
||||
str _content_encoding
|
||||
|
||||
Py_buffer py_buf
|
||||
|
||||
def __cinit__(self):
|
||||
self._cparser = <cparser.llhttp_t*> \
|
||||
PyMem_Malloc(sizeof(cparser.llhttp_t))
|
||||
if self._cparser is NULL:
|
||||
raise MemoryError()
|
||||
|
||||
self._csettings = <cparser.llhttp_settings_t*> \
|
||||
PyMem_Malloc(sizeof(cparser.llhttp_settings_t))
|
||||
if self._csettings is NULL:
|
||||
raise MemoryError()
|
||||
|
||||
def __dealloc__(self):
|
||||
PyMem_Free(self._cparser)
|
||||
PyMem_Free(self._csettings)
|
||||
|
||||
cdef _init(
|
||||
self, cparser.llhttp_type mode,
|
||||
object protocol, object loop, int limit,
|
||||
object timer=None,
|
||||
size_t max_line_size=8190, size_t max_headers=32768,
|
||||
size_t max_field_size=8190, payload_exception=None,
|
||||
bint response_with_body=True, bint read_until_eof=False,
|
||||
bint auto_decompress=True,
|
||||
):
|
||||
cparser.llhttp_settings_init(self._csettings)
|
||||
cparser.llhttp_init(self._cparser, mode, self._csettings)
|
||||
self._cparser.data = <void*>self
|
||||
self._cparser.content_length = 0
|
||||
|
||||
self._protocol = protocol
|
||||
self._loop = loop
|
||||
self._timer = timer
|
||||
|
||||
self._buf = bytearray()
|
||||
self._payload = None
|
||||
self._payload_error = 0
|
||||
self._payload_exception = payload_exception
|
||||
self._messages = []
|
||||
|
||||
self._raw_name = bytearray()
|
||||
self._raw_value = bytearray()
|
||||
self._has_value = False
|
||||
|
||||
self._max_line_size = max_line_size
|
||||
self._max_headers = max_headers
|
||||
self._max_field_size = max_field_size
|
||||
self._response_with_body = response_with_body
|
||||
self._read_until_eof = read_until_eof
|
||||
self._upgraded = False
|
||||
self._auto_decompress = auto_decompress
|
||||
self._content_encoding = None
|
||||
|
||||
self._csettings.on_url = cb_on_url
|
||||
self._csettings.on_status = cb_on_status
|
||||
self._csettings.on_header_field = cb_on_header_field
|
||||
self._csettings.on_header_value = cb_on_header_value
|
||||
self._csettings.on_headers_complete = cb_on_headers_complete
|
||||
self._csettings.on_body = cb_on_body
|
||||
self._csettings.on_message_begin = cb_on_message_begin
|
||||
self._csettings.on_message_complete = cb_on_message_complete
|
||||
self._csettings.on_chunk_header = cb_on_chunk_header
|
||||
self._csettings.on_chunk_complete = cb_on_chunk_complete
|
||||
|
||||
self._last_error = None
|
||||
self._limit = limit
|
||||
|
||||
cdef _process_header(self):
|
||||
if self._raw_name:
|
||||
raw_name = bytes(self._raw_name)
|
||||
raw_value = bytes(self._raw_value)
|
||||
|
||||
name = find_header(raw_name)
|
||||
value = raw_value.decode('utf-8', 'surrogateescape')
|
||||
|
||||
self._headers.add(name, value)
|
||||
|
||||
if name is CONTENT_ENCODING:
|
||||
self._content_encoding = value
|
||||
|
||||
PyByteArray_Resize(self._raw_name, 0)
|
||||
PyByteArray_Resize(self._raw_value, 0)
|
||||
self._has_value = False
|
||||
self._raw_headers.append((raw_name, raw_value))
|
||||
|
||||
cdef _on_header_field(self, char* at, size_t length):
|
||||
cdef Py_ssize_t size
|
||||
cdef char *buf
|
||||
if self._has_value:
|
||||
self._process_header()
|
||||
|
||||
size = PyByteArray_Size(self._raw_name)
|
||||
PyByteArray_Resize(self._raw_name, size + length)
|
||||
buf = PyByteArray_AsString(self._raw_name)
|
||||
memcpy(buf + size, at, length)
|
||||
|
||||
cdef _on_header_value(self, char* at, size_t length):
|
||||
cdef Py_ssize_t size
|
||||
cdef char *buf
|
||||
|
||||
size = PyByteArray_Size(self._raw_value)
|
||||
PyByteArray_Resize(self._raw_value, size + length)
|
||||
buf = PyByteArray_AsString(self._raw_value)
|
||||
memcpy(buf + size, at, length)
|
||||
self._has_value = True
|
||||
|
||||
cdef _on_headers_complete(self):
|
||||
self._process_header()
|
||||
|
||||
method = http_method_str(self._cparser.method)
|
||||
should_close = not cparser.llhttp_should_keep_alive(self._cparser)
|
||||
upgrade = self._cparser.upgrade
|
||||
chunked = self._cparser.flags & cparser.F_CHUNKED
|
||||
|
||||
raw_headers = tuple(self._raw_headers)
|
||||
headers = CIMultiDictProxy(self._headers)
|
||||
|
||||
if upgrade or self._cparser.method == cparser.HTTP_CONNECT:
|
||||
self._upgraded = True
|
||||
|
||||
# do not support old websocket spec
|
||||
if SEC_WEBSOCKET_KEY1 in headers:
|
||||
raise InvalidHeader(SEC_WEBSOCKET_KEY1)
|
||||
|
||||
encoding = None
|
||||
enc = self._content_encoding
|
||||
if enc is not None:
|
||||
self._content_encoding = None
|
||||
enc = enc.lower()
|
||||
if enc in ('gzip', 'deflate', 'br'):
|
||||
encoding = enc
|
||||
|
||||
if self._cparser.type == cparser.HTTP_REQUEST:
|
||||
msg = _new_request_message(
|
||||
method, self._path,
|
||||
self.http_version(), headers, raw_headers,
|
||||
should_close, encoding, upgrade, chunked, self._url)
|
||||
else:
|
||||
msg = _new_response_message(
|
||||
self.http_version(), self._cparser.status_code, self._reason,
|
||||
headers, raw_headers, should_close, encoding,
|
||||
upgrade, chunked)
|
||||
|
||||
if (
|
||||
ULLONG_MAX > self._cparser.content_length > 0 or chunked or
|
||||
self._cparser.method == cparser.HTTP_CONNECT or
|
||||
(self._cparser.status_code >= 199 and
|
||||
self._cparser.content_length == 0 and
|
||||
self._read_until_eof)
|
||||
):
|
||||
payload = StreamReader(
|
||||
self._protocol, timer=self._timer, loop=self._loop,
|
||||
limit=self._limit)
|
||||
else:
|
||||
payload = EMPTY_PAYLOAD
|
||||
|
||||
self._payload = payload
|
||||
if encoding is not None and self._auto_decompress:
|
||||
self._payload = DeflateBuffer(payload, encoding)
|
||||
|
||||
if not self._response_with_body:
|
||||
payload = EMPTY_PAYLOAD
|
||||
|
||||
self._messages.append((msg, payload))
|
||||
|
||||
cdef _on_message_complete(self):
|
||||
self._payload.feed_eof()
|
||||
self._payload = None
|
||||
|
||||
cdef _on_chunk_header(self):
|
||||
self._payload.begin_http_chunk_receiving()
|
||||
|
||||
cdef _on_chunk_complete(self):
|
||||
self._payload.end_http_chunk_receiving()
|
||||
|
||||
cdef object _on_status_complete(self):
|
||||
pass
|
||||
|
||||
cdef inline http_version(self):
|
||||
cdef cparser.llhttp_t* parser = self._cparser
|
||||
|
||||
if parser.http_major == 1:
|
||||
if parser.http_minor == 0:
|
||||
return HttpVersion10
|
||||
elif parser.http_minor == 1:
|
||||
return HttpVersion11
|
||||
|
||||
return HttpVersion(parser.http_major, parser.http_minor)
|
||||
|
||||
### Public API ###
|
||||
|
||||
def feed_eof(self):
|
||||
cdef bytes desc
|
||||
|
||||
if self._payload is not None:
|
||||
if self._cparser.flags & cparser.F_CHUNKED:
|
||||
raise TransferEncodingError(
|
||||
"Not enough data for satisfy transfer length header.")
|
||||
elif self._cparser.flags & cparser.F_CONTENT_LENGTH:
|
||||
raise ContentLengthError(
|
||||
"Not enough data for satisfy content length header.")
|
||||
elif cparser.llhttp_get_errno(self._cparser) != cparser.HPE_OK:
|
||||
desc = cparser.llhttp_get_error_reason(self._cparser)
|
||||
raise PayloadEncodingError(desc.decode('latin-1'))
|
||||
else:
|
||||
self._payload.feed_eof()
|
||||
elif self._started:
|
||||
self._on_headers_complete()
|
||||
if self._messages:
|
||||
return self._messages[-1][0]
|
||||
|
||||
def feed_data(self, data):
|
||||
cdef:
|
||||
size_t data_len
|
||||
size_t nb
|
||||
cdef cparser.llhttp_errno_t errno
|
||||
|
||||
PyObject_GetBuffer(data, &self.py_buf, PyBUF_SIMPLE)
|
||||
data_len = <size_t>self.py_buf.len
|
||||
|
||||
errno = cparser.llhttp_execute(
|
||||
self._cparser,
|
||||
<char*>self.py_buf.buf,
|
||||
data_len)
|
||||
|
||||
if errno is cparser.HPE_PAUSED_UPGRADE:
|
||||
cparser.llhttp_resume_after_upgrade(self._cparser)
|
||||
|
||||
nb = cparser.llhttp_get_error_pos(self._cparser) - <char*>self.py_buf.buf
|
||||
|
||||
PyBuffer_Release(&self.py_buf)
|
||||
|
||||
if errno not in (cparser.HPE_OK, cparser.HPE_PAUSED_UPGRADE):
|
||||
if self._payload_error == 0:
|
||||
if self._last_error is not None:
|
||||
ex = self._last_error
|
||||
self._last_error = None
|
||||
else:
|
||||
ex = parser_error_from_errno(self._cparser)
|
||||
self._payload = None
|
||||
raise ex
|
||||
|
||||
if self._messages:
|
||||
messages = self._messages
|
||||
self._messages = []
|
||||
else:
|
||||
messages = ()
|
||||
|
||||
if self._upgraded:
|
||||
return messages, True, data[nb:]
|
||||
else:
|
||||
return messages, False, b''
|
||||
|
||||
def set_upgraded(self, val):
|
||||
self._upgraded = val
|
||||
|
||||
|
||||
cdef class HttpRequestParser(HttpParser):
|
||||
|
||||
def __init__(
|
||||
self, protocol, loop, int limit, timer=None,
|
||||
size_t max_line_size=8190, size_t max_headers=32768,
|
||||
size_t max_field_size=8190, payload_exception=None,
|
||||
bint response_with_body=True, bint read_until_eof=False,
|
||||
bint auto_decompress=True,
|
||||
):
|
||||
self._init(cparser.HTTP_REQUEST, protocol, loop, limit, timer,
|
||||
max_line_size, max_headers, max_field_size,
|
||||
payload_exception, response_with_body, read_until_eof,
|
||||
auto_decompress)
|
||||
|
||||
cdef object _on_status_complete(self):
|
||||
cdef int idx1, idx2
|
||||
if not self._buf:
|
||||
return
|
||||
self._path = self._buf.decode('utf-8', 'surrogateescape')
|
||||
try:
|
||||
idx3 = len(self._path)
|
||||
if self._cparser.method == cparser.HTTP_CONNECT:
|
||||
# authority-form,
|
||||
# https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.3
|
||||
self._url = URL.build(authority=self._path, encoded=True)
|
||||
elif idx3 > 1 and self._path[0] == '/':
|
||||
# origin-form,
|
||||
# https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.1
|
||||
idx1 = self._path.find("?")
|
||||
if idx1 == -1:
|
||||
query = ""
|
||||
idx2 = self._path.find("#")
|
||||
if idx2 == -1:
|
||||
path = self._path
|
||||
fragment = ""
|
||||
else:
|
||||
path = self._path[0: idx2]
|
||||
fragment = self._path[idx2+1:]
|
||||
|
||||
else:
|
||||
path = self._path[0:idx1]
|
||||
idx1 += 1
|
||||
idx2 = self._path.find("#", idx1+1)
|
||||
if idx2 == -1:
|
||||
query = self._path[idx1:]
|
||||
fragment = ""
|
||||
else:
|
||||
query = self._path[idx1: idx2]
|
||||
fragment = self._path[idx2+1:]
|
||||
|
||||
self._url = URL.build(
|
||||
path=path,
|
||||
query_string=query,
|
||||
fragment=fragment,
|
||||
encoded=True,
|
||||
)
|
||||
else:
|
||||
# absolute-form for proxy maybe,
|
||||
# https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.2
|
||||
self._url = URL(self._path, encoded=True)
|
||||
finally:
|
||||
PyByteArray_Resize(self._buf, 0)
|
||||
|
||||
|
||||
cdef class HttpResponseParser(HttpParser):
|
||||
|
||||
def __init__(
|
||||
self, protocol, loop, int limit, timer=None,
|
||||
size_t max_line_size=8190, size_t max_headers=32768,
|
||||
size_t max_field_size=8190, payload_exception=None,
|
||||
bint response_with_body=True, bint read_until_eof=False,
|
||||
bint auto_decompress=True
|
||||
):
|
||||
self._init(cparser.HTTP_RESPONSE, protocol, loop, limit, timer,
|
||||
max_line_size, max_headers, max_field_size,
|
||||
payload_exception, response_with_body, read_until_eof,
|
||||
auto_decompress)
|
||||
|
||||
cdef object _on_status_complete(self):
|
||||
if self._buf:
|
||||
self._reason = self._buf.decode('utf-8', 'surrogateescape')
|
||||
PyByteArray_Resize(self._buf, 0)
|
||||
else:
|
||||
self._reason = self._reason or ''
|
||||
|
||||
cdef int cb_on_message_begin(cparser.llhttp_t* parser) except -1:
|
||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||
|
||||
pyparser._started = True
|
||||
pyparser._headers = CIMultiDict()
|
||||
pyparser._raw_headers = []
|
||||
PyByteArray_Resize(pyparser._buf, 0)
|
||||
pyparser._path = None
|
||||
pyparser._reason = None
|
||||
return 0
|
||||
|
||||
|
||||
cdef int cb_on_url(cparser.llhttp_t* parser,
|
||||
const char *at, size_t length) except -1:
|
||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||
try:
|
||||
if length > pyparser._max_line_size:
|
||||
raise LineTooLong(
|
||||
'Status line is too long', pyparser._max_line_size, length)
|
||||
extend(pyparser._buf, at, length)
|
||||
except BaseException as ex:
|
||||
pyparser._last_error = ex
|
||||
return -1
|
||||
else:
|
||||
return 0
|
||||
|
||||
|
||||
cdef int cb_on_status(cparser.llhttp_t* parser,
|
||||
const char *at, size_t length) except -1:
|
||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||
cdef str reason
|
||||
try:
|
||||
if length > pyparser._max_line_size:
|
||||
raise LineTooLong(
|
||||
'Status line is too long', pyparser._max_line_size, length)
|
||||
extend(pyparser._buf, at, length)
|
||||
except BaseException as ex:
|
||||
pyparser._last_error = ex
|
||||
return -1
|
||||
else:
|
||||
return 0
|
||||
|
||||
|
||||
cdef int cb_on_header_field(cparser.llhttp_t* parser,
|
||||
const char *at, size_t length) except -1:
|
||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||
cdef Py_ssize_t size
|
||||
try:
|
||||
pyparser._on_status_complete()
|
||||
size = len(pyparser._raw_name) + length
|
||||
if size > pyparser._max_field_size:
|
||||
raise LineTooLong(
|
||||
'Header name is too long', pyparser._max_field_size, size)
|
||||
pyparser._on_header_field(at, length)
|
||||
except BaseException as ex:
|
||||
pyparser._last_error = ex
|
||||
return -1
|
||||
else:
|
||||
return 0
|
||||
|
||||
|
||||
cdef int cb_on_header_value(cparser.llhttp_t* parser,
|
||||
const char *at, size_t length) except -1:
|
||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||
cdef Py_ssize_t size
|
||||
try:
|
||||
size = len(pyparser._raw_value) + length
|
||||
if size > pyparser._max_field_size:
|
||||
raise LineTooLong(
|
||||
'Header value is too long', pyparser._max_field_size, size)
|
||||
pyparser._on_header_value(at, length)
|
||||
except BaseException as ex:
|
||||
pyparser._last_error = ex
|
||||
return -1
|
||||
else:
|
||||
return 0
|
||||
|
||||
|
||||
cdef int cb_on_headers_complete(cparser.llhttp_t* parser) except -1:
|
||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||
try:
|
||||
pyparser._on_status_complete()
|
||||
pyparser._on_headers_complete()
|
||||
except BaseException as exc:
|
||||
pyparser._last_error = exc
|
||||
return -1
|
||||
else:
|
||||
if (
|
||||
pyparser._cparser.upgrade or
|
||||
pyparser._cparser.method == cparser.HTTP_CONNECT
|
||||
):
|
||||
return 2
|
||||
else:
|
||||
return 0
|
||||
|
||||
|
||||
cdef int cb_on_body(cparser.llhttp_t* parser,
|
||||
const char *at, size_t length) except -1:
|
||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||
cdef bytes body = at[:length]
|
||||
try:
|
||||
pyparser._payload.feed_data(body, length)
|
||||
except BaseException as exc:
|
||||
if pyparser._payload_exception is not None:
|
||||
pyparser._payload.set_exception(pyparser._payload_exception(str(exc)))
|
||||
else:
|
||||
pyparser._payload.set_exception(exc)
|
||||
pyparser._payload_error = 1
|
||||
return -1
|
||||
else:
|
||||
return 0
|
||||
|
||||
|
||||
cdef int cb_on_message_complete(cparser.llhttp_t* parser) except -1:
|
||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||
try:
|
||||
pyparser._started = False
|
||||
pyparser._on_message_complete()
|
||||
except BaseException as exc:
|
||||
pyparser._last_error = exc
|
||||
return -1
|
||||
else:
|
||||
return 0
|
||||
|
||||
|
||||
cdef int cb_on_chunk_header(cparser.llhttp_t* parser) except -1:
|
||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||
try:
|
||||
pyparser._on_chunk_header()
|
||||
except BaseException as exc:
|
||||
pyparser._last_error = exc
|
||||
return -1
|
||||
else:
|
||||
return 0
|
||||
|
||||
|
||||
cdef int cb_on_chunk_complete(cparser.llhttp_t* parser) except -1:
|
||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||
try:
|
||||
pyparser._on_chunk_complete()
|
||||
except BaseException as exc:
|
||||
pyparser._last_error = exc
|
||||
return -1
|
||||
else:
|
||||
return 0
|
||||
|
||||
|
||||
cdef parser_error_from_errno(cparser.llhttp_t* parser):
|
||||
cdef cparser.llhttp_errno_t errno = cparser.llhttp_get_errno(parser)
|
||||
cdef bytes desc = cparser.llhttp_get_error_reason(parser)
|
||||
|
||||
if errno in (cparser.HPE_CB_MESSAGE_BEGIN,
|
||||
cparser.HPE_CB_HEADERS_COMPLETE,
|
||||
cparser.HPE_CB_MESSAGE_COMPLETE,
|
||||
cparser.HPE_CB_CHUNK_HEADER,
|
||||
cparser.HPE_CB_CHUNK_COMPLETE,
|
||||
cparser.HPE_INVALID_CONSTANT,
|
||||
cparser.HPE_INVALID_HEADER_TOKEN,
|
||||
cparser.HPE_INVALID_CONTENT_LENGTH,
|
||||
cparser.HPE_INVALID_CHUNK_SIZE,
|
||||
cparser.HPE_INVALID_EOF_STATE,
|
||||
cparser.HPE_INVALID_TRANSFER_ENCODING):
|
||||
cls = BadHttpMessage
|
||||
|
||||
elif errno == cparser.HPE_INVALID_STATUS:
|
||||
cls = BadStatusLine
|
||||
|
||||
elif errno == cparser.HPE_INVALID_METHOD:
|
||||
cls = BadStatusLine
|
||||
|
||||
elif errno == cparser.HPE_INVALID_VERSION:
|
||||
cls = BadStatusLine
|
||||
|
||||
elif errno == cparser.HPE_INVALID_URL:
|
||||
cls = InvalidURLError
|
||||
|
||||
else:
|
||||
cls = BadHttpMessage
|
||||
|
||||
return cls(desc.decode('latin-1'))
|
Plik binarny nie jest wyświetlany.
|
@ -0,0 +1,163 @@
|
|||
from cpython.bytes cimport PyBytes_FromStringAndSize
|
||||
from cpython.exc cimport PyErr_NoMemory
|
||||
from cpython.mem cimport PyMem_Free, PyMem_Malloc, PyMem_Realloc
|
||||
from cpython.object cimport PyObject_Str
|
||||
from libc.stdint cimport uint8_t, uint64_t
|
||||
from libc.string cimport memcpy
|
||||
|
||||
from multidict import istr
|
||||
|
||||
DEF BUF_SIZE = 16 * 1024 # 16KiB
|
||||
cdef char BUFFER[BUF_SIZE]
|
||||
|
||||
cdef object _istr = istr
|
||||
|
||||
|
||||
# ----------------- writer ---------------------------
|
||||
|
||||
cdef struct Writer:
|
||||
char *buf
|
||||
Py_ssize_t size
|
||||
Py_ssize_t pos
|
||||
|
||||
|
||||
cdef inline void _init_writer(Writer* writer):
|
||||
writer.buf = &BUFFER[0]
|
||||
writer.size = BUF_SIZE
|
||||
writer.pos = 0
|
||||
|
||||
|
||||
cdef inline void _release_writer(Writer* writer):
|
||||
if writer.buf != BUFFER:
|
||||
PyMem_Free(writer.buf)
|
||||
|
||||
|
||||
cdef inline int _write_byte(Writer* writer, uint8_t ch):
|
||||
cdef char * buf
|
||||
cdef Py_ssize_t size
|
||||
|
||||
if writer.pos == writer.size:
|
||||
# reallocate
|
||||
size = writer.size + BUF_SIZE
|
||||
if writer.buf == BUFFER:
|
||||
buf = <char*>PyMem_Malloc(size)
|
||||
if buf == NULL:
|
||||
PyErr_NoMemory()
|
||||
return -1
|
||||
memcpy(buf, writer.buf, writer.size)
|
||||
else:
|
||||
buf = <char*>PyMem_Realloc(writer.buf, size)
|
||||
if buf == NULL:
|
||||
PyErr_NoMemory()
|
||||
return -1
|
||||
writer.buf = buf
|
||||
writer.size = size
|
||||
writer.buf[writer.pos] = <char>ch
|
||||
writer.pos += 1
|
||||
return 0
|
||||
|
||||
|
||||
cdef inline int _write_utf8(Writer* writer, Py_UCS4 symbol):
|
||||
cdef uint64_t utf = <uint64_t> symbol
|
||||
|
||||
if utf < 0x80:
|
||||
return _write_byte(writer, <uint8_t>utf)
|
||||
elif utf < 0x800:
|
||||
if _write_byte(writer, <uint8_t>(0xc0 | (utf >> 6))) < 0:
|
||||
return -1
|
||||
return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
|
||||
elif 0xD800 <= utf <= 0xDFFF:
|
||||
# surogate pair, ignored
|
||||
return 0
|
||||
elif utf < 0x10000:
|
||||
if _write_byte(writer, <uint8_t>(0xe0 | (utf >> 12))) < 0:
|
||||
return -1
|
||||
if _write_byte(writer, <uint8_t>(0x80 | ((utf >> 6) & 0x3f))) < 0:
|
||||
return -1
|
||||
return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
|
||||
elif utf > 0x10FFFF:
|
||||
# symbol is too large
|
||||
return 0
|
||||
else:
|
||||
if _write_byte(writer, <uint8_t>(0xf0 | (utf >> 18))) < 0:
|
||||
return -1
|
||||
if _write_byte(writer,
|
||||
<uint8_t>(0x80 | ((utf >> 12) & 0x3f))) < 0:
|
||||
return -1
|
||||
if _write_byte(writer,
|
||||
<uint8_t>(0x80 | ((utf >> 6) & 0x3f))) < 0:
|
||||
return -1
|
||||
return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
|
||||
|
||||
|
||||
cdef inline int _write_str(Writer* writer, str s):
|
||||
cdef Py_UCS4 ch
|
||||
for ch in s:
|
||||
if _write_utf8(writer, ch) < 0:
|
||||
return -1
|
||||
|
||||
|
||||
# --------------- _serialize_headers ----------------------
|
||||
|
||||
cdef str to_str(object s):
|
||||
typ = type(s)
|
||||
if typ is str:
|
||||
return <str>s
|
||||
elif typ is _istr:
|
||||
return PyObject_Str(s)
|
||||
elif not isinstance(s, str):
|
||||
raise TypeError("Cannot serialize non-str key {!r}".format(s))
|
||||
else:
|
||||
return str(s)
|
||||
|
||||
|
||||
cdef void _safe_header(str string) except *:
|
||||
if "\r" in string or "\n" in string:
|
||||
raise ValueError(
|
||||
"Newline or carriage return character detected in HTTP status message or "
|
||||
"header. This is a potential security issue."
|
||||
)
|
||||
|
||||
|
||||
def _serialize_headers(str status_line, headers):
|
||||
cdef Writer writer
|
||||
cdef object key
|
||||
cdef object val
|
||||
cdef bytes ret
|
||||
|
||||
_init_writer(&writer)
|
||||
|
||||
for key, val in headers.items():
|
||||
_safe_header(to_str(key))
|
||||
_safe_header(to_str(val))
|
||||
|
||||
try:
|
||||
if _write_str(&writer, status_line) < 0:
|
||||
raise
|
||||
if _write_byte(&writer, b'\r') < 0:
|
||||
raise
|
||||
if _write_byte(&writer, b'\n') < 0:
|
||||
raise
|
||||
|
||||
for key, val in headers.items():
|
||||
if _write_str(&writer, to_str(key)) < 0:
|
||||
raise
|
||||
if _write_byte(&writer, b':') < 0:
|
||||
raise
|
||||
if _write_byte(&writer, b' ') < 0:
|
||||
raise
|
||||
if _write_str(&writer, to_str(val)) < 0:
|
||||
raise
|
||||
if _write_byte(&writer, b'\r') < 0:
|
||||
raise
|
||||
if _write_byte(&writer, b'\n') < 0:
|
||||
raise
|
||||
|
||||
if _write_byte(&writer, b'\r') < 0:
|
||||
raise
|
||||
if _write_byte(&writer, b'\n') < 0:
|
||||
raise
|
||||
|
||||
return PyBytes_FromStringAndSize(writer.buf, writer.pos)
|
||||
finally:
|
||||
_release_writer(&writer)
|
Plik binarny nie jest wyświetlany.
|
@ -0,0 +1,56 @@
|
|||
from cpython cimport PyBytes_AsString
|
||||
|
||||
|
||||
#from cpython cimport PyByteArray_AsString # cython still not exports that
|
||||
cdef extern from "Python.h":
|
||||
char* PyByteArray_AsString(bytearray ba) except NULL
|
||||
|
||||
from libc.stdint cimport uint32_t, uint64_t, uintmax_t
|
||||
|
||||
|
||||
def _websocket_mask_cython(object mask, object data):
|
||||
"""Note, this function mutates its `data` argument
|
||||
"""
|
||||
cdef:
|
||||
Py_ssize_t data_len, i
|
||||
# bit operations on signed integers are implementation-specific
|
||||
unsigned char * in_buf
|
||||
const unsigned char * mask_buf
|
||||
uint32_t uint32_msk
|
||||
uint64_t uint64_msk
|
||||
|
||||
assert len(mask) == 4
|
||||
|
||||
if not isinstance(mask, bytes):
|
||||
mask = bytes(mask)
|
||||
|
||||
if isinstance(data, bytearray):
|
||||
data = <bytearray>data
|
||||
else:
|
||||
data = bytearray(data)
|
||||
|
||||
data_len = len(data)
|
||||
in_buf = <unsigned char*>PyByteArray_AsString(data)
|
||||
mask_buf = <const unsigned char*>PyBytes_AsString(mask)
|
||||
uint32_msk = (<uint32_t*>mask_buf)[0]
|
||||
|
||||
# TODO: align in_data ptr to achieve even faster speeds
|
||||
# does it need in python ?! malloc() always aligns to sizeof(long) bytes
|
||||
|
||||
if sizeof(size_t) >= 8:
|
||||
uint64_msk = uint32_msk
|
||||
uint64_msk = (uint64_msk << 32) | uint32_msk
|
||||
|
||||
while data_len >= 8:
|
||||
(<uint64_t*>in_buf)[0] ^= uint64_msk
|
||||
in_buf += 8
|
||||
data_len -= 8
|
||||
|
||||
|
||||
while data_len >= 4:
|
||||
(<uint32_t*>in_buf)[0] ^= uint32_msk
|
||||
in_buf += 4
|
||||
data_len -= 4
|
||||
|
||||
for i in range(0, data_len):
|
||||
in_buf[i] ^= mask_buf[i]
|
|
@ -0,0 +1,207 @@
|
|||
import asyncio
|
||||
import logging
|
||||
from abc import ABC, abstractmethod
|
||||
from collections.abc import Sized
|
||||
from http.cookies import BaseCookie, Morsel
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
Awaitable,
|
||||
Callable,
|
||||
Dict,
|
||||
Generator,
|
||||
Iterable,
|
||||
List,
|
||||
Optional,
|
||||
Tuple,
|
||||
)
|
||||
|
||||
from multidict import CIMultiDict
|
||||
from yarl import URL
|
||||
|
||||
from .helpers import get_running_loop
|
||||
from .typedefs import LooseCookies
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover
|
||||
from .web_app import Application
|
||||
from .web_exceptions import HTTPException
|
||||
from .web_request import BaseRequest, Request
|
||||
from .web_response import StreamResponse
|
||||
else:
|
||||
BaseRequest = Request = Application = StreamResponse = None
|
||||
HTTPException = None
|
||||
|
||||
|
||||
class AbstractRouter(ABC):
|
||||
def __init__(self) -> None:
|
||||
self._frozen = False
|
||||
|
||||
def post_init(self, app: Application) -> None:
|
||||
"""Post init stage.
|
||||
|
||||
Not an abstract method for sake of backward compatibility,
|
||||
but if the router wants to be aware of the application
|
||||
it can override this.
|
||||
"""
|
||||
|
||||
@property
|
||||
def frozen(self) -> bool:
|
||||
return self._frozen
|
||||
|
||||
def freeze(self) -> None:
|
||||
"""Freeze router."""
|
||||
self._frozen = True
|
||||
|
||||
@abstractmethod
|
||||
async def resolve(self, request: Request) -> "AbstractMatchInfo":
|
||||
"""Return MATCH_INFO for given request"""
|
||||
|
||||
|
||||
class AbstractMatchInfo(ABC):
|
||||
@property # pragma: no branch
|
||||
@abstractmethod
|
||||
def handler(self) -> Callable[[Request], Awaitable[StreamResponse]]:
|
||||
"""Execute matched request handler"""
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def expect_handler(self) -> Callable[[Request], Awaitable[None]]:
|
||||
"""Expect handler for 100-continue processing"""
|
||||
|
||||
@property # pragma: no branch
|
||||
@abstractmethod
|
||||
def http_exception(self) -> Optional[HTTPException]:
|
||||
"""HTTPException instance raised on router's resolving, or None"""
|
||||
|
||||
@abstractmethod # pragma: no branch
|
||||
def get_info(self) -> Dict[str, Any]:
|
||||
"""Return a dict with additional info useful for introspection"""
|
||||
|
||||
@property # pragma: no branch
|
||||
@abstractmethod
|
||||
def apps(self) -> Tuple[Application, ...]:
|
||||
"""Stack of nested applications.
|
||||
|
||||
Top level application is left-most element.
|
||||
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def add_app(self, app: Application) -> None:
|
||||
"""Add application to the nested apps stack."""
|
||||
|
||||
@abstractmethod
|
||||
def freeze(self) -> None:
|
||||
"""Freeze the match info.
|
||||
|
||||
The method is called after route resolution.
|
||||
|
||||
After the call .add_app() is forbidden.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class AbstractView(ABC):
|
||||
"""Abstract class based view."""
|
||||
|
||||
def __init__(self, request: Request) -> None:
|
||||
self._request = request
|
||||
|
||||
@property
|
||||
def request(self) -> Request:
|
||||
"""Request instance."""
|
||||
return self._request
|
||||
|
||||
@abstractmethod
|
||||
def __await__(self) -> Generator[Any, None, StreamResponse]:
|
||||
"""Execute the view handler."""
|
||||
|
||||
|
||||
class AbstractResolver(ABC):
|
||||
"""Abstract DNS resolver."""
|
||||
|
||||
@abstractmethod
|
||||
async def resolve(self, host: str, port: int, family: int) -> List[Dict[str, Any]]:
|
||||
"""Return IP address for given hostname"""
|
||||
|
||||
@abstractmethod
|
||||
async def close(self) -> None:
|
||||
"""Release resolver"""
|
||||
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover
|
||||
IterableBase = Iterable[Morsel[str]]
|
||||
else:
|
||||
IterableBase = Iterable
|
||||
|
||||
|
||||
ClearCookiePredicate = Callable[["Morsel[str]"], bool]
|
||||
|
||||
|
||||
class AbstractCookieJar(Sized, IterableBase):
|
||||
"""Abstract Cookie Jar."""
|
||||
|
||||
def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
|
||||
self._loop = get_running_loop(loop)
|
||||
|
||||
@abstractmethod
|
||||
def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None:
|
||||
"""Clear all cookies if no predicate is passed."""
|
||||
|
||||
@abstractmethod
|
||||
def clear_domain(self, domain: str) -> None:
|
||||
"""Clear all cookies for domain and all subdomains."""
|
||||
|
||||
@abstractmethod
|
||||
def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
|
||||
"""Update cookies."""
|
||||
|
||||
@abstractmethod
|
||||
def filter_cookies(self, request_url: URL) -> "BaseCookie[str]":
|
||||
"""Return the jar's cookies filtered by their attributes."""
|
||||
|
||||
|
||||
class AbstractStreamWriter(ABC):
|
||||
"""Abstract stream writer."""
|
||||
|
||||
buffer_size = 0
|
||||
output_size = 0
|
||||
length: Optional[int] = 0
|
||||
|
||||
@abstractmethod
|
||||
async def write(self, chunk: bytes) -> None:
|
||||
"""Write chunk into stream."""
|
||||
|
||||
@abstractmethod
|
||||
async def write_eof(self, chunk: bytes = b"") -> None:
|
||||
"""Write last chunk."""
|
||||
|
||||
@abstractmethod
|
||||
async def drain(self) -> None:
|
||||
"""Flush the write buffer."""
|
||||
|
||||
@abstractmethod
|
||||
def enable_compression(self, encoding: str = "deflate") -> None:
|
||||
"""Enable HTTP body compression"""
|
||||
|
||||
@abstractmethod
|
||||
def enable_chunking(self) -> None:
|
||||
"""Enable HTTP chunked mode"""
|
||||
|
||||
@abstractmethod
|
||||
async def write_headers(
|
||||
self, status_line: str, headers: "CIMultiDict[str]"
|
||||
) -> None:
|
||||
"""Write HTTP headers"""
|
||||
|
||||
|
||||
class AbstractAccessLogger(ABC):
|
||||
"""Abstract writer to access log."""
|
||||
|
||||
def __init__(self, logger: logging.Logger, log_format: str) -> None:
|
||||
self.logger = logger
|
||||
self.log_format = log_format
|
||||
|
||||
@abstractmethod
|
||||
def log(self, request: BaseRequest, response: StreamResponse, time: float) -> None:
|
||||
"""Emit log to logger."""
|
|
@ -0,0 +1,90 @@
|
|||
import asyncio
|
||||
from typing import Optional, cast
|
||||
|
||||
from .tcp_helpers import tcp_nodelay
|
||||
|
||||
|
||||
class BaseProtocol(asyncio.Protocol):
|
||||
__slots__ = (
|
||||
"_loop",
|
||||
"_paused",
|
||||
"_drain_waiter",
|
||||
"_connection_lost",
|
||||
"_reading_paused",
|
||||
"transport",
|
||||
)
|
||||
|
||||
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
|
||||
self._loop: asyncio.AbstractEventLoop = loop
|
||||
self._paused = False
|
||||
self._drain_waiter: Optional[asyncio.Future[None]] = None
|
||||
self._reading_paused = False
|
||||
|
||||
self.transport: Optional[asyncio.Transport] = None
|
||||
|
||||
@property
|
||||
def connected(self) -> bool:
|
||||
"""Return True if the connection is open."""
|
||||
return self.transport is not None
|
||||
|
||||
def pause_writing(self) -> None:
|
||||
assert not self._paused
|
||||
self._paused = True
|
||||
|
||||
def resume_writing(self) -> None:
|
||||
assert self._paused
|
||||
self._paused = False
|
||||
|
||||
waiter = self._drain_waiter
|
||||
if waiter is not None:
|
||||
self._drain_waiter = None
|
||||
if not waiter.done():
|
||||
waiter.set_result(None)
|
||||
|
||||
def pause_reading(self) -> None:
|
||||
if not self._reading_paused and self.transport is not None:
|
||||
try:
|
||||
self.transport.pause_reading()
|
||||
except (AttributeError, NotImplementedError, RuntimeError):
|
||||
pass
|
||||
self._reading_paused = True
|
||||
|
||||
def resume_reading(self) -> None:
|
||||
if self._reading_paused and self.transport is not None:
|
||||
try:
|
||||
self.transport.resume_reading()
|
||||
except (AttributeError, NotImplementedError, RuntimeError):
|
||||
pass
|
||||
self._reading_paused = False
|
||||
|
||||
def connection_made(self, transport: asyncio.BaseTransport) -> None:
|
||||
tr = cast(asyncio.Transport, transport)
|
||||
tcp_nodelay(tr, True)
|
||||
self.transport = tr
|
||||
|
||||
def connection_lost(self, exc: Optional[BaseException]) -> None:
|
||||
# Wake up the writer if currently paused.
|
||||
self.transport = None
|
||||
if not self._paused:
|
||||
return
|
||||
waiter = self._drain_waiter
|
||||
if waiter is None:
|
||||
return
|
||||
self._drain_waiter = None
|
||||
if waiter.done():
|
||||
return
|
||||
if exc is None:
|
||||
waiter.set_result(None)
|
||||
else:
|
||||
waiter.set_exception(exc)
|
||||
|
||||
async def _drain_helper(self) -> None:
|
||||
if not self.connected:
|
||||
raise ConnectionResetError("Connection lost")
|
||||
if not self._paused:
|
||||
return
|
||||
waiter = self._drain_waiter
|
||||
if waiter is None:
|
||||
waiter = self._loop.create_future()
|
||||
self._drain_waiter = waiter
|
||||
await asyncio.shield(waiter)
|
Plik diff jest za duży
Load Diff
|
@ -0,0 +1,342 @@
|
|||
"""HTTP related errors."""
|
||||
|
||||
import asyncio
|
||||
import warnings
|
||||
from typing import TYPE_CHECKING, Any, Optional, Tuple, Union
|
||||
|
||||
from .http_parser import RawResponseMessage
|
||||
from .typedefs import LooseHeaders
|
||||
|
||||
try:
|
||||
import ssl
|
||||
|
||||
SSLContext = ssl.SSLContext
|
||||
except ImportError: # pragma: no cover
|
||||
ssl = SSLContext = None # type: ignore[assignment]
|
||||
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover
|
||||
from .client_reqrep import ClientResponse, ConnectionKey, Fingerprint, RequestInfo
|
||||
else:
|
||||
RequestInfo = ClientResponse = ConnectionKey = None
|
||||
|
||||
__all__ = (
|
||||
"ClientError",
|
||||
"ClientConnectionError",
|
||||
"ClientOSError",
|
||||
"ClientConnectorError",
|
||||
"ClientProxyConnectionError",
|
||||
"ClientSSLError",
|
||||
"ClientConnectorSSLError",
|
||||
"ClientConnectorCertificateError",
|
||||
"ServerConnectionError",
|
||||
"ServerTimeoutError",
|
||||
"ServerDisconnectedError",
|
||||
"ServerFingerprintMismatch",
|
||||
"ClientResponseError",
|
||||
"ClientHttpProxyError",
|
||||
"WSServerHandshakeError",
|
||||
"ContentTypeError",
|
||||
"ClientPayloadError",
|
||||
"InvalidURL",
|
||||
)
|
||||
|
||||
|
||||
class ClientError(Exception):
|
||||
"""Base class for client connection errors."""
|
||||
|
||||
|
||||
class ClientResponseError(ClientError):
|
||||
"""Connection error during reading response.
|
||||
|
||||
request_info: instance of RequestInfo
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
request_info: RequestInfo,
|
||||
history: Tuple[ClientResponse, ...],
|
||||
*,
|
||||
code: Optional[int] = None,
|
||||
status: Optional[int] = None,
|
||||
message: str = "",
|
||||
headers: Optional[LooseHeaders] = None,
|
||||
) -> None:
|
||||
self.request_info = request_info
|
||||
if code is not None:
|
||||
if status is not None:
|
||||
raise ValueError(
|
||||
"Both code and status arguments are provided; "
|
||||
"code is deprecated, use status instead"
|
||||
)
|
||||
warnings.warn(
|
||||
"code argument is deprecated, use status instead",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
if status is not None:
|
||||
self.status = status
|
||||
elif code is not None:
|
||||
self.status = code
|
||||
else:
|
||||
self.status = 0
|
||||
self.message = message
|
||||
self.headers = headers
|
||||
self.history = history
|
||||
self.args = (request_info, history)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return "{}, message={!r}, url={!r}".format(
|
||||
self.status,
|
||||
self.message,
|
||||
self.request_info.real_url,
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
args = f"{self.request_info!r}, {self.history!r}"
|
||||
if self.status != 0:
|
||||
args += f", status={self.status!r}"
|
||||
if self.message != "":
|
||||
args += f", message={self.message!r}"
|
||||
if self.headers is not None:
|
||||
args += f", headers={self.headers!r}"
|
||||
return f"{type(self).__name__}({args})"
|
||||
|
||||
@property
|
||||
def code(self) -> int:
|
||||
warnings.warn(
|
||||
"code property is deprecated, use status instead",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return self.status
|
||||
|
||||
@code.setter
|
||||
def code(self, value: int) -> None:
|
||||
warnings.warn(
|
||||
"code property is deprecated, use status instead",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
self.status = value
|
||||
|
||||
|
||||
class ContentTypeError(ClientResponseError):
|
||||
"""ContentType found is not valid."""
|
||||
|
||||
|
||||
class WSServerHandshakeError(ClientResponseError):
|
||||
"""websocket server handshake error."""
|
||||
|
||||
|
||||
class ClientHttpProxyError(ClientResponseError):
|
||||
"""HTTP proxy error.
|
||||
|
||||
Raised in :class:`aiohttp.connector.TCPConnector` if
|
||||
proxy responds with status other than ``200 OK``
|
||||
on ``CONNECT`` request.
|
||||
"""
|
||||
|
||||
|
||||
class TooManyRedirects(ClientResponseError):
|
||||
"""Client was redirected too many times."""
|
||||
|
||||
|
||||
class ClientConnectionError(ClientError):
|
||||
"""Base class for client socket errors."""
|
||||
|
||||
|
||||
class ClientOSError(ClientConnectionError, OSError):
|
||||
"""OSError error."""
|
||||
|
||||
|
||||
class ClientConnectorError(ClientOSError):
|
||||
"""Client connector error.
|
||||
|
||||
Raised in :class:`aiohttp.connector.TCPConnector` if
|
||||
a connection can not be established.
|
||||
"""
|
||||
|
||||
def __init__(self, connection_key: ConnectionKey, os_error: OSError) -> None:
|
||||
self._conn_key = connection_key
|
||||
self._os_error = os_error
|
||||
super().__init__(os_error.errno, os_error.strerror)
|
||||
self.args = (connection_key, os_error)
|
||||
|
||||
@property
|
||||
def os_error(self) -> OSError:
|
||||
return self._os_error
|
||||
|
||||
@property
|
||||
def host(self) -> str:
|
||||
return self._conn_key.host
|
||||
|
||||
@property
|
||||
def port(self) -> Optional[int]:
|
||||
return self._conn_key.port
|
||||
|
||||
@property
|
||||
def ssl(self) -> Union[SSLContext, None, bool, "Fingerprint"]:
|
||||
return self._conn_key.ssl
|
||||
|
||||
def __str__(self) -> str:
|
||||
return "Cannot connect to host {0.host}:{0.port} ssl:{1} [{2}]".format(
|
||||
self, self.ssl if self.ssl is not None else "default", self.strerror
|
||||
)
|
||||
|
||||
# OSError.__reduce__ does too much black magick
|
||||
__reduce__ = BaseException.__reduce__
|
||||
|
||||
|
||||
class ClientProxyConnectionError(ClientConnectorError):
|
||||
"""Proxy connection error.
|
||||
|
||||
Raised in :class:`aiohttp.connector.TCPConnector` if
|
||||
connection to proxy can not be established.
|
||||
"""
|
||||
|
||||
|
||||
class UnixClientConnectorError(ClientConnectorError):
|
||||
"""Unix connector error.
|
||||
|
||||
Raised in :py:class:`aiohttp.connector.UnixConnector`
|
||||
if connection to unix socket can not be established.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, path: str, connection_key: ConnectionKey, os_error: OSError
|
||||
) -> None:
|
||||
self._path = path
|
||||
super().__init__(connection_key, os_error)
|
||||
|
||||
@property
|
||||
def path(self) -> str:
|
||||
return self._path
|
||||
|
||||
def __str__(self) -> str:
|
||||
return "Cannot connect to unix socket {0.path} ssl:{1} [{2}]".format(
|
||||
self, self.ssl if self.ssl is not None else "default", self.strerror
|
||||
)
|
||||
|
||||
|
||||
class ServerConnectionError(ClientConnectionError):
|
||||
"""Server connection errors."""
|
||||
|
||||
|
||||
class ServerDisconnectedError(ServerConnectionError):
|
||||
"""Server disconnected."""
|
||||
|
||||
def __init__(self, message: Union[RawResponseMessage, str, None] = None) -> None:
|
||||
if message is None:
|
||||
message = "Server disconnected"
|
||||
|
||||
self.args = (message,)
|
||||
self.message = message
|
||||
|
||||
|
||||
class ServerTimeoutError(ServerConnectionError, asyncio.TimeoutError):
|
||||
"""Server timeout error."""
|
||||
|
||||
|
||||
class ServerFingerprintMismatch(ServerConnectionError):
|
||||
"""SSL certificate does not match expected fingerprint."""
|
||||
|
||||
def __init__(self, expected: bytes, got: bytes, host: str, port: int) -> None:
|
||||
self.expected = expected
|
||||
self.got = got
|
||||
self.host = host
|
||||
self.port = port
|
||||
self.args = (expected, got, host, port)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return "<{} expected={!r} got={!r} host={!r} port={!r}>".format(
|
||||
self.__class__.__name__, self.expected, self.got, self.host, self.port
|
||||
)
|
||||
|
||||
|
||||
class ClientPayloadError(ClientError):
|
||||
"""Response payload error."""
|
||||
|
||||
|
||||
class InvalidURL(ClientError, ValueError):
|
||||
"""Invalid URL.
|
||||
|
||||
URL used for fetching is malformed, e.g. it doesn't contains host
|
||||
part.
|
||||
"""
|
||||
|
||||
# Derive from ValueError for backward compatibility
|
||||
|
||||
def __init__(self, url: Any) -> None:
|
||||
# The type of url is not yarl.URL because the exception can be raised
|
||||
# on URL(url) call
|
||||
super().__init__(url)
|
||||
|
||||
@property
|
||||
def url(self) -> Any:
|
||||
return self.args[0]
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<{self.__class__.__name__} {self.url}>"
|
||||
|
||||
|
||||
class ClientSSLError(ClientConnectorError):
|
||||
"""Base error for ssl.*Errors."""
|
||||
|
||||
|
||||
if ssl is not None:
|
||||
cert_errors = (ssl.CertificateError,)
|
||||
cert_errors_bases = (
|
||||
ClientSSLError,
|
||||
ssl.CertificateError,
|
||||
)
|
||||
|
||||
ssl_errors = (ssl.SSLError,)
|
||||
ssl_error_bases = (ClientSSLError, ssl.SSLError)
|
||||
else: # pragma: no cover
|
||||
cert_errors = tuple()
|
||||
cert_errors_bases = (
|
||||
ClientSSLError,
|
||||
ValueError,
|
||||
)
|
||||
|
||||
ssl_errors = tuple()
|
||||
ssl_error_bases = (ClientSSLError,)
|
||||
|
||||
|
||||
class ClientConnectorSSLError(*ssl_error_bases): # type: ignore[misc]
|
||||
"""Response ssl error."""
|
||||
|
||||
|
||||
class ClientConnectorCertificateError(*cert_errors_bases): # type: ignore[misc]
|
||||
"""Response certificate error."""
|
||||
|
||||
def __init__(
|
||||
self, connection_key: ConnectionKey, certificate_error: Exception
|
||||
) -> None:
|
||||
self._conn_key = connection_key
|
||||
self._certificate_error = certificate_error
|
||||
self.args = (connection_key, certificate_error)
|
||||
|
||||
@property
|
||||
def certificate_error(self) -> Exception:
|
||||
return self._certificate_error
|
||||
|
||||
@property
|
||||
def host(self) -> str:
|
||||
return self._conn_key.host
|
||||
|
||||
@property
|
||||
def port(self) -> Optional[int]:
|
||||
return self._conn_key.port
|
||||
|
||||
@property
|
||||
def ssl(self) -> bool:
|
||||
return self._conn_key.is_ssl
|
||||
|
||||
def __str__(self) -> str:
|
||||
return (
|
||||
"Cannot connect to host {0.host}:{0.port} ssl:{0.ssl} "
|
||||
"[{0.certificate_error.__class__.__name__}: "
|
||||
"{0.certificate_error.args}]".format(self)
|
||||
)
|
|
@ -0,0 +1,251 @@
|
|||
import asyncio
|
||||
from contextlib import suppress
|
||||
from typing import Any, Optional, Tuple
|
||||
|
||||
from .base_protocol import BaseProtocol
|
||||
from .client_exceptions import (
|
||||
ClientOSError,
|
||||
ClientPayloadError,
|
||||
ServerDisconnectedError,
|
||||
ServerTimeoutError,
|
||||
)
|
||||
from .helpers import BaseTimerContext
|
||||
from .http import HttpResponseParser, RawResponseMessage
|
||||
from .streams import EMPTY_PAYLOAD, DataQueue, StreamReader
|
||||
|
||||
|
||||
class ResponseHandler(BaseProtocol, DataQueue[Tuple[RawResponseMessage, StreamReader]]):
|
||||
"""Helper class to adapt between Protocol and StreamReader."""
|
||||
|
||||
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
|
||||
BaseProtocol.__init__(self, loop=loop)
|
||||
DataQueue.__init__(self, loop)
|
||||
|
||||
self._should_close = False
|
||||
|
||||
self._payload: Optional[StreamReader] = None
|
||||
self._skip_payload = False
|
||||
self._payload_parser = None
|
||||
|
||||
self._timer = None
|
||||
|
||||
self._tail = b""
|
||||
self._upgraded = False
|
||||
self._parser: Optional[HttpResponseParser] = None
|
||||
|
||||
self._read_timeout: Optional[float] = None
|
||||
self._read_timeout_handle: Optional[asyncio.TimerHandle] = None
|
||||
|
||||
@property
|
||||
def upgraded(self) -> bool:
|
||||
return self._upgraded
|
||||
|
||||
@property
|
||||
def should_close(self) -> bool:
|
||||
if self._payload is not None and not self._payload.is_eof() or self._upgraded:
|
||||
return True
|
||||
|
||||
return (
|
||||
self._should_close
|
||||
or self._upgraded
|
||||
or self.exception() is not None
|
||||
or self._payload_parser is not None
|
||||
or len(self) > 0
|
||||
or bool(self._tail)
|
||||
)
|
||||
|
||||
def force_close(self) -> None:
|
||||
self._should_close = True
|
||||
|
||||
def close(self) -> None:
|
||||
transport = self.transport
|
||||
if transport is not None:
|
||||
transport.close()
|
||||
self.transport = None
|
||||
self._payload = None
|
||||
self._drop_timeout()
|
||||
|
||||
def is_connected(self) -> bool:
|
||||
return self.transport is not None and not self.transport.is_closing()
|
||||
|
||||
def connection_lost(self, exc: Optional[BaseException]) -> None:
|
||||
self._drop_timeout()
|
||||
|
||||
if self._payload_parser is not None:
|
||||
with suppress(Exception):
|
||||
self._payload_parser.feed_eof()
|
||||
|
||||
uncompleted = None
|
||||
if self._parser is not None:
|
||||
try:
|
||||
uncompleted = self._parser.feed_eof()
|
||||
except Exception:
|
||||
if self._payload is not None:
|
||||
self._payload.set_exception(
|
||||
ClientPayloadError("Response payload is not completed")
|
||||
)
|
||||
|
||||
if not self.is_eof():
|
||||
if isinstance(exc, OSError):
|
||||
exc = ClientOSError(*exc.args)
|
||||
if exc is None:
|
||||
exc = ServerDisconnectedError(uncompleted)
|
||||
# assigns self._should_close to True as side effect,
|
||||
# we do it anyway below
|
||||
self.set_exception(exc)
|
||||
|
||||
self._should_close = True
|
||||
self._parser = None
|
||||
self._payload = None
|
||||
self._payload_parser = None
|
||||
self._reading_paused = False
|
||||
|
||||
super().connection_lost(exc)
|
||||
|
||||
def eof_received(self) -> None:
|
||||
# should call parser.feed_eof() most likely
|
||||
self._drop_timeout()
|
||||
|
||||
def pause_reading(self) -> None:
|
||||
super().pause_reading()
|
||||
self._drop_timeout()
|
||||
|
||||
def resume_reading(self) -> None:
|
||||
super().resume_reading()
|
||||
self._reschedule_timeout()
|
||||
|
||||
def set_exception(self, exc: BaseException) -> None:
|
||||
self._should_close = True
|
||||
self._drop_timeout()
|
||||
super().set_exception(exc)
|
||||
|
||||
def set_parser(self, parser: Any, payload: Any) -> None:
|
||||
# TODO: actual types are:
|
||||
# parser: WebSocketReader
|
||||
# payload: FlowControlDataQueue
|
||||
# but they are not generi enough
|
||||
# Need an ABC for both types
|
||||
self._payload = payload
|
||||
self._payload_parser = parser
|
||||
|
||||
self._drop_timeout()
|
||||
|
||||
if self._tail:
|
||||
data, self._tail = self._tail, b""
|
||||
self.data_received(data)
|
||||
|
||||
def set_response_params(
|
||||
self,
|
||||
*,
|
||||
timer: Optional[BaseTimerContext] = None,
|
||||
skip_payload: bool = False,
|
||||
read_until_eof: bool = False,
|
||||
auto_decompress: bool = True,
|
||||
read_timeout: Optional[float] = None,
|
||||
read_bufsize: int = 2**16,
|
||||
) -> None:
|
||||
self._skip_payload = skip_payload
|
||||
|
||||
self._read_timeout = read_timeout
|
||||
self._reschedule_timeout()
|
||||
|
||||
self._parser = HttpResponseParser(
|
||||
self,
|
||||
self._loop,
|
||||
read_bufsize,
|
||||
timer=timer,
|
||||
payload_exception=ClientPayloadError,
|
||||
response_with_body=not skip_payload,
|
||||
read_until_eof=read_until_eof,
|
||||
auto_decompress=auto_decompress,
|
||||
)
|
||||
|
||||
if self._tail:
|
||||
data, self._tail = self._tail, b""
|
||||
self.data_received(data)
|
||||
|
||||
def _drop_timeout(self) -> None:
|
||||
if self._read_timeout_handle is not None:
|
||||
self._read_timeout_handle.cancel()
|
||||
self._read_timeout_handle = None
|
||||
|
||||
def _reschedule_timeout(self) -> None:
|
||||
timeout = self._read_timeout
|
||||
if self._read_timeout_handle is not None:
|
||||
self._read_timeout_handle.cancel()
|
||||
|
||||
if timeout:
|
||||
self._read_timeout_handle = self._loop.call_later(
|
||||
timeout, self._on_read_timeout
|
||||
)
|
||||
else:
|
||||
self._read_timeout_handle = None
|
||||
|
||||
def _on_read_timeout(self) -> None:
|
||||
exc = ServerTimeoutError("Timeout on reading data from socket")
|
||||
self.set_exception(exc)
|
||||
if self._payload is not None:
|
||||
self._payload.set_exception(exc)
|
||||
|
||||
def data_received(self, data: bytes) -> None:
|
||||
self._reschedule_timeout()
|
||||
|
||||
if not data:
|
||||
return
|
||||
|
||||
# custom payload parser
|
||||
if self._payload_parser is not None:
|
||||
eof, tail = self._payload_parser.feed_data(data)
|
||||
if eof:
|
||||
self._payload = None
|
||||
self._payload_parser = None
|
||||
|
||||
if tail:
|
||||
self.data_received(tail)
|
||||
return
|
||||
else:
|
||||
if self._upgraded or self._parser is None:
|
||||
# i.e. websocket connection, websocket parser is not set yet
|
||||
self._tail += data
|
||||
else:
|
||||
# parse http messages
|
||||
try:
|
||||
messages, upgraded, tail = self._parser.feed_data(data)
|
||||
except BaseException as exc:
|
||||
if self.transport is not None:
|
||||
# connection.release() could be called BEFORE
|
||||
# data_received(), the transport is already
|
||||
# closed in this case
|
||||
self.transport.close()
|
||||
# should_close is True after the call
|
||||
self.set_exception(exc)
|
||||
return
|
||||
|
||||
self._upgraded = upgraded
|
||||
|
||||
payload: Optional[StreamReader] = None
|
||||
for message, payload in messages:
|
||||
if message.should_close:
|
||||
self._should_close = True
|
||||
|
||||
self._payload = payload
|
||||
|
||||
if self._skip_payload or message.code in (204, 304):
|
||||
self.feed_data((message, EMPTY_PAYLOAD), 0)
|
||||
else:
|
||||
self.feed_data((message, payload), 0)
|
||||
if payload is not None:
|
||||
# new message(s) was processed
|
||||
# register timeout handler unsubscribing
|
||||
# either on end-of-stream or immediately for
|
||||
# EMPTY_PAYLOAD
|
||||
if payload is not EMPTY_PAYLOAD:
|
||||
payload.on_eof(self._drop_timeout)
|
||||
else:
|
||||
self._drop_timeout()
|
||||
|
||||
if tail:
|
||||
if upgraded:
|
||||
self.data_received(tail)
|
||||
else:
|
||||
self._tail = tail
|
Plik diff jest za duży
Load Diff
|
@ -0,0 +1,300 @@
|
|||
"""WebSocket client for asyncio."""
|
||||
|
||||
import asyncio
|
||||
from typing import Any, Optional, cast
|
||||
|
||||
import async_timeout
|
||||
|
||||
from .client_exceptions import ClientError
|
||||
from .client_reqrep import ClientResponse
|
||||
from .helpers import call_later, set_result
|
||||
from .http import (
|
||||
WS_CLOSED_MESSAGE,
|
||||
WS_CLOSING_MESSAGE,
|
||||
WebSocketError,
|
||||
WSCloseCode,
|
||||
WSMessage,
|
||||
WSMsgType,
|
||||
)
|
||||
from .http_websocket import WebSocketWriter # WSMessage
|
||||
from .streams import EofStream, FlowControlDataQueue
|
||||
from .typedefs import (
|
||||
DEFAULT_JSON_DECODER,
|
||||
DEFAULT_JSON_ENCODER,
|
||||
JSONDecoder,
|
||||
JSONEncoder,
|
||||
)
|
||||
|
||||
|
||||
class ClientWebSocketResponse:
|
||||
def __init__(
|
||||
self,
|
||||
reader: "FlowControlDataQueue[WSMessage]",
|
||||
writer: WebSocketWriter,
|
||||
protocol: Optional[str],
|
||||
response: ClientResponse,
|
||||
timeout: float,
|
||||
autoclose: bool,
|
||||
autoping: bool,
|
||||
loop: asyncio.AbstractEventLoop,
|
||||
*,
|
||||
receive_timeout: Optional[float] = None,
|
||||
heartbeat: Optional[float] = None,
|
||||
compress: int = 0,
|
||||
client_notakeover: bool = False,
|
||||
) -> None:
|
||||
self._response = response
|
||||
self._conn = response.connection
|
||||
|
||||
self._writer = writer
|
||||
self._reader = reader
|
||||
self._protocol = protocol
|
||||
self._closed = False
|
||||
self._closing = False
|
||||
self._close_code: Optional[int] = None
|
||||
self._timeout = timeout
|
||||
self._receive_timeout = receive_timeout
|
||||
self._autoclose = autoclose
|
||||
self._autoping = autoping
|
||||
self._heartbeat = heartbeat
|
||||
self._heartbeat_cb: Optional[asyncio.TimerHandle] = None
|
||||
if heartbeat is not None:
|
||||
self._pong_heartbeat = heartbeat / 2.0
|
||||
self._pong_response_cb: Optional[asyncio.TimerHandle] = None
|
||||
self._loop = loop
|
||||
self._waiting: Optional[asyncio.Future[bool]] = None
|
||||
self._exception: Optional[BaseException] = None
|
||||
self._compress = compress
|
||||
self._client_notakeover = client_notakeover
|
||||
|
||||
self._reset_heartbeat()
|
||||
|
||||
def _cancel_heartbeat(self) -> None:
|
||||
if self._pong_response_cb is not None:
|
||||
self._pong_response_cb.cancel()
|
||||
self._pong_response_cb = None
|
||||
|
||||
if self._heartbeat_cb is not None:
|
||||
self._heartbeat_cb.cancel()
|
||||
self._heartbeat_cb = None
|
||||
|
||||
def _reset_heartbeat(self) -> None:
|
||||
self._cancel_heartbeat()
|
||||
|
||||
if self._heartbeat is not None:
|
||||
self._heartbeat_cb = call_later(
|
||||
self._send_heartbeat, self._heartbeat, self._loop
|
||||
)
|
||||
|
||||
def _send_heartbeat(self) -> None:
|
||||
if self._heartbeat is not None and not self._closed:
|
||||
# fire-and-forget a task is not perfect but maybe ok for
|
||||
# sending ping. Otherwise we need a long-living heartbeat
|
||||
# task in the class.
|
||||
self._loop.create_task(self._writer.ping())
|
||||
|
||||
if self._pong_response_cb is not None:
|
||||
self._pong_response_cb.cancel()
|
||||
self._pong_response_cb = call_later(
|
||||
self._pong_not_received, self._pong_heartbeat, self._loop
|
||||
)
|
||||
|
||||
def _pong_not_received(self) -> None:
|
||||
if not self._closed:
|
||||
self._closed = True
|
||||
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
||||
self._exception = asyncio.TimeoutError()
|
||||
self._response.close()
|
||||
|
||||
@property
|
||||
def closed(self) -> bool:
|
||||
return self._closed
|
||||
|
||||
@property
|
||||
def close_code(self) -> Optional[int]:
|
||||
return self._close_code
|
||||
|
||||
@property
|
||||
def protocol(self) -> Optional[str]:
|
||||
return self._protocol
|
||||
|
||||
@property
|
||||
def compress(self) -> int:
|
||||
return self._compress
|
||||
|
||||
@property
|
||||
def client_notakeover(self) -> bool:
|
||||
return self._client_notakeover
|
||||
|
||||
def get_extra_info(self, name: str, default: Any = None) -> Any:
|
||||
"""extra info from connection transport"""
|
||||
conn = self._response.connection
|
||||
if conn is None:
|
||||
return default
|
||||
transport = conn.transport
|
||||
if transport is None:
|
||||
return default
|
||||
return transport.get_extra_info(name, default)
|
||||
|
||||
def exception(self) -> Optional[BaseException]:
|
||||
return self._exception
|
||||
|
||||
async def ping(self, message: bytes = b"") -> None:
|
||||
await self._writer.ping(message)
|
||||
|
||||
async def pong(self, message: bytes = b"") -> None:
|
||||
await self._writer.pong(message)
|
||||
|
||||
async def send_str(self, data: str, compress: Optional[int] = None) -> None:
|
||||
if not isinstance(data, str):
|
||||
raise TypeError("data argument must be str (%r)" % type(data))
|
||||
await self._writer.send(data, binary=False, compress=compress)
|
||||
|
||||
async def send_bytes(self, data: bytes, compress: Optional[int] = None) -> None:
|
||||
if not isinstance(data, (bytes, bytearray, memoryview)):
|
||||
raise TypeError("data argument must be byte-ish (%r)" % type(data))
|
||||
await self._writer.send(data, binary=True, compress=compress)
|
||||
|
||||
async def send_json(
|
||||
self,
|
||||
data: Any,
|
||||
compress: Optional[int] = None,
|
||||
*,
|
||||
dumps: JSONEncoder = DEFAULT_JSON_ENCODER,
|
||||
) -> None:
|
||||
await self.send_str(dumps(data), compress=compress)
|
||||
|
||||
async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bool:
|
||||
# we need to break `receive()` cycle first,
|
||||
# `close()` may be called from different task
|
||||
if self._waiting is not None and not self._closed:
|
||||
self._reader.feed_data(WS_CLOSING_MESSAGE, 0)
|
||||
await self._waiting
|
||||
|
||||
if not self._closed:
|
||||
self._cancel_heartbeat()
|
||||
self._closed = True
|
||||
try:
|
||||
await self._writer.close(code, message)
|
||||
except asyncio.CancelledError:
|
||||
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
||||
self._response.close()
|
||||
raise
|
||||
except Exception as exc:
|
||||
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
||||
self._exception = exc
|
||||
self._response.close()
|
||||
return True
|
||||
|
||||
if self._closing:
|
||||
self._response.close()
|
||||
return True
|
||||
|
||||
while True:
|
||||
try:
|
||||
async with async_timeout.timeout(self._timeout):
|
||||
msg = await self._reader.read()
|
||||
except asyncio.CancelledError:
|
||||
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
||||
self._response.close()
|
||||
raise
|
||||
except Exception as exc:
|
||||
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
||||
self._exception = exc
|
||||
self._response.close()
|
||||
return True
|
||||
|
||||
if msg.type == WSMsgType.CLOSE:
|
||||
self._close_code = msg.data
|
||||
self._response.close()
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
async def receive(self, timeout: Optional[float] = None) -> WSMessage:
|
||||
while True:
|
||||
if self._waiting is not None:
|
||||
raise RuntimeError("Concurrent call to receive() is not allowed")
|
||||
|
||||
if self._closed:
|
||||
return WS_CLOSED_MESSAGE
|
||||
elif self._closing:
|
||||
await self.close()
|
||||
return WS_CLOSED_MESSAGE
|
||||
|
||||
try:
|
||||
self._waiting = self._loop.create_future()
|
||||
try:
|
||||
async with async_timeout.timeout(timeout or self._receive_timeout):
|
||||
msg = await self._reader.read()
|
||||
self._reset_heartbeat()
|
||||
finally:
|
||||
waiter = self._waiting
|
||||
self._waiting = None
|
||||
set_result(waiter, True)
|
||||
except (asyncio.CancelledError, asyncio.TimeoutError):
|
||||
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
||||
raise
|
||||
except EofStream:
|
||||
self._close_code = WSCloseCode.OK
|
||||
await self.close()
|
||||
return WSMessage(WSMsgType.CLOSED, None, None)
|
||||
except ClientError:
|
||||
self._closed = True
|
||||
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
||||
return WS_CLOSED_MESSAGE
|
||||
except WebSocketError as exc:
|
||||
self._close_code = exc.code
|
||||
await self.close(code=exc.code)
|
||||
return WSMessage(WSMsgType.ERROR, exc, None)
|
||||
except Exception as exc:
|
||||
self._exception = exc
|
||||
self._closing = True
|
||||
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
||||
await self.close()
|
||||
return WSMessage(WSMsgType.ERROR, exc, None)
|
||||
|
||||
if msg.type == WSMsgType.CLOSE:
|
||||
self._closing = True
|
||||
self._close_code = msg.data
|
||||
if not self._closed and self._autoclose:
|
||||
await self.close()
|
||||
elif msg.type == WSMsgType.CLOSING:
|
||||
self._closing = True
|
||||
elif msg.type == WSMsgType.PING and self._autoping:
|
||||
await self.pong(msg.data)
|
||||
continue
|
||||
elif msg.type == WSMsgType.PONG and self._autoping:
|
||||
continue
|
||||
|
||||
return msg
|
||||
|
||||
async def receive_str(self, *, timeout: Optional[float] = None) -> str:
|
||||
msg = await self.receive(timeout)
|
||||
if msg.type != WSMsgType.TEXT:
|
||||
raise TypeError(f"Received message {msg.type}:{msg.data!r} is not str")
|
||||
return cast(str, msg.data)
|
||||
|
||||
async def receive_bytes(self, *, timeout: Optional[float] = None) -> bytes:
|
||||
msg = await self.receive(timeout)
|
||||
if msg.type != WSMsgType.BINARY:
|
||||
raise TypeError(f"Received message {msg.type}:{msg.data!r} is not bytes")
|
||||
return cast(bytes, msg.data)
|
||||
|
||||
async def receive_json(
|
||||
self,
|
||||
*,
|
||||
loads: JSONDecoder = DEFAULT_JSON_DECODER,
|
||||
timeout: Optional[float] = None,
|
||||
) -> Any:
|
||||
data = await self.receive_str(timeout=timeout)
|
||||
return loads(data)
|
||||
|
||||
def __aiter__(self) -> "ClientWebSocketResponse":
|
||||
return self
|
||||
|
||||
async def __anext__(self) -> WSMessage:
|
||||
msg = await self.receive()
|
||||
if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSING, WSMsgType.CLOSED):
|
||||
raise StopAsyncIteration
|
||||
return msg
|
Plik diff jest za duży
Load Diff
|
@ -0,0 +1,415 @@
|
|||
import asyncio
|
||||
import contextlib
|
||||
import datetime
|
||||
import os # noqa
|
||||
import pathlib
|
||||
import pickle
|
||||
import re
|
||||
from collections import defaultdict
|
||||
from http.cookies import BaseCookie, Morsel, SimpleCookie
|
||||
from typing import ( # noqa
|
||||
DefaultDict,
|
||||
Dict,
|
||||
Iterable,
|
||||
Iterator,
|
||||
List,
|
||||
Mapping,
|
||||
Optional,
|
||||
Set,
|
||||
Tuple,
|
||||
Union,
|
||||
cast,
|
||||
)
|
||||
|
||||
from yarl import URL
|
||||
|
||||
from .abc import AbstractCookieJar, ClearCookiePredicate
|
||||
from .helpers import is_ip_address, next_whole_second
|
||||
from .typedefs import LooseCookies, PathLike, StrOrURL
|
||||
|
||||
__all__ = ("CookieJar", "DummyCookieJar")
|
||||
|
||||
|
||||
CookieItem = Union[str, "Morsel[str]"]
|
||||
|
||||
|
||||
class CookieJar(AbstractCookieJar):
|
||||
"""Implements cookie storage adhering to RFC 6265."""
|
||||
|
||||
DATE_TOKENS_RE = re.compile(
|
||||
r"[\x09\x20-\x2F\x3B-\x40\x5B-\x60\x7B-\x7E]*"
|
||||
r"(?P<token>[\x00-\x08\x0A-\x1F\d:a-zA-Z\x7F-\xFF]+)"
|
||||
)
|
||||
|
||||
DATE_HMS_TIME_RE = re.compile(r"(\d{1,2}):(\d{1,2}):(\d{1,2})")
|
||||
|
||||
DATE_DAY_OF_MONTH_RE = re.compile(r"(\d{1,2})")
|
||||
|
||||
DATE_MONTH_RE = re.compile(
|
||||
"(jan)|(feb)|(mar)|(apr)|(may)|(jun)|(jul)|" "(aug)|(sep)|(oct)|(nov)|(dec)",
|
||||
re.I,
|
||||
)
|
||||
|
||||
DATE_YEAR_RE = re.compile(r"(\d{2,4})")
|
||||
|
||||
MAX_TIME = datetime.datetime.max.replace(tzinfo=datetime.timezone.utc)
|
||||
|
||||
MAX_32BIT_TIME = datetime.datetime.utcfromtimestamp(2**31 - 1)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
unsafe: bool = False,
|
||||
quote_cookie: bool = True,
|
||||
treat_as_secure_origin: Union[StrOrURL, List[StrOrURL], None] = None,
|
||||
loop: Optional[asyncio.AbstractEventLoop] = None,
|
||||
) -> None:
|
||||
super().__init__(loop=loop)
|
||||
self._cookies: DefaultDict[Tuple[str, str], SimpleCookie[str]] = defaultdict(
|
||||
SimpleCookie
|
||||
)
|
||||
self._host_only_cookies: Set[Tuple[str, str]] = set()
|
||||
self._unsafe = unsafe
|
||||
self._quote_cookie = quote_cookie
|
||||
if treat_as_secure_origin is None:
|
||||
treat_as_secure_origin = []
|
||||
elif isinstance(treat_as_secure_origin, URL):
|
||||
treat_as_secure_origin = [treat_as_secure_origin.origin()]
|
||||
elif isinstance(treat_as_secure_origin, str):
|
||||
treat_as_secure_origin = [URL(treat_as_secure_origin).origin()]
|
||||
else:
|
||||
treat_as_secure_origin = [
|
||||
URL(url).origin() if isinstance(url, str) else url.origin()
|
||||
for url in treat_as_secure_origin
|
||||
]
|
||||
self._treat_as_secure_origin = treat_as_secure_origin
|
||||
self._next_expiration = next_whole_second()
|
||||
self._expirations: Dict[Tuple[str, str, str], datetime.datetime] = {}
|
||||
# #4515: datetime.max may not be representable on 32-bit platforms
|
||||
self._max_time = self.MAX_TIME
|
||||
try:
|
||||
self._max_time.timestamp()
|
||||
except OverflowError:
|
||||
self._max_time = self.MAX_32BIT_TIME
|
||||
|
||||
def save(self, file_path: PathLike) -> None:
|
||||
file_path = pathlib.Path(file_path)
|
||||
with file_path.open(mode="wb") as f:
|
||||
pickle.dump(self._cookies, f, pickle.HIGHEST_PROTOCOL)
|
||||
|
||||
def load(self, file_path: PathLike) -> None:
|
||||
file_path = pathlib.Path(file_path)
|
||||
with file_path.open(mode="rb") as f:
|
||||
self._cookies = pickle.load(f)
|
||||
|
||||
def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None:
|
||||
if predicate is None:
|
||||
self._next_expiration = next_whole_second()
|
||||
self._cookies.clear()
|
||||
self._host_only_cookies.clear()
|
||||
self._expirations.clear()
|
||||
return
|
||||
|
||||
to_del = []
|
||||
now = datetime.datetime.now(datetime.timezone.utc)
|
||||
for (domain, path), cookie in self._cookies.items():
|
||||
for name, morsel in cookie.items():
|
||||
key = (domain, path, name)
|
||||
if (
|
||||
key in self._expirations and self._expirations[key] <= now
|
||||
) or predicate(morsel):
|
||||
to_del.append(key)
|
||||
|
||||
for domain, path, name in to_del:
|
||||
self._host_only_cookies.discard((domain, name))
|
||||
key = (domain, path, name)
|
||||
if key in self._expirations:
|
||||
del self._expirations[(domain, path, name)]
|
||||
self._cookies[(domain, path)].pop(name, None)
|
||||
|
||||
next_expiration = min(self._expirations.values(), default=self._max_time)
|
||||
try:
|
||||
self._next_expiration = next_expiration.replace(
|
||||
microsecond=0
|
||||
) + datetime.timedelta(seconds=1)
|
||||
except OverflowError:
|
||||
self._next_expiration = self._max_time
|
||||
|
||||
def clear_domain(self, domain: str) -> None:
|
||||
self.clear(lambda x: self._is_domain_match(domain, x["domain"]))
|
||||
|
||||
def __iter__(self) -> "Iterator[Morsel[str]]":
|
||||
self._do_expiration()
|
||||
for val in self._cookies.values():
|
||||
yield from val.values()
|
||||
|
||||
def __len__(self) -> int:
|
||||
return sum(1 for i in self)
|
||||
|
||||
def _do_expiration(self) -> None:
|
||||
self.clear(lambda x: False)
|
||||
|
||||
def _expire_cookie(
|
||||
self, when: datetime.datetime, domain: str, path: str, name: str
|
||||
) -> None:
|
||||
self._next_expiration = min(self._next_expiration, when)
|
||||
self._expirations[(domain, path, name)] = when
|
||||
|
||||
def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
|
||||
"""Update cookies."""
|
||||
hostname = response_url.raw_host
|
||||
|
||||
if not self._unsafe and is_ip_address(hostname):
|
||||
# Don't accept cookies from IPs
|
||||
return
|
||||
|
||||
if isinstance(cookies, Mapping):
|
||||
cookies = cookies.items()
|
||||
|
||||
for name, cookie in cookies:
|
||||
if not isinstance(cookie, Morsel):
|
||||
tmp: SimpleCookie[str] = SimpleCookie()
|
||||
tmp[name] = cookie # type: ignore[assignment]
|
||||
cookie = tmp[name]
|
||||
|
||||
domain = cookie["domain"]
|
||||
|
||||
# ignore domains with trailing dots
|
||||
if domain.endswith("."):
|
||||
domain = ""
|
||||
del cookie["domain"]
|
||||
|
||||
if not domain and hostname is not None:
|
||||
# Set the cookie's domain to the response hostname
|
||||
# and set its host-only-flag
|
||||
self._host_only_cookies.add((hostname, name))
|
||||
domain = cookie["domain"] = hostname
|
||||
|
||||
if domain.startswith("."):
|
||||
# Remove leading dot
|
||||
domain = domain[1:]
|
||||
cookie["domain"] = domain
|
||||
|
||||
if hostname and not self._is_domain_match(domain, hostname):
|
||||
# Setting cookies for different domains is not allowed
|
||||
continue
|
||||
|
||||
path = cookie["path"]
|
||||
if not path or not path.startswith("/"):
|
||||
# Set the cookie's path to the response path
|
||||
path = response_url.path
|
||||
if not path.startswith("/"):
|
||||
path = "/"
|
||||
else:
|
||||
# Cut everything from the last slash to the end
|
||||
path = "/" + path[1 : path.rfind("/")]
|
||||
cookie["path"] = path
|
||||
|
||||
max_age = cookie["max-age"]
|
||||
if max_age:
|
||||
try:
|
||||
delta_seconds = int(max_age)
|
||||
try:
|
||||
max_age_expiration = datetime.datetime.now(
|
||||
datetime.timezone.utc
|
||||
) + datetime.timedelta(seconds=delta_seconds)
|
||||
except OverflowError:
|
||||
max_age_expiration = self._max_time
|
||||
self._expire_cookie(max_age_expiration, domain, path, name)
|
||||
except ValueError:
|
||||
cookie["max-age"] = ""
|
||||
|
||||
else:
|
||||
expires = cookie["expires"]
|
||||
if expires:
|
||||
expire_time = self._parse_date(expires)
|
||||
if expire_time:
|
||||
self._expire_cookie(expire_time, domain, path, name)
|
||||
else:
|
||||
cookie["expires"] = ""
|
||||
|
||||
self._cookies[(domain, path)][name] = cookie
|
||||
|
||||
self._do_expiration()
|
||||
|
||||
def filter_cookies(
|
||||
self, request_url: URL = URL()
|
||||
) -> Union["BaseCookie[str]", "SimpleCookie[str]"]:
|
||||
"""Returns this jar's cookies filtered by their attributes."""
|
||||
self._do_expiration()
|
||||
request_url = URL(request_url)
|
||||
filtered: Union["SimpleCookie[str]", "BaseCookie[str]"] = (
|
||||
SimpleCookie() if self._quote_cookie else BaseCookie()
|
||||
)
|
||||
hostname = request_url.raw_host or ""
|
||||
request_origin = URL()
|
||||
with contextlib.suppress(ValueError):
|
||||
request_origin = request_url.origin()
|
||||
|
||||
is_not_secure = (
|
||||
request_url.scheme not in ("https", "wss")
|
||||
and request_origin not in self._treat_as_secure_origin
|
||||
)
|
||||
|
||||
for cookie in self:
|
||||
name = cookie.key
|
||||
domain = cookie["domain"]
|
||||
|
||||
# Send shared cookies
|
||||
if not domain:
|
||||
filtered[name] = cookie.value
|
||||
continue
|
||||
|
||||
if not self._unsafe and is_ip_address(hostname):
|
||||
continue
|
||||
|
||||
if (domain, name) in self._host_only_cookies:
|
||||
if domain != hostname:
|
||||
continue
|
||||
elif not self._is_domain_match(domain, hostname):
|
||||
continue
|
||||
|
||||
if not self._is_path_match(request_url.path, cookie["path"]):
|
||||
continue
|
||||
|
||||
if is_not_secure and cookie["secure"]:
|
||||
continue
|
||||
|
||||
# It's critical we use the Morsel so the coded_value
|
||||
# (based on cookie version) is preserved
|
||||
mrsl_val = cast("Morsel[str]", cookie.get(cookie.key, Morsel()))
|
||||
mrsl_val.set(cookie.key, cookie.value, cookie.coded_value)
|
||||
filtered[name] = mrsl_val
|
||||
|
||||
return filtered
|
||||
|
||||
@staticmethod
|
||||
def _is_domain_match(domain: str, hostname: str) -> bool:
|
||||
"""Implements domain matching adhering to RFC 6265."""
|
||||
if hostname == domain:
|
||||
return True
|
||||
|
||||
if not hostname.endswith(domain):
|
||||
return False
|
||||
|
||||
non_matching = hostname[: -len(domain)]
|
||||
|
||||
if not non_matching.endswith("."):
|
||||
return False
|
||||
|
||||
return not is_ip_address(hostname)
|
||||
|
||||
@staticmethod
|
||||
def _is_path_match(req_path: str, cookie_path: str) -> bool:
|
||||
"""Implements path matching adhering to RFC 6265."""
|
||||
if not req_path.startswith("/"):
|
||||
req_path = "/"
|
||||
|
||||
if req_path == cookie_path:
|
||||
return True
|
||||
|
||||
if not req_path.startswith(cookie_path):
|
||||
return False
|
||||
|
||||
if cookie_path.endswith("/"):
|
||||
return True
|
||||
|
||||
non_matching = req_path[len(cookie_path) :]
|
||||
|
||||
return non_matching.startswith("/")
|
||||
|
||||
@classmethod
|
||||
def _parse_date(cls, date_str: str) -> Optional[datetime.datetime]:
|
||||
"""Implements date string parsing adhering to RFC 6265."""
|
||||
if not date_str:
|
||||
return None
|
||||
|
||||
found_time = False
|
||||
found_day = False
|
||||
found_month = False
|
||||
found_year = False
|
||||
|
||||
hour = minute = second = 0
|
||||
day = 0
|
||||
month = 0
|
||||
year = 0
|
||||
|
||||
for token_match in cls.DATE_TOKENS_RE.finditer(date_str):
|
||||
|
||||
token = token_match.group("token")
|
||||
|
||||
if not found_time:
|
||||
time_match = cls.DATE_HMS_TIME_RE.match(token)
|
||||
if time_match:
|
||||
found_time = True
|
||||
hour, minute, second = (int(s) for s in time_match.groups())
|
||||
continue
|
||||
|
||||
if not found_day:
|
||||
day_match = cls.DATE_DAY_OF_MONTH_RE.match(token)
|
||||
if day_match:
|
||||
found_day = True
|
||||
day = int(day_match.group())
|
||||
continue
|
||||
|
||||
if not found_month:
|
||||
month_match = cls.DATE_MONTH_RE.match(token)
|
||||
if month_match:
|
||||
found_month = True
|
||||
assert month_match.lastindex is not None
|
||||
month = month_match.lastindex
|
||||
continue
|
||||
|
||||
if not found_year:
|
||||
year_match = cls.DATE_YEAR_RE.match(token)
|
||||
if year_match:
|
||||
found_year = True
|
||||
year = int(year_match.group())
|
||||
|
||||
if 70 <= year <= 99:
|
||||
year += 1900
|
||||
elif 0 <= year <= 69:
|
||||
year += 2000
|
||||
|
||||
if False in (found_day, found_month, found_year, found_time):
|
||||
return None
|
||||
|
||||
if not 1 <= day <= 31:
|
||||
return None
|
||||
|
||||
if year < 1601 or hour > 23 or minute > 59 or second > 59:
|
||||
return None
|
||||
|
||||
return datetime.datetime(
|
||||
year, month, day, hour, minute, second, tzinfo=datetime.timezone.utc
|
||||
)
|
||||
|
||||
|
||||
class DummyCookieJar(AbstractCookieJar):
|
||||
"""Implements a dummy cookie storage.
|
||||
|
||||
It can be used with the ClientSession when no cookie processing is needed.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
|
||||
super().__init__(loop=loop)
|
||||
|
||||
def __iter__(self) -> "Iterator[Morsel[str]]":
|
||||
while False:
|
||||
yield None
|
||||
|
||||
def __len__(self) -> int:
|
||||
return 0
|
||||
|
||||
def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None:
|
||||
pass
|
||||
|
||||
def clear_domain(self, domain: str) -> None:
|
||||
pass
|
||||
|
||||
def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
|
||||
pass
|
||||
|
||||
def filter_cookies(self, request_url: URL) -> "BaseCookie[str]":
|
||||
return SimpleCookie()
|
|
@ -0,0 +1,172 @@
|
|||
import io
|
||||
from typing import Any, Iterable, List, Optional
|
||||
from urllib.parse import urlencode
|
||||
|
||||
from multidict import MultiDict, MultiDictProxy
|
||||
|
||||
from . import hdrs, multipart, payload
|
||||
from .helpers import guess_filename
|
||||
from .payload import Payload
|
||||
|
||||
__all__ = ("FormData",)
|
||||
|
||||
|
||||
class FormData:
|
||||
"""Helper class for form body generation.
|
||||
|
||||
Supports multipart/form-data and application/x-www-form-urlencoded.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
fields: Iterable[Any] = (),
|
||||
quote_fields: bool = True,
|
||||
charset: Optional[str] = None,
|
||||
) -> None:
|
||||
self._writer = multipart.MultipartWriter("form-data")
|
||||
self._fields: List[Any] = []
|
||||
self._is_multipart = False
|
||||
self._is_processed = False
|
||||
self._quote_fields = quote_fields
|
||||
self._charset = charset
|
||||
|
||||
if isinstance(fields, dict):
|
||||
fields = list(fields.items())
|
||||
elif not isinstance(fields, (list, tuple)):
|
||||
fields = (fields,)
|
||||
self.add_fields(*fields)
|
||||
|
||||
@property
|
||||
def is_multipart(self) -> bool:
|
||||
return self._is_multipart
|
||||
|
||||
def add_field(
|
||||
self,
|
||||
name: str,
|
||||
value: Any,
|
||||
*,
|
||||
content_type: Optional[str] = None,
|
||||
filename: Optional[str] = None,
|
||||
content_transfer_encoding: Optional[str] = None,
|
||||
) -> None:
|
||||
|
||||
if isinstance(value, io.IOBase):
|
||||
self._is_multipart = True
|
||||
elif isinstance(value, (bytes, bytearray, memoryview)):
|
||||
if filename is None and content_transfer_encoding is None:
|
||||
filename = name
|
||||
|
||||
type_options: MultiDict[str] = MultiDict({"name": name})
|
||||
if filename is not None and not isinstance(filename, str):
|
||||
raise TypeError(
|
||||
"filename must be an instance of str. " "Got: %s" % filename
|
||||
)
|
||||
if filename is None and isinstance(value, io.IOBase):
|
||||
filename = guess_filename(value, name)
|
||||
if filename is not None:
|
||||
type_options["filename"] = filename
|
||||
self._is_multipart = True
|
||||
|
||||
headers = {}
|
||||
if content_type is not None:
|
||||
if not isinstance(content_type, str):
|
||||
raise TypeError(
|
||||
"content_type must be an instance of str. " "Got: %s" % content_type
|
||||
)
|
||||
headers[hdrs.CONTENT_TYPE] = content_type
|
||||
self._is_multipart = True
|
||||
if content_transfer_encoding is not None:
|
||||
if not isinstance(content_transfer_encoding, str):
|
||||
raise TypeError(
|
||||
"content_transfer_encoding must be an instance"
|
||||
" of str. Got: %s" % content_transfer_encoding
|
||||
)
|
||||
headers[hdrs.CONTENT_TRANSFER_ENCODING] = content_transfer_encoding
|
||||
self._is_multipart = True
|
||||
|
||||
self._fields.append((type_options, headers, value))
|
||||
|
||||
def add_fields(self, *fields: Any) -> None:
|
||||
to_add = list(fields)
|
||||
|
||||
while to_add:
|
||||
rec = to_add.pop(0)
|
||||
|
||||
if isinstance(rec, io.IOBase):
|
||||
k = guess_filename(rec, "unknown")
|
||||
self.add_field(k, rec) # type: ignore[arg-type]
|
||||
|
||||
elif isinstance(rec, (MultiDictProxy, MultiDict)):
|
||||
to_add.extend(rec.items())
|
||||
|
||||
elif isinstance(rec, (list, tuple)) and len(rec) == 2:
|
||||
k, fp = rec
|
||||
self.add_field(k, fp) # type: ignore[arg-type]
|
||||
|
||||
else:
|
||||
raise TypeError(
|
||||
"Only io.IOBase, multidict and (name, file) "
|
||||
"pairs allowed, use .add_field() for passing "
|
||||
"more complex parameters, got {!r}".format(rec)
|
||||
)
|
||||
|
||||
def _gen_form_urlencoded(self) -> payload.BytesPayload:
|
||||
# form data (x-www-form-urlencoded)
|
||||
data = []
|
||||
for type_options, _, value in self._fields:
|
||||
data.append((type_options["name"], value))
|
||||
|
||||
charset = self._charset if self._charset is not None else "utf-8"
|
||||
|
||||
if charset == "utf-8":
|
||||
content_type = "application/x-www-form-urlencoded"
|
||||
else:
|
||||
content_type = "application/x-www-form-urlencoded; " "charset=%s" % charset
|
||||
|
||||
return payload.BytesPayload(
|
||||
urlencode(data, doseq=True, encoding=charset).encode(),
|
||||
content_type=content_type,
|
||||
)
|
||||
|
||||
def _gen_form_data(self) -> multipart.MultipartWriter:
|
||||
"""Encode a list of fields using the multipart/form-data MIME format"""
|
||||
if self._is_processed:
|
||||
raise RuntimeError("Form data has been processed already")
|
||||
for dispparams, headers, value in self._fields:
|
||||
try:
|
||||
if hdrs.CONTENT_TYPE in headers:
|
||||
part = payload.get_payload(
|
||||
value,
|
||||
content_type=headers[hdrs.CONTENT_TYPE],
|
||||
headers=headers,
|
||||
encoding=self._charset,
|
||||
)
|
||||
else:
|
||||
part = payload.get_payload(
|
||||
value, headers=headers, encoding=self._charset
|
||||
)
|
||||
except Exception as exc:
|
||||
raise TypeError(
|
||||
"Can not serialize value type: %r\n "
|
||||
"headers: %r\n value: %r" % (type(value), headers, value)
|
||||
) from exc
|
||||
|
||||
if dispparams:
|
||||
part.set_content_disposition(
|
||||
"form-data", quote_fields=self._quote_fields, **dispparams
|
||||
)
|
||||
# FIXME cgi.FieldStorage doesn't likes body parts with
|
||||
# Content-Length which were sent via chunked transfer encoding
|
||||
assert part.headers is not None
|
||||
part.headers.popall(hdrs.CONTENT_LENGTH, None)
|
||||
|
||||
self._writer.append_payload(part)
|
||||
|
||||
self._is_processed = True
|
||||
return self._writer
|
||||
|
||||
def __call__(self) -> Payload:
|
||||
if self._is_multipart:
|
||||
return self._gen_form_data()
|
||||
else:
|
||||
return self._gen_form_urlencoded()
|
|
@ -0,0 +1,114 @@
|
|||
"""HTTP Headers constants."""
|
||||
|
||||
# After changing the file content call ./tools/gen.py
|
||||
# to regenerate the headers parser
|
||||
import sys
|
||||
from typing import Set
|
||||
|
||||
from multidict import istr
|
||||
|
||||
if sys.version_info >= (3, 8):
|
||||
from typing import Final
|
||||
else:
|
||||
from typing_extensions import Final
|
||||
|
||||
METH_ANY: Final[str] = "*"
|
||||
METH_CONNECT: Final[str] = "CONNECT"
|
||||
METH_HEAD: Final[str] = "HEAD"
|
||||
METH_GET: Final[str] = "GET"
|
||||
METH_DELETE: Final[str] = "DELETE"
|
||||
METH_OPTIONS: Final[str] = "OPTIONS"
|
||||
METH_PATCH: Final[str] = "PATCH"
|
||||
METH_POST: Final[str] = "POST"
|
||||
METH_PUT: Final[str] = "PUT"
|
||||
METH_TRACE: Final[str] = "TRACE"
|
||||
|
||||
METH_ALL: Final[Set[str]] = {
|
||||
METH_CONNECT,
|
||||
METH_HEAD,
|
||||
METH_GET,
|
||||
METH_DELETE,
|
||||
METH_OPTIONS,
|
||||
METH_PATCH,
|
||||
METH_POST,
|
||||
METH_PUT,
|
||||
METH_TRACE,
|
||||
}
|
||||
|
||||
ACCEPT: Final[istr] = istr("Accept")
|
||||
ACCEPT_CHARSET: Final[istr] = istr("Accept-Charset")
|
||||
ACCEPT_ENCODING: Final[istr] = istr("Accept-Encoding")
|
||||
ACCEPT_LANGUAGE: Final[istr] = istr("Accept-Language")
|
||||
ACCEPT_RANGES: Final[istr] = istr("Accept-Ranges")
|
||||
ACCESS_CONTROL_MAX_AGE: Final[istr] = istr("Access-Control-Max-Age")
|
||||
ACCESS_CONTROL_ALLOW_CREDENTIALS: Final[istr] = istr("Access-Control-Allow-Credentials")
|
||||
ACCESS_CONTROL_ALLOW_HEADERS: Final[istr] = istr("Access-Control-Allow-Headers")
|
||||
ACCESS_CONTROL_ALLOW_METHODS: Final[istr] = istr("Access-Control-Allow-Methods")
|
||||
ACCESS_CONTROL_ALLOW_ORIGIN: Final[istr] = istr("Access-Control-Allow-Origin")
|
||||
ACCESS_CONTROL_EXPOSE_HEADERS: Final[istr] = istr("Access-Control-Expose-Headers")
|
||||
ACCESS_CONTROL_REQUEST_HEADERS: Final[istr] = istr("Access-Control-Request-Headers")
|
||||
ACCESS_CONTROL_REQUEST_METHOD: Final[istr] = istr("Access-Control-Request-Method")
|
||||
AGE: Final[istr] = istr("Age")
|
||||
ALLOW: Final[istr] = istr("Allow")
|
||||
AUTHORIZATION: Final[istr] = istr("Authorization")
|
||||
CACHE_CONTROL: Final[istr] = istr("Cache-Control")
|
||||
CONNECTION: Final[istr] = istr("Connection")
|
||||
CONTENT_DISPOSITION: Final[istr] = istr("Content-Disposition")
|
||||
CONTENT_ENCODING: Final[istr] = istr("Content-Encoding")
|
||||
CONTENT_LANGUAGE: Final[istr] = istr("Content-Language")
|
||||
CONTENT_LENGTH: Final[istr] = istr("Content-Length")
|
||||
CONTENT_LOCATION: Final[istr] = istr("Content-Location")
|
||||
CONTENT_MD5: Final[istr] = istr("Content-MD5")
|
||||
CONTENT_RANGE: Final[istr] = istr("Content-Range")
|
||||
CONTENT_TRANSFER_ENCODING: Final[istr] = istr("Content-Transfer-Encoding")
|
||||
CONTENT_TYPE: Final[istr] = istr("Content-Type")
|
||||
COOKIE: Final[istr] = istr("Cookie")
|
||||
DATE: Final[istr] = istr("Date")
|
||||
DESTINATION: Final[istr] = istr("Destination")
|
||||
DIGEST: Final[istr] = istr("Digest")
|
||||
ETAG: Final[istr] = istr("Etag")
|
||||
EXPECT: Final[istr] = istr("Expect")
|
||||
EXPIRES: Final[istr] = istr("Expires")
|
||||
FORWARDED: Final[istr] = istr("Forwarded")
|
||||
FROM: Final[istr] = istr("From")
|
||||
HOST: Final[istr] = istr("Host")
|
||||
IF_MATCH: Final[istr] = istr("If-Match")
|
||||
IF_MODIFIED_SINCE: Final[istr] = istr("If-Modified-Since")
|
||||
IF_NONE_MATCH: Final[istr] = istr("If-None-Match")
|
||||
IF_RANGE: Final[istr] = istr("If-Range")
|
||||
IF_UNMODIFIED_SINCE: Final[istr] = istr("If-Unmodified-Since")
|
||||
KEEP_ALIVE: Final[istr] = istr("Keep-Alive")
|
||||
LAST_EVENT_ID: Final[istr] = istr("Last-Event-ID")
|
||||
LAST_MODIFIED: Final[istr] = istr("Last-Modified")
|
||||
LINK: Final[istr] = istr("Link")
|
||||
LOCATION: Final[istr] = istr("Location")
|
||||
MAX_FORWARDS: Final[istr] = istr("Max-Forwards")
|
||||
ORIGIN: Final[istr] = istr("Origin")
|
||||
PRAGMA: Final[istr] = istr("Pragma")
|
||||
PROXY_AUTHENTICATE: Final[istr] = istr("Proxy-Authenticate")
|
||||
PROXY_AUTHORIZATION: Final[istr] = istr("Proxy-Authorization")
|
||||
RANGE: Final[istr] = istr("Range")
|
||||
REFERER: Final[istr] = istr("Referer")
|
||||
RETRY_AFTER: Final[istr] = istr("Retry-After")
|
||||
SEC_WEBSOCKET_ACCEPT: Final[istr] = istr("Sec-WebSocket-Accept")
|
||||
SEC_WEBSOCKET_VERSION: Final[istr] = istr("Sec-WebSocket-Version")
|
||||
SEC_WEBSOCKET_PROTOCOL: Final[istr] = istr("Sec-WebSocket-Protocol")
|
||||
SEC_WEBSOCKET_EXTENSIONS: Final[istr] = istr("Sec-WebSocket-Extensions")
|
||||
SEC_WEBSOCKET_KEY: Final[istr] = istr("Sec-WebSocket-Key")
|
||||
SEC_WEBSOCKET_KEY1: Final[istr] = istr("Sec-WebSocket-Key1")
|
||||
SERVER: Final[istr] = istr("Server")
|
||||
SET_COOKIE: Final[istr] = istr("Set-Cookie")
|
||||
TE: Final[istr] = istr("TE")
|
||||
TRAILER: Final[istr] = istr("Trailer")
|
||||
TRANSFER_ENCODING: Final[istr] = istr("Transfer-Encoding")
|
||||
UPGRADE: Final[istr] = istr("Upgrade")
|
||||
URI: Final[istr] = istr("URI")
|
||||
USER_AGENT: Final[istr] = istr("User-Agent")
|
||||
VARY: Final[istr] = istr("Vary")
|
||||
VIA: Final[istr] = istr("Via")
|
||||
WANT_DIGEST: Final[istr] = istr("Want-Digest")
|
||||
WARNING: Final[istr] = istr("Warning")
|
||||
WWW_AUTHENTICATE: Final[istr] = istr("WWW-Authenticate")
|
||||
X_FORWARDED_FOR: Final[istr] = istr("X-Forwarded-For")
|
||||
X_FORWARDED_HOST: Final[istr] = istr("X-Forwarded-Host")
|
||||
X_FORWARDED_PROTO: Final[istr] = istr("X-Forwarded-Proto")
|
|
@ -0,0 +1,878 @@
|
|||
"""Various helper functions"""
|
||||
|
||||
import asyncio
|
||||
import base64
|
||||
import binascii
|
||||
import datetime
|
||||
import functools
|
||||
import inspect
|
||||
import netrc
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
import warnings
|
||||
import weakref
|
||||
from collections import namedtuple
|
||||
from contextlib import suppress
|
||||
from email.parser import HeaderParser
|
||||
from email.utils import parsedate
|
||||
from math import ceil
|
||||
from pathlib import Path
|
||||
from types import TracebackType
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
ContextManager,
|
||||
Dict,
|
||||
Generator,
|
||||
Generic,
|
||||
Iterable,
|
||||
Iterator,
|
||||
List,
|
||||
Mapping,
|
||||
Optional,
|
||||
Pattern,
|
||||
Set,
|
||||
Tuple,
|
||||
Type,
|
||||
TypeVar,
|
||||
Union,
|
||||
cast,
|
||||
)
|
||||
from urllib.parse import quote
|
||||
from urllib.request import getproxies, proxy_bypass
|
||||
|
||||
import async_timeout
|
||||
import attr
|
||||
from multidict import MultiDict, MultiDictProxy
|
||||
from yarl import URL
|
||||
|
||||
from . import hdrs
|
||||
from .log import client_logger, internal_logger
|
||||
from .typedefs import PathLike, Protocol # noqa
|
||||
|
||||
__all__ = ("BasicAuth", "ChainMapProxy", "ETag")
|
||||
|
||||
IS_MACOS = platform.system() == "Darwin"
|
||||
IS_WINDOWS = platform.system() == "Windows"
|
||||
|
||||
PY_36 = sys.version_info >= (3, 6)
|
||||
PY_37 = sys.version_info >= (3, 7)
|
||||
PY_38 = sys.version_info >= (3, 8)
|
||||
PY_310 = sys.version_info >= (3, 10)
|
||||
PY_311 = sys.version_info >= (3, 11)
|
||||
|
||||
if sys.version_info < (3, 7):
|
||||
import idna_ssl
|
||||
|
||||
idna_ssl.patch_match_hostname()
|
||||
|
||||
def all_tasks(
|
||||
loop: Optional[asyncio.AbstractEventLoop] = None,
|
||||
) -> Set["asyncio.Task[Any]"]:
|
||||
tasks = list(asyncio.Task.all_tasks(loop))
|
||||
return {t for t in tasks if not t.done()}
|
||||
|
||||
else:
|
||||
all_tasks = asyncio.all_tasks
|
||||
|
||||
|
||||
_T = TypeVar("_T")
|
||||
_S = TypeVar("_S")
|
||||
|
||||
|
||||
sentinel: Any = object()
|
||||
NO_EXTENSIONS: bool = bool(os.environ.get("AIOHTTP_NO_EXTENSIONS"))
|
||||
|
||||
# N.B. sys.flags.dev_mode is available on Python 3.7+, use getattr
|
||||
# for compatibility with older versions
|
||||
DEBUG: bool = getattr(sys.flags, "dev_mode", False) or (
|
||||
not sys.flags.ignore_environment and bool(os.environ.get("PYTHONASYNCIODEBUG"))
|
||||
)
|
||||
|
||||
|
||||
CHAR = {chr(i) for i in range(0, 128)}
|
||||
CTL = {chr(i) for i in range(0, 32)} | {
|
||||
chr(127),
|
||||
}
|
||||
SEPARATORS = {
|
||||
"(",
|
||||
")",
|
||||
"<",
|
||||
">",
|
||||
"@",
|
||||
",",
|
||||
";",
|
||||
":",
|
||||
"\\",
|
||||
'"',
|
||||
"/",
|
||||
"[",
|
||||
"]",
|
||||
"?",
|
||||
"=",
|
||||
"{",
|
||||
"}",
|
||||
" ",
|
||||
chr(9),
|
||||
}
|
||||
TOKEN = CHAR ^ CTL ^ SEPARATORS
|
||||
|
||||
|
||||
class noop:
|
||||
def __await__(self) -> Generator[None, None, None]:
|
||||
yield
|
||||
|
||||
|
||||
class BasicAuth(namedtuple("BasicAuth", ["login", "password", "encoding"])):
|
||||
"""Http basic authentication helper."""
|
||||
|
||||
def __new__(
|
||||
cls, login: str, password: str = "", encoding: str = "latin1"
|
||||
) -> "BasicAuth":
|
||||
if login is None:
|
||||
raise ValueError("None is not allowed as login value")
|
||||
|
||||
if password is None:
|
||||
raise ValueError("None is not allowed as password value")
|
||||
|
||||
if ":" in login:
|
||||
raise ValueError('A ":" is not allowed in login (RFC 1945#section-11.1)')
|
||||
|
||||
return super().__new__(cls, login, password, encoding)
|
||||
|
||||
@classmethod
|
||||
def decode(cls, auth_header: str, encoding: str = "latin1") -> "BasicAuth":
|
||||
"""Create a BasicAuth object from an Authorization HTTP header."""
|
||||
try:
|
||||
auth_type, encoded_credentials = auth_header.split(" ", 1)
|
||||
except ValueError:
|
||||
raise ValueError("Could not parse authorization header.")
|
||||
|
||||
if auth_type.lower() != "basic":
|
||||
raise ValueError("Unknown authorization method %s" % auth_type)
|
||||
|
||||
try:
|
||||
decoded = base64.b64decode(
|
||||
encoded_credentials.encode("ascii"), validate=True
|
||||
).decode(encoding)
|
||||
except binascii.Error:
|
||||
raise ValueError("Invalid base64 encoding.")
|
||||
|
||||
try:
|
||||
# RFC 2617 HTTP Authentication
|
||||
# https://www.ietf.org/rfc/rfc2617.txt
|
||||
# the colon must be present, but the username and password may be
|
||||
# otherwise blank.
|
||||
username, password = decoded.split(":", 1)
|
||||
except ValueError:
|
||||
raise ValueError("Invalid credentials.")
|
||||
|
||||
return cls(username, password, encoding=encoding)
|
||||
|
||||
@classmethod
|
||||
def from_url(cls, url: URL, *, encoding: str = "latin1") -> Optional["BasicAuth"]:
|
||||
"""Create BasicAuth from url."""
|
||||
if not isinstance(url, URL):
|
||||
raise TypeError("url should be yarl.URL instance")
|
||||
if url.user is None:
|
||||
return None
|
||||
return cls(url.user, url.password or "", encoding=encoding)
|
||||
|
||||
def encode(self) -> str:
|
||||
"""Encode credentials."""
|
||||
creds = (f"{self.login}:{self.password}").encode(self.encoding)
|
||||
return "Basic %s" % base64.b64encode(creds).decode(self.encoding)
|
||||
|
||||
|
||||
def strip_auth_from_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]:
|
||||
auth = BasicAuth.from_url(url)
|
||||
if auth is None:
|
||||
return url, None
|
||||
else:
|
||||
return url.with_user(None), auth
|
||||
|
||||
|
||||
def netrc_from_env() -> Optional[netrc.netrc]:
|
||||
"""Load netrc from file.
|
||||
|
||||
Attempt to load it from the path specified by the env-var
|
||||
NETRC or in the default location in the user's home directory.
|
||||
|
||||
Returns None if it couldn't be found or fails to parse.
|
||||
"""
|
||||
netrc_env = os.environ.get("NETRC")
|
||||
|
||||
if netrc_env is not None:
|
||||
netrc_path = Path(netrc_env)
|
||||
else:
|
||||
try:
|
||||
home_dir = Path.home()
|
||||
except RuntimeError as e: # pragma: no cover
|
||||
# if pathlib can't resolve home, it may raise a RuntimeError
|
||||
client_logger.debug(
|
||||
"Could not resolve home directory when "
|
||||
"trying to look for .netrc file: %s",
|
||||
e,
|
||||
)
|
||||
return None
|
||||
|
||||
netrc_path = home_dir / ("_netrc" if IS_WINDOWS else ".netrc")
|
||||
|
||||
try:
|
||||
return netrc.netrc(str(netrc_path))
|
||||
except netrc.NetrcParseError as e:
|
||||
client_logger.warning("Could not parse .netrc file: %s", e)
|
||||
except OSError as e:
|
||||
# we couldn't read the file (doesn't exist, permissions, etc.)
|
||||
if netrc_env or netrc_path.is_file():
|
||||
# only warn if the environment wanted us to load it,
|
||||
# or it appears like the default file does actually exist
|
||||
client_logger.warning("Could not read .netrc file: %s", e)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
||||
class ProxyInfo:
|
||||
proxy: URL
|
||||
proxy_auth: Optional[BasicAuth]
|
||||
|
||||
|
||||
def proxies_from_env() -> Dict[str, ProxyInfo]:
|
||||
proxy_urls = {
|
||||
k: URL(v)
|
||||
for k, v in getproxies().items()
|
||||
if k in ("http", "https", "ws", "wss")
|
||||
}
|
||||
netrc_obj = netrc_from_env()
|
||||
stripped = {k: strip_auth_from_url(v) for k, v in proxy_urls.items()}
|
||||
ret = {}
|
||||
for proto, val in stripped.items():
|
||||
proxy, auth = val
|
||||
if proxy.scheme in ("https", "wss"):
|
||||
client_logger.warning(
|
||||
"%s proxies %s are not supported, ignoring", proxy.scheme.upper(), proxy
|
||||
)
|
||||
continue
|
||||
if netrc_obj and auth is None:
|
||||
auth_from_netrc = None
|
||||
if proxy.host is not None:
|
||||
auth_from_netrc = netrc_obj.authenticators(proxy.host)
|
||||
if auth_from_netrc is not None:
|
||||
# auth_from_netrc is a (`user`, `account`, `password`) tuple,
|
||||
# `user` and `account` both can be username,
|
||||
# if `user` is None, use `account`
|
||||
*logins, password = auth_from_netrc
|
||||
login = logins[0] if logins[0] else logins[-1]
|
||||
auth = BasicAuth(cast(str, login), cast(str, password))
|
||||
ret[proto] = ProxyInfo(proxy, auth)
|
||||
return ret
|
||||
|
||||
|
||||
def current_task(
|
||||
loop: Optional[asyncio.AbstractEventLoop] = None,
|
||||
) -> "Optional[asyncio.Task[Any]]":
|
||||
if sys.version_info >= (3, 7):
|
||||
return asyncio.current_task(loop=loop)
|
||||
else:
|
||||
return asyncio.Task.current_task(loop=loop)
|
||||
|
||||
|
||||
def get_running_loop(
|
||||
loop: Optional[asyncio.AbstractEventLoop] = None,
|
||||
) -> asyncio.AbstractEventLoop:
|
||||
if loop is None:
|
||||
loop = asyncio.get_event_loop()
|
||||
if not loop.is_running():
|
||||
warnings.warn(
|
||||
"The object should be created within an async function",
|
||||
DeprecationWarning,
|
||||
stacklevel=3,
|
||||
)
|
||||
if loop.get_debug():
|
||||
internal_logger.warning(
|
||||
"The object should be created within an async function", stack_info=True
|
||||
)
|
||||
return loop
|
||||
|
||||
|
||||
def isasyncgenfunction(obj: Any) -> bool:
|
||||
func = getattr(inspect, "isasyncgenfunction", None)
|
||||
if func is not None:
|
||||
return func(obj) # type: ignore[no-any-return]
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def get_env_proxy_for_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]:
|
||||
"""Get a permitted proxy for the given URL from the env."""
|
||||
if url.host is not None and proxy_bypass(url.host):
|
||||
raise LookupError(f"Proxying is disallowed for `{url.host!r}`")
|
||||
|
||||
proxies_in_env = proxies_from_env()
|
||||
try:
|
||||
proxy_info = proxies_in_env[url.scheme]
|
||||
except KeyError:
|
||||
raise LookupError(f"No proxies found for `{url!s}` in the env")
|
||||
else:
|
||||
return proxy_info.proxy, proxy_info.proxy_auth
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
||||
class MimeType:
|
||||
type: str
|
||||
subtype: str
|
||||
suffix: str
|
||||
parameters: "MultiDictProxy[str]"
|
||||
|
||||
|
||||
@functools.lru_cache(maxsize=56)
|
||||
def parse_mimetype(mimetype: str) -> MimeType:
|
||||
"""Parses a MIME type into its components.
|
||||
|
||||
mimetype is a MIME type string.
|
||||
|
||||
Returns a MimeType object.
|
||||
|
||||
Example:
|
||||
|
||||
>>> parse_mimetype('text/html; charset=utf-8')
|
||||
MimeType(type='text', subtype='html', suffix='',
|
||||
parameters={'charset': 'utf-8'})
|
||||
|
||||
"""
|
||||
if not mimetype:
|
||||
return MimeType(
|
||||
type="", subtype="", suffix="", parameters=MultiDictProxy(MultiDict())
|
||||
)
|
||||
|
||||
parts = mimetype.split(";")
|
||||
params: MultiDict[str] = MultiDict()
|
||||
for item in parts[1:]:
|
||||
if not item:
|
||||
continue
|
||||
key, value = cast(
|
||||
Tuple[str, str], item.split("=", 1) if "=" in item else (item, "")
|
||||
)
|
||||
params.add(key.lower().strip(), value.strip(' "'))
|
||||
|
||||
fulltype = parts[0].strip().lower()
|
||||
if fulltype == "*":
|
||||
fulltype = "*/*"
|
||||
|
||||
mtype, stype = (
|
||||
cast(Tuple[str, str], fulltype.split("/", 1))
|
||||
if "/" in fulltype
|
||||
else (fulltype, "")
|
||||
)
|
||||
stype, suffix = (
|
||||
cast(Tuple[str, str], stype.split("+", 1)) if "+" in stype else (stype, "")
|
||||
)
|
||||
|
||||
return MimeType(
|
||||
type=mtype, subtype=stype, suffix=suffix, parameters=MultiDictProxy(params)
|
||||
)
|
||||
|
||||
|
||||
def guess_filename(obj: Any, default: Optional[str] = None) -> Optional[str]:
|
||||
name = getattr(obj, "name", None)
|
||||
if name and isinstance(name, str) and name[0] != "<" and name[-1] != ">":
|
||||
return Path(name).name
|
||||
return default
|
||||
|
||||
|
||||
not_qtext_re = re.compile(r"[^\041\043-\133\135-\176]")
|
||||
QCONTENT = {chr(i) for i in range(0x20, 0x7F)} | {"\t"}
|
||||
|
||||
|
||||
def quoted_string(content: str) -> str:
|
||||
"""Return 7-bit content as quoted-string.
|
||||
|
||||
Format content into a quoted-string as defined in RFC5322 for
|
||||
Internet Message Format. Notice that this is not the 8-bit HTTP
|
||||
format, but the 7-bit email format. Content must be in usascii or
|
||||
a ValueError is raised.
|
||||
"""
|
||||
if not (QCONTENT > set(content)):
|
||||
raise ValueError(f"bad content for quoted-string {content!r}")
|
||||
return not_qtext_re.sub(lambda x: "\\" + x.group(0), content)
|
||||
|
||||
|
||||
def content_disposition_header(
|
||||
disptype: str, quote_fields: bool = True, _charset: str = "utf-8", **params: str
|
||||
) -> str:
|
||||
"""Sets ``Content-Disposition`` header for MIME.
|
||||
|
||||
This is the MIME payload Content-Disposition header from RFC 2183
|
||||
and RFC 7579 section 4.2, not the HTTP Content-Disposition from
|
||||
RFC 6266.
|
||||
|
||||
disptype is a disposition type: inline, attachment, form-data.
|
||||
Should be valid extension token (see RFC 2183)
|
||||
|
||||
quote_fields performs value quoting to 7-bit MIME headers
|
||||
according to RFC 7578. Set to quote_fields to False if recipient
|
||||
can take 8-bit file names and field values.
|
||||
|
||||
_charset specifies the charset to use when quote_fields is True.
|
||||
|
||||
params is a dict with disposition params.
|
||||
"""
|
||||
if not disptype or not (TOKEN > set(disptype)):
|
||||
raise ValueError("bad content disposition type {!r}" "".format(disptype))
|
||||
|
||||
value = disptype
|
||||
if params:
|
||||
lparams = []
|
||||
for key, val in params.items():
|
||||
if not key or not (TOKEN > set(key)):
|
||||
raise ValueError(
|
||||
"bad content disposition parameter" " {!r}={!r}".format(key, val)
|
||||
)
|
||||
if quote_fields:
|
||||
if key.lower() == "filename":
|
||||
qval = quote(val, "", encoding=_charset)
|
||||
lparams.append((key, '"%s"' % qval))
|
||||
else:
|
||||
try:
|
||||
qval = quoted_string(val)
|
||||
except ValueError:
|
||||
qval = "".join(
|
||||
(_charset, "''", quote(val, "", encoding=_charset))
|
||||
)
|
||||
lparams.append((key + "*", qval))
|
||||
else:
|
||||
lparams.append((key, '"%s"' % qval))
|
||||
else:
|
||||
qval = val.replace("\\", "\\\\").replace('"', '\\"')
|
||||
lparams.append((key, '"%s"' % qval))
|
||||
sparams = "; ".join("=".join(pair) for pair in lparams)
|
||||
value = "; ".join((value, sparams))
|
||||
return value
|
||||
|
||||
|
||||
class _TSelf(Protocol, Generic[_T]):
|
||||
_cache: Dict[str, _T]
|
||||
|
||||
|
||||
class reify(Generic[_T]):
|
||||
"""Use as a class method decorator.
|
||||
|
||||
It operates almost exactly like
|
||||
the Python `@property` decorator, but it puts the result of the
|
||||
method it decorates into the instance dict after the first call,
|
||||
effectively replacing the function it decorates with an instance
|
||||
variable. It is, in Python parlance, a data descriptor.
|
||||
"""
|
||||
|
||||
def __init__(self, wrapped: Callable[..., _T]) -> None:
|
||||
self.wrapped = wrapped
|
||||
self.__doc__ = wrapped.__doc__
|
||||
self.name = wrapped.__name__
|
||||
|
||||
def __get__(self, inst: _TSelf[_T], owner: Optional[Type[Any]] = None) -> _T:
|
||||
try:
|
||||
try:
|
||||
return inst._cache[self.name]
|
||||
except KeyError:
|
||||
val = self.wrapped(inst)
|
||||
inst._cache[self.name] = val
|
||||
return val
|
||||
except AttributeError:
|
||||
if inst is None:
|
||||
return self
|
||||
raise
|
||||
|
||||
def __set__(self, inst: _TSelf[_T], value: _T) -> None:
|
||||
raise AttributeError("reified property is read-only")
|
||||
|
||||
|
||||
reify_py = reify
|
||||
|
||||
try:
|
||||
from ._helpers import reify as reify_c
|
||||
|
||||
if not NO_EXTENSIONS:
|
||||
reify = reify_c # type: ignore[misc,assignment]
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
_ipv4_pattern = (
|
||||
r"^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}"
|
||||
r"(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$"
|
||||
)
|
||||
_ipv6_pattern = (
|
||||
r"^(?:(?:(?:[A-F0-9]{1,4}:){6}|(?=(?:[A-F0-9]{0,4}:){0,6}"
|
||||
r"(?:[0-9]{1,3}\.){3}[0-9]{1,3}$)(([0-9A-F]{1,4}:){0,5}|:)"
|
||||
r"((:[0-9A-F]{1,4}){1,5}:|:)|::(?:[A-F0-9]{1,4}:){5})"
|
||||
r"(?:(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])\.){3}"
|
||||
r"(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])|(?:[A-F0-9]{1,4}:){7}"
|
||||
r"[A-F0-9]{1,4}|(?=(?:[A-F0-9]{0,4}:){0,7}[A-F0-9]{0,4}$)"
|
||||
r"(([0-9A-F]{1,4}:){1,7}|:)((:[0-9A-F]{1,4}){1,7}|:)|(?:[A-F0-9]{1,4}:){7}"
|
||||
r":|:(:[A-F0-9]{1,4}){7})$"
|
||||
)
|
||||
_ipv4_regex = re.compile(_ipv4_pattern)
|
||||
_ipv6_regex = re.compile(_ipv6_pattern, flags=re.IGNORECASE)
|
||||
_ipv4_regexb = re.compile(_ipv4_pattern.encode("ascii"))
|
||||
_ipv6_regexb = re.compile(_ipv6_pattern.encode("ascii"), flags=re.IGNORECASE)
|
||||
|
||||
|
||||
def _is_ip_address(
|
||||
regex: Pattern[str], regexb: Pattern[bytes], host: Optional[Union[str, bytes]]
|
||||
) -> bool:
|
||||
if host is None:
|
||||
return False
|
||||
if isinstance(host, str):
|
||||
return bool(regex.match(host))
|
||||
elif isinstance(host, (bytes, bytearray, memoryview)):
|
||||
return bool(regexb.match(host))
|
||||
else:
|
||||
raise TypeError(f"{host} [{type(host)}] is not a str or bytes")
|
||||
|
||||
|
||||
is_ipv4_address = functools.partial(_is_ip_address, _ipv4_regex, _ipv4_regexb)
|
||||
is_ipv6_address = functools.partial(_is_ip_address, _ipv6_regex, _ipv6_regexb)
|
||||
|
||||
|
||||
def is_ip_address(host: Optional[Union[str, bytes, bytearray, memoryview]]) -> bool:
|
||||
return is_ipv4_address(host) or is_ipv6_address(host)
|
||||
|
||||
|
||||
def next_whole_second() -> datetime.datetime:
|
||||
"""Return current time rounded up to the next whole second."""
|
||||
return datetime.datetime.now(datetime.timezone.utc).replace(
|
||||
microsecond=0
|
||||
) + datetime.timedelta(seconds=0)
|
||||
|
||||
|
||||
_cached_current_datetime: Optional[int] = None
|
||||
_cached_formatted_datetime = ""
|
||||
|
||||
|
||||
def rfc822_formatted_time() -> str:
|
||||
global _cached_current_datetime
|
||||
global _cached_formatted_datetime
|
||||
|
||||
now = int(time.time())
|
||||
if now != _cached_current_datetime:
|
||||
# Weekday and month names for HTTP date/time formatting;
|
||||
# always English!
|
||||
# Tuples are constants stored in codeobject!
|
||||
_weekdayname = ("Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun")
|
||||
_monthname = (
|
||||
"", # Dummy so we can use 1-based month numbers
|
||||
"Jan",
|
||||
"Feb",
|
||||
"Mar",
|
||||
"Apr",
|
||||
"May",
|
||||
"Jun",
|
||||
"Jul",
|
||||
"Aug",
|
||||
"Sep",
|
||||
"Oct",
|
||||
"Nov",
|
||||
"Dec",
|
||||
)
|
||||
|
||||
year, month, day, hh, mm, ss, wd, *tail = time.gmtime(now)
|
||||
_cached_formatted_datetime = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % (
|
||||
_weekdayname[wd],
|
||||
day,
|
||||
_monthname[month],
|
||||
year,
|
||||
hh,
|
||||
mm,
|
||||
ss,
|
||||
)
|
||||
_cached_current_datetime = now
|
||||
return _cached_formatted_datetime
|
||||
|
||||
|
||||
def _weakref_handle(info: "Tuple[weakref.ref[object], str]") -> None:
|
||||
ref, name = info
|
||||
ob = ref()
|
||||
if ob is not None:
|
||||
with suppress(Exception):
|
||||
getattr(ob, name)()
|
||||
|
||||
|
||||
def weakref_handle(
|
||||
ob: object, name: str, timeout: float, loop: asyncio.AbstractEventLoop
|
||||
) -> Optional[asyncio.TimerHandle]:
|
||||
if timeout is not None and timeout > 0:
|
||||
when = loop.time() + timeout
|
||||
if timeout >= 5:
|
||||
when = ceil(when)
|
||||
|
||||
return loop.call_at(when, _weakref_handle, (weakref.ref(ob), name))
|
||||
return None
|
||||
|
||||
|
||||
def call_later(
|
||||
cb: Callable[[], Any], timeout: float, loop: asyncio.AbstractEventLoop
|
||||
) -> Optional[asyncio.TimerHandle]:
|
||||
if timeout is not None and timeout > 0:
|
||||
when = loop.time() + timeout
|
||||
if timeout > 5:
|
||||
when = ceil(when)
|
||||
return loop.call_at(when, cb)
|
||||
return None
|
||||
|
||||
|
||||
class TimeoutHandle:
|
||||
"""Timeout handle"""
|
||||
|
||||
def __init__(
|
||||
self, loop: asyncio.AbstractEventLoop, timeout: Optional[float]
|
||||
) -> None:
|
||||
self._timeout = timeout
|
||||
self._loop = loop
|
||||
self._callbacks: List[
|
||||
Tuple[Callable[..., None], Tuple[Any, ...], Dict[str, Any]]
|
||||
] = []
|
||||
|
||||
def register(
|
||||
self, callback: Callable[..., None], *args: Any, **kwargs: Any
|
||||
) -> None:
|
||||
self._callbacks.append((callback, args, kwargs))
|
||||
|
||||
def close(self) -> None:
|
||||
self._callbacks.clear()
|
||||
|
||||
def start(self) -> Optional[asyncio.Handle]:
|
||||
timeout = self._timeout
|
||||
if timeout is not None and timeout > 0:
|
||||
when = self._loop.time() + timeout
|
||||
if timeout >= 5:
|
||||
when = ceil(when)
|
||||
return self._loop.call_at(when, self.__call__)
|
||||
else:
|
||||
return None
|
||||
|
||||
def timer(self) -> "BaseTimerContext":
|
||||
if self._timeout is not None and self._timeout > 0:
|
||||
timer = TimerContext(self._loop)
|
||||
self.register(timer.timeout)
|
||||
return timer
|
||||
else:
|
||||
return TimerNoop()
|
||||
|
||||
def __call__(self) -> None:
|
||||
for cb, args, kwargs in self._callbacks:
|
||||
with suppress(Exception):
|
||||
cb(*args, **kwargs)
|
||||
|
||||
self._callbacks.clear()
|
||||
|
||||
|
||||
class BaseTimerContext(ContextManager["BaseTimerContext"]):
|
||||
pass
|
||||
|
||||
|
||||
class TimerNoop(BaseTimerContext):
|
||||
def __enter__(self) -> BaseTimerContext:
|
||||
return self
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: Optional[Type[BaseException]],
|
||||
exc_val: Optional[BaseException],
|
||||
exc_tb: Optional[TracebackType],
|
||||
) -> None:
|
||||
return
|
||||
|
||||
|
||||
class TimerContext(BaseTimerContext):
|
||||
"""Low resolution timeout context manager"""
|
||||
|
||||
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
|
||||
self._loop = loop
|
||||
self._tasks: List[asyncio.Task[Any]] = []
|
||||
self._cancelled = False
|
||||
|
||||
def __enter__(self) -> BaseTimerContext:
|
||||
task = current_task(loop=self._loop)
|
||||
|
||||
if task is None:
|
||||
raise RuntimeError(
|
||||
"Timeout context manager should be used " "inside a task"
|
||||
)
|
||||
|
||||
if self._cancelled:
|
||||
raise asyncio.TimeoutError from None
|
||||
|
||||
self._tasks.append(task)
|
||||
return self
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: Optional[Type[BaseException]],
|
||||
exc_val: Optional[BaseException],
|
||||
exc_tb: Optional[TracebackType],
|
||||
) -> Optional[bool]:
|
||||
if self._tasks:
|
||||
self._tasks.pop()
|
||||
|
||||
if exc_type is asyncio.CancelledError and self._cancelled:
|
||||
raise asyncio.TimeoutError from None
|
||||
return None
|
||||
|
||||
def timeout(self) -> None:
|
||||
if not self._cancelled:
|
||||
for task in set(self._tasks):
|
||||
task.cancel()
|
||||
|
||||
self._cancelled = True
|
||||
|
||||
|
||||
def ceil_timeout(delay: Optional[float]) -> async_timeout.Timeout:
|
||||
if delay is None or delay <= 0:
|
||||
return async_timeout.timeout(None)
|
||||
|
||||
loop = get_running_loop()
|
||||
now = loop.time()
|
||||
when = now + delay
|
||||
if delay > 5:
|
||||
when = ceil(when)
|
||||
return async_timeout.timeout_at(when)
|
||||
|
||||
|
||||
class HeadersMixin:
|
||||
|
||||
ATTRS = frozenset(["_content_type", "_content_dict", "_stored_content_type"])
|
||||
|
||||
_content_type: Optional[str] = None
|
||||
_content_dict: Optional[Dict[str, str]] = None
|
||||
_stored_content_type = sentinel
|
||||
|
||||
def _parse_content_type(self, raw: str) -> None:
|
||||
self._stored_content_type = raw
|
||||
if raw is None:
|
||||
# default value according to RFC 2616
|
||||
self._content_type = "application/octet-stream"
|
||||
self._content_dict = {}
|
||||
else:
|
||||
msg = HeaderParser().parsestr("Content-Type: " + raw)
|
||||
self._content_type = msg.get_content_type()
|
||||
params = msg.get_params()
|
||||
self._content_dict = dict(params[1:]) # First element is content type again
|
||||
|
||||
@property
|
||||
def content_type(self) -> str:
|
||||
"""The value of content part for Content-Type HTTP header."""
|
||||
raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore[attr-defined]
|
||||
if self._stored_content_type != raw:
|
||||
self._parse_content_type(raw)
|
||||
return self._content_type # type: ignore[return-value]
|
||||
|
||||
@property
|
||||
def charset(self) -> Optional[str]:
|
||||
"""The value of charset part for Content-Type HTTP header."""
|
||||
raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore[attr-defined]
|
||||
if self._stored_content_type != raw:
|
||||
self._parse_content_type(raw)
|
||||
return self._content_dict.get("charset") # type: ignore[union-attr]
|
||||
|
||||
@property
|
||||
def content_length(self) -> Optional[int]:
|
||||
"""The value of Content-Length HTTP header."""
|
||||
content_length = self._headers.get( # type: ignore[attr-defined]
|
||||
hdrs.CONTENT_LENGTH
|
||||
)
|
||||
|
||||
if content_length is not None:
|
||||
return int(content_length)
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def set_result(fut: "asyncio.Future[_T]", result: _T) -> None:
|
||||
if not fut.done():
|
||||
fut.set_result(result)
|
||||
|
||||
|
||||
def set_exception(fut: "asyncio.Future[_T]", exc: BaseException) -> None:
|
||||
if not fut.done():
|
||||
fut.set_exception(exc)
|
||||
|
||||
|
||||
class ChainMapProxy(Mapping[str, Any]):
|
||||
__slots__ = ("_maps",)
|
||||
|
||||
def __init__(self, maps: Iterable[Mapping[str, Any]]) -> None:
|
||||
self._maps = tuple(maps)
|
||||
|
||||
def __init_subclass__(cls) -> None:
|
||||
raise TypeError(
|
||||
"Inheritance class {} from ChainMapProxy "
|
||||
"is forbidden".format(cls.__name__)
|
||||
)
|
||||
|
||||
def __getitem__(self, key: str) -> Any:
|
||||
for mapping in self._maps:
|
||||
try:
|
||||
return mapping[key]
|
||||
except KeyError:
|
||||
pass
|
||||
raise KeyError(key)
|
||||
|
||||
def get(self, key: str, default: Any = None) -> Any:
|
||||
return self[key] if key in self else default
|
||||
|
||||
def __len__(self) -> int:
|
||||
# reuses stored hash values if possible
|
||||
return len(set().union(*self._maps)) # type: ignore[arg-type]
|
||||
|
||||
def __iter__(self) -> Iterator[str]:
|
||||
d: Dict[str, Any] = {}
|
||||
for mapping in reversed(self._maps):
|
||||
# reuses stored hash values if possible
|
||||
d.update(mapping)
|
||||
return iter(d)
|
||||
|
||||
def __contains__(self, key: object) -> bool:
|
||||
return any(key in m for m in self._maps)
|
||||
|
||||
def __bool__(self) -> bool:
|
||||
return any(self._maps)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
content = ", ".join(map(repr, self._maps))
|
||||
return f"ChainMapProxy({content})"
|
||||
|
||||
|
||||
# https://tools.ietf.org/html/rfc7232#section-2.3
|
||||
_ETAGC = r"[!#-}\x80-\xff]+"
|
||||
_ETAGC_RE = re.compile(_ETAGC)
|
||||
_QUOTED_ETAG = rf'(W/)?"({_ETAGC})"'
|
||||
QUOTED_ETAG_RE = re.compile(_QUOTED_ETAG)
|
||||
LIST_QUOTED_ETAG_RE = re.compile(rf"({_QUOTED_ETAG})(?:\s*,\s*|$)|(.)")
|
||||
|
||||
ETAG_ANY = "*"
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
||||
class ETag:
|
||||
value: str
|
||||
is_weak: bool = False
|
||||
|
||||
|
||||
def validate_etag_value(value: str) -> None:
|
||||
if value != ETAG_ANY and not _ETAGC_RE.fullmatch(value):
|
||||
raise ValueError(
|
||||
f"Value {value!r} is not a valid etag. Maybe it contains '\"'?"
|
||||
)
|
||||
|
||||
|
||||
def parse_http_date(date_str: Optional[str]) -> Optional[datetime.datetime]:
|
||||
"""Process a date string, return a datetime object"""
|
||||
if date_str is not None:
|
||||
timetuple = parsedate(date_str)
|
||||
if timetuple is not None:
|
||||
with suppress(ValueError):
|
||||
return datetime.datetime(*timetuple[:6], tzinfo=datetime.timezone.utc)
|
||||
return None
|
|
@ -0,0 +1,70 @@
|
|||
import http.server
|
||||
import sys
|
||||
from typing import Mapping, Tuple
|
||||
|
||||
from . import __version__
|
||||
from .http_exceptions import HttpProcessingError as HttpProcessingError
|
||||
from .http_parser import (
|
||||
HeadersParser as HeadersParser,
|
||||
HttpParser as HttpParser,
|
||||
HttpRequestParser as HttpRequestParser,
|
||||
HttpResponseParser as HttpResponseParser,
|
||||
RawRequestMessage as RawRequestMessage,
|
||||
RawResponseMessage as RawResponseMessage,
|
||||
)
|
||||
from .http_websocket import (
|
||||
WS_CLOSED_MESSAGE as WS_CLOSED_MESSAGE,
|
||||
WS_CLOSING_MESSAGE as WS_CLOSING_MESSAGE,
|
||||
WS_KEY as WS_KEY,
|
||||
WebSocketError as WebSocketError,
|
||||
WebSocketReader as WebSocketReader,
|
||||
WebSocketWriter as WebSocketWriter,
|
||||
WSCloseCode as WSCloseCode,
|
||||
WSMessage as WSMessage,
|
||||
WSMsgType as WSMsgType,
|
||||
ws_ext_gen as ws_ext_gen,
|
||||
ws_ext_parse as ws_ext_parse,
|
||||
)
|
||||
from .http_writer import (
|
||||
HttpVersion as HttpVersion,
|
||||
HttpVersion10 as HttpVersion10,
|
||||
HttpVersion11 as HttpVersion11,
|
||||
StreamWriter as StreamWriter,
|
||||
)
|
||||
|
||||
__all__ = (
|
||||
"HttpProcessingError",
|
||||
"RESPONSES",
|
||||
"SERVER_SOFTWARE",
|
||||
# .http_writer
|
||||
"StreamWriter",
|
||||
"HttpVersion",
|
||||
"HttpVersion10",
|
||||
"HttpVersion11",
|
||||
# .http_parser
|
||||
"HeadersParser",
|
||||
"HttpParser",
|
||||
"HttpRequestParser",
|
||||
"HttpResponseParser",
|
||||
"RawRequestMessage",
|
||||
"RawResponseMessage",
|
||||
# .http_websocket
|
||||
"WS_CLOSED_MESSAGE",
|
||||
"WS_CLOSING_MESSAGE",
|
||||
"WS_KEY",
|
||||
"WebSocketReader",
|
||||
"WebSocketWriter",
|
||||
"ws_ext_gen",
|
||||
"ws_ext_parse",
|
||||
"WSMessage",
|
||||
"WebSocketError",
|
||||
"WSMsgType",
|
||||
"WSCloseCode",
|
||||
)
|
||||
|
||||
|
||||
SERVER_SOFTWARE: str = "Python/{0[0]}.{0[1]} aiohttp/{1}".format(
|
||||
sys.version_info, __version__
|
||||
)
|
||||
|
||||
RESPONSES: Mapping[int, Tuple[str, str]] = http.server.BaseHTTPRequestHandler.responses
|
|
@ -0,0 +1,105 @@
|
|||
"""Low-level http related exceptions."""
|
||||
|
||||
|
||||
from typing import Optional, Union
|
||||
|
||||
from .typedefs import _CIMultiDict
|
||||
|
||||
__all__ = ("HttpProcessingError",)
|
||||
|
||||
|
||||
class HttpProcessingError(Exception):
|
||||
"""HTTP error.
|
||||
|
||||
Shortcut for raising HTTP errors with custom code, message and headers.
|
||||
|
||||
code: HTTP Error code.
|
||||
message: (optional) Error message.
|
||||
headers: (optional) Headers to be sent in response, a list of pairs
|
||||
"""
|
||||
|
||||
code = 0
|
||||
message = ""
|
||||
headers = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
code: Optional[int] = None,
|
||||
message: str = "",
|
||||
headers: Optional[_CIMultiDict] = None,
|
||||
) -> None:
|
||||
if code is not None:
|
||||
self.code = code
|
||||
self.headers = headers
|
||||
self.message = message
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"{self.code}, message={self.message!r}"
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<{self.__class__.__name__}: {self}>"
|
||||
|
||||
|
||||
class BadHttpMessage(HttpProcessingError):
|
||||
|
||||
code = 400
|
||||
message = "Bad Request"
|
||||
|
||||
def __init__(self, message: str, *, headers: Optional[_CIMultiDict] = None) -> None:
|
||||
super().__init__(message=message, headers=headers)
|
||||
self.args = (message,)
|
||||
|
||||
|
||||
class HttpBadRequest(BadHttpMessage):
|
||||
|
||||
code = 400
|
||||
message = "Bad Request"
|
||||
|
||||
|
||||
class PayloadEncodingError(BadHttpMessage):
|
||||
"""Base class for payload errors"""
|
||||
|
||||
|
||||
class ContentEncodingError(PayloadEncodingError):
|
||||
"""Content encoding error."""
|
||||
|
||||
|
||||
class TransferEncodingError(PayloadEncodingError):
|
||||
"""transfer encoding error."""
|
||||
|
||||
|
||||
class ContentLengthError(PayloadEncodingError):
|
||||
"""Not enough data for satisfy content length header."""
|
||||
|
||||
|
||||
class LineTooLong(BadHttpMessage):
|
||||
def __init__(
|
||||
self, line: str, limit: str = "Unknown", actual_size: str = "Unknown"
|
||||
) -> None:
|
||||
super().__init__(
|
||||
f"Got more than {limit} bytes ({actual_size}) when reading {line}."
|
||||
)
|
||||
self.args = (line, limit, actual_size)
|
||||
|
||||
|
||||
class InvalidHeader(BadHttpMessage):
|
||||
def __init__(self, hdr: Union[bytes, str]) -> None:
|
||||
if isinstance(hdr, bytes):
|
||||
hdr = hdr.decode("utf-8", "surrogateescape")
|
||||
super().__init__(f"Invalid HTTP Header: {hdr}")
|
||||
self.hdr = hdr
|
||||
self.args = (hdr,)
|
||||
|
||||
|
||||
class BadStatusLine(BadHttpMessage):
|
||||
def __init__(self, line: str = "") -> None:
|
||||
if not isinstance(line, str):
|
||||
line = repr(line)
|
||||
super().__init__(f"Bad status line {line!r}")
|
||||
self.args = (line,)
|
||||
self.line = line
|
||||
|
||||
|
||||
class InvalidURLError(BadHttpMessage):
|
||||
pass
|
|
@ -0,0 +1,969 @@
|
|||
import abc
|
||||
import asyncio
|
||||
import collections
|
||||
import re
|
||||
import string
|
||||
import zlib
|
||||
from contextlib import suppress
|
||||
from enum import IntEnum
|
||||
from typing import (
|
||||
Any,
|
||||
Generic,
|
||||
List,
|
||||
NamedTuple,
|
||||
Optional,
|
||||
Pattern,
|
||||
Set,
|
||||
Tuple,
|
||||
Type,
|
||||
TypeVar,
|
||||
Union,
|
||||
cast,
|
||||
)
|
||||
|
||||
from multidict import CIMultiDict, CIMultiDictProxy, istr
|
||||
from yarl import URL
|
||||
|
||||
from . import hdrs
|
||||
from .base_protocol import BaseProtocol
|
||||
from .helpers import NO_EXTENSIONS, BaseTimerContext
|
||||
from .http_exceptions import (
|
||||
BadHttpMessage,
|
||||
BadStatusLine,
|
||||
ContentEncodingError,
|
||||
ContentLengthError,
|
||||
InvalidHeader,
|
||||
LineTooLong,
|
||||
TransferEncodingError,
|
||||
)
|
||||
from .http_writer import HttpVersion, HttpVersion10
|
||||
from .log import internal_logger
|
||||
from .streams import EMPTY_PAYLOAD, StreamReader
|
||||
from .typedefs import Final, RawHeaders
|
||||
|
||||
try:
|
||||
import brotli
|
||||
|
||||
HAS_BROTLI = True
|
||||
except ImportError: # pragma: no cover
|
||||
HAS_BROTLI = False
|
||||
|
||||
|
||||
__all__ = (
|
||||
"HeadersParser",
|
||||
"HttpParser",
|
||||
"HttpRequestParser",
|
||||
"HttpResponseParser",
|
||||
"RawRequestMessage",
|
||||
"RawResponseMessage",
|
||||
)
|
||||
|
||||
ASCIISET: Final[Set[str]] = set(string.printable)
|
||||
|
||||
# See https://tools.ietf.org/html/rfc7230#section-3.1.1
|
||||
# and https://tools.ietf.org/html/rfc7230#appendix-B
|
||||
#
|
||||
# method = token
|
||||
# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" / "+" / "-" / "." /
|
||||
# "^" / "_" / "`" / "|" / "~" / DIGIT / ALPHA
|
||||
# token = 1*tchar
|
||||
METHRE: Final[Pattern[str]] = re.compile(r"[!#$%&'*+\-.^_`|~0-9A-Za-z]+")
|
||||
VERSRE: Final[Pattern[str]] = re.compile(r"HTTP/(\d+).(\d+)")
|
||||
HDRRE: Final[Pattern[bytes]] = re.compile(rb"[\x00-\x1F\x7F()<>@,;:\[\]={} \t\\\\\"]")
|
||||
|
||||
|
||||
class RawRequestMessage(NamedTuple):
|
||||
method: str
|
||||
path: str
|
||||
version: HttpVersion
|
||||
headers: "CIMultiDictProxy[str]"
|
||||
raw_headers: RawHeaders
|
||||
should_close: bool
|
||||
compression: Optional[str]
|
||||
upgrade: bool
|
||||
chunked: bool
|
||||
url: URL
|
||||
|
||||
|
||||
RawResponseMessage = collections.namedtuple(
|
||||
"RawResponseMessage",
|
||||
[
|
||||
"version",
|
||||
"code",
|
||||
"reason",
|
||||
"headers",
|
||||
"raw_headers",
|
||||
"should_close",
|
||||
"compression",
|
||||
"upgrade",
|
||||
"chunked",
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
_MsgT = TypeVar("_MsgT", RawRequestMessage, RawResponseMessage)
|
||||
|
||||
|
||||
class ParseState(IntEnum):
|
||||
|
||||
PARSE_NONE = 0
|
||||
PARSE_LENGTH = 1
|
||||
PARSE_CHUNKED = 2
|
||||
PARSE_UNTIL_EOF = 3
|
||||
|
||||
|
||||
class ChunkState(IntEnum):
|
||||
PARSE_CHUNKED_SIZE = 0
|
||||
PARSE_CHUNKED_CHUNK = 1
|
||||
PARSE_CHUNKED_CHUNK_EOF = 2
|
||||
PARSE_MAYBE_TRAILERS = 3
|
||||
PARSE_TRAILERS = 4
|
||||
|
||||
|
||||
class HeadersParser:
|
||||
def __init__(
|
||||
self,
|
||||
max_line_size: int = 8190,
|
||||
max_headers: int = 32768,
|
||||
max_field_size: int = 8190,
|
||||
) -> None:
|
||||
self.max_line_size = max_line_size
|
||||
self.max_headers = max_headers
|
||||
self.max_field_size = max_field_size
|
||||
|
||||
def parse_headers(
|
||||
self, lines: List[bytes]
|
||||
) -> Tuple["CIMultiDictProxy[str]", RawHeaders]:
|
||||
headers: CIMultiDict[str] = CIMultiDict()
|
||||
raw_headers = []
|
||||
|
||||
lines_idx = 1
|
||||
line = lines[1]
|
||||
line_count = len(lines)
|
||||
|
||||
while line:
|
||||
# Parse initial header name : value pair.
|
||||
try:
|
||||
bname, bvalue = line.split(b":", 1)
|
||||
except ValueError:
|
||||
raise InvalidHeader(line) from None
|
||||
|
||||
bname = bname.strip(b" \t")
|
||||
bvalue = bvalue.lstrip()
|
||||
if HDRRE.search(bname):
|
||||
raise InvalidHeader(bname)
|
||||
if len(bname) > self.max_field_size:
|
||||
raise LineTooLong(
|
||||
"request header name {}".format(
|
||||
bname.decode("utf8", "xmlcharrefreplace")
|
||||
),
|
||||
str(self.max_field_size),
|
||||
str(len(bname)),
|
||||
)
|
||||
|
||||
header_length = len(bvalue)
|
||||
|
||||
# next line
|
||||
lines_idx += 1
|
||||
line = lines[lines_idx]
|
||||
|
||||
# consume continuation lines
|
||||
continuation = line and line[0] in (32, 9) # (' ', '\t')
|
||||
|
||||
if continuation:
|
||||
bvalue_lst = [bvalue]
|
||||
while continuation:
|
||||
header_length += len(line)
|
||||
if header_length > self.max_field_size:
|
||||
raise LineTooLong(
|
||||
"request header field {}".format(
|
||||
bname.decode("utf8", "xmlcharrefreplace")
|
||||
),
|
||||
str(self.max_field_size),
|
||||
str(header_length),
|
||||
)
|
||||
bvalue_lst.append(line)
|
||||
|
||||
# next line
|
||||
lines_idx += 1
|
||||
if lines_idx < line_count:
|
||||
line = lines[lines_idx]
|
||||
if line:
|
||||
continuation = line[0] in (32, 9) # (' ', '\t')
|
||||
else:
|
||||
line = b""
|
||||
break
|
||||
bvalue = b"".join(bvalue_lst)
|
||||
else:
|
||||
if header_length > self.max_field_size:
|
||||
raise LineTooLong(
|
||||
"request header field {}".format(
|
||||
bname.decode("utf8", "xmlcharrefreplace")
|
||||
),
|
||||
str(self.max_field_size),
|
||||
str(header_length),
|
||||
)
|
||||
|
||||
bvalue = bvalue.strip()
|
||||
name = bname.decode("utf-8", "surrogateescape")
|
||||
value = bvalue.decode("utf-8", "surrogateescape")
|
||||
|
||||
headers.add(name, value)
|
||||
raw_headers.append((bname, bvalue))
|
||||
|
||||
return (CIMultiDictProxy(headers), tuple(raw_headers))
|
||||
|
||||
|
||||
class HttpParser(abc.ABC, Generic[_MsgT]):
|
||||
def __init__(
|
||||
self,
|
||||
protocol: Optional[BaseProtocol] = None,
|
||||
loop: Optional[asyncio.AbstractEventLoop] = None,
|
||||
limit: int = 2**16,
|
||||
max_line_size: int = 8190,
|
||||
max_headers: int = 32768,
|
||||
max_field_size: int = 8190,
|
||||
timer: Optional[BaseTimerContext] = None,
|
||||
code: Optional[int] = None,
|
||||
method: Optional[str] = None,
|
||||
readall: bool = False,
|
||||
payload_exception: Optional[Type[BaseException]] = None,
|
||||
response_with_body: bool = True,
|
||||
read_until_eof: bool = False,
|
||||
auto_decompress: bool = True,
|
||||
) -> None:
|
||||
self.protocol = protocol
|
||||
self.loop = loop
|
||||
self.max_line_size = max_line_size
|
||||
self.max_headers = max_headers
|
||||
self.max_field_size = max_field_size
|
||||
self.timer = timer
|
||||
self.code = code
|
||||
self.method = method
|
||||
self.readall = readall
|
||||
self.payload_exception = payload_exception
|
||||
self.response_with_body = response_with_body
|
||||
self.read_until_eof = read_until_eof
|
||||
|
||||
self._lines: List[bytes] = []
|
||||
self._tail = b""
|
||||
self._upgraded = False
|
||||
self._payload = None
|
||||
self._payload_parser: Optional[HttpPayloadParser] = None
|
||||
self._auto_decompress = auto_decompress
|
||||
self._limit = limit
|
||||
self._headers_parser = HeadersParser(max_line_size, max_headers, max_field_size)
|
||||
|
||||
@abc.abstractmethod
|
||||
def parse_message(self, lines: List[bytes]) -> _MsgT:
|
||||
pass
|
||||
|
||||
def feed_eof(self) -> Optional[_MsgT]:
|
||||
if self._payload_parser is not None:
|
||||
self._payload_parser.feed_eof()
|
||||
self._payload_parser = None
|
||||
else:
|
||||
# try to extract partial message
|
||||
if self._tail:
|
||||
self._lines.append(self._tail)
|
||||
|
||||
if self._lines:
|
||||
if self._lines[-1] != "\r\n":
|
||||
self._lines.append(b"")
|
||||
with suppress(Exception):
|
||||
return self.parse_message(self._lines)
|
||||
return None
|
||||
|
||||
def feed_data(
|
||||
self,
|
||||
data: bytes,
|
||||
SEP: bytes = b"\r\n",
|
||||
EMPTY: bytes = b"",
|
||||
CONTENT_LENGTH: istr = hdrs.CONTENT_LENGTH,
|
||||
METH_CONNECT: str = hdrs.METH_CONNECT,
|
||||
SEC_WEBSOCKET_KEY1: istr = hdrs.SEC_WEBSOCKET_KEY1,
|
||||
) -> Tuple[List[Tuple[_MsgT, StreamReader]], bool, bytes]:
|
||||
|
||||
messages = []
|
||||
|
||||
if self._tail:
|
||||
data, self._tail = self._tail + data, b""
|
||||
|
||||
data_len = len(data)
|
||||
start_pos = 0
|
||||
loop = self.loop
|
||||
|
||||
while start_pos < data_len:
|
||||
|
||||
# read HTTP message (request/response line + headers), \r\n\r\n
|
||||
# and split by lines
|
||||
if self._payload_parser is None and not self._upgraded:
|
||||
pos = data.find(SEP, start_pos)
|
||||
# consume \r\n
|
||||
if pos == start_pos and not self._lines:
|
||||
start_pos = pos + 2
|
||||
continue
|
||||
|
||||
if pos >= start_pos:
|
||||
# line found
|
||||
self._lines.append(data[start_pos:pos])
|
||||
start_pos = pos + 2
|
||||
|
||||
# \r\n\r\n found
|
||||
if self._lines[-1] == EMPTY:
|
||||
try:
|
||||
msg: _MsgT = self.parse_message(self._lines)
|
||||
finally:
|
||||
self._lines.clear()
|
||||
|
||||
def get_content_length() -> Optional[int]:
|
||||
# payload length
|
||||
length_hdr = msg.headers.get(CONTENT_LENGTH)
|
||||
if length_hdr is None:
|
||||
return None
|
||||
|
||||
try:
|
||||
length = int(length_hdr)
|
||||
except ValueError:
|
||||
raise InvalidHeader(CONTENT_LENGTH)
|
||||
|
||||
if length < 0:
|
||||
raise InvalidHeader(CONTENT_LENGTH)
|
||||
|
||||
return length
|
||||
|
||||
length = get_content_length()
|
||||
# do not support old websocket spec
|
||||
if SEC_WEBSOCKET_KEY1 in msg.headers:
|
||||
raise InvalidHeader(SEC_WEBSOCKET_KEY1)
|
||||
|
||||
self._upgraded = msg.upgrade
|
||||
|
||||
method = getattr(msg, "method", self.method)
|
||||
|
||||
assert self.protocol is not None
|
||||
# calculate payload
|
||||
if (
|
||||
(length is not None and length > 0)
|
||||
or msg.chunked
|
||||
and not msg.upgrade
|
||||
):
|
||||
payload = StreamReader(
|
||||
self.protocol,
|
||||
timer=self.timer,
|
||||
loop=loop,
|
||||
limit=self._limit,
|
||||
)
|
||||
payload_parser = HttpPayloadParser(
|
||||
payload,
|
||||
length=length,
|
||||
chunked=msg.chunked,
|
||||
method=method,
|
||||
compression=msg.compression,
|
||||
code=self.code,
|
||||
readall=self.readall,
|
||||
response_with_body=self.response_with_body,
|
||||
auto_decompress=self._auto_decompress,
|
||||
)
|
||||
if not payload_parser.done:
|
||||
self._payload_parser = payload_parser
|
||||
elif method == METH_CONNECT:
|
||||
assert isinstance(msg, RawRequestMessage)
|
||||
payload = StreamReader(
|
||||
self.protocol,
|
||||
timer=self.timer,
|
||||
loop=loop,
|
||||
limit=self._limit,
|
||||
)
|
||||
self._upgraded = True
|
||||
self._payload_parser = HttpPayloadParser(
|
||||
payload,
|
||||
method=msg.method,
|
||||
compression=msg.compression,
|
||||
readall=True,
|
||||
auto_decompress=self._auto_decompress,
|
||||
)
|
||||
else:
|
||||
if (
|
||||
getattr(msg, "code", 100) >= 199
|
||||
and length is None
|
||||
and self.read_until_eof
|
||||
):
|
||||
payload = StreamReader(
|
||||
self.protocol,
|
||||
timer=self.timer,
|
||||
loop=loop,
|
||||
limit=self._limit,
|
||||
)
|
||||
payload_parser = HttpPayloadParser(
|
||||
payload,
|
||||
length=length,
|
||||
chunked=msg.chunked,
|
||||
method=method,
|
||||
compression=msg.compression,
|
||||
code=self.code,
|
||||
readall=True,
|
||||
response_with_body=self.response_with_body,
|
||||
auto_decompress=self._auto_decompress,
|
||||
)
|
||||
if not payload_parser.done:
|
||||
self._payload_parser = payload_parser
|
||||
else:
|
||||
payload = EMPTY_PAYLOAD
|
||||
|
||||
messages.append((msg, payload))
|
||||
else:
|
||||
self._tail = data[start_pos:]
|
||||
data = EMPTY
|
||||
break
|
||||
|
||||
# no parser, just store
|
||||
elif self._payload_parser is None and self._upgraded:
|
||||
assert not self._lines
|
||||
break
|
||||
|
||||
# feed payload
|
||||
elif data and start_pos < data_len:
|
||||
assert not self._lines
|
||||
assert self._payload_parser is not None
|
||||
try:
|
||||
eof, data = self._payload_parser.feed_data(data[start_pos:])
|
||||
except BaseException as exc:
|
||||
if self.payload_exception is not None:
|
||||
self._payload_parser.payload.set_exception(
|
||||
self.payload_exception(str(exc))
|
||||
)
|
||||
else:
|
||||
self._payload_parser.payload.set_exception(exc)
|
||||
|
||||
eof = True
|
||||
data = b""
|
||||
|
||||
if eof:
|
||||
start_pos = 0
|
||||
data_len = len(data)
|
||||
self._payload_parser = None
|
||||
continue
|
||||
else:
|
||||
break
|
||||
|
||||
if data and start_pos < data_len:
|
||||
data = data[start_pos:]
|
||||
else:
|
||||
data = EMPTY
|
||||
|
||||
return messages, self._upgraded, data
|
||||
|
||||
def parse_headers(
|
||||
self, lines: List[bytes]
|
||||
) -> Tuple[
|
||||
"CIMultiDictProxy[str]", RawHeaders, Optional[bool], Optional[str], bool, bool
|
||||
]:
|
||||
"""Parses RFC 5322 headers from a stream.
|
||||
|
||||
Line continuations are supported. Returns list of header name
|
||||
and value pairs. Header name is in upper case.
|
||||
"""
|
||||
headers, raw_headers = self._headers_parser.parse_headers(lines)
|
||||
close_conn = None
|
||||
encoding = None
|
||||
upgrade = False
|
||||
chunked = False
|
||||
|
||||
# keep-alive
|
||||
conn = headers.get(hdrs.CONNECTION)
|
||||
if conn:
|
||||
v = conn.lower()
|
||||
if v == "close":
|
||||
close_conn = True
|
||||
elif v == "keep-alive":
|
||||
close_conn = False
|
||||
elif v == "upgrade":
|
||||
upgrade = True
|
||||
|
||||
# encoding
|
||||
enc = headers.get(hdrs.CONTENT_ENCODING)
|
||||
if enc:
|
||||
enc = enc.lower()
|
||||
if enc in ("gzip", "deflate", "br"):
|
||||
encoding = enc
|
||||
|
||||
# chunking
|
||||
te = headers.get(hdrs.TRANSFER_ENCODING)
|
||||
if te is not None:
|
||||
if "chunked" == te.lower():
|
||||
chunked = True
|
||||
else:
|
||||
raise BadHttpMessage("Request has invalid `Transfer-Encoding`")
|
||||
|
||||
if hdrs.CONTENT_LENGTH in headers:
|
||||
raise BadHttpMessage(
|
||||
"Content-Length can't be present with Transfer-Encoding",
|
||||
)
|
||||
|
||||
return (headers, raw_headers, close_conn, encoding, upgrade, chunked)
|
||||
|
||||
def set_upgraded(self, val: bool) -> None:
|
||||
"""Set connection upgraded (to websocket) mode.
|
||||
|
||||
:param bool val: new state.
|
||||
"""
|
||||
self._upgraded = val
|
||||
|
||||
|
||||
class HttpRequestParser(HttpParser[RawRequestMessage]):
|
||||
"""Read request status line.
|
||||
|
||||
Exception .http_exceptions.BadStatusLine
|
||||
could be raised in case of any errors in status line.
|
||||
Returns RawRequestMessage.
|
||||
"""
|
||||
|
||||
def parse_message(self, lines: List[bytes]) -> RawRequestMessage:
|
||||
# request line
|
||||
line = lines[0].decode("utf-8", "surrogateescape")
|
||||
try:
|
||||
method, path, version = line.split(None, 2)
|
||||
except ValueError:
|
||||
raise BadStatusLine(line) from None
|
||||
|
||||
if len(path) > self.max_line_size:
|
||||
raise LineTooLong(
|
||||
"Status line is too long", str(self.max_line_size), str(len(path))
|
||||
)
|
||||
|
||||
# method
|
||||
if not METHRE.match(method):
|
||||
raise BadStatusLine(method)
|
||||
|
||||
# version
|
||||
try:
|
||||
if version.startswith("HTTP/"):
|
||||
n1, n2 = version[5:].split(".", 1)
|
||||
version_o = HttpVersion(int(n1), int(n2))
|
||||
else:
|
||||
raise BadStatusLine(version)
|
||||
except Exception:
|
||||
raise BadStatusLine(version)
|
||||
|
||||
if method == "CONNECT":
|
||||
# authority-form,
|
||||
# https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.3
|
||||
url = URL.build(authority=path, encoded=True)
|
||||
elif path.startswith("/"):
|
||||
# origin-form,
|
||||
# https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.1
|
||||
path_part, _hash_separator, url_fragment = path.partition("#")
|
||||
path_part, _question_mark_separator, qs_part = path_part.partition("?")
|
||||
|
||||
# NOTE: `yarl.URL.build()` is used to mimic what the Cython-based
|
||||
# NOTE: parser does, otherwise it results into the same
|
||||
# NOTE: HTTP Request-Line input producing different
|
||||
# NOTE: `yarl.URL()` objects
|
||||
url = URL.build(
|
||||
path=path_part,
|
||||
query_string=qs_part,
|
||||
fragment=url_fragment,
|
||||
encoded=True,
|
||||
)
|
||||
else:
|
||||
# absolute-form for proxy maybe,
|
||||
# https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.2
|
||||
url = URL(path, encoded=True)
|
||||
|
||||
# read headers
|
||||
(
|
||||
headers,
|
||||
raw_headers,
|
||||
close,
|
||||
compression,
|
||||
upgrade,
|
||||
chunked,
|
||||
) = self.parse_headers(lines)
|
||||
|
||||
if close is None: # then the headers weren't set in the request
|
||||
if version_o <= HttpVersion10: # HTTP 1.0 must asks to not close
|
||||
close = True
|
||||
else: # HTTP 1.1 must ask to close.
|
||||
close = False
|
||||
|
||||
return RawRequestMessage(
|
||||
method,
|
||||
path,
|
||||
version_o,
|
||||
headers,
|
||||
raw_headers,
|
||||
close,
|
||||
compression,
|
||||
upgrade,
|
||||
chunked,
|
||||
url,
|
||||
)
|
||||
|
||||
|
||||
class HttpResponseParser(HttpParser[RawResponseMessage]):
|
||||
"""Read response status line and headers.
|
||||
|
||||
BadStatusLine could be raised in case of any errors in status line.
|
||||
Returns RawResponseMessage.
|
||||
"""
|
||||
|
||||
def parse_message(self, lines: List[bytes]) -> RawResponseMessage:
|
||||
line = lines[0].decode("utf-8", "surrogateescape")
|
||||
try:
|
||||
version, status = line.split(None, 1)
|
||||
except ValueError:
|
||||
raise BadStatusLine(line) from None
|
||||
|
||||
try:
|
||||
status, reason = status.split(None, 1)
|
||||
except ValueError:
|
||||
reason = ""
|
||||
|
||||
if len(reason) > self.max_line_size:
|
||||
raise LineTooLong(
|
||||
"Status line is too long", str(self.max_line_size), str(len(reason))
|
||||
)
|
||||
|
||||
# version
|
||||
match = VERSRE.match(version)
|
||||
if match is None:
|
||||
raise BadStatusLine(line)
|
||||
version_o = HttpVersion(int(match.group(1)), int(match.group(2)))
|
||||
|
||||
# The status code is a three-digit number
|
||||
try:
|
||||
status_i = int(status)
|
||||
except ValueError:
|
||||
raise BadStatusLine(line) from None
|
||||
|
||||
if status_i > 999:
|
||||
raise BadStatusLine(line)
|
||||
|
||||
# read headers
|
||||
(
|
||||
headers,
|
||||
raw_headers,
|
||||
close,
|
||||
compression,
|
||||
upgrade,
|
||||
chunked,
|
||||
) = self.parse_headers(lines)
|
||||
|
||||
if close is None:
|
||||
close = version_o <= HttpVersion10
|
||||
|
||||
return RawResponseMessage(
|
||||
version_o,
|
||||
status_i,
|
||||
reason.strip(),
|
||||
headers,
|
||||
raw_headers,
|
||||
close,
|
||||
compression,
|
||||
upgrade,
|
||||
chunked,
|
||||
)
|
||||
|
||||
|
||||
class HttpPayloadParser:
|
||||
def __init__(
|
||||
self,
|
||||
payload: StreamReader,
|
||||
length: Optional[int] = None,
|
||||
chunked: bool = False,
|
||||
compression: Optional[str] = None,
|
||||
code: Optional[int] = None,
|
||||
method: Optional[str] = None,
|
||||
readall: bool = False,
|
||||
response_with_body: bool = True,
|
||||
auto_decompress: bool = True,
|
||||
) -> None:
|
||||
self._length = 0
|
||||
self._type = ParseState.PARSE_NONE
|
||||
self._chunk = ChunkState.PARSE_CHUNKED_SIZE
|
||||
self._chunk_size = 0
|
||||
self._chunk_tail = b""
|
||||
self._auto_decompress = auto_decompress
|
||||
self.done = False
|
||||
|
||||
# payload decompression wrapper
|
||||
if response_with_body and compression and self._auto_decompress:
|
||||
real_payload: Union[StreamReader, DeflateBuffer] = DeflateBuffer(
|
||||
payload, compression
|
||||
)
|
||||
else:
|
||||
real_payload = payload
|
||||
|
||||
# payload parser
|
||||
if not response_with_body:
|
||||
# don't parse payload if it's not expected to be received
|
||||
self._type = ParseState.PARSE_NONE
|
||||
real_payload.feed_eof()
|
||||
self.done = True
|
||||
|
||||
elif chunked:
|
||||
self._type = ParseState.PARSE_CHUNKED
|
||||
elif length is not None:
|
||||
self._type = ParseState.PARSE_LENGTH
|
||||
self._length = length
|
||||
if self._length == 0:
|
||||
real_payload.feed_eof()
|
||||
self.done = True
|
||||
else:
|
||||
if readall and code != 204:
|
||||
self._type = ParseState.PARSE_UNTIL_EOF
|
||||
elif method in ("PUT", "POST"):
|
||||
internal_logger.warning( # pragma: no cover
|
||||
"Content-Length or Transfer-Encoding header is required"
|
||||
)
|
||||
self._type = ParseState.PARSE_NONE
|
||||
real_payload.feed_eof()
|
||||
self.done = True
|
||||
|
||||
self.payload = real_payload
|
||||
|
||||
def feed_eof(self) -> None:
|
||||
if self._type == ParseState.PARSE_UNTIL_EOF:
|
||||
self.payload.feed_eof()
|
||||
elif self._type == ParseState.PARSE_LENGTH:
|
||||
raise ContentLengthError(
|
||||
"Not enough data for satisfy content length header."
|
||||
)
|
||||
elif self._type == ParseState.PARSE_CHUNKED:
|
||||
raise TransferEncodingError(
|
||||
"Not enough data for satisfy transfer length header."
|
||||
)
|
||||
|
||||
def feed_data(
|
||||
self, chunk: bytes, SEP: bytes = b"\r\n", CHUNK_EXT: bytes = b";"
|
||||
) -> Tuple[bool, bytes]:
|
||||
# Read specified amount of bytes
|
||||
if self._type == ParseState.PARSE_LENGTH:
|
||||
required = self._length
|
||||
chunk_len = len(chunk)
|
||||
|
||||
if required >= chunk_len:
|
||||
self._length = required - chunk_len
|
||||
self.payload.feed_data(chunk, chunk_len)
|
||||
if self._length == 0:
|
||||
self.payload.feed_eof()
|
||||
return True, b""
|
||||
else:
|
||||
self._length = 0
|
||||
self.payload.feed_data(chunk[:required], required)
|
||||
self.payload.feed_eof()
|
||||
return True, chunk[required:]
|
||||
|
||||
# Chunked transfer encoding parser
|
||||
elif self._type == ParseState.PARSE_CHUNKED:
|
||||
if self._chunk_tail:
|
||||
chunk = self._chunk_tail + chunk
|
||||
self._chunk_tail = b""
|
||||
|
||||
while chunk:
|
||||
|
||||
# read next chunk size
|
||||
if self._chunk == ChunkState.PARSE_CHUNKED_SIZE:
|
||||
pos = chunk.find(SEP)
|
||||
if pos >= 0:
|
||||
i = chunk.find(CHUNK_EXT, 0, pos)
|
||||
if i >= 0:
|
||||
size_b = chunk[:i] # strip chunk-extensions
|
||||
else:
|
||||
size_b = chunk[:pos]
|
||||
|
||||
try:
|
||||
size = int(bytes(size_b), 16)
|
||||
except ValueError:
|
||||
exc = TransferEncodingError(
|
||||
chunk[:pos].decode("ascii", "surrogateescape")
|
||||
)
|
||||
self.payload.set_exception(exc)
|
||||
raise exc from None
|
||||
|
||||
chunk = chunk[pos + 2 :]
|
||||
if size == 0: # eof marker
|
||||
self._chunk = ChunkState.PARSE_MAYBE_TRAILERS
|
||||
else:
|
||||
self._chunk = ChunkState.PARSE_CHUNKED_CHUNK
|
||||
self._chunk_size = size
|
||||
self.payload.begin_http_chunk_receiving()
|
||||
else:
|
||||
self._chunk_tail = chunk
|
||||
return False, b""
|
||||
|
||||
# read chunk and feed buffer
|
||||
if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK:
|
||||
required = self._chunk_size
|
||||
chunk_len = len(chunk)
|
||||
|
||||
if required > chunk_len:
|
||||
self._chunk_size = required - chunk_len
|
||||
self.payload.feed_data(chunk, chunk_len)
|
||||
return False, b""
|
||||
else:
|
||||
self._chunk_size = 0
|
||||
self.payload.feed_data(chunk[:required], required)
|
||||
chunk = chunk[required:]
|
||||
self._chunk = ChunkState.PARSE_CHUNKED_CHUNK_EOF
|
||||
self.payload.end_http_chunk_receiving()
|
||||
|
||||
# toss the CRLF at the end of the chunk
|
||||
if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK_EOF:
|
||||
if chunk[:2] == SEP:
|
||||
chunk = chunk[2:]
|
||||
self._chunk = ChunkState.PARSE_CHUNKED_SIZE
|
||||
else:
|
||||
self._chunk_tail = chunk
|
||||
return False, b""
|
||||
|
||||
# if stream does not contain trailer, after 0\r\n
|
||||
# we should get another \r\n otherwise
|
||||
# trailers needs to be skiped until \r\n\r\n
|
||||
if self._chunk == ChunkState.PARSE_MAYBE_TRAILERS:
|
||||
head = chunk[:2]
|
||||
if head == SEP:
|
||||
# end of stream
|
||||
self.payload.feed_eof()
|
||||
return True, chunk[2:]
|
||||
# Both CR and LF, or only LF may not be received yet. It is
|
||||
# expected that CRLF or LF will be shown at the very first
|
||||
# byte next time, otherwise trailers should come. The last
|
||||
# CRLF which marks the end of response might not be
|
||||
# contained in the same TCP segment which delivered the
|
||||
# size indicator.
|
||||
if not head:
|
||||
return False, b""
|
||||
if head == SEP[:1]:
|
||||
self._chunk_tail = head
|
||||
return False, b""
|
||||
self._chunk = ChunkState.PARSE_TRAILERS
|
||||
|
||||
# read and discard trailer up to the CRLF terminator
|
||||
if self._chunk == ChunkState.PARSE_TRAILERS:
|
||||
pos = chunk.find(SEP)
|
||||
if pos >= 0:
|
||||
chunk = chunk[pos + 2 :]
|
||||
self._chunk = ChunkState.PARSE_MAYBE_TRAILERS
|
||||
else:
|
||||
self._chunk_tail = chunk
|
||||
return False, b""
|
||||
|
||||
# Read all bytes until eof
|
||||
elif self._type == ParseState.PARSE_UNTIL_EOF:
|
||||
self.payload.feed_data(chunk, len(chunk))
|
||||
|
||||
return False, b""
|
||||
|
||||
|
||||
class DeflateBuffer:
|
||||
"""DeflateStream decompress stream and feed data into specified stream."""
|
||||
|
||||
decompressor: Any
|
||||
|
||||
def __init__(self, out: StreamReader, encoding: Optional[str]) -> None:
|
||||
self.out = out
|
||||
self.size = 0
|
||||
self.encoding = encoding
|
||||
self._started_decoding = False
|
||||
|
||||
if encoding == "br":
|
||||
if not HAS_BROTLI: # pragma: no cover
|
||||
raise ContentEncodingError(
|
||||
"Can not decode content-encoding: brotli (br). "
|
||||
"Please install `Brotli`"
|
||||
)
|
||||
|
||||
class BrotliDecoder:
|
||||
# Supports both 'brotlipy' and 'Brotli' packages
|
||||
# since they share an import name. The top branches
|
||||
# are for 'brotlipy' and bottom branches for 'Brotli'
|
||||
def __init__(self) -> None:
|
||||
self._obj = brotli.Decompressor()
|
||||
|
||||
def decompress(self, data: bytes) -> bytes:
|
||||
if hasattr(self._obj, "decompress"):
|
||||
return cast(bytes, self._obj.decompress(data))
|
||||
return cast(bytes, self._obj.process(data))
|
||||
|
||||
def flush(self) -> bytes:
|
||||
if hasattr(self._obj, "flush"):
|
||||
return cast(bytes, self._obj.flush())
|
||||
return b""
|
||||
|
||||
self.decompressor = BrotliDecoder()
|
||||
else:
|
||||
zlib_mode = 16 + zlib.MAX_WBITS if encoding == "gzip" else zlib.MAX_WBITS
|
||||
self.decompressor = zlib.decompressobj(wbits=zlib_mode)
|
||||
|
||||
def set_exception(self, exc: BaseException) -> None:
|
||||
self.out.set_exception(exc)
|
||||
|
||||
def feed_data(self, chunk: bytes, size: int) -> None:
|
||||
if not size:
|
||||
return
|
||||
|
||||
self.size += size
|
||||
|
||||
# RFC1950
|
||||
# bits 0..3 = CM = 0b1000 = 8 = "deflate"
|
||||
# bits 4..7 = CINFO = 1..7 = windows size.
|
||||
if (
|
||||
not self._started_decoding
|
||||
and self.encoding == "deflate"
|
||||
and chunk[0] & 0xF != 8
|
||||
):
|
||||
# Change the decoder to decompress incorrectly compressed data
|
||||
# Actually we should issue a warning about non-RFC-compliant data.
|
||||
self.decompressor = zlib.decompressobj(wbits=-zlib.MAX_WBITS)
|
||||
|
||||
try:
|
||||
chunk = self.decompressor.decompress(chunk)
|
||||
except Exception:
|
||||
raise ContentEncodingError(
|
||||
"Can not decode content-encoding: %s" % self.encoding
|
||||
)
|
||||
|
||||
self._started_decoding = True
|
||||
|
||||
if chunk:
|
||||
self.out.feed_data(chunk, len(chunk))
|
||||
|
||||
def feed_eof(self) -> None:
|
||||
chunk = self.decompressor.flush()
|
||||
|
||||
if chunk or self.size > 0:
|
||||
self.out.feed_data(chunk, len(chunk))
|
||||
if self.encoding == "deflate" and not self.decompressor.eof:
|
||||
raise ContentEncodingError("deflate")
|
||||
|
||||
self.out.feed_eof()
|
||||
|
||||
def begin_http_chunk_receiving(self) -> None:
|
||||
self.out.begin_http_chunk_receiving()
|
||||
|
||||
def end_http_chunk_receiving(self) -> None:
|
||||
self.out.end_http_chunk_receiving()
|
||||
|
||||
|
||||
HttpRequestParserPy = HttpRequestParser
|
||||
HttpResponseParserPy = HttpResponseParser
|
||||
RawRequestMessagePy = RawRequestMessage
|
||||
RawResponseMessagePy = RawResponseMessage
|
||||
|
||||
try:
|
||||
if not NO_EXTENSIONS:
|
||||
from ._http_parser import ( # type: ignore[import,no-redef]
|
||||
HttpRequestParser,
|
||||
HttpResponseParser,
|
||||
RawRequestMessage,
|
||||
RawResponseMessage,
|
||||
)
|
||||
|
||||
HttpRequestParserC = HttpRequestParser
|
||||
HttpResponseParserC = HttpResponseParser
|
||||
RawRequestMessageC = RawRequestMessage
|
||||
RawResponseMessageC = RawResponseMessage
|
||||
except ImportError: # pragma: no cover
|
||||
pass
|
|
@ -0,0 +1,701 @@
|
|||
"""WebSocket protocol versions 13 and 8."""
|
||||
|
||||
import asyncio
|
||||
import collections
|
||||
import json
|
||||
import random
|
||||
import re
|
||||
import sys
|
||||
import zlib
|
||||
from enum import IntEnum
|
||||
from struct import Struct
|
||||
from typing import Any, Callable, List, Optional, Pattern, Set, Tuple, Union, cast
|
||||
|
||||
from .base_protocol import BaseProtocol
|
||||
from .helpers import NO_EXTENSIONS
|
||||
from .streams import DataQueue
|
||||
from .typedefs import Final
|
||||
|
||||
__all__ = (
|
||||
"WS_CLOSED_MESSAGE",
|
||||
"WS_CLOSING_MESSAGE",
|
||||
"WS_KEY",
|
||||
"WebSocketReader",
|
||||
"WebSocketWriter",
|
||||
"WSMessage",
|
||||
"WebSocketError",
|
||||
"WSMsgType",
|
||||
"WSCloseCode",
|
||||
)
|
||||
|
||||
|
||||
class WSCloseCode(IntEnum):
|
||||
OK = 1000
|
||||
GOING_AWAY = 1001
|
||||
PROTOCOL_ERROR = 1002
|
||||
UNSUPPORTED_DATA = 1003
|
||||
ABNORMAL_CLOSURE = 1006
|
||||
INVALID_TEXT = 1007
|
||||
POLICY_VIOLATION = 1008
|
||||
MESSAGE_TOO_BIG = 1009
|
||||
MANDATORY_EXTENSION = 1010
|
||||
INTERNAL_ERROR = 1011
|
||||
SERVICE_RESTART = 1012
|
||||
TRY_AGAIN_LATER = 1013
|
||||
BAD_GATEWAY = 1014
|
||||
|
||||
|
||||
ALLOWED_CLOSE_CODES: Final[Set[int]] = {int(i) for i in WSCloseCode}
|
||||
|
||||
|
||||
class WSMsgType(IntEnum):
|
||||
# websocket spec types
|
||||
CONTINUATION = 0x0
|
||||
TEXT = 0x1
|
||||
BINARY = 0x2
|
||||
PING = 0x9
|
||||
PONG = 0xA
|
||||
CLOSE = 0x8
|
||||
|
||||
# aiohttp specific types
|
||||
CLOSING = 0x100
|
||||
CLOSED = 0x101
|
||||
ERROR = 0x102
|
||||
|
||||
text = TEXT
|
||||
binary = BINARY
|
||||
ping = PING
|
||||
pong = PONG
|
||||
close = CLOSE
|
||||
closing = CLOSING
|
||||
closed = CLOSED
|
||||
error = ERROR
|
||||
|
||||
|
||||
WS_KEY: Final[bytes] = b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11"
|
||||
|
||||
|
||||
UNPACK_LEN2 = Struct("!H").unpack_from
|
||||
UNPACK_LEN3 = Struct("!Q").unpack_from
|
||||
UNPACK_CLOSE_CODE = Struct("!H").unpack
|
||||
PACK_LEN1 = Struct("!BB").pack
|
||||
PACK_LEN2 = Struct("!BBH").pack
|
||||
PACK_LEN3 = Struct("!BBQ").pack
|
||||
PACK_CLOSE_CODE = Struct("!H").pack
|
||||
MSG_SIZE: Final[int] = 2**14
|
||||
DEFAULT_LIMIT: Final[int] = 2**16
|
||||
|
||||
|
||||
_WSMessageBase = collections.namedtuple("_WSMessageBase", ["type", "data", "extra"])
|
||||
|
||||
|
||||
class WSMessage(_WSMessageBase):
|
||||
def json(self, *, loads: Callable[[Any], Any] = json.loads) -> Any:
|
||||
"""Return parsed JSON data.
|
||||
|
||||
.. versionadded:: 0.22
|
||||
"""
|
||||
return loads(self.data)
|
||||
|
||||
|
||||
WS_CLOSED_MESSAGE = WSMessage(WSMsgType.CLOSED, None, None)
|
||||
WS_CLOSING_MESSAGE = WSMessage(WSMsgType.CLOSING, None, None)
|
||||
|
||||
|
||||
class WebSocketError(Exception):
|
||||
"""WebSocket protocol parser error."""
|
||||
|
||||
def __init__(self, code: int, message: str) -> None:
|
||||
self.code = code
|
||||
super().__init__(code, message)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return cast(str, self.args[1])
|
||||
|
||||
|
||||
class WSHandshakeError(Exception):
|
||||
"""WebSocket protocol handshake error."""
|
||||
|
||||
|
||||
native_byteorder: Final[str] = sys.byteorder
|
||||
|
||||
|
||||
# Used by _websocket_mask_python
|
||||
_XOR_TABLE: Final[List[bytes]] = [bytes(a ^ b for a in range(256)) for b in range(256)]
|
||||
|
||||
|
||||
def _websocket_mask_python(mask: bytes, data: bytearray) -> None:
|
||||
"""Websocket masking function.
|
||||
|
||||
`mask` is a `bytes` object of length 4; `data` is a `bytearray`
|
||||
object of any length. The contents of `data` are masked with `mask`,
|
||||
as specified in section 5.3 of RFC 6455.
|
||||
|
||||
Note that this function mutates the `data` argument.
|
||||
|
||||
This pure-python implementation may be replaced by an optimized
|
||||
version when available.
|
||||
|
||||
"""
|
||||
assert isinstance(data, bytearray), data
|
||||
assert len(mask) == 4, mask
|
||||
|
||||
if data:
|
||||
a, b, c, d = (_XOR_TABLE[n] for n in mask)
|
||||
data[::4] = data[::4].translate(a)
|
||||
data[1::4] = data[1::4].translate(b)
|
||||
data[2::4] = data[2::4].translate(c)
|
||||
data[3::4] = data[3::4].translate(d)
|
||||
|
||||
|
||||
if NO_EXTENSIONS: # pragma: no cover
|
||||
_websocket_mask = _websocket_mask_python
|
||||
else:
|
||||
try:
|
||||
from ._websocket import _websocket_mask_cython # type: ignore[import]
|
||||
|
||||
_websocket_mask = _websocket_mask_cython
|
||||
except ImportError: # pragma: no cover
|
||||
_websocket_mask = _websocket_mask_python
|
||||
|
||||
_WS_DEFLATE_TRAILING: Final[bytes] = bytes([0x00, 0x00, 0xFF, 0xFF])
|
||||
|
||||
|
||||
_WS_EXT_RE: Final[Pattern[str]] = re.compile(
|
||||
r"^(?:;\s*(?:"
|
||||
r"(server_no_context_takeover)|"
|
||||
r"(client_no_context_takeover)|"
|
||||
r"(server_max_window_bits(?:=(\d+))?)|"
|
||||
r"(client_max_window_bits(?:=(\d+))?)))*$"
|
||||
)
|
||||
|
||||
_WS_EXT_RE_SPLIT: Final[Pattern[str]] = re.compile(r"permessage-deflate([^,]+)?")
|
||||
|
||||
|
||||
def ws_ext_parse(extstr: Optional[str], isserver: bool = False) -> Tuple[int, bool]:
|
||||
if not extstr:
|
||||
return 0, False
|
||||
|
||||
compress = 0
|
||||
notakeover = False
|
||||
for ext in _WS_EXT_RE_SPLIT.finditer(extstr):
|
||||
defext = ext.group(1)
|
||||
# Return compress = 15 when get `permessage-deflate`
|
||||
if not defext:
|
||||
compress = 15
|
||||
break
|
||||
match = _WS_EXT_RE.match(defext)
|
||||
if match:
|
||||
compress = 15
|
||||
if isserver:
|
||||
# Server never fail to detect compress handshake.
|
||||
# Server does not need to send max wbit to client
|
||||
if match.group(4):
|
||||
compress = int(match.group(4))
|
||||
# Group3 must match if group4 matches
|
||||
# Compress wbit 8 does not support in zlib
|
||||
# If compress level not support,
|
||||
# CONTINUE to next extension
|
||||
if compress > 15 or compress < 9:
|
||||
compress = 0
|
||||
continue
|
||||
if match.group(1):
|
||||
notakeover = True
|
||||
# Ignore regex group 5 & 6 for client_max_window_bits
|
||||
break
|
||||
else:
|
||||
if match.group(6):
|
||||
compress = int(match.group(6))
|
||||
# Group5 must match if group6 matches
|
||||
# Compress wbit 8 does not support in zlib
|
||||
# If compress level not support,
|
||||
# FAIL the parse progress
|
||||
if compress > 15 or compress < 9:
|
||||
raise WSHandshakeError("Invalid window size")
|
||||
if match.group(2):
|
||||
notakeover = True
|
||||
# Ignore regex group 5 & 6 for client_max_window_bits
|
||||
break
|
||||
# Return Fail if client side and not match
|
||||
elif not isserver:
|
||||
raise WSHandshakeError("Extension for deflate not supported" + ext.group(1))
|
||||
|
||||
return compress, notakeover
|
||||
|
||||
|
||||
def ws_ext_gen(
|
||||
compress: int = 15, isserver: bool = False, server_notakeover: bool = False
|
||||
) -> str:
|
||||
# client_notakeover=False not used for server
|
||||
# compress wbit 8 does not support in zlib
|
||||
if compress < 9 or compress > 15:
|
||||
raise ValueError(
|
||||
"Compress wbits must between 9 and 15, " "zlib does not support wbits=8"
|
||||
)
|
||||
enabledext = ["permessage-deflate"]
|
||||
if not isserver:
|
||||
enabledext.append("client_max_window_bits")
|
||||
|
||||
if compress < 15:
|
||||
enabledext.append("server_max_window_bits=" + str(compress))
|
||||
if server_notakeover:
|
||||
enabledext.append("server_no_context_takeover")
|
||||
# if client_notakeover:
|
||||
# enabledext.append('client_no_context_takeover')
|
||||
return "; ".join(enabledext)
|
||||
|
||||
|
||||
class WSParserState(IntEnum):
|
||||
READ_HEADER = 1
|
||||
READ_PAYLOAD_LENGTH = 2
|
||||
READ_PAYLOAD_MASK = 3
|
||||
READ_PAYLOAD = 4
|
||||
|
||||
|
||||
class WebSocketReader:
|
||||
def __init__(
|
||||
self, queue: DataQueue[WSMessage], max_msg_size: int, compress: bool = True
|
||||
) -> None:
|
||||
self.queue = queue
|
||||
self._max_msg_size = max_msg_size
|
||||
|
||||
self._exc: Optional[BaseException] = None
|
||||
self._partial = bytearray()
|
||||
self._state = WSParserState.READ_HEADER
|
||||
|
||||
self._opcode: Optional[int] = None
|
||||
self._frame_fin = False
|
||||
self._frame_opcode: Optional[int] = None
|
||||
self._frame_payload = bytearray()
|
||||
|
||||
self._tail = b""
|
||||
self._has_mask = False
|
||||
self._frame_mask: Optional[bytes] = None
|
||||
self._payload_length = 0
|
||||
self._payload_length_flag = 0
|
||||
self._compressed: Optional[bool] = None
|
||||
self._decompressobj: Any = None # zlib.decompressobj actually
|
||||
self._compress = compress
|
||||
|
||||
def feed_eof(self) -> None:
|
||||
self.queue.feed_eof()
|
||||
|
||||
def feed_data(self, data: bytes) -> Tuple[bool, bytes]:
|
||||
if self._exc:
|
||||
return True, data
|
||||
|
||||
try:
|
||||
return self._feed_data(data)
|
||||
except Exception as exc:
|
||||
self._exc = exc
|
||||
self.queue.set_exception(exc)
|
||||
return True, b""
|
||||
|
||||
def _feed_data(self, data: bytes) -> Tuple[bool, bytes]:
|
||||
for fin, opcode, payload, compressed in self.parse_frame(data):
|
||||
if compressed and not self._decompressobj:
|
||||
self._decompressobj = zlib.decompressobj(wbits=-zlib.MAX_WBITS)
|
||||
if opcode == WSMsgType.CLOSE:
|
||||
if len(payload) >= 2:
|
||||
close_code = UNPACK_CLOSE_CODE(payload[:2])[0]
|
||||
if close_code < 3000 and close_code not in ALLOWED_CLOSE_CODES:
|
||||
raise WebSocketError(
|
||||
WSCloseCode.PROTOCOL_ERROR,
|
||||
f"Invalid close code: {close_code}",
|
||||
)
|
||||
try:
|
||||
close_message = payload[2:].decode("utf-8")
|
||||
except UnicodeDecodeError as exc:
|
||||
raise WebSocketError(
|
||||
WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message"
|
||||
) from exc
|
||||
msg = WSMessage(WSMsgType.CLOSE, close_code, close_message)
|
||||
elif payload:
|
||||
raise WebSocketError(
|
||||
WSCloseCode.PROTOCOL_ERROR,
|
||||
f"Invalid close frame: {fin} {opcode} {payload!r}",
|
||||
)
|
||||
else:
|
||||
msg = WSMessage(WSMsgType.CLOSE, 0, "")
|
||||
|
||||
self.queue.feed_data(msg, 0)
|
||||
|
||||
elif opcode == WSMsgType.PING:
|
||||
self.queue.feed_data(
|
||||
WSMessage(WSMsgType.PING, payload, ""), len(payload)
|
||||
)
|
||||
|
||||
elif opcode == WSMsgType.PONG:
|
||||
self.queue.feed_data(
|
||||
WSMessage(WSMsgType.PONG, payload, ""), len(payload)
|
||||
)
|
||||
|
||||
elif (
|
||||
opcode not in (WSMsgType.TEXT, WSMsgType.BINARY)
|
||||
and self._opcode is None
|
||||
):
|
||||
raise WebSocketError(
|
||||
WSCloseCode.PROTOCOL_ERROR, f"Unexpected opcode={opcode!r}"
|
||||
)
|
||||
else:
|
||||
# load text/binary
|
||||
if not fin:
|
||||
# got partial frame payload
|
||||
if opcode != WSMsgType.CONTINUATION:
|
||||
self._opcode = opcode
|
||||
self._partial.extend(payload)
|
||||
if self._max_msg_size and len(self._partial) >= self._max_msg_size:
|
||||
raise WebSocketError(
|
||||
WSCloseCode.MESSAGE_TOO_BIG,
|
||||
"Message size {} exceeds limit {}".format(
|
||||
len(self._partial), self._max_msg_size
|
||||
),
|
||||
)
|
||||
else:
|
||||
# previous frame was non finished
|
||||
# we should get continuation opcode
|
||||
if self._partial:
|
||||
if opcode != WSMsgType.CONTINUATION:
|
||||
raise WebSocketError(
|
||||
WSCloseCode.PROTOCOL_ERROR,
|
||||
"The opcode in non-fin frame is expected "
|
||||
"to be zero, got {!r}".format(opcode),
|
||||
)
|
||||
|
||||
if opcode == WSMsgType.CONTINUATION:
|
||||
assert self._opcode is not None
|
||||
opcode = self._opcode
|
||||
self._opcode = None
|
||||
|
||||
self._partial.extend(payload)
|
||||
if self._max_msg_size and len(self._partial) >= self._max_msg_size:
|
||||
raise WebSocketError(
|
||||
WSCloseCode.MESSAGE_TOO_BIG,
|
||||
"Message size {} exceeds limit {}".format(
|
||||
len(self._partial), self._max_msg_size
|
||||
),
|
||||
)
|
||||
|
||||
# Decompress process must to be done after all packets
|
||||
# received.
|
||||
if compressed:
|
||||
self._partial.extend(_WS_DEFLATE_TRAILING)
|
||||
payload_merged = self._decompressobj.decompress(
|
||||
self._partial, self._max_msg_size
|
||||
)
|
||||
if self._decompressobj.unconsumed_tail:
|
||||
left = len(self._decompressobj.unconsumed_tail)
|
||||
raise WebSocketError(
|
||||
WSCloseCode.MESSAGE_TOO_BIG,
|
||||
"Decompressed message size {} exceeds limit {}".format(
|
||||
self._max_msg_size + left, self._max_msg_size
|
||||
),
|
||||
)
|
||||
else:
|
||||
payload_merged = bytes(self._partial)
|
||||
|
||||
self._partial.clear()
|
||||
|
||||
if opcode == WSMsgType.TEXT:
|
||||
try:
|
||||
text = payload_merged.decode("utf-8")
|
||||
self.queue.feed_data(
|
||||
WSMessage(WSMsgType.TEXT, text, ""), len(text)
|
||||
)
|
||||
except UnicodeDecodeError as exc:
|
||||
raise WebSocketError(
|
||||
WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message"
|
||||
) from exc
|
||||
else:
|
||||
self.queue.feed_data(
|
||||
WSMessage(WSMsgType.BINARY, payload_merged, ""),
|
||||
len(payload_merged),
|
||||
)
|
||||
|
||||
return False, b""
|
||||
|
||||
def parse_frame(
|
||||
self, buf: bytes
|
||||
) -> List[Tuple[bool, Optional[int], bytearray, Optional[bool]]]:
|
||||
"""Return the next frame from the socket."""
|
||||
frames = []
|
||||
if self._tail:
|
||||
buf, self._tail = self._tail + buf, b""
|
||||
|
||||
start_pos = 0
|
||||
buf_length = len(buf)
|
||||
|
||||
while True:
|
||||
# read header
|
||||
if self._state == WSParserState.READ_HEADER:
|
||||
if buf_length - start_pos >= 2:
|
||||
data = buf[start_pos : start_pos + 2]
|
||||
start_pos += 2
|
||||
first_byte, second_byte = data
|
||||
|
||||
fin = (first_byte >> 7) & 1
|
||||
rsv1 = (first_byte >> 6) & 1
|
||||
rsv2 = (first_byte >> 5) & 1
|
||||
rsv3 = (first_byte >> 4) & 1
|
||||
opcode = first_byte & 0xF
|
||||
|
||||
# frame-fin = %x0 ; more frames of this message follow
|
||||
# / %x1 ; final frame of this message
|
||||
# frame-rsv1 = %x0 ;
|
||||
# 1 bit, MUST be 0 unless negotiated otherwise
|
||||
# frame-rsv2 = %x0 ;
|
||||
# 1 bit, MUST be 0 unless negotiated otherwise
|
||||
# frame-rsv3 = %x0 ;
|
||||
# 1 bit, MUST be 0 unless negotiated otherwise
|
||||
#
|
||||
# Remove rsv1 from this test for deflate development
|
||||
if rsv2 or rsv3 or (rsv1 and not self._compress):
|
||||
raise WebSocketError(
|
||||
WSCloseCode.PROTOCOL_ERROR,
|
||||
"Received frame with non-zero reserved bits",
|
||||
)
|
||||
|
||||
if opcode > 0x7 and fin == 0:
|
||||
raise WebSocketError(
|
||||
WSCloseCode.PROTOCOL_ERROR,
|
||||
"Received fragmented control frame",
|
||||
)
|
||||
|
||||
has_mask = (second_byte >> 7) & 1
|
||||
length = second_byte & 0x7F
|
||||
|
||||
# Control frames MUST have a payload
|
||||
# length of 125 bytes or less
|
||||
if opcode > 0x7 and length > 125:
|
||||
raise WebSocketError(
|
||||
WSCloseCode.PROTOCOL_ERROR,
|
||||
"Control frame payload cannot be " "larger than 125 bytes",
|
||||
)
|
||||
|
||||
# Set compress status if last package is FIN
|
||||
# OR set compress status if this is first fragment
|
||||
# Raise error if not first fragment with rsv1 = 0x1
|
||||
if self._frame_fin or self._compressed is None:
|
||||
self._compressed = True if rsv1 else False
|
||||
elif rsv1:
|
||||
raise WebSocketError(
|
||||
WSCloseCode.PROTOCOL_ERROR,
|
||||
"Received frame with non-zero reserved bits",
|
||||
)
|
||||
|
||||
self._frame_fin = bool(fin)
|
||||
self._frame_opcode = opcode
|
||||
self._has_mask = bool(has_mask)
|
||||
self._payload_length_flag = length
|
||||
self._state = WSParserState.READ_PAYLOAD_LENGTH
|
||||
else:
|
||||
break
|
||||
|
||||
# read payload length
|
||||
if self._state == WSParserState.READ_PAYLOAD_LENGTH:
|
||||
length = self._payload_length_flag
|
||||
if length == 126:
|
||||
if buf_length - start_pos >= 2:
|
||||
data = buf[start_pos : start_pos + 2]
|
||||
start_pos += 2
|
||||
length = UNPACK_LEN2(data)[0]
|
||||
self._payload_length = length
|
||||
self._state = (
|
||||
WSParserState.READ_PAYLOAD_MASK
|
||||
if self._has_mask
|
||||
else WSParserState.READ_PAYLOAD
|
||||
)
|
||||
else:
|
||||
break
|
||||
elif length > 126:
|
||||
if buf_length - start_pos >= 8:
|
||||
data = buf[start_pos : start_pos + 8]
|
||||
start_pos += 8
|
||||
length = UNPACK_LEN3(data)[0]
|
||||
self._payload_length = length
|
||||
self._state = (
|
||||
WSParserState.READ_PAYLOAD_MASK
|
||||
if self._has_mask
|
||||
else WSParserState.READ_PAYLOAD
|
||||
)
|
||||
else:
|
||||
break
|
||||
else:
|
||||
self._payload_length = length
|
||||
self._state = (
|
||||
WSParserState.READ_PAYLOAD_MASK
|
||||
if self._has_mask
|
||||
else WSParserState.READ_PAYLOAD
|
||||
)
|
||||
|
||||
# read payload mask
|
||||
if self._state == WSParserState.READ_PAYLOAD_MASK:
|
||||
if buf_length - start_pos >= 4:
|
||||
self._frame_mask = buf[start_pos : start_pos + 4]
|
||||
start_pos += 4
|
||||
self._state = WSParserState.READ_PAYLOAD
|
||||
else:
|
||||
break
|
||||
|
||||
if self._state == WSParserState.READ_PAYLOAD:
|
||||
length = self._payload_length
|
||||
payload = self._frame_payload
|
||||
|
||||
chunk_len = buf_length - start_pos
|
||||
if length >= chunk_len:
|
||||
self._payload_length = length - chunk_len
|
||||
payload.extend(buf[start_pos:])
|
||||
start_pos = buf_length
|
||||
else:
|
||||
self._payload_length = 0
|
||||
payload.extend(buf[start_pos : start_pos + length])
|
||||
start_pos = start_pos + length
|
||||
|
||||
if self._payload_length == 0:
|
||||
if self._has_mask:
|
||||
assert self._frame_mask is not None
|
||||
_websocket_mask(self._frame_mask, payload)
|
||||
|
||||
frames.append(
|
||||
(self._frame_fin, self._frame_opcode, payload, self._compressed)
|
||||
)
|
||||
|
||||
self._frame_payload = bytearray()
|
||||
self._state = WSParserState.READ_HEADER
|
||||
else:
|
||||
break
|
||||
|
||||
self._tail = buf[start_pos:]
|
||||
|
||||
return frames
|
||||
|
||||
|
||||
class WebSocketWriter:
|
||||
def __init__(
|
||||
self,
|
||||
protocol: BaseProtocol,
|
||||
transport: asyncio.Transport,
|
||||
*,
|
||||
use_mask: bool = False,
|
||||
limit: int = DEFAULT_LIMIT,
|
||||
random: Any = random.Random(),
|
||||
compress: int = 0,
|
||||
notakeover: bool = False,
|
||||
) -> None:
|
||||
self.protocol = protocol
|
||||
self.transport = transport
|
||||
self.use_mask = use_mask
|
||||
self.randrange = random.randrange
|
||||
self.compress = compress
|
||||
self.notakeover = notakeover
|
||||
self._closing = False
|
||||
self._limit = limit
|
||||
self._output_size = 0
|
||||
self._compressobj: Any = None # actually compressobj
|
||||
|
||||
async def _send_frame(
|
||||
self, message: bytes, opcode: int, compress: Optional[int] = None
|
||||
) -> None:
|
||||
"""Send a frame over the websocket with message as its payload."""
|
||||
if self._closing and not (opcode & WSMsgType.CLOSE):
|
||||
raise ConnectionResetError("Cannot write to closing transport")
|
||||
|
||||
rsv = 0
|
||||
|
||||
# Only compress larger packets (disabled)
|
||||
# Does small packet needs to be compressed?
|
||||
# if self.compress and opcode < 8 and len(message) > 124:
|
||||
if (compress or self.compress) and opcode < 8:
|
||||
if compress:
|
||||
# Do not set self._compress if compressing is for this frame
|
||||
compressobj = zlib.compressobj(level=zlib.Z_BEST_SPEED, wbits=-compress)
|
||||
else: # self.compress
|
||||
if not self._compressobj:
|
||||
self._compressobj = zlib.compressobj(
|
||||
level=zlib.Z_BEST_SPEED, wbits=-self.compress
|
||||
)
|
||||
compressobj = self._compressobj
|
||||
|
||||
message = compressobj.compress(message)
|
||||
message = message + compressobj.flush(
|
||||
zlib.Z_FULL_FLUSH if self.notakeover else zlib.Z_SYNC_FLUSH
|
||||
)
|
||||
if message.endswith(_WS_DEFLATE_TRAILING):
|
||||
message = message[:-4]
|
||||
rsv = rsv | 0x40
|
||||
|
||||
msg_length = len(message)
|
||||
|
||||
use_mask = self.use_mask
|
||||
if use_mask:
|
||||
mask_bit = 0x80
|
||||
else:
|
||||
mask_bit = 0
|
||||
|
||||
if msg_length < 126:
|
||||
header = PACK_LEN1(0x80 | rsv | opcode, msg_length | mask_bit)
|
||||
elif msg_length < (1 << 16):
|
||||
header = PACK_LEN2(0x80 | rsv | opcode, 126 | mask_bit, msg_length)
|
||||
else:
|
||||
header = PACK_LEN3(0x80 | rsv | opcode, 127 | mask_bit, msg_length)
|
||||
if use_mask:
|
||||
mask = self.randrange(0, 0xFFFFFFFF)
|
||||
mask = mask.to_bytes(4, "big")
|
||||
message = bytearray(message)
|
||||
_websocket_mask(mask, message)
|
||||
self._write(header + mask + message)
|
||||
self._output_size += len(header) + len(mask) + len(message)
|
||||
else:
|
||||
if len(message) > MSG_SIZE:
|
||||
self._write(header)
|
||||
self._write(message)
|
||||
else:
|
||||
self._write(header + message)
|
||||
|
||||
self._output_size += len(header) + len(message)
|
||||
|
||||
if self._output_size > self._limit:
|
||||
self._output_size = 0
|
||||
await self.protocol._drain_helper()
|
||||
|
||||
def _write(self, data: bytes) -> None:
|
||||
if self.transport is None or self.transport.is_closing():
|
||||
raise ConnectionResetError("Cannot write to closing transport")
|
||||
self.transport.write(data)
|
||||
|
||||
async def pong(self, message: bytes = b"") -> None:
|
||||
"""Send pong message."""
|
||||
if isinstance(message, str):
|
||||
message = message.encode("utf-8")
|
||||
await self._send_frame(message, WSMsgType.PONG)
|
||||
|
||||
async def ping(self, message: bytes = b"") -> None:
|
||||
"""Send ping message."""
|
||||
if isinstance(message, str):
|
||||
message = message.encode("utf-8")
|
||||
await self._send_frame(message, WSMsgType.PING)
|
||||
|
||||
async def send(
|
||||
self,
|
||||
message: Union[str, bytes],
|
||||
binary: bool = False,
|
||||
compress: Optional[int] = None,
|
||||
) -> None:
|
||||
"""Send a frame over the websocket with message as its payload."""
|
||||
if isinstance(message, str):
|
||||
message = message.encode("utf-8")
|
||||
if binary:
|
||||
await self._send_frame(message, WSMsgType.BINARY, compress)
|
||||
else:
|
||||
await self._send_frame(message, WSMsgType.TEXT, compress)
|
||||
|
||||
async def close(self, code: int = 1000, message: bytes = b"") -> None:
|
||||
"""Close the websocket, sending the specified code and message."""
|
||||
if isinstance(message, str):
|
||||
message = message.encode("utf-8")
|
||||
try:
|
||||
await self._send_frame(
|
||||
PACK_CLOSE_CODE(code) + message, opcode=WSMsgType.CLOSE
|
||||
)
|
||||
finally:
|
||||
self._closing = True
|
|
@ -0,0 +1,198 @@
|
|||
"""Http related parsers and protocol."""
|
||||
|
||||
import asyncio
|
||||
import zlib
|
||||
from typing import Any, Awaitable, Callable, NamedTuple, Optional, Union # noqa
|
||||
|
||||
from multidict import CIMultiDict
|
||||
|
||||
from .abc import AbstractStreamWriter
|
||||
from .base_protocol import BaseProtocol
|
||||
from .helpers import NO_EXTENSIONS
|
||||
|
||||
__all__ = ("StreamWriter", "HttpVersion", "HttpVersion10", "HttpVersion11")
|
||||
|
||||
|
||||
class HttpVersion(NamedTuple):
|
||||
major: int
|
||||
minor: int
|
||||
|
||||
|
||||
HttpVersion10 = HttpVersion(1, 0)
|
||||
HttpVersion11 = HttpVersion(1, 1)
|
||||
|
||||
|
||||
_T_OnChunkSent = Optional[Callable[[bytes], Awaitable[None]]]
|
||||
_T_OnHeadersSent = Optional[Callable[["CIMultiDict[str]"], Awaitable[None]]]
|
||||
|
||||
|
||||
class StreamWriter(AbstractStreamWriter):
|
||||
def __init__(
|
||||
self,
|
||||
protocol: BaseProtocol,
|
||||
loop: asyncio.AbstractEventLoop,
|
||||
on_chunk_sent: _T_OnChunkSent = None,
|
||||
on_headers_sent: _T_OnHeadersSent = None,
|
||||
) -> None:
|
||||
self._protocol = protocol
|
||||
|
||||
self.loop = loop
|
||||
self.length = None
|
||||
self.chunked = False
|
||||
self.buffer_size = 0
|
||||
self.output_size = 0
|
||||
|
||||
self._eof = False
|
||||
self._compress: Any = None
|
||||
self._drain_waiter = None
|
||||
|
||||
self._on_chunk_sent: _T_OnChunkSent = on_chunk_sent
|
||||
self._on_headers_sent: _T_OnHeadersSent = on_headers_sent
|
||||
|
||||
@property
|
||||
def transport(self) -> Optional[asyncio.Transport]:
|
||||
return self._protocol.transport
|
||||
|
||||
@property
|
||||
def protocol(self) -> BaseProtocol:
|
||||
return self._protocol
|
||||
|
||||
def enable_chunking(self) -> None:
|
||||
self.chunked = True
|
||||
|
||||
def enable_compression(
|
||||
self, encoding: str = "deflate", strategy: int = zlib.Z_DEFAULT_STRATEGY
|
||||
) -> None:
|
||||
zlib_mode = 16 + zlib.MAX_WBITS if encoding == "gzip" else zlib.MAX_WBITS
|
||||
self._compress = zlib.compressobj(wbits=zlib_mode, strategy=strategy)
|
||||
|
||||
def _write(self, chunk: bytes) -> None:
|
||||
size = len(chunk)
|
||||
self.buffer_size += size
|
||||
self.output_size += size
|
||||
transport = self.transport
|
||||
if not self._protocol.connected or transport is None or transport.is_closing():
|
||||
raise ConnectionResetError("Cannot write to closing transport")
|
||||
transport.write(chunk)
|
||||
|
||||
async def write(
|
||||
self, chunk: bytes, *, drain: bool = True, LIMIT: int = 0x10000
|
||||
) -> None:
|
||||
"""Writes chunk of data to a stream.
|
||||
|
||||
write_eof() indicates end of stream.
|
||||
writer can't be used after write_eof() method being called.
|
||||
write() return drain future.
|
||||
"""
|
||||
if self._on_chunk_sent is not None:
|
||||
await self._on_chunk_sent(chunk)
|
||||
|
||||
if isinstance(chunk, memoryview):
|
||||
if chunk.nbytes != len(chunk):
|
||||
# just reshape it
|
||||
chunk = chunk.cast("c")
|
||||
|
||||
if self._compress is not None:
|
||||
chunk = self._compress.compress(chunk)
|
||||
if not chunk:
|
||||
return
|
||||
|
||||
if self.length is not None:
|
||||
chunk_len = len(chunk)
|
||||
if self.length >= chunk_len:
|
||||
self.length = self.length - chunk_len
|
||||
else:
|
||||
chunk = chunk[: self.length]
|
||||
self.length = 0
|
||||
if not chunk:
|
||||
return
|
||||
|
||||
if chunk:
|
||||
if self.chunked:
|
||||
chunk_len_pre = ("%x\r\n" % len(chunk)).encode("ascii")
|
||||
chunk = chunk_len_pre + chunk + b"\r\n"
|
||||
|
||||
self._write(chunk)
|
||||
|
||||
if self.buffer_size > LIMIT and drain:
|
||||
self.buffer_size = 0
|
||||
await self.drain()
|
||||
|
||||
async def write_headers(
|
||||
self, status_line: str, headers: "CIMultiDict[str]"
|
||||
) -> None:
|
||||
"""Write request/response status and headers."""
|
||||
if self._on_headers_sent is not None:
|
||||
await self._on_headers_sent(headers)
|
||||
|
||||
# status + headers
|
||||
buf = _serialize_headers(status_line, headers)
|
||||
self._write(buf)
|
||||
|
||||
async def write_eof(self, chunk: bytes = b"") -> None:
|
||||
if self._eof:
|
||||
return
|
||||
|
||||
if chunk and self._on_chunk_sent is not None:
|
||||
await self._on_chunk_sent(chunk)
|
||||
|
||||
if self._compress:
|
||||
if chunk:
|
||||
chunk = self._compress.compress(chunk)
|
||||
|
||||
chunk = chunk + self._compress.flush()
|
||||
if chunk and self.chunked:
|
||||
chunk_len = ("%x\r\n" % len(chunk)).encode("ascii")
|
||||
chunk = chunk_len + chunk + b"\r\n0\r\n\r\n"
|
||||
else:
|
||||
if self.chunked:
|
||||
if chunk:
|
||||
chunk_len = ("%x\r\n" % len(chunk)).encode("ascii")
|
||||
chunk = chunk_len + chunk + b"\r\n0\r\n\r\n"
|
||||
else:
|
||||
chunk = b"0\r\n\r\n"
|
||||
|
||||
if chunk:
|
||||
self._write(chunk)
|
||||
|
||||
await self.drain()
|
||||
|
||||
self._eof = True
|
||||
|
||||
async def drain(self) -> None:
|
||||
"""Flush the write buffer.
|
||||
|
||||
The intended use is to write
|
||||
|
||||
await w.write(data)
|
||||
await w.drain()
|
||||
"""
|
||||
if self._protocol.transport is not None:
|
||||
await self._protocol._drain_helper()
|
||||
|
||||
|
||||
def _safe_header(string: str) -> str:
|
||||
if "\r" in string or "\n" in string:
|
||||
raise ValueError(
|
||||
"Newline or carriage return detected in headers. "
|
||||
"Potential header injection attack."
|
||||
)
|
||||
return string
|
||||
|
||||
|
||||
def _py_serialize_headers(status_line: str, headers: "CIMultiDict[str]") -> bytes:
|
||||
headers_gen = (_safe_header(k) + ": " + _safe_header(v) for k, v in headers.items())
|
||||
line = status_line + "\r\n" + "\r\n".join(headers_gen) + "\r\n\r\n"
|
||||
return line.encode("utf-8")
|
||||
|
||||
|
||||
_serialize_headers = _py_serialize_headers
|
||||
|
||||
try:
|
||||
import aiohttp._http_writer as _http_writer # type: ignore[import]
|
||||
|
||||
_c_serialize_headers = _http_writer._serialize_headers
|
||||
if not NO_EXTENSIONS:
|
||||
_serialize_headers = _c_serialize_headers
|
||||
except ImportError:
|
||||
pass
|
|
@ -0,0 +1,41 @@
|
|||
import asyncio
|
||||
import collections
|
||||
from typing import Any, Deque, Optional
|
||||
|
||||
|
||||
class EventResultOrError:
|
||||
"""Event asyncio lock helper class.
|
||||
|
||||
Wraps the Event asyncio lock allowing either to awake the
|
||||
locked Tasks without any error or raising an exception.
|
||||
|
||||
thanks to @vorpalsmith for the simple design.
|
||||
"""
|
||||
|
||||
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
|
||||
self._loop = loop
|
||||
self._exc: Optional[BaseException] = None
|
||||
self._event = asyncio.Event()
|
||||
self._waiters: Deque[asyncio.Future[Any]] = collections.deque()
|
||||
|
||||
def set(self, exc: Optional[BaseException] = None) -> None:
|
||||
self._exc = exc
|
||||
self._event.set()
|
||||
|
||||
async def wait(self) -> Any:
|
||||
waiter = self._loop.create_task(self._event.wait())
|
||||
self._waiters.append(waiter)
|
||||
try:
|
||||
val = await waiter
|
||||
finally:
|
||||
self._waiters.remove(waiter)
|
||||
|
||||
if self._exc is not None:
|
||||
raise self._exc
|
||||
|
||||
return val
|
||||
|
||||
def cancel(self) -> None:
|
||||
"""Cancel all waiters"""
|
||||
for waiter in self._waiters:
|
||||
waiter.cancel()
|
|
@ -0,0 +1,8 @@
|
|||
import logging
|
||||
|
||||
access_logger = logging.getLogger("aiohttp.access")
|
||||
client_logger = logging.getLogger("aiohttp.client")
|
||||
internal_logger = logging.getLogger("aiohttp.internal")
|
||||
server_logger = logging.getLogger("aiohttp.server")
|
||||
web_logger = logging.getLogger("aiohttp.web")
|
||||
ws_logger = logging.getLogger("aiohttp.websocket")
|
|
@ -0,0 +1,961 @@
|
|||
import base64
|
||||
import binascii
|
||||
import json
|
||||
import re
|
||||
import uuid
|
||||
import warnings
|
||||
import zlib
|
||||
from collections import deque
|
||||
from types import TracebackType
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
AsyncIterator,
|
||||
Deque,
|
||||
Dict,
|
||||
Iterator,
|
||||
List,
|
||||
Mapping,
|
||||
Optional,
|
||||
Sequence,
|
||||
Tuple,
|
||||
Type,
|
||||
Union,
|
||||
cast,
|
||||
)
|
||||
from urllib.parse import parse_qsl, unquote, urlencode
|
||||
|
||||
from multidict import CIMultiDict, CIMultiDictProxy, MultiMapping
|
||||
|
||||
from .hdrs import (
|
||||
CONTENT_DISPOSITION,
|
||||
CONTENT_ENCODING,
|
||||
CONTENT_LENGTH,
|
||||
CONTENT_TRANSFER_ENCODING,
|
||||
CONTENT_TYPE,
|
||||
)
|
||||
from .helpers import CHAR, TOKEN, parse_mimetype, reify
|
||||
from .http import HeadersParser
|
||||
from .payload import (
|
||||
JsonPayload,
|
||||
LookupError,
|
||||
Order,
|
||||
Payload,
|
||||
StringPayload,
|
||||
get_payload,
|
||||
payload_type,
|
||||
)
|
||||
from .streams import StreamReader
|
||||
|
||||
__all__ = (
|
||||
"MultipartReader",
|
||||
"MultipartWriter",
|
||||
"BodyPartReader",
|
||||
"BadContentDispositionHeader",
|
||||
"BadContentDispositionParam",
|
||||
"parse_content_disposition",
|
||||
"content_disposition_filename",
|
||||
)
|
||||
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover
|
||||
from .client_reqrep import ClientResponse
|
||||
|
||||
|
||||
class BadContentDispositionHeader(RuntimeWarning):
|
||||
pass
|
||||
|
||||
|
||||
class BadContentDispositionParam(RuntimeWarning):
|
||||
pass
|
||||
|
||||
|
||||
def parse_content_disposition(
|
||||
header: Optional[str],
|
||||
) -> Tuple[Optional[str], Dict[str, str]]:
|
||||
def is_token(string: str) -> bool:
|
||||
return bool(string) and TOKEN >= set(string)
|
||||
|
||||
def is_quoted(string: str) -> bool:
|
||||
return string[0] == string[-1] == '"'
|
||||
|
||||
def is_rfc5987(string: str) -> bool:
|
||||
return is_token(string) and string.count("'") == 2
|
||||
|
||||
def is_extended_param(string: str) -> bool:
|
||||
return string.endswith("*")
|
||||
|
||||
def is_continuous_param(string: str) -> bool:
|
||||
pos = string.find("*") + 1
|
||||
if not pos:
|
||||
return False
|
||||
substring = string[pos:-1] if string.endswith("*") else string[pos:]
|
||||
return substring.isdigit()
|
||||
|
||||
def unescape(text: str, *, chars: str = "".join(map(re.escape, CHAR))) -> str:
|
||||
return re.sub(f"\\\\([{chars}])", "\\1", text)
|
||||
|
||||
if not header:
|
||||
return None, {}
|
||||
|
||||
disptype, *parts = header.split(";")
|
||||
if not is_token(disptype):
|
||||
warnings.warn(BadContentDispositionHeader(header))
|
||||
return None, {}
|
||||
|
||||
params: Dict[str, str] = {}
|
||||
while parts:
|
||||
item = parts.pop(0)
|
||||
|
||||
if "=" not in item:
|
||||
warnings.warn(BadContentDispositionHeader(header))
|
||||
return None, {}
|
||||
|
||||
key, value = item.split("=", 1)
|
||||
key = key.lower().strip()
|
||||
value = value.lstrip()
|
||||
|
||||
if key in params:
|
||||
warnings.warn(BadContentDispositionHeader(header))
|
||||
return None, {}
|
||||
|
||||
if not is_token(key):
|
||||
warnings.warn(BadContentDispositionParam(item))
|
||||
continue
|
||||
|
||||
elif is_continuous_param(key):
|
||||
if is_quoted(value):
|
||||
value = unescape(value[1:-1])
|
||||
elif not is_token(value):
|
||||
warnings.warn(BadContentDispositionParam(item))
|
||||
continue
|
||||
|
||||
elif is_extended_param(key):
|
||||
if is_rfc5987(value):
|
||||
encoding, _, value = value.split("'", 2)
|
||||
encoding = encoding or "utf-8"
|
||||
else:
|
||||
warnings.warn(BadContentDispositionParam(item))
|
||||
continue
|
||||
|
||||
try:
|
||||
value = unquote(value, encoding, "strict")
|
||||
except UnicodeDecodeError: # pragma: nocover
|
||||
warnings.warn(BadContentDispositionParam(item))
|
||||
continue
|
||||
|
||||
else:
|
||||
failed = True
|
||||
if is_quoted(value):
|
||||
failed = False
|
||||
value = unescape(value[1:-1].lstrip("\\/"))
|
||||
elif is_token(value):
|
||||
failed = False
|
||||
elif parts:
|
||||
# maybe just ; in filename, in any case this is just
|
||||
# one case fix, for proper fix we need to redesign parser
|
||||
_value = f"{value};{parts[0]}"
|
||||
if is_quoted(_value):
|
||||
parts.pop(0)
|
||||
value = unescape(_value[1:-1].lstrip("\\/"))
|
||||
failed = False
|
||||
|
||||
if failed:
|
||||
warnings.warn(BadContentDispositionHeader(header))
|
||||
return None, {}
|
||||
|
||||
params[key] = value
|
||||
|
||||
return disptype.lower(), params
|
||||
|
||||
|
||||
def content_disposition_filename(
|
||||
params: Mapping[str, str], name: str = "filename"
|
||||
) -> Optional[str]:
|
||||
name_suf = "%s*" % name
|
||||
if not params:
|
||||
return None
|
||||
elif name_suf in params:
|
||||
return params[name_suf]
|
||||
elif name in params:
|
||||
return params[name]
|
||||
else:
|
||||
parts = []
|
||||
fnparams = sorted(
|
||||
(key, value) for key, value in params.items() if key.startswith(name_suf)
|
||||
)
|
||||
for num, (key, value) in enumerate(fnparams):
|
||||
_, tail = key.split("*", 1)
|
||||
if tail.endswith("*"):
|
||||
tail = tail[:-1]
|
||||
if tail == str(num):
|
||||
parts.append(value)
|
||||
else:
|
||||
break
|
||||
if not parts:
|
||||
return None
|
||||
value = "".join(parts)
|
||||
if "'" in value:
|
||||
encoding, _, value = value.split("'", 2)
|
||||
encoding = encoding or "utf-8"
|
||||
return unquote(value, encoding, "strict")
|
||||
return value
|
||||
|
||||
|
||||
class MultipartResponseWrapper:
|
||||
"""Wrapper around the MultipartReader.
|
||||
|
||||
It takes care about
|
||||
underlying connection and close it when it needs in.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
resp: "ClientResponse",
|
||||
stream: "MultipartReader",
|
||||
) -> None:
|
||||
self.resp = resp
|
||||
self.stream = stream
|
||||
|
||||
def __aiter__(self) -> "MultipartResponseWrapper":
|
||||
return self
|
||||
|
||||
async def __anext__(
|
||||
self,
|
||||
) -> Union["MultipartReader", "BodyPartReader"]:
|
||||
part = await self.next()
|
||||
if part is None:
|
||||
raise StopAsyncIteration
|
||||
return part
|
||||
|
||||
def at_eof(self) -> bool:
|
||||
"""Returns True when all response data had been read."""
|
||||
return self.resp.content.at_eof()
|
||||
|
||||
async def next(
|
||||
self,
|
||||
) -> Optional[Union["MultipartReader", "BodyPartReader"]]:
|
||||
"""Emits next multipart reader object."""
|
||||
item = await self.stream.next()
|
||||
if self.stream.at_eof():
|
||||
await self.release()
|
||||
return item
|
||||
|
||||
async def release(self) -> None:
|
||||
"""Release the connection gracefully.
|
||||
|
||||
All remaining content is read to the void.
|
||||
"""
|
||||
await self.resp.release()
|
||||
|
||||
|
||||
class BodyPartReader:
|
||||
"""Multipart reader for single body part."""
|
||||
|
||||
chunk_size = 8192
|
||||
|
||||
def __init__(
|
||||
self, boundary: bytes, headers: "CIMultiDictProxy[str]", content: StreamReader
|
||||
) -> None:
|
||||
self.headers = headers
|
||||
self._boundary = boundary
|
||||
self._content = content
|
||||
self._at_eof = False
|
||||
length = self.headers.get(CONTENT_LENGTH, None)
|
||||
self._length = int(length) if length is not None else None
|
||||
self._read_bytes = 0
|
||||
# TODO: typeing.Deque is not supported by Python 3.5
|
||||
self._unread: Deque[bytes] = deque()
|
||||
self._prev_chunk: Optional[bytes] = None
|
||||
self._content_eof = 0
|
||||
self._cache: Dict[str, Any] = {}
|
||||
|
||||
def __aiter__(self) -> AsyncIterator["BodyPartReader"]:
|
||||
return self # type: ignore[return-value]
|
||||
|
||||
async def __anext__(self) -> bytes:
|
||||
part = await self.next()
|
||||
if part is None:
|
||||
raise StopAsyncIteration
|
||||
return part
|
||||
|
||||
async def next(self) -> Optional[bytes]:
|
||||
item = await self.read()
|
||||
if not item:
|
||||
return None
|
||||
return item
|
||||
|
||||
async def read(self, *, decode: bool = False) -> bytes:
|
||||
"""Reads body part data.
|
||||
|
||||
decode: Decodes data following by encoding
|
||||
method from Content-Encoding header. If it missed
|
||||
data remains untouched
|
||||
"""
|
||||
if self._at_eof:
|
||||
return b""
|
||||
data = bytearray()
|
||||
while not self._at_eof:
|
||||
data.extend(await self.read_chunk(self.chunk_size))
|
||||
if decode:
|
||||
return self.decode(data)
|
||||
return data
|
||||
|
||||
async def read_chunk(self, size: int = chunk_size) -> bytes:
|
||||
"""Reads body part content chunk of the specified size.
|
||||
|
||||
size: chunk size
|
||||
"""
|
||||
if self._at_eof:
|
||||
return b""
|
||||
if self._length:
|
||||
chunk = await self._read_chunk_from_length(size)
|
||||
else:
|
||||
chunk = await self._read_chunk_from_stream(size)
|
||||
|
||||
self._read_bytes += len(chunk)
|
||||
if self._read_bytes == self._length:
|
||||
self._at_eof = True
|
||||
if self._at_eof:
|
||||
clrf = await self._content.readline()
|
||||
assert (
|
||||
b"\r\n" == clrf
|
||||
), "reader did not read all the data or it is malformed"
|
||||
return chunk
|
||||
|
||||
async def _read_chunk_from_length(self, size: int) -> bytes:
|
||||
# Reads body part content chunk of the specified size.
|
||||
# The body part must has Content-Length header with proper value.
|
||||
assert self._length is not None, "Content-Length required for chunked read"
|
||||
chunk_size = min(size, self._length - self._read_bytes)
|
||||
chunk = await self._content.read(chunk_size)
|
||||
return chunk
|
||||
|
||||
async def _read_chunk_from_stream(self, size: int) -> bytes:
|
||||
# Reads content chunk of body part with unknown length.
|
||||
# The Content-Length header for body part is not necessary.
|
||||
assert (
|
||||
size >= len(self._boundary) + 2
|
||||
), "Chunk size must be greater or equal than boundary length + 2"
|
||||
first_chunk = self._prev_chunk is None
|
||||
if first_chunk:
|
||||
self._prev_chunk = await self._content.read(size)
|
||||
|
||||
chunk = await self._content.read(size)
|
||||
self._content_eof += int(self._content.at_eof())
|
||||
assert self._content_eof < 3, "Reading after EOF"
|
||||
assert self._prev_chunk is not None
|
||||
window = self._prev_chunk + chunk
|
||||
sub = b"\r\n" + self._boundary
|
||||
if first_chunk:
|
||||
idx = window.find(sub)
|
||||
else:
|
||||
idx = window.find(sub, max(0, len(self._prev_chunk) - len(sub)))
|
||||
if idx >= 0:
|
||||
# pushing boundary back to content
|
||||
with warnings.catch_warnings():
|
||||
warnings.filterwarnings("ignore", category=DeprecationWarning)
|
||||
self._content.unread_data(window[idx:])
|
||||
if size > idx:
|
||||
self._prev_chunk = self._prev_chunk[:idx]
|
||||
chunk = window[len(self._prev_chunk) : idx]
|
||||
if not chunk:
|
||||
self._at_eof = True
|
||||
result = self._prev_chunk
|
||||
self._prev_chunk = chunk
|
||||
return result
|
||||
|
||||
async def readline(self) -> bytes:
|
||||
"""Reads body part by line by line."""
|
||||
if self._at_eof:
|
||||
return b""
|
||||
|
||||
if self._unread:
|
||||
line = self._unread.popleft()
|
||||
else:
|
||||
line = await self._content.readline()
|
||||
|
||||
if line.startswith(self._boundary):
|
||||
# the very last boundary may not come with \r\n,
|
||||
# so set single rules for everyone
|
||||
sline = line.rstrip(b"\r\n")
|
||||
boundary = self._boundary
|
||||
last_boundary = self._boundary + b"--"
|
||||
# ensure that we read exactly the boundary, not something alike
|
||||
if sline == boundary or sline == last_boundary:
|
||||
self._at_eof = True
|
||||
self._unread.append(line)
|
||||
return b""
|
||||
else:
|
||||
next_line = await self._content.readline()
|
||||
if next_line.startswith(self._boundary):
|
||||
line = line[:-2] # strip CRLF but only once
|
||||
self._unread.append(next_line)
|
||||
|
||||
return line
|
||||
|
||||
async def release(self) -> None:
|
||||
"""Like read(), but reads all the data to the void."""
|
||||
if self._at_eof:
|
||||
return
|
||||
while not self._at_eof:
|
||||
await self.read_chunk(self.chunk_size)
|
||||
|
||||
async def text(self, *, encoding: Optional[str] = None) -> str:
|
||||
"""Like read(), but assumes that body part contains text data."""
|
||||
data = await self.read(decode=True)
|
||||
# see https://www.w3.org/TR/html5/forms.html#multipart/form-data-encoding-algorithm # NOQA
|
||||
# and https://dvcs.w3.org/hg/xhr/raw-file/tip/Overview.html#dom-xmlhttprequest-send # NOQA
|
||||
encoding = encoding or self.get_charset(default="utf-8")
|
||||
return data.decode(encoding)
|
||||
|
||||
async def json(self, *, encoding: Optional[str] = None) -> Optional[Dict[str, Any]]:
|
||||
"""Like read(), but assumes that body parts contains JSON data."""
|
||||
data = await self.read(decode=True)
|
||||
if not data:
|
||||
return None
|
||||
encoding = encoding or self.get_charset(default="utf-8")
|
||||
return cast(Dict[str, Any], json.loads(data.decode(encoding)))
|
||||
|
||||
async def form(self, *, encoding: Optional[str] = None) -> List[Tuple[str, str]]:
|
||||
"""Like read(), but assumes that body parts contain form urlencoded data."""
|
||||
data = await self.read(decode=True)
|
||||
if not data:
|
||||
return []
|
||||
if encoding is not None:
|
||||
real_encoding = encoding
|
||||
else:
|
||||
real_encoding = self.get_charset(default="utf-8")
|
||||
return parse_qsl(
|
||||
data.rstrip().decode(real_encoding),
|
||||
keep_blank_values=True,
|
||||
encoding=real_encoding,
|
||||
)
|
||||
|
||||
def at_eof(self) -> bool:
|
||||
"""Returns True if the boundary was reached or False otherwise."""
|
||||
return self._at_eof
|
||||
|
||||
def decode(self, data: bytes) -> bytes:
|
||||
"""Decodes data.
|
||||
|
||||
Decoding is done according the specified Content-Encoding
|
||||
or Content-Transfer-Encoding headers value.
|
||||
"""
|
||||
if CONTENT_TRANSFER_ENCODING in self.headers:
|
||||
data = self._decode_content_transfer(data)
|
||||
if CONTENT_ENCODING in self.headers:
|
||||
return self._decode_content(data)
|
||||
return data
|
||||
|
||||
def _decode_content(self, data: bytes) -> bytes:
|
||||
encoding = self.headers.get(CONTENT_ENCODING, "").lower()
|
||||
|
||||
if encoding == "deflate":
|
||||
return zlib.decompress(data, -zlib.MAX_WBITS)
|
||||
elif encoding == "gzip":
|
||||
return zlib.decompress(data, 16 + zlib.MAX_WBITS)
|
||||
elif encoding == "identity":
|
||||
return data
|
||||
else:
|
||||
raise RuntimeError(f"unknown content encoding: {encoding}")
|
||||
|
||||
def _decode_content_transfer(self, data: bytes) -> bytes:
|
||||
encoding = self.headers.get(CONTENT_TRANSFER_ENCODING, "").lower()
|
||||
|
||||
if encoding == "base64":
|
||||
return base64.b64decode(data)
|
||||
elif encoding == "quoted-printable":
|
||||
return binascii.a2b_qp(data)
|
||||
elif encoding in ("binary", "8bit", "7bit"):
|
||||
return data
|
||||
else:
|
||||
raise RuntimeError(
|
||||
"unknown content transfer encoding: {}" "".format(encoding)
|
||||
)
|
||||
|
||||
def get_charset(self, default: str) -> str:
|
||||
"""Returns charset parameter from Content-Type header or default."""
|
||||
ctype = self.headers.get(CONTENT_TYPE, "")
|
||||
mimetype = parse_mimetype(ctype)
|
||||
return mimetype.parameters.get("charset", default)
|
||||
|
||||
@reify
|
||||
def name(self) -> Optional[str]:
|
||||
"""Returns name specified in Content-Disposition header.
|
||||
|
||||
If the header is missing or malformed, returns None.
|
||||
"""
|
||||
_, params = parse_content_disposition(self.headers.get(CONTENT_DISPOSITION))
|
||||
return content_disposition_filename(params, "name")
|
||||
|
||||
@reify
|
||||
def filename(self) -> Optional[str]:
|
||||
"""Returns filename specified in Content-Disposition header.
|
||||
|
||||
Returns None if the header is missing or malformed.
|
||||
"""
|
||||
_, params = parse_content_disposition(self.headers.get(CONTENT_DISPOSITION))
|
||||
return content_disposition_filename(params, "filename")
|
||||
|
||||
|
||||
@payload_type(BodyPartReader, order=Order.try_first)
|
||||
class BodyPartReaderPayload(Payload):
|
||||
def __init__(self, value: BodyPartReader, *args: Any, **kwargs: Any) -> None:
|
||||
super().__init__(value, *args, **kwargs)
|
||||
|
||||
params: Dict[str, str] = {}
|
||||
if value.name is not None:
|
||||
params["name"] = value.name
|
||||
if value.filename is not None:
|
||||
params["filename"] = value.filename
|
||||
|
||||
if params:
|
||||
self.set_content_disposition("attachment", True, **params)
|
||||
|
||||
async def write(self, writer: Any) -> None:
|
||||
field = self._value
|
||||
chunk = await field.read_chunk(size=2**16)
|
||||
while chunk:
|
||||
await writer.write(field.decode(chunk))
|
||||
chunk = await field.read_chunk(size=2**16)
|
||||
|
||||
|
||||
class MultipartReader:
|
||||
"""Multipart body reader."""
|
||||
|
||||
#: Response wrapper, used when multipart readers constructs from response.
|
||||
response_wrapper_cls = MultipartResponseWrapper
|
||||
#: Multipart reader class, used to handle multipart/* body parts.
|
||||
#: None points to type(self)
|
||||
multipart_reader_cls = None
|
||||
#: Body part reader class for non multipart/* content types.
|
||||
part_reader_cls = BodyPartReader
|
||||
|
||||
def __init__(self, headers: Mapping[str, str], content: StreamReader) -> None:
|
||||
self.headers = headers
|
||||
self._boundary = ("--" + self._get_boundary()).encode()
|
||||
self._content = content
|
||||
self._last_part: Optional[Union["MultipartReader", BodyPartReader]] = None
|
||||
self._at_eof = False
|
||||
self._at_bof = True
|
||||
self._unread: List[bytes] = []
|
||||
|
||||
def __aiter__(
|
||||
self,
|
||||
) -> AsyncIterator["BodyPartReader"]:
|
||||
return self # type: ignore[return-value]
|
||||
|
||||
async def __anext__(
|
||||
self,
|
||||
) -> Optional[Union["MultipartReader", BodyPartReader]]:
|
||||
part = await self.next()
|
||||
if part is None:
|
||||
raise StopAsyncIteration
|
||||
return part
|
||||
|
||||
@classmethod
|
||||
def from_response(
|
||||
cls,
|
||||
response: "ClientResponse",
|
||||
) -> MultipartResponseWrapper:
|
||||
"""Constructs reader instance from HTTP response.
|
||||
|
||||
:param response: :class:`~aiohttp.client.ClientResponse` instance
|
||||
"""
|
||||
obj = cls.response_wrapper_cls(
|
||||
response, cls(response.headers, response.content)
|
||||
)
|
||||
return obj
|
||||
|
||||
def at_eof(self) -> bool:
|
||||
"""Returns True if the final boundary was reached, false otherwise."""
|
||||
return self._at_eof
|
||||
|
||||
async def next(
|
||||
self,
|
||||
) -> Optional[Union["MultipartReader", BodyPartReader]]:
|
||||
"""Emits the next multipart body part."""
|
||||
# So, if we're at BOF, we need to skip till the boundary.
|
||||
if self._at_eof:
|
||||
return None
|
||||
await self._maybe_release_last_part()
|
||||
if self._at_bof:
|
||||
await self._read_until_first_boundary()
|
||||
self._at_bof = False
|
||||
else:
|
||||
await self._read_boundary()
|
||||
if self._at_eof: # we just read the last boundary, nothing to do there
|
||||
return None
|
||||
self._last_part = await self.fetch_next_part()
|
||||
return self._last_part
|
||||
|
||||
async def release(self) -> None:
|
||||
"""Reads all the body parts to the void till the final boundary."""
|
||||
while not self._at_eof:
|
||||
item = await self.next()
|
||||
if item is None:
|
||||
break
|
||||
await item.release()
|
||||
|
||||
async def fetch_next_part(
|
||||
self,
|
||||
) -> Union["MultipartReader", BodyPartReader]:
|
||||
"""Returns the next body part reader."""
|
||||
headers = await self._read_headers()
|
||||
return self._get_part_reader(headers)
|
||||
|
||||
def _get_part_reader(
|
||||
self,
|
||||
headers: "CIMultiDictProxy[str]",
|
||||
) -> Union["MultipartReader", BodyPartReader]:
|
||||
"""Dispatches the response by the `Content-Type` header.
|
||||
|
||||
Returns a suitable reader instance.
|
||||
|
||||
:param dict headers: Response headers
|
||||
"""
|
||||
ctype = headers.get(CONTENT_TYPE, "")
|
||||
mimetype = parse_mimetype(ctype)
|
||||
|
||||
if mimetype.type == "multipart":
|
||||
if self.multipart_reader_cls is None:
|
||||
return type(self)(headers, self._content)
|
||||
return self.multipart_reader_cls(headers, self._content)
|
||||
else:
|
||||
return self.part_reader_cls(self._boundary, headers, self._content)
|
||||
|
||||
def _get_boundary(self) -> str:
|
||||
mimetype = parse_mimetype(self.headers[CONTENT_TYPE])
|
||||
|
||||
assert mimetype.type == "multipart", "multipart/* content type expected"
|
||||
|
||||
if "boundary" not in mimetype.parameters:
|
||||
raise ValueError(
|
||||
"boundary missed for Content-Type: %s" % self.headers[CONTENT_TYPE]
|
||||
)
|
||||
|
||||
boundary = mimetype.parameters["boundary"]
|
||||
if len(boundary) > 70:
|
||||
raise ValueError("boundary %r is too long (70 chars max)" % boundary)
|
||||
|
||||
return boundary
|
||||
|
||||
async def _readline(self) -> bytes:
|
||||
if self._unread:
|
||||
return self._unread.pop()
|
||||
return await self._content.readline()
|
||||
|
||||
async def _read_until_first_boundary(self) -> None:
|
||||
while True:
|
||||
chunk = await self._readline()
|
||||
if chunk == b"":
|
||||
raise ValueError(
|
||||
"Could not find starting boundary %r" % (self._boundary)
|
||||
)
|
||||
chunk = chunk.rstrip()
|
||||
if chunk == self._boundary:
|
||||
return
|
||||
elif chunk == self._boundary + b"--":
|
||||
self._at_eof = True
|
||||
return
|
||||
|
||||
async def _read_boundary(self) -> None:
|
||||
chunk = (await self._readline()).rstrip()
|
||||
if chunk == self._boundary:
|
||||
pass
|
||||
elif chunk == self._boundary + b"--":
|
||||
self._at_eof = True
|
||||
epilogue = await self._readline()
|
||||
next_line = await self._readline()
|
||||
|
||||
# the epilogue is expected and then either the end of input or the
|
||||
# parent multipart boundary, if the parent boundary is found then
|
||||
# it should be marked as unread and handed to the parent for
|
||||
# processing
|
||||
if next_line[:2] == b"--":
|
||||
self._unread.append(next_line)
|
||||
# otherwise the request is likely missing an epilogue and both
|
||||
# lines should be passed to the parent for processing
|
||||
# (this handles the old behavior gracefully)
|
||||
else:
|
||||
self._unread.extend([next_line, epilogue])
|
||||
else:
|
||||
raise ValueError(f"Invalid boundary {chunk!r}, expected {self._boundary!r}")
|
||||
|
||||
async def _read_headers(self) -> "CIMultiDictProxy[str]":
|
||||
lines = [b""]
|
||||
while True:
|
||||
chunk = await self._content.readline()
|
||||
chunk = chunk.strip()
|
||||
lines.append(chunk)
|
||||
if not chunk:
|
||||
break
|
||||
parser = HeadersParser()
|
||||
headers, raw_headers = parser.parse_headers(lines)
|
||||
return headers
|
||||
|
||||
async def _maybe_release_last_part(self) -> None:
|
||||
"""Ensures that the last read body part is read completely."""
|
||||
if self._last_part is not None:
|
||||
if not self._last_part.at_eof():
|
||||
await self._last_part.release()
|
||||
self._unread.extend(self._last_part._unread)
|
||||
self._last_part = None
|
||||
|
||||
|
||||
_Part = Tuple[Payload, str, str]
|
||||
|
||||
|
||||
class MultipartWriter(Payload):
|
||||
"""Multipart body writer."""
|
||||
|
||||
def __init__(self, subtype: str = "mixed", boundary: Optional[str] = None) -> None:
|
||||
boundary = boundary if boundary is not None else uuid.uuid4().hex
|
||||
# The underlying Payload API demands a str (utf-8), not bytes,
|
||||
# so we need to ensure we don't lose anything during conversion.
|
||||
# As a result, require the boundary to be ASCII only.
|
||||
# In both situations.
|
||||
|
||||
try:
|
||||
self._boundary = boundary.encode("ascii")
|
||||
except UnicodeEncodeError:
|
||||
raise ValueError("boundary should contain ASCII only chars") from None
|
||||
ctype = f"multipart/{subtype}; boundary={self._boundary_value}"
|
||||
|
||||
super().__init__(None, content_type=ctype)
|
||||
|
||||
self._parts: List[_Part] = []
|
||||
|
||||
def __enter__(self) -> "MultipartWriter":
|
||||
return self
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: Optional[Type[BaseException]],
|
||||
exc_val: Optional[BaseException],
|
||||
exc_tb: Optional[TracebackType],
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
def __iter__(self) -> Iterator[_Part]:
|
||||
return iter(self._parts)
|
||||
|
||||
def __len__(self) -> int:
|
||||
return len(self._parts)
|
||||
|
||||
def __bool__(self) -> bool:
|
||||
return True
|
||||
|
||||
_valid_tchar_regex = re.compile(rb"\A[!#$%&'*+\-.^_`|~\w]+\Z")
|
||||
_invalid_qdtext_char_regex = re.compile(rb"[\x00-\x08\x0A-\x1F\x7F]")
|
||||
|
||||
@property
|
||||
def _boundary_value(self) -> str:
|
||||
"""Wrap boundary parameter value in quotes, if necessary.
|
||||
|
||||
Reads self.boundary and returns a unicode sting.
|
||||
"""
|
||||
# Refer to RFCs 7231, 7230, 5234.
|
||||
#
|
||||
# parameter = token "=" ( token / quoted-string )
|
||||
# token = 1*tchar
|
||||
# quoted-string = DQUOTE *( qdtext / quoted-pair ) DQUOTE
|
||||
# qdtext = HTAB / SP / %x21 / %x23-5B / %x5D-7E / obs-text
|
||||
# obs-text = %x80-FF
|
||||
# quoted-pair = "\" ( HTAB / SP / VCHAR / obs-text )
|
||||
# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*"
|
||||
# / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~"
|
||||
# / DIGIT / ALPHA
|
||||
# ; any VCHAR, except delimiters
|
||||
# VCHAR = %x21-7E
|
||||
value = self._boundary
|
||||
if re.match(self._valid_tchar_regex, value):
|
||||
return value.decode("ascii") # cannot fail
|
||||
|
||||
if re.search(self._invalid_qdtext_char_regex, value):
|
||||
raise ValueError("boundary value contains invalid characters")
|
||||
|
||||
# escape %x5C and %x22
|
||||
quoted_value_content = value.replace(b"\\", b"\\\\")
|
||||
quoted_value_content = quoted_value_content.replace(b'"', b'\\"')
|
||||
|
||||
return '"' + quoted_value_content.decode("ascii") + '"'
|
||||
|
||||
@property
|
||||
def boundary(self) -> str:
|
||||
return self._boundary.decode("ascii")
|
||||
|
||||
def append(self, obj: Any, headers: Optional[MultiMapping[str]] = None) -> Payload:
|
||||
if headers is None:
|
||||
headers = CIMultiDict()
|
||||
|
||||
if isinstance(obj, Payload):
|
||||
obj.headers.update(headers)
|
||||
return self.append_payload(obj)
|
||||
else:
|
||||
try:
|
||||
payload = get_payload(obj, headers=headers)
|
||||
except LookupError:
|
||||
raise TypeError("Cannot create payload from %r" % obj)
|
||||
else:
|
||||
return self.append_payload(payload)
|
||||
|
||||
def append_payload(self, payload: Payload) -> Payload:
|
||||
"""Adds a new body part to multipart writer."""
|
||||
# compression
|
||||
encoding: Optional[str] = payload.headers.get(
|
||||
CONTENT_ENCODING,
|
||||
"",
|
||||
).lower()
|
||||
if encoding and encoding not in ("deflate", "gzip", "identity"):
|
||||
raise RuntimeError(f"unknown content encoding: {encoding}")
|
||||
if encoding == "identity":
|
||||
encoding = None
|
||||
|
||||
# te encoding
|
||||
te_encoding: Optional[str] = payload.headers.get(
|
||||
CONTENT_TRANSFER_ENCODING,
|
||||
"",
|
||||
).lower()
|
||||
if te_encoding not in ("", "base64", "quoted-printable", "binary"):
|
||||
raise RuntimeError(
|
||||
"unknown content transfer encoding: {}" "".format(te_encoding)
|
||||
)
|
||||
if te_encoding == "binary":
|
||||
te_encoding = None
|
||||
|
||||
# size
|
||||
size = payload.size
|
||||
if size is not None and not (encoding or te_encoding):
|
||||
payload.headers[CONTENT_LENGTH] = str(size)
|
||||
|
||||
self._parts.append((payload, encoding, te_encoding)) # type: ignore[arg-type]
|
||||
return payload
|
||||
|
||||
def append_json(
|
||||
self, obj: Any, headers: Optional[MultiMapping[str]] = None
|
||||
) -> Payload:
|
||||
"""Helper to append JSON part."""
|
||||
if headers is None:
|
||||
headers = CIMultiDict()
|
||||
|
||||
return self.append_payload(JsonPayload(obj, headers=headers))
|
||||
|
||||
def append_form(
|
||||
self,
|
||||
obj: Union[Sequence[Tuple[str, str]], Mapping[str, str]],
|
||||
headers: Optional[MultiMapping[str]] = None,
|
||||
) -> Payload:
|
||||
"""Helper to append form urlencoded part."""
|
||||
assert isinstance(obj, (Sequence, Mapping))
|
||||
|
||||
if headers is None:
|
||||
headers = CIMultiDict()
|
||||
|
||||
if isinstance(obj, Mapping):
|
||||
obj = list(obj.items())
|
||||
data = urlencode(obj, doseq=True)
|
||||
|
||||
return self.append_payload(
|
||||
StringPayload(
|
||||
data, headers=headers, content_type="application/x-www-form-urlencoded"
|
||||
)
|
||||
)
|
||||
|
||||
@property
|
||||
def size(self) -> Optional[int]:
|
||||
"""Size of the payload."""
|
||||
total = 0
|
||||
for part, encoding, te_encoding in self._parts:
|
||||
if encoding or te_encoding or part.size is None:
|
||||
return None
|
||||
|
||||
total += int(
|
||||
2
|
||||
+ len(self._boundary)
|
||||
+ 2
|
||||
+ part.size # b'--'+self._boundary+b'\r\n'
|
||||
+ len(part._binary_headers)
|
||||
+ 2 # b'\r\n'
|
||||
)
|
||||
|
||||
total += 2 + len(self._boundary) + 4 # b'--'+self._boundary+b'--\r\n'
|
||||
return total
|
||||
|
||||
async def write(self, writer: Any, close_boundary: bool = True) -> None:
|
||||
"""Write body."""
|
||||
for part, encoding, te_encoding in self._parts:
|
||||
await writer.write(b"--" + self._boundary + b"\r\n")
|
||||
await writer.write(part._binary_headers)
|
||||
|
||||
if encoding or te_encoding:
|
||||
w = MultipartPayloadWriter(writer)
|
||||
if encoding:
|
||||
w.enable_compression(encoding)
|
||||
if te_encoding:
|
||||
w.enable_encoding(te_encoding)
|
||||
await part.write(w) # type: ignore[arg-type]
|
||||
await w.write_eof()
|
||||
else:
|
||||
await part.write(writer)
|
||||
|
||||
await writer.write(b"\r\n")
|
||||
|
||||
if close_boundary:
|
||||
await writer.write(b"--" + self._boundary + b"--\r\n")
|
||||
|
||||
|
||||
class MultipartPayloadWriter:
|
||||
def __init__(self, writer: Any) -> None:
|
||||
self._writer = writer
|
||||
self._encoding: Optional[str] = None
|
||||
self._compress: Any = None
|
||||
self._encoding_buffer: Optional[bytearray] = None
|
||||
|
||||
def enable_encoding(self, encoding: str) -> None:
|
||||
if encoding == "base64":
|
||||
self._encoding = encoding
|
||||
self._encoding_buffer = bytearray()
|
||||
elif encoding == "quoted-printable":
|
||||
self._encoding = "quoted-printable"
|
||||
|
||||
def enable_compression(
|
||||
self, encoding: str = "deflate", strategy: int = zlib.Z_DEFAULT_STRATEGY
|
||||
) -> None:
|
||||
zlib_mode = 16 + zlib.MAX_WBITS if encoding == "gzip" else -zlib.MAX_WBITS
|
||||
self._compress = zlib.compressobj(wbits=zlib_mode, strategy=strategy)
|
||||
|
||||
async def write_eof(self) -> None:
|
||||
if self._compress is not None:
|
||||
chunk = self._compress.flush()
|
||||
if chunk:
|
||||
self._compress = None
|
||||
await self.write(chunk)
|
||||
|
||||
if self._encoding == "base64":
|
||||
if self._encoding_buffer:
|
||||
await self._writer.write(base64.b64encode(self._encoding_buffer))
|
||||
|
||||
async def write(self, chunk: bytes) -> None:
|
||||
if self._compress is not None:
|
||||
if chunk:
|
||||
chunk = self._compress.compress(chunk)
|
||||
if not chunk:
|
||||
return
|
||||
|
||||
if self._encoding == "base64":
|
||||
buf = self._encoding_buffer
|
||||
assert buf is not None
|
||||
buf.extend(chunk)
|
||||
|
||||
if buf:
|
||||
div, mod = divmod(len(buf), 3)
|
||||
enc_chunk, self._encoding_buffer = (buf[: div * 3], buf[div * 3 :])
|
||||
if enc_chunk:
|
||||
b64chunk = base64.b64encode(enc_chunk)
|
||||
await self._writer.write(b64chunk)
|
||||
elif self._encoding == "quoted-printable":
|
||||
await self._writer.write(binascii.b2a_qp(chunk))
|
||||
else:
|
||||
await self._writer.write(chunk)
|
|
@ -0,0 +1,465 @@
|
|||
import asyncio
|
||||
import enum
|
||||
import io
|
||||
import json
|
||||
import mimetypes
|
||||
import os
|
||||
import warnings
|
||||
from abc import ABC, abstractmethod
|
||||
from itertools import chain
|
||||
from typing import (
|
||||
IO,
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
ByteString,
|
||||
Dict,
|
||||
Iterable,
|
||||
Optional,
|
||||
TextIO,
|
||||
Tuple,
|
||||
Type,
|
||||
Union,
|
||||
)
|
||||
|
||||
from multidict import CIMultiDict
|
||||
|
||||
from . import hdrs
|
||||
from .abc import AbstractStreamWriter
|
||||
from .helpers import (
|
||||
PY_36,
|
||||
content_disposition_header,
|
||||
guess_filename,
|
||||
parse_mimetype,
|
||||
sentinel,
|
||||
)
|
||||
from .streams import StreamReader
|
||||
from .typedefs import Final, JSONEncoder, _CIMultiDict
|
||||
|
||||
__all__ = (
|
||||
"PAYLOAD_REGISTRY",
|
||||
"get_payload",
|
||||
"payload_type",
|
||||
"Payload",
|
||||
"BytesPayload",
|
||||
"StringPayload",
|
||||
"IOBasePayload",
|
||||
"BytesIOPayload",
|
||||
"BufferedReaderPayload",
|
||||
"TextIOPayload",
|
||||
"StringIOPayload",
|
||||
"JsonPayload",
|
||||
"AsyncIterablePayload",
|
||||
)
|
||||
|
||||
TOO_LARGE_BYTES_BODY: Final[int] = 2**20 # 1 MB
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover
|
||||
from typing import List
|
||||
|
||||
|
||||
class LookupError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class Order(str, enum.Enum):
|
||||
normal = "normal"
|
||||
try_first = "try_first"
|
||||
try_last = "try_last"
|
||||
|
||||
|
||||
def get_payload(data: Any, *args: Any, **kwargs: Any) -> "Payload":
|
||||
return PAYLOAD_REGISTRY.get(data, *args, **kwargs)
|
||||
|
||||
|
||||
def register_payload(
|
||||
factory: Type["Payload"], type: Any, *, order: Order = Order.normal
|
||||
) -> None:
|
||||
PAYLOAD_REGISTRY.register(factory, type, order=order)
|
||||
|
||||
|
||||
class payload_type:
|
||||
def __init__(self, type: Any, *, order: Order = Order.normal) -> None:
|
||||
self.type = type
|
||||
self.order = order
|
||||
|
||||
def __call__(self, factory: Type["Payload"]) -> Type["Payload"]:
|
||||
register_payload(factory, self.type, order=self.order)
|
||||
return factory
|
||||
|
||||
|
||||
PayloadType = Type["Payload"]
|
||||
_PayloadRegistryItem = Tuple[PayloadType, Any]
|
||||
|
||||
|
||||
class PayloadRegistry:
|
||||
"""Payload registry.
|
||||
|
||||
note: we need zope.interface for more efficient adapter search
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._first: List[_PayloadRegistryItem] = []
|
||||
self._normal: List[_PayloadRegistryItem] = []
|
||||
self._last: List[_PayloadRegistryItem] = []
|
||||
|
||||
def get(
|
||||
self,
|
||||
data: Any,
|
||||
*args: Any,
|
||||
_CHAIN: "Type[chain[_PayloadRegistryItem]]" = chain,
|
||||
**kwargs: Any,
|
||||
) -> "Payload":
|
||||
if isinstance(data, Payload):
|
||||
return data
|
||||
for factory, type in _CHAIN(self._first, self._normal, self._last):
|
||||
if isinstance(data, type):
|
||||
return factory(data, *args, **kwargs)
|
||||
|
||||
raise LookupError()
|
||||
|
||||
def register(
|
||||
self, factory: PayloadType, type: Any, *, order: Order = Order.normal
|
||||
) -> None:
|
||||
if order is Order.try_first:
|
||||
self._first.append((factory, type))
|
||||
elif order is Order.normal:
|
||||
self._normal.append((factory, type))
|
||||
elif order is Order.try_last:
|
||||
self._last.append((factory, type))
|
||||
else:
|
||||
raise ValueError(f"Unsupported order {order!r}")
|
||||
|
||||
|
||||
class Payload(ABC):
|
||||
|
||||
_default_content_type: str = "application/octet-stream"
|
||||
_size: Optional[int] = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
value: Any,
|
||||
headers: Optional[
|
||||
Union[_CIMultiDict, Dict[str, str], Iterable[Tuple[str, str]]]
|
||||
] = None,
|
||||
content_type: Optional[str] = sentinel,
|
||||
filename: Optional[str] = None,
|
||||
encoding: Optional[str] = None,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
self._encoding = encoding
|
||||
self._filename = filename
|
||||
self._headers: _CIMultiDict = CIMultiDict()
|
||||
self._value = value
|
||||
if content_type is not sentinel and content_type is not None:
|
||||
self._headers[hdrs.CONTENT_TYPE] = content_type
|
||||
elif self._filename is not None:
|
||||
content_type = mimetypes.guess_type(self._filename)[0]
|
||||
if content_type is None:
|
||||
content_type = self._default_content_type
|
||||
self._headers[hdrs.CONTENT_TYPE] = content_type
|
||||
else:
|
||||
self._headers[hdrs.CONTENT_TYPE] = self._default_content_type
|
||||
self._headers.update(headers or {})
|
||||
|
||||
@property
|
||||
def size(self) -> Optional[int]:
|
||||
"""Size of the payload."""
|
||||
return self._size
|
||||
|
||||
@property
|
||||
def filename(self) -> Optional[str]:
|
||||
"""Filename of the payload."""
|
||||
return self._filename
|
||||
|
||||
@property
|
||||
def headers(self) -> _CIMultiDict:
|
||||
"""Custom item headers"""
|
||||
return self._headers
|
||||
|
||||
@property
|
||||
def _binary_headers(self) -> bytes:
|
||||
return (
|
||||
"".join([k + ": " + v + "\r\n" for k, v in self.headers.items()]).encode(
|
||||
"utf-8"
|
||||
)
|
||||
+ b"\r\n"
|
||||
)
|
||||
|
||||
@property
|
||||
def encoding(self) -> Optional[str]:
|
||||
"""Payload encoding"""
|
||||
return self._encoding
|
||||
|
||||
@property
|
||||
def content_type(self) -> str:
|
||||
"""Content type"""
|
||||
return self._headers[hdrs.CONTENT_TYPE]
|
||||
|
||||
def set_content_disposition(
|
||||
self,
|
||||
disptype: str,
|
||||
quote_fields: bool = True,
|
||||
_charset: str = "utf-8",
|
||||
**params: Any,
|
||||
) -> None:
|
||||
"""Sets ``Content-Disposition`` header."""
|
||||
self._headers[hdrs.CONTENT_DISPOSITION] = content_disposition_header(
|
||||
disptype, quote_fields=quote_fields, _charset=_charset, **params
|
||||
)
|
||||
|
||||
@abstractmethod
|
||||
async def write(self, writer: AbstractStreamWriter) -> None:
|
||||
"""Write payload.
|
||||
|
||||
writer is an AbstractStreamWriter instance:
|
||||
"""
|
||||
|
||||
|
||||
class BytesPayload(Payload):
|
||||
def __init__(self, value: ByteString, *args: Any, **kwargs: Any) -> None:
|
||||
if not isinstance(value, (bytes, bytearray, memoryview)):
|
||||
raise TypeError(f"value argument must be byte-ish, not {type(value)!r}")
|
||||
|
||||
if "content_type" not in kwargs:
|
||||
kwargs["content_type"] = "application/octet-stream"
|
||||
|
||||
super().__init__(value, *args, **kwargs)
|
||||
|
||||
if isinstance(value, memoryview):
|
||||
self._size = value.nbytes
|
||||
else:
|
||||
self._size = len(value)
|
||||
|
||||
if self._size > TOO_LARGE_BYTES_BODY:
|
||||
if PY_36:
|
||||
kwargs = {"source": self}
|
||||
else:
|
||||
kwargs = {}
|
||||
warnings.warn(
|
||||
"Sending a large body directly with raw bytes might"
|
||||
" lock the event loop. You should probably pass an "
|
||||
"io.BytesIO object instead",
|
||||
ResourceWarning,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
async def write(self, writer: AbstractStreamWriter) -> None:
|
||||
await writer.write(self._value)
|
||||
|
||||
|
||||
class StringPayload(BytesPayload):
|
||||
def __init__(
|
||||
self,
|
||||
value: str,
|
||||
*args: Any,
|
||||
encoding: Optional[str] = None,
|
||||
content_type: Optional[str] = None,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
|
||||
if encoding is None:
|
||||
if content_type is None:
|
||||
real_encoding = "utf-8"
|
||||
content_type = "text/plain; charset=utf-8"
|
||||
else:
|
||||
mimetype = parse_mimetype(content_type)
|
||||
real_encoding = mimetype.parameters.get("charset", "utf-8")
|
||||
else:
|
||||
if content_type is None:
|
||||
content_type = "text/plain; charset=%s" % encoding
|
||||
real_encoding = encoding
|
||||
|
||||
super().__init__(
|
||||
value.encode(real_encoding),
|
||||
encoding=real_encoding,
|
||||
content_type=content_type,
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
|
||||
class StringIOPayload(StringPayload):
|
||||
def __init__(self, value: IO[str], *args: Any, **kwargs: Any) -> None:
|
||||
super().__init__(value.read(), *args, **kwargs)
|
||||
|
||||
|
||||
class IOBasePayload(Payload):
|
||||
_value: IO[Any]
|
||||
|
||||
def __init__(
|
||||
self, value: IO[Any], disposition: str = "attachment", *args: Any, **kwargs: Any
|
||||
) -> None:
|
||||
if "filename" not in kwargs:
|
||||
kwargs["filename"] = guess_filename(value)
|
||||
|
||||
super().__init__(value, *args, **kwargs)
|
||||
|
||||
if self._filename is not None and disposition is not None:
|
||||
if hdrs.CONTENT_DISPOSITION not in self.headers:
|
||||
self.set_content_disposition(disposition, filename=self._filename)
|
||||
|
||||
async def write(self, writer: AbstractStreamWriter) -> None:
|
||||
loop = asyncio.get_event_loop()
|
||||
try:
|
||||
chunk = await loop.run_in_executor(None, self._value.read, 2**16)
|
||||
while chunk:
|
||||
await writer.write(chunk)
|
||||
chunk = await loop.run_in_executor(None, self._value.read, 2**16)
|
||||
finally:
|
||||
await loop.run_in_executor(None, self._value.close)
|
||||
|
||||
|
||||
class TextIOPayload(IOBasePayload):
|
||||
_value: TextIO
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
value: TextIO,
|
||||
*args: Any,
|
||||
encoding: Optional[str] = None,
|
||||
content_type: Optional[str] = None,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
|
||||
if encoding is None:
|
||||
if content_type is None:
|
||||
encoding = "utf-8"
|
||||
content_type = "text/plain; charset=utf-8"
|
||||
else:
|
||||
mimetype = parse_mimetype(content_type)
|
||||
encoding = mimetype.parameters.get("charset", "utf-8")
|
||||
else:
|
||||
if content_type is None:
|
||||
content_type = "text/plain; charset=%s" % encoding
|
||||
|
||||
super().__init__(
|
||||
value,
|
||||
content_type=content_type,
|
||||
encoding=encoding,
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
@property
|
||||
def size(self) -> Optional[int]:
|
||||
try:
|
||||
return os.fstat(self._value.fileno()).st_size - self._value.tell()
|
||||
except OSError:
|
||||
return None
|
||||
|
||||
async def write(self, writer: AbstractStreamWriter) -> None:
|
||||
loop = asyncio.get_event_loop()
|
||||
try:
|
||||
chunk = await loop.run_in_executor(None, self._value.read, 2**16)
|
||||
while chunk:
|
||||
data = (
|
||||
chunk.encode(encoding=self._encoding)
|
||||
if self._encoding
|
||||
else chunk.encode()
|
||||
)
|
||||
await writer.write(data)
|
||||
chunk = await loop.run_in_executor(None, self._value.read, 2**16)
|
||||
finally:
|
||||
await loop.run_in_executor(None, self._value.close)
|
||||
|
||||
|
||||
class BytesIOPayload(IOBasePayload):
|
||||
@property
|
||||
def size(self) -> int:
|
||||
position = self._value.tell()
|
||||
end = self._value.seek(0, os.SEEK_END)
|
||||
self._value.seek(position)
|
||||
return end - position
|
||||
|
||||
|
||||
class BufferedReaderPayload(IOBasePayload):
|
||||
@property
|
||||
def size(self) -> Optional[int]:
|
||||
try:
|
||||
return os.fstat(self._value.fileno()).st_size - self._value.tell()
|
||||
except OSError:
|
||||
# data.fileno() is not supported, e.g.
|
||||
# io.BufferedReader(io.BytesIO(b'data'))
|
||||
return None
|
||||
|
||||
|
||||
class JsonPayload(BytesPayload):
|
||||
def __init__(
|
||||
self,
|
||||
value: Any,
|
||||
encoding: str = "utf-8",
|
||||
content_type: str = "application/json",
|
||||
dumps: JSONEncoder = json.dumps,
|
||||
*args: Any,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
|
||||
super().__init__(
|
||||
dumps(value).encode(encoding),
|
||||
content_type=content_type,
|
||||
encoding=encoding,
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover
|
||||
from typing import AsyncIterable, AsyncIterator
|
||||
|
||||
_AsyncIterator = AsyncIterator[bytes]
|
||||
_AsyncIterable = AsyncIterable[bytes]
|
||||
else:
|
||||
from collections.abc import AsyncIterable, AsyncIterator
|
||||
|
||||
_AsyncIterator = AsyncIterator
|
||||
_AsyncIterable = AsyncIterable
|
||||
|
||||
|
||||
class AsyncIterablePayload(Payload):
|
||||
|
||||
_iter: Optional[_AsyncIterator] = None
|
||||
|
||||
def __init__(self, value: _AsyncIterable, *args: Any, **kwargs: Any) -> None:
|
||||
if not isinstance(value, AsyncIterable):
|
||||
raise TypeError(
|
||||
"value argument must support "
|
||||
"collections.abc.AsyncIterablebe interface, "
|
||||
"got {!r}".format(type(value))
|
||||
)
|
||||
|
||||
if "content_type" not in kwargs:
|
||||
kwargs["content_type"] = "application/octet-stream"
|
||||
|
||||
super().__init__(value, *args, **kwargs)
|
||||
|
||||
self._iter = value.__aiter__()
|
||||
|
||||
async def write(self, writer: AbstractStreamWriter) -> None:
|
||||
if self._iter:
|
||||
try:
|
||||
# iter is not None check prevents rare cases
|
||||
# when the case iterable is used twice
|
||||
while True:
|
||||
chunk = await self._iter.__anext__()
|
||||
await writer.write(chunk)
|
||||
except StopAsyncIteration:
|
||||
self._iter = None
|
||||
|
||||
|
||||
class StreamReaderPayload(AsyncIterablePayload):
|
||||
def __init__(self, value: StreamReader, *args: Any, **kwargs: Any) -> None:
|
||||
super().__init__(value.iter_any(), *args, **kwargs)
|
||||
|
||||
|
||||
PAYLOAD_REGISTRY = PayloadRegistry()
|
||||
PAYLOAD_REGISTRY.register(BytesPayload, (bytes, bytearray, memoryview))
|
||||
PAYLOAD_REGISTRY.register(StringPayload, str)
|
||||
PAYLOAD_REGISTRY.register(StringIOPayload, io.StringIO)
|
||||
PAYLOAD_REGISTRY.register(TextIOPayload, io.TextIOBase)
|
||||
PAYLOAD_REGISTRY.register(BytesIOPayload, io.BytesIO)
|
||||
PAYLOAD_REGISTRY.register(BufferedReaderPayload, (io.BufferedReader, io.BufferedRandom))
|
||||
PAYLOAD_REGISTRY.register(IOBasePayload, io.IOBase)
|
||||
PAYLOAD_REGISTRY.register(StreamReaderPayload, StreamReader)
|
||||
# try_last for giving a chance to more specialized async interables like
|
||||
# multidict.BodyPartReaderPayload override the default
|
||||
PAYLOAD_REGISTRY.register(AsyncIterablePayload, AsyncIterable, order=Order.try_last)
|
|
@ -0,0 +1,75 @@
|
|||
"""
|
||||
Payload implemenation for coroutines as data provider.
|
||||
|
||||
As a simple case, you can upload data from file::
|
||||
|
||||
@aiohttp.streamer
|
||||
async def file_sender(writer, file_name=None):
|
||||
with open(file_name, 'rb') as f:
|
||||
chunk = f.read(2**16)
|
||||
while chunk:
|
||||
await writer.write(chunk)
|
||||
|
||||
chunk = f.read(2**16)
|
||||
|
||||
Then you can use `file_sender` like this:
|
||||
|
||||
async with session.post('http://httpbin.org/post',
|
||||
data=file_sender(file_name='huge_file')) as resp:
|
||||
print(await resp.text())
|
||||
|
||||
..note:: Coroutine must accept `writer` as first argument
|
||||
|
||||
"""
|
||||
|
||||
import types
|
||||
import warnings
|
||||
from typing import Any, Awaitable, Callable, Dict, Tuple
|
||||
|
||||
from .abc import AbstractStreamWriter
|
||||
from .payload import Payload, payload_type
|
||||
|
||||
__all__ = ("streamer",)
|
||||
|
||||
|
||||
class _stream_wrapper:
|
||||
def __init__(
|
||||
self,
|
||||
coro: Callable[..., Awaitable[None]],
|
||||
args: Tuple[Any, ...],
|
||||
kwargs: Dict[str, Any],
|
||||
) -> None:
|
||||
self.coro = types.coroutine(coro)
|
||||
self.args = args
|
||||
self.kwargs = kwargs
|
||||
|
||||
async def __call__(self, writer: AbstractStreamWriter) -> None:
|
||||
await self.coro(writer, *self.args, **self.kwargs) # type: ignore[operator]
|
||||
|
||||
|
||||
class streamer:
|
||||
def __init__(self, coro: Callable[..., Awaitable[None]]) -> None:
|
||||
warnings.warn(
|
||||
"@streamer is deprecated, use async generators instead",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
self.coro = coro
|
||||
|
||||
def __call__(self, *args: Any, **kwargs: Any) -> _stream_wrapper:
|
||||
return _stream_wrapper(self.coro, args, kwargs)
|
||||
|
||||
|
||||
@payload_type(_stream_wrapper)
|
||||
class StreamWrapperPayload(Payload):
|
||||
async def write(self, writer: AbstractStreamWriter) -> None:
|
||||
await self._value(writer)
|
||||
|
||||
|
||||
@payload_type(streamer)
|
||||
class StreamPayload(StreamWrapperPayload):
|
||||
def __init__(self, value: Any, *args: Any, **kwargs: Any) -> None:
|
||||
super().__init__(value(), *args, **kwargs)
|
||||
|
||||
async def write(self, writer: AbstractStreamWriter) -> None:
|
||||
await self._value(writer)
|
|
@ -0,0 +1 @@
|
|||
Marker
|
|
@ -0,0 +1,391 @@
|
|||
import asyncio
|
||||
import contextlib
|
||||
import warnings
|
||||
from collections.abc import Callable
|
||||
from typing import Any, Awaitable, Callable, Dict, Generator, Optional, Union
|
||||
|
||||
import pytest
|
||||
|
||||
from aiohttp.helpers import PY_37, isasyncgenfunction
|
||||
from aiohttp.web import Application
|
||||
|
||||
from .test_utils import (
|
||||
BaseTestServer,
|
||||
RawTestServer,
|
||||
TestClient,
|
||||
TestServer,
|
||||
loop_context,
|
||||
setup_test_loop,
|
||||
teardown_test_loop,
|
||||
unused_port as _unused_port,
|
||||
)
|
||||
|
||||
try:
|
||||
import uvloop
|
||||
except ImportError: # pragma: no cover
|
||||
uvloop = None
|
||||
|
||||
try:
|
||||
import tokio
|
||||
except ImportError: # pragma: no cover
|
||||
tokio = None
|
||||
|
||||
AiohttpClient = Callable[[Union[Application, BaseTestServer]], Awaitable[TestClient]]
|
||||
|
||||
|
||||
def pytest_addoption(parser): # type: ignore[no-untyped-def]
|
||||
parser.addoption(
|
||||
"--aiohttp-fast",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="run tests faster by disabling extra checks",
|
||||
)
|
||||
parser.addoption(
|
||||
"--aiohttp-loop",
|
||||
action="store",
|
||||
default="pyloop",
|
||||
help="run tests with specific loop: pyloop, uvloop, tokio or all",
|
||||
)
|
||||
parser.addoption(
|
||||
"--aiohttp-enable-loop-debug",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="enable event loop debug mode",
|
||||
)
|
||||
|
||||
|
||||
def pytest_fixture_setup(fixturedef): # type: ignore[no-untyped-def]
|
||||
"""Set up pytest fixture.
|
||||
|
||||
Allow fixtures to be coroutines. Run coroutine fixtures in an event loop.
|
||||
"""
|
||||
func = fixturedef.func
|
||||
|
||||
if isasyncgenfunction(func):
|
||||
# async generator fixture
|
||||
is_async_gen = True
|
||||
elif asyncio.iscoroutinefunction(func):
|
||||
# regular async fixture
|
||||
is_async_gen = False
|
||||
else:
|
||||
# not an async fixture, nothing to do
|
||||
return
|
||||
|
||||
strip_request = False
|
||||
if "request" not in fixturedef.argnames:
|
||||
fixturedef.argnames += ("request",)
|
||||
strip_request = True
|
||||
|
||||
def wrapper(*args, **kwargs): # type: ignore[no-untyped-def]
|
||||
request = kwargs["request"]
|
||||
if strip_request:
|
||||
del kwargs["request"]
|
||||
|
||||
# if neither the fixture nor the test use the 'loop' fixture,
|
||||
# 'getfixturevalue' will fail because the test is not parameterized
|
||||
# (this can be removed someday if 'loop' is no longer parameterized)
|
||||
if "loop" not in request.fixturenames:
|
||||
raise Exception(
|
||||
"Asynchronous fixtures must depend on the 'loop' fixture or "
|
||||
"be used in tests depending from it."
|
||||
)
|
||||
|
||||
_loop = request.getfixturevalue("loop")
|
||||
|
||||
if is_async_gen:
|
||||
# for async generators, we need to advance the generator once,
|
||||
# then advance it again in a finalizer
|
||||
gen = func(*args, **kwargs)
|
||||
|
||||
def finalizer(): # type: ignore[no-untyped-def]
|
||||
try:
|
||||
return _loop.run_until_complete(gen.__anext__())
|
||||
except StopAsyncIteration:
|
||||
pass
|
||||
|
||||
request.addfinalizer(finalizer)
|
||||
return _loop.run_until_complete(gen.__anext__())
|
||||
else:
|
||||
return _loop.run_until_complete(func(*args, **kwargs))
|
||||
|
||||
fixturedef.func = wrapper
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def fast(request): # type: ignore[no-untyped-def]
|
||||
"""--fast config option"""
|
||||
return request.config.getoption("--aiohttp-fast")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def loop_debug(request): # type: ignore[no-untyped-def]
|
||||
"""--enable-loop-debug config option"""
|
||||
return request.config.getoption("--aiohttp-enable-loop-debug")
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _runtime_warning_context(): # type: ignore[no-untyped-def]
|
||||
"""Context manager which checks for RuntimeWarnings.
|
||||
|
||||
This exists specifically to
|
||||
avoid "coroutine 'X' was never awaited" warnings being missed.
|
||||
|
||||
If RuntimeWarnings occur in the context a RuntimeError is raised.
|
||||
"""
|
||||
with warnings.catch_warnings(record=True) as _warnings:
|
||||
yield
|
||||
rw = [
|
||||
"{w.filename}:{w.lineno}:{w.message}".format(w=w)
|
||||
for w in _warnings
|
||||
if w.category == RuntimeWarning
|
||||
]
|
||||
if rw:
|
||||
raise RuntimeError(
|
||||
"{} Runtime Warning{},\n{}".format(
|
||||
len(rw), "" if len(rw) == 1 else "s", "\n".join(rw)
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _passthrough_loop_context(loop, fast=False): # type: ignore[no-untyped-def]
|
||||
"""Passthrough loop context.
|
||||
|
||||
Sets up and tears down a loop unless one is passed in via the loop
|
||||
argument when it's passed straight through.
|
||||
"""
|
||||
if loop:
|
||||
# loop already exists, pass it straight through
|
||||
yield loop
|
||||
else:
|
||||
# this shadows loop_context's standard behavior
|
||||
loop = setup_test_loop()
|
||||
yield loop
|
||||
teardown_test_loop(loop, fast=fast)
|
||||
|
||||
|
||||
def pytest_pycollect_makeitem(collector, name, obj): # type: ignore[no-untyped-def]
|
||||
"""Fix pytest collecting for coroutines."""
|
||||
if collector.funcnamefilter(name) and asyncio.iscoroutinefunction(obj):
|
||||
return list(collector._genfunctions(name, obj))
|
||||
|
||||
|
||||
def pytest_pyfunc_call(pyfuncitem): # type: ignore[no-untyped-def]
|
||||
"""Run coroutines in an event loop instead of a normal function call."""
|
||||
fast = pyfuncitem.config.getoption("--aiohttp-fast")
|
||||
if asyncio.iscoroutinefunction(pyfuncitem.function):
|
||||
existing_loop = pyfuncitem.funcargs.get(
|
||||
"proactor_loop"
|
||||
) or pyfuncitem.funcargs.get("loop", None)
|
||||
with _runtime_warning_context():
|
||||
with _passthrough_loop_context(existing_loop, fast=fast) as _loop:
|
||||
testargs = {
|
||||
arg: pyfuncitem.funcargs[arg]
|
||||
for arg in pyfuncitem._fixtureinfo.argnames
|
||||
}
|
||||
_loop.run_until_complete(pyfuncitem.obj(**testargs))
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def pytest_generate_tests(metafunc): # type: ignore[no-untyped-def]
|
||||
if "loop_factory" not in metafunc.fixturenames:
|
||||
return
|
||||
|
||||
loops = metafunc.config.option.aiohttp_loop
|
||||
avail_factories = {"pyloop": asyncio.DefaultEventLoopPolicy}
|
||||
|
||||
if uvloop is not None: # pragma: no cover
|
||||
avail_factories["uvloop"] = uvloop.EventLoopPolicy
|
||||
|
||||
if tokio is not None: # pragma: no cover
|
||||
avail_factories["tokio"] = tokio.EventLoopPolicy
|
||||
|
||||
if loops == "all":
|
||||
loops = "pyloop,uvloop?,tokio?"
|
||||
|
||||
factories = {} # type: ignore[var-annotated]
|
||||
for name in loops.split(","):
|
||||
required = not name.endswith("?")
|
||||
name = name.strip(" ?")
|
||||
if name not in avail_factories: # pragma: no cover
|
||||
if required:
|
||||
raise ValueError(
|
||||
"Unknown loop '%s', available loops: %s"
|
||||
% (name, list(factories.keys()))
|
||||
)
|
||||
else:
|
||||
continue
|
||||
factories[name] = avail_factories[name]
|
||||
metafunc.parametrize(
|
||||
"loop_factory", list(factories.values()), ids=list(factories.keys())
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def loop(loop_factory, fast, loop_debug): # type: ignore[no-untyped-def]
|
||||
"""Return an instance of the event loop."""
|
||||
policy = loop_factory()
|
||||
asyncio.set_event_loop_policy(policy)
|
||||
with loop_context(fast=fast) as _loop:
|
||||
if loop_debug:
|
||||
_loop.set_debug(True) # pragma: no cover
|
||||
asyncio.set_event_loop(_loop)
|
||||
yield _loop
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def proactor_loop(): # type: ignore[no-untyped-def]
|
||||
if not PY_37:
|
||||
policy = asyncio.get_event_loop_policy()
|
||||
policy._loop_factory = asyncio.ProactorEventLoop # type: ignore[attr-defined]
|
||||
else:
|
||||
policy = asyncio.WindowsProactorEventLoopPolicy() # type: ignore[attr-defined]
|
||||
asyncio.set_event_loop_policy(policy)
|
||||
|
||||
with loop_context(policy.new_event_loop) as _loop:
|
||||
asyncio.set_event_loop(_loop)
|
||||
yield _loop
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def unused_port(aiohttp_unused_port): # type: ignore[no-untyped-def] # pragma: no cover
|
||||
warnings.warn(
|
||||
"Deprecated, use aiohttp_unused_port fixture instead",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return aiohttp_unused_port
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def aiohttp_unused_port(): # type: ignore[no-untyped-def]
|
||||
"""Return a port that is unused on the current host."""
|
||||
return _unused_port
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def aiohttp_server(loop): # type: ignore[no-untyped-def]
|
||||
"""Factory to create a TestServer instance, given an app.
|
||||
|
||||
aiohttp_server(app, **kwargs)
|
||||
"""
|
||||
servers = []
|
||||
|
||||
async def go(app, *, port=None, **kwargs): # type: ignore[no-untyped-def]
|
||||
server = TestServer(app, port=port)
|
||||
await server.start_server(loop=loop, **kwargs)
|
||||
servers.append(server)
|
||||
return server
|
||||
|
||||
yield go
|
||||
|
||||
async def finalize() -> None:
|
||||
while servers:
|
||||
await servers.pop().close()
|
||||
|
||||
loop.run_until_complete(finalize())
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_server(aiohttp_server): # type: ignore[no-untyped-def] # pragma: no cover
|
||||
warnings.warn(
|
||||
"Deprecated, use aiohttp_server fixture instead",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return aiohttp_server
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def aiohttp_raw_server(loop): # type: ignore[no-untyped-def]
|
||||
"""Factory to create a RawTestServer instance, given a web handler.
|
||||
|
||||
aiohttp_raw_server(handler, **kwargs)
|
||||
"""
|
||||
servers = []
|
||||
|
||||
async def go(handler, *, port=None, **kwargs): # type: ignore[no-untyped-def]
|
||||
server = RawTestServer(handler, port=port)
|
||||
await server.start_server(loop=loop, **kwargs)
|
||||
servers.append(server)
|
||||
return server
|
||||
|
||||
yield go
|
||||
|
||||
async def finalize() -> None:
|
||||
while servers:
|
||||
await servers.pop().close()
|
||||
|
||||
loop.run_until_complete(finalize())
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def raw_test_server( # type: ignore[no-untyped-def] # pragma: no cover
|
||||
aiohttp_raw_server,
|
||||
):
|
||||
warnings.warn(
|
||||
"Deprecated, use aiohttp_raw_server fixture instead",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return aiohttp_raw_server
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def aiohttp_client(
|
||||
loop: asyncio.AbstractEventLoop,
|
||||
) -> Generator[AiohttpClient, None, None]:
|
||||
"""Factory to create a TestClient instance.
|
||||
|
||||
aiohttp_client(app, **kwargs)
|
||||
aiohttp_client(server, **kwargs)
|
||||
aiohttp_client(raw_server, **kwargs)
|
||||
"""
|
||||
clients = []
|
||||
|
||||
async def go(
|
||||
__param: Union[Application, BaseTestServer],
|
||||
*args: Any,
|
||||
server_kwargs: Optional[Dict[str, Any]] = None,
|
||||
**kwargs: Any
|
||||
) -> TestClient:
|
||||
|
||||
if isinstance(__param, Callable) and not isinstance( # type: ignore[arg-type]
|
||||
__param, (Application, BaseTestServer)
|
||||
):
|
||||
__param = __param(loop, *args, **kwargs)
|
||||
kwargs = {}
|
||||
else:
|
||||
assert not args, "args should be empty"
|
||||
|
||||
if isinstance(__param, Application):
|
||||
server_kwargs = server_kwargs or {}
|
||||
server = TestServer(__param, loop=loop, **server_kwargs)
|
||||
client = TestClient(server, loop=loop, **kwargs)
|
||||
elif isinstance(__param, BaseTestServer):
|
||||
client = TestClient(__param, loop=loop, **kwargs)
|
||||
else:
|
||||
raise ValueError("Unknown argument type: %r" % type(__param))
|
||||
|
||||
await client.start_server()
|
||||
clients.append(client)
|
||||
return client
|
||||
|
||||
yield go
|
||||
|
||||
async def finalize() -> None:
|
||||
while clients:
|
||||
await clients.pop().close()
|
||||
|
||||
loop.run_until_complete(finalize())
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_client(aiohttp_client): # type: ignore[no-untyped-def] # pragma: no cover
|
||||
warnings.warn(
|
||||
"Deprecated, use aiohttp_client fixture instead",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return aiohttp_client
|
|
@ -0,0 +1,160 @@
|
|||
import asyncio
|
||||
import socket
|
||||
from typing import Any, Dict, List, Optional, Type, Union
|
||||
|
||||
from .abc import AbstractResolver
|
||||
from .helpers import get_running_loop
|
||||
|
||||
__all__ = ("ThreadedResolver", "AsyncResolver", "DefaultResolver")
|
||||
|
||||
try:
|
||||
import aiodns
|
||||
|
||||
# aiodns_default = hasattr(aiodns.DNSResolver, 'gethostbyname')
|
||||
except ImportError: # pragma: no cover
|
||||
aiodns = None
|
||||
|
||||
aiodns_default = False
|
||||
|
||||
|
||||
class ThreadedResolver(AbstractResolver):
|
||||
"""Threaded resolver.
|
||||
|
||||
Uses an Executor for synchronous getaddrinfo() calls.
|
||||
concurrent.futures.ThreadPoolExecutor is used by default.
|
||||
"""
|
||||
|
||||
def __init__(self, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
|
||||
self._loop = get_running_loop(loop)
|
||||
|
||||
async def resolve(
|
||||
self, hostname: str, port: int = 0, family: int = socket.AF_INET
|
||||
) -> List[Dict[str, Any]]:
|
||||
infos = await self._loop.getaddrinfo(
|
||||
hostname,
|
||||
port,
|
||||
type=socket.SOCK_STREAM,
|
||||
family=family,
|
||||
flags=socket.AI_ADDRCONFIG,
|
||||
)
|
||||
|
||||
hosts = []
|
||||
for family, _, proto, _, address in infos:
|
||||
if family == socket.AF_INET6:
|
||||
if len(address) < 3:
|
||||
# IPv6 is not supported by Python build,
|
||||
# or IPv6 is not enabled in the host
|
||||
continue
|
||||
if address[3]: # type: ignore[misc]
|
||||
# This is essential for link-local IPv6 addresses.
|
||||
# LL IPv6 is a VERY rare case. Strictly speaking, we should use
|
||||
# getnameinfo() unconditionally, but performance makes sense.
|
||||
host, _port = socket.getnameinfo(
|
||||
address, socket.NI_NUMERICHOST | socket.NI_NUMERICSERV
|
||||
)
|
||||
port = int(_port)
|
||||
else:
|
||||
host, port = address[:2]
|
||||
else: # IPv4
|
||||
assert family == socket.AF_INET
|
||||
host, port = address # type: ignore[misc]
|
||||
hosts.append(
|
||||
{
|
||||
"hostname": hostname,
|
||||
"host": host,
|
||||
"port": port,
|
||||
"family": family,
|
||||
"proto": proto,
|
||||
"flags": socket.AI_NUMERICHOST | socket.AI_NUMERICSERV,
|
||||
}
|
||||
)
|
||||
|
||||
return hosts
|
||||
|
||||
async def close(self) -> None:
|
||||
pass
|
||||
|
||||
|
||||
class AsyncResolver(AbstractResolver):
|
||||
"""Use the `aiodns` package to make asynchronous DNS lookups"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
loop: Optional[asyncio.AbstractEventLoop] = None,
|
||||
*args: Any,
|
||||
**kwargs: Any
|
||||
) -> None:
|
||||
if aiodns is None:
|
||||
raise RuntimeError("Resolver requires aiodns library")
|
||||
|
||||
self._loop = get_running_loop(loop)
|
||||
self._resolver = aiodns.DNSResolver(*args, loop=loop, **kwargs)
|
||||
|
||||
if not hasattr(self._resolver, "gethostbyname"):
|
||||
# aiodns 1.1 is not available, fallback to DNSResolver.query
|
||||
self.resolve = self._resolve_with_query # type: ignore
|
||||
|
||||
async def resolve(
|
||||
self, host: str, port: int = 0, family: int = socket.AF_INET
|
||||
) -> List[Dict[str, Any]]:
|
||||
try:
|
||||
resp = await self._resolver.gethostbyname(host, family)
|
||||
except aiodns.error.DNSError as exc:
|
||||
msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed"
|
||||
raise OSError(msg) from exc
|
||||
hosts = []
|
||||
for address in resp.addresses:
|
||||
hosts.append(
|
||||
{
|
||||
"hostname": host,
|
||||
"host": address,
|
||||
"port": port,
|
||||
"family": family,
|
||||
"proto": 0,
|
||||
"flags": socket.AI_NUMERICHOST | socket.AI_NUMERICSERV,
|
||||
}
|
||||
)
|
||||
|
||||
if not hosts:
|
||||
raise OSError("DNS lookup failed")
|
||||
|
||||
return hosts
|
||||
|
||||
async def _resolve_with_query(
|
||||
self, host: str, port: int = 0, family: int = socket.AF_INET
|
||||
) -> List[Dict[str, Any]]:
|
||||
if family == socket.AF_INET6:
|
||||
qtype = "AAAA"
|
||||
else:
|
||||
qtype = "A"
|
||||
|
||||
try:
|
||||
resp = await self._resolver.query(host, qtype)
|
||||
except aiodns.error.DNSError as exc:
|
||||
msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed"
|
||||
raise OSError(msg) from exc
|
||||
|
||||
hosts = []
|
||||
for rr in resp:
|
||||
hosts.append(
|
||||
{
|
||||
"hostname": host,
|
||||
"host": rr.host,
|
||||
"port": port,
|
||||
"family": family,
|
||||
"proto": 0,
|
||||
"flags": socket.AI_NUMERICHOST,
|
||||
}
|
||||
)
|
||||
|
||||
if not hosts:
|
||||
raise OSError("DNS lookup failed")
|
||||
|
||||
return hosts
|
||||
|
||||
async def close(self) -> None:
|
||||
self._resolver.cancel()
|
||||
|
||||
|
||||
_DefaultType = Type[Union[AsyncResolver, ThreadedResolver]]
|
||||
DefaultResolver: _DefaultType = AsyncResolver if aiodns_default else ThreadedResolver
|
|
@ -0,0 +1,660 @@
|
|||
import asyncio
|
||||
import collections
|
||||
import warnings
|
||||
from typing import Awaitable, Callable, Deque, Generic, List, Optional, Tuple, TypeVar
|
||||
|
||||
from .base_protocol import BaseProtocol
|
||||
from .helpers import BaseTimerContext, set_exception, set_result
|
||||
from .log import internal_logger
|
||||
from .typedefs import Final
|
||||
|
||||
__all__ = (
|
||||
"EMPTY_PAYLOAD",
|
||||
"EofStream",
|
||||
"StreamReader",
|
||||
"DataQueue",
|
||||
"FlowControlDataQueue",
|
||||
)
|
||||
|
||||
_T = TypeVar("_T")
|
||||
|
||||
|
||||
class EofStream(Exception):
|
||||
"""eof stream indication."""
|
||||
|
||||
|
||||
class AsyncStreamIterator(Generic[_T]):
|
||||
def __init__(self, read_func: Callable[[], Awaitable[_T]]) -> None:
|
||||
self.read_func = read_func
|
||||
|
||||
def __aiter__(self) -> "AsyncStreamIterator[_T]":
|
||||
return self
|
||||
|
||||
async def __anext__(self) -> _T:
|
||||
try:
|
||||
rv = await self.read_func()
|
||||
except EofStream:
|
||||
raise StopAsyncIteration
|
||||
if rv == b"":
|
||||
raise StopAsyncIteration
|
||||
return rv
|
||||
|
||||
|
||||
class ChunkTupleAsyncStreamIterator:
|
||||
def __init__(self, stream: "StreamReader") -> None:
|
||||
self._stream = stream
|
||||
|
||||
def __aiter__(self) -> "ChunkTupleAsyncStreamIterator":
|
||||
return self
|
||||
|
||||
async def __anext__(self) -> Tuple[bytes, bool]:
|
||||
rv = await self._stream.readchunk()
|
||||
if rv == (b"", False):
|
||||
raise StopAsyncIteration
|
||||
return rv
|
||||
|
||||
|
||||
class AsyncStreamReaderMixin:
|
||||
def __aiter__(self) -> AsyncStreamIterator[bytes]:
|
||||
return AsyncStreamIterator(self.readline) # type: ignore[attr-defined]
|
||||
|
||||
def iter_chunked(self, n: int) -> AsyncStreamIterator[bytes]:
|
||||
"""Returns an asynchronous iterator that yields chunks of size n.
|
||||
|
||||
Python-3.5 available for Python 3.5+ only
|
||||
"""
|
||||
return AsyncStreamIterator(
|
||||
lambda: self.read(n) # type: ignore[attr-defined,no-any-return]
|
||||
)
|
||||
|
||||
def iter_any(self) -> AsyncStreamIterator[bytes]:
|
||||
"""Yield all available data as soon as it is received.
|
||||
|
||||
Python-3.5 available for Python 3.5+ only
|
||||
"""
|
||||
return AsyncStreamIterator(self.readany) # type: ignore[attr-defined]
|
||||
|
||||
def iter_chunks(self) -> ChunkTupleAsyncStreamIterator:
|
||||
"""Yield chunks of data as they are received by the server.
|
||||
|
||||
The yielded objects are tuples
|
||||
of (bytes, bool) as returned by the StreamReader.readchunk method.
|
||||
|
||||
Python-3.5 available for Python 3.5+ only
|
||||
"""
|
||||
return ChunkTupleAsyncStreamIterator(self) # type: ignore[arg-type]
|
||||
|
||||
|
||||
class StreamReader(AsyncStreamReaderMixin):
|
||||
"""An enhancement of asyncio.StreamReader.
|
||||
|
||||
Supports asynchronous iteration by line, chunk or as available::
|
||||
|
||||
async for line in reader:
|
||||
...
|
||||
async for chunk in reader.iter_chunked(1024):
|
||||
...
|
||||
async for slice in reader.iter_any():
|
||||
...
|
||||
|
||||
"""
|
||||
|
||||
total_bytes = 0
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
protocol: BaseProtocol,
|
||||
limit: int,
|
||||
*,
|
||||
timer: Optional[BaseTimerContext] = None,
|
||||
loop: Optional[asyncio.AbstractEventLoop] = None,
|
||||
) -> None:
|
||||
self._protocol = protocol
|
||||
self._low_water = limit
|
||||
self._high_water = limit * 2
|
||||
if loop is None:
|
||||
loop = asyncio.get_event_loop()
|
||||
self._loop = loop
|
||||
self._size = 0
|
||||
self._cursor = 0
|
||||
self._http_chunk_splits: Optional[List[int]] = None
|
||||
self._buffer: Deque[bytes] = collections.deque()
|
||||
self._buffer_offset = 0
|
||||
self._eof = False
|
||||
self._waiter: Optional[asyncio.Future[None]] = None
|
||||
self._eof_waiter: Optional[asyncio.Future[None]] = None
|
||||
self._exception: Optional[BaseException] = None
|
||||
self._timer = timer
|
||||
self._eof_callbacks: List[Callable[[], None]] = []
|
||||
|
||||
def __repr__(self) -> str:
|
||||
info = [self.__class__.__name__]
|
||||
if self._size:
|
||||
info.append("%d bytes" % self._size)
|
||||
if self._eof:
|
||||
info.append("eof")
|
||||
if self._low_water != 2**16: # default limit
|
||||
info.append("low=%d high=%d" % (self._low_water, self._high_water))
|
||||
if self._waiter:
|
||||
info.append("w=%r" % self._waiter)
|
||||
if self._exception:
|
||||
info.append("e=%r" % self._exception)
|
||||
return "<%s>" % " ".join(info)
|
||||
|
||||
def get_read_buffer_limits(self) -> Tuple[int, int]:
|
||||
return (self._low_water, self._high_water)
|
||||
|
||||
def exception(self) -> Optional[BaseException]:
|
||||
return self._exception
|
||||
|
||||
def set_exception(self, exc: BaseException) -> None:
|
||||
self._exception = exc
|
||||
self._eof_callbacks.clear()
|
||||
|
||||
waiter = self._waiter
|
||||
if waiter is not None:
|
||||
self._waiter = None
|
||||
set_exception(waiter, exc)
|
||||
|
||||
waiter = self._eof_waiter
|
||||
if waiter is not None:
|
||||
self._eof_waiter = None
|
||||
set_exception(waiter, exc)
|
||||
|
||||
def on_eof(self, callback: Callable[[], None]) -> None:
|
||||
if self._eof:
|
||||
try:
|
||||
callback()
|
||||
except Exception:
|
||||
internal_logger.exception("Exception in eof callback")
|
||||
else:
|
||||
self._eof_callbacks.append(callback)
|
||||
|
||||
def feed_eof(self) -> None:
|
||||
self._eof = True
|
||||
|
||||
waiter = self._waiter
|
||||
if waiter is not None:
|
||||
self._waiter = None
|
||||
set_result(waiter, None)
|
||||
|
||||
waiter = self._eof_waiter
|
||||
if waiter is not None:
|
||||
self._eof_waiter = None
|
||||
set_result(waiter, None)
|
||||
|
||||
for cb in self._eof_callbacks:
|
||||
try:
|
||||
cb()
|
||||
except Exception:
|
||||
internal_logger.exception("Exception in eof callback")
|
||||
|
||||
self._eof_callbacks.clear()
|
||||
|
||||
def is_eof(self) -> bool:
|
||||
"""Return True if 'feed_eof' was called."""
|
||||
return self._eof
|
||||
|
||||
def at_eof(self) -> bool:
|
||||
"""Return True if the buffer is empty and 'feed_eof' was called."""
|
||||
return self._eof and not self._buffer
|
||||
|
||||
async def wait_eof(self) -> None:
|
||||
if self._eof:
|
||||
return
|
||||
|
||||
assert self._eof_waiter is None
|
||||
self._eof_waiter = self._loop.create_future()
|
||||
try:
|
||||
await self._eof_waiter
|
||||
finally:
|
||||
self._eof_waiter = None
|
||||
|
||||
def unread_data(self, data: bytes) -> None:
|
||||
"""rollback reading some data from stream, inserting it to buffer head."""
|
||||
warnings.warn(
|
||||
"unread_data() is deprecated "
|
||||
"and will be removed in future releases (#3260)",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
if not data:
|
||||
return
|
||||
|
||||
if self._buffer_offset:
|
||||
self._buffer[0] = self._buffer[0][self._buffer_offset :]
|
||||
self._buffer_offset = 0
|
||||
self._size += len(data)
|
||||
self._cursor -= len(data)
|
||||
self._buffer.appendleft(data)
|
||||
self._eof_counter = 0
|
||||
|
||||
# TODO: size is ignored, remove the param later
|
||||
def feed_data(self, data: bytes, size: int = 0) -> None:
|
||||
assert not self._eof, "feed_data after feed_eof"
|
||||
|
||||
if not data:
|
||||
return
|
||||
|
||||
self._size += len(data)
|
||||
self._buffer.append(data)
|
||||
self.total_bytes += len(data)
|
||||
|
||||
waiter = self._waiter
|
||||
if waiter is not None:
|
||||
self._waiter = None
|
||||
set_result(waiter, None)
|
||||
|
||||
if self._size > self._high_water and not self._protocol._reading_paused:
|
||||
self._protocol.pause_reading()
|
||||
|
||||
def begin_http_chunk_receiving(self) -> None:
|
||||
if self._http_chunk_splits is None:
|
||||
if self.total_bytes:
|
||||
raise RuntimeError(
|
||||
"Called begin_http_chunk_receiving when" "some data was already fed"
|
||||
)
|
||||
self._http_chunk_splits = []
|
||||
|
||||
def end_http_chunk_receiving(self) -> None:
|
||||
if self._http_chunk_splits is None:
|
||||
raise RuntimeError(
|
||||
"Called end_chunk_receiving without calling "
|
||||
"begin_chunk_receiving first"
|
||||
)
|
||||
|
||||
# self._http_chunk_splits contains logical byte offsets from start of
|
||||
# the body transfer. Each offset is the offset of the end of a chunk.
|
||||
# "Logical" means bytes, accessible for a user.
|
||||
# If no chunks containig logical data were received, current position
|
||||
# is difinitely zero.
|
||||
pos = self._http_chunk_splits[-1] if self._http_chunk_splits else 0
|
||||
|
||||
if self.total_bytes == pos:
|
||||
# We should not add empty chunks here. So we check for that.
|
||||
# Note, when chunked + gzip is used, we can receive a chunk
|
||||
# of compressed data, but that data may not be enough for gzip FSM
|
||||
# to yield any uncompressed data. That's why current position may
|
||||
# not change after receiving a chunk.
|
||||
return
|
||||
|
||||
self._http_chunk_splits.append(self.total_bytes)
|
||||
|
||||
# wake up readchunk when end of http chunk received
|
||||
waiter = self._waiter
|
||||
if waiter is not None:
|
||||
self._waiter = None
|
||||
set_result(waiter, None)
|
||||
|
||||
async def _wait(self, func_name: str) -> None:
|
||||
# StreamReader uses a future to link the protocol feed_data() method
|
||||
# to a read coroutine. Running two read coroutines at the same time
|
||||
# would have an unexpected behaviour. It would not possible to know
|
||||
# which coroutine would get the next data.
|
||||
if self._waiter is not None:
|
||||
raise RuntimeError(
|
||||
"%s() called while another coroutine is "
|
||||
"already waiting for incoming data" % func_name
|
||||
)
|
||||
|
||||
waiter = self._waiter = self._loop.create_future()
|
||||
try:
|
||||
if self._timer:
|
||||
with self._timer:
|
||||
await waiter
|
||||
else:
|
||||
await waiter
|
||||
finally:
|
||||
self._waiter = None
|
||||
|
||||
async def readline(self) -> bytes:
|
||||
return await self.readuntil()
|
||||
|
||||
async def readuntil(self, separator: bytes = b"\n") -> bytes:
|
||||
seplen = len(separator)
|
||||
if seplen == 0:
|
||||
raise ValueError("Separator should be at least one-byte string")
|
||||
|
||||
if self._exception is not None:
|
||||
raise self._exception
|
||||
|
||||
chunk = b""
|
||||
chunk_size = 0
|
||||
not_enough = True
|
||||
|
||||
while not_enough:
|
||||
while self._buffer and not_enough:
|
||||
offset = self._buffer_offset
|
||||
ichar = self._buffer[0].find(separator, offset) + 1
|
||||
# Read from current offset to found separator or to the end.
|
||||
data = self._read_nowait_chunk(ichar - offset if ichar else -1)
|
||||
chunk += data
|
||||
chunk_size += len(data)
|
||||
if ichar:
|
||||
not_enough = False
|
||||
|
||||
if chunk_size > self._high_water:
|
||||
raise ValueError("Chunk too big")
|
||||
|
||||
if self._eof:
|
||||
break
|
||||
|
||||
if not_enough:
|
||||
await self._wait("readuntil")
|
||||
|
||||
return chunk
|
||||
|
||||
async def read(self, n: int = -1) -> bytes:
|
||||
if self._exception is not None:
|
||||
raise self._exception
|
||||
|
||||
# migration problem; with DataQueue you have to catch
|
||||
# EofStream exception, so common way is to run payload.read() inside
|
||||
# infinite loop. what can cause real infinite loop with StreamReader
|
||||
# lets keep this code one major release.
|
||||
if __debug__:
|
||||
if self._eof and not self._buffer:
|
||||
self._eof_counter = getattr(self, "_eof_counter", 0) + 1
|
||||
if self._eof_counter > 5:
|
||||
internal_logger.warning(
|
||||
"Multiple access to StreamReader in eof state, "
|
||||
"might be infinite loop.",
|
||||
stack_info=True,
|
||||
)
|
||||
|
||||
if not n:
|
||||
return b""
|
||||
|
||||
if n < 0:
|
||||
# This used to just loop creating a new waiter hoping to
|
||||
# collect everything in self._buffer, but that would
|
||||
# deadlock if the subprocess sends more than self.limit
|
||||
# bytes. So just call self.readany() until EOF.
|
||||
blocks = []
|
||||
while True:
|
||||
block = await self.readany()
|
||||
if not block:
|
||||
break
|
||||
blocks.append(block)
|
||||
return b"".join(blocks)
|
||||
|
||||
# TODO: should be `if` instead of `while`
|
||||
# because waiter maybe triggered on chunk end,
|
||||
# without feeding any data
|
||||
while not self._buffer and not self._eof:
|
||||
await self._wait("read")
|
||||
|
||||
return self._read_nowait(n)
|
||||
|
||||
async def readany(self) -> bytes:
|
||||
if self._exception is not None:
|
||||
raise self._exception
|
||||
|
||||
# TODO: should be `if` instead of `while`
|
||||
# because waiter maybe triggered on chunk end,
|
||||
# without feeding any data
|
||||
while not self._buffer and not self._eof:
|
||||
await self._wait("readany")
|
||||
|
||||
return self._read_nowait(-1)
|
||||
|
||||
async def readchunk(self) -> Tuple[bytes, bool]:
|
||||
"""Returns a tuple of (data, end_of_http_chunk).
|
||||
|
||||
When chunked transfer
|
||||
encoding is used, end_of_http_chunk is a boolean indicating if the end
|
||||
of the data corresponds to the end of a HTTP chunk , otherwise it is
|
||||
always False.
|
||||
"""
|
||||
while True:
|
||||
if self._exception is not None:
|
||||
raise self._exception
|
||||
|
||||
while self._http_chunk_splits:
|
||||
pos = self._http_chunk_splits.pop(0)
|
||||
if pos == self._cursor:
|
||||
return (b"", True)
|
||||
if pos > self._cursor:
|
||||
return (self._read_nowait(pos - self._cursor), True)
|
||||
internal_logger.warning(
|
||||
"Skipping HTTP chunk end due to data "
|
||||
"consumption beyond chunk boundary"
|
||||
)
|
||||
|
||||
if self._buffer:
|
||||
return (self._read_nowait_chunk(-1), False)
|
||||
# return (self._read_nowait(-1), False)
|
||||
|
||||
if self._eof:
|
||||
# Special case for signifying EOF.
|
||||
# (b'', True) is not a final return value actually.
|
||||
return (b"", False)
|
||||
|
||||
await self._wait("readchunk")
|
||||
|
||||
async def readexactly(self, n: int) -> bytes:
|
||||
if self._exception is not None:
|
||||
raise self._exception
|
||||
|
||||
blocks: List[bytes] = []
|
||||
while n > 0:
|
||||
block = await self.read(n)
|
||||
if not block:
|
||||
partial = b"".join(blocks)
|
||||
raise asyncio.IncompleteReadError(partial, len(partial) + n)
|
||||
blocks.append(block)
|
||||
n -= len(block)
|
||||
|
||||
return b"".join(blocks)
|
||||
|
||||
def read_nowait(self, n: int = -1) -> bytes:
|
||||
# default was changed to be consistent with .read(-1)
|
||||
#
|
||||
# I believe the most users don't know about the method and
|
||||
# they are not affected.
|
||||
if self._exception is not None:
|
||||
raise self._exception
|
||||
|
||||
if self._waiter and not self._waiter.done():
|
||||
raise RuntimeError(
|
||||
"Called while some coroutine is waiting for incoming data."
|
||||
)
|
||||
|
||||
return self._read_nowait(n)
|
||||
|
||||
def _read_nowait_chunk(self, n: int) -> bytes:
|
||||
first_buffer = self._buffer[0]
|
||||
offset = self._buffer_offset
|
||||
if n != -1 and len(first_buffer) - offset > n:
|
||||
data = first_buffer[offset : offset + n]
|
||||
self._buffer_offset += n
|
||||
|
||||
elif offset:
|
||||
self._buffer.popleft()
|
||||
data = first_buffer[offset:]
|
||||
self._buffer_offset = 0
|
||||
|
||||
else:
|
||||
data = self._buffer.popleft()
|
||||
|
||||
self._size -= len(data)
|
||||
self._cursor += len(data)
|
||||
|
||||
chunk_splits = self._http_chunk_splits
|
||||
# Prevent memory leak: drop useless chunk splits
|
||||
while chunk_splits and chunk_splits[0] < self._cursor:
|
||||
chunk_splits.pop(0)
|
||||
|
||||
if self._size < self._low_water and self._protocol._reading_paused:
|
||||
self._protocol.resume_reading()
|
||||
return data
|
||||
|
||||
def _read_nowait(self, n: int) -> bytes:
|
||||
"""Read not more than n bytes, or whole buffer if n == -1"""
|
||||
chunks = []
|
||||
|
||||
while self._buffer:
|
||||
chunk = self._read_nowait_chunk(n)
|
||||
chunks.append(chunk)
|
||||
if n != -1:
|
||||
n -= len(chunk)
|
||||
if n == 0:
|
||||
break
|
||||
|
||||
return b"".join(chunks) if chunks else b""
|
||||
|
||||
|
||||
class EmptyStreamReader(StreamReader): # lgtm [py/missing-call-to-init]
|
||||
def __init__(self) -> None:
|
||||
pass
|
||||
|
||||
def exception(self) -> Optional[BaseException]:
|
||||
return None
|
||||
|
||||
def set_exception(self, exc: BaseException) -> None:
|
||||
pass
|
||||
|
||||
def on_eof(self, callback: Callable[[], None]) -> None:
|
||||
try:
|
||||
callback()
|
||||
except Exception:
|
||||
internal_logger.exception("Exception in eof callback")
|
||||
|
||||
def feed_eof(self) -> None:
|
||||
pass
|
||||
|
||||
def is_eof(self) -> bool:
|
||||
return True
|
||||
|
||||
def at_eof(self) -> bool:
|
||||
return True
|
||||
|
||||
async def wait_eof(self) -> None:
|
||||
return
|
||||
|
||||
def feed_data(self, data: bytes, n: int = 0) -> None:
|
||||
pass
|
||||
|
||||
async def readline(self) -> bytes:
|
||||
return b""
|
||||
|
||||
async def read(self, n: int = -1) -> bytes:
|
||||
return b""
|
||||
|
||||
# TODO add async def readuntil
|
||||
|
||||
async def readany(self) -> bytes:
|
||||
return b""
|
||||
|
||||
async def readchunk(self) -> Tuple[bytes, bool]:
|
||||
return (b"", True)
|
||||
|
||||
async def readexactly(self, n: int) -> bytes:
|
||||
raise asyncio.IncompleteReadError(b"", n)
|
||||
|
||||
def read_nowait(self, n: int = -1) -> bytes:
|
||||
return b""
|
||||
|
||||
|
||||
EMPTY_PAYLOAD: Final[StreamReader] = EmptyStreamReader()
|
||||
|
||||
|
||||
class DataQueue(Generic[_T]):
|
||||
"""DataQueue is a general-purpose blocking queue with one reader."""
|
||||
|
||||
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
|
||||
self._loop = loop
|
||||
self._eof = False
|
||||
self._waiter: Optional[asyncio.Future[None]] = None
|
||||
self._exception: Optional[BaseException] = None
|
||||
self._size = 0
|
||||
self._buffer: Deque[Tuple[_T, int]] = collections.deque()
|
||||
|
||||
def __len__(self) -> int:
|
||||
return len(self._buffer)
|
||||
|
||||
def is_eof(self) -> bool:
|
||||
return self._eof
|
||||
|
||||
def at_eof(self) -> bool:
|
||||
return self._eof and not self._buffer
|
||||
|
||||
def exception(self) -> Optional[BaseException]:
|
||||
return self._exception
|
||||
|
||||
def set_exception(self, exc: BaseException) -> None:
|
||||
self._eof = True
|
||||
self._exception = exc
|
||||
|
||||
waiter = self._waiter
|
||||
if waiter is not None:
|
||||
self._waiter = None
|
||||
set_exception(waiter, exc)
|
||||
|
||||
def feed_data(self, data: _T, size: int = 0) -> None:
|
||||
self._size += size
|
||||
self._buffer.append((data, size))
|
||||
|
||||
waiter = self._waiter
|
||||
if waiter is not None:
|
||||
self._waiter = None
|
||||
set_result(waiter, None)
|
||||
|
||||
def feed_eof(self) -> None:
|
||||
self._eof = True
|
||||
|
||||
waiter = self._waiter
|
||||
if waiter is not None:
|
||||
self._waiter = None
|
||||
set_result(waiter, None)
|
||||
|
||||
async def read(self) -> _T:
|
||||
if not self._buffer and not self._eof:
|
||||
assert not self._waiter
|
||||
self._waiter = self._loop.create_future()
|
||||
try:
|
||||
await self._waiter
|
||||
except (asyncio.CancelledError, asyncio.TimeoutError):
|
||||
self._waiter = None
|
||||
raise
|
||||
|
||||
if self._buffer:
|
||||
data, size = self._buffer.popleft()
|
||||
self._size -= size
|
||||
return data
|
||||
else:
|
||||
if self._exception is not None:
|
||||
raise self._exception
|
||||
else:
|
||||
raise EofStream
|
||||
|
||||
def __aiter__(self) -> AsyncStreamIterator[_T]:
|
||||
return AsyncStreamIterator(self.read)
|
||||
|
||||
|
||||
class FlowControlDataQueue(DataQueue[_T]):
|
||||
"""FlowControlDataQueue resumes and pauses an underlying stream.
|
||||
|
||||
It is a destination for parsed data.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, protocol: BaseProtocol, limit: int, *, loop: asyncio.AbstractEventLoop
|
||||
) -> None:
|
||||
super().__init__(loop=loop)
|
||||
|
||||
self._protocol = protocol
|
||||
self._limit = limit * 2
|
||||
|
||||
def feed_data(self, data: _T, size: int = 0) -> None:
|
||||
super().feed_data(data, size)
|
||||
|
||||
if self._size > self._limit and not self._protocol._reading_paused:
|
||||
self._protocol.pause_reading()
|
||||
|
||||
async def read(self) -> _T:
|
||||
try:
|
||||
return await super().read()
|
||||
finally:
|
||||
if self._size < self._limit and self._protocol._reading_paused:
|
||||
self._protocol.resume_reading()
|
|
@ -0,0 +1,37 @@
|
|||
"""Helper methods to tune a TCP connection"""
|
||||
|
||||
import asyncio
|
||||
import socket
|
||||
from contextlib import suppress
|
||||
from typing import Optional # noqa
|
||||
|
||||
__all__ = ("tcp_keepalive", "tcp_nodelay")
|
||||
|
||||
|
||||
if hasattr(socket, "SO_KEEPALIVE"):
|
||||
|
||||
def tcp_keepalive(transport: asyncio.Transport) -> None:
|
||||
sock = transport.get_extra_info("socket")
|
||||
if sock is not None:
|
||||
sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
|
||||
|
||||
else:
|
||||
|
||||
def tcp_keepalive(transport: asyncio.Transport) -> None: # pragma: no cover
|
||||
pass
|
||||
|
||||
|
||||
def tcp_nodelay(transport: asyncio.Transport, value: bool) -> None:
|
||||
sock = transport.get_extra_info("socket")
|
||||
|
||||
if sock is None:
|
||||
return
|
||||
|
||||
if sock.family not in (socket.AF_INET, socket.AF_INET6):
|
||||
return
|
||||
|
||||
value = bool(value)
|
||||
|
||||
# socket may be closed already, on windows OSError get raised
|
||||
with suppress(OSError):
|
||||
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, value)
|
Some files were not shown because too many files have changed in this diff Show More
Ładowanie…
Reference in New Issue