Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions src/ahttpx/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,12 @@
from ._content import * # Content, File, Files, Form, HTML, JSON, MultiPart, Text
from ._headers import * # Headers
from ._network import * # NetworkBackend, NetworkStream, timeout
from ._parsers import * # HTTPParser, ProtocolError
from ._parsers import * # HTTPParser, HTTPStream, ProtocolError
from ._pool import * # Connection, ConnectionPool, Transport
from ._quickstart import * # get, post, put, patch, delete
from ._response import * # Response
from ._request import * # Request
from ._streams import * # ByteStream, DuplexStream, FileStream, HTTPStream, Stream
from ._streams import * # ByteStream, DuplexStream, FileStream, Stream
from ._server import * # serve_http, run
from ._urlencode import * # quote, unquote, urldecode, urlencode
from ._urls import * # QueryParams, URL
Expand Down
48 changes: 47 additions & 1 deletion src/ahttpx/_parsers.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
import enum
import io
import typing

from ._streams import Stream

__all__ = ['HTTPParser', 'Mode', 'ProtocolError']
__all__ = ['HTTPParser', 'HTTPStream', 'Mode', 'ProtocolError']


# TODO...
Expand Down Expand Up @@ -436,6 +438,50 @@ def __repr__(self) -> str:
return f'<HTTPParser [{detail}]>'


class HTTPStream(Stream):
def __init__(self, parser: HTTPParser, callback: typing.Callable | None = None):
self._parser = parser
self._buffer = io.BytesIO()
self._callback = callback

async def read(self, size=-1) -> bytes:
sections = []
length = 0

# If we have any data in the buffer read that and clear the buffer.
buffered = self._buffer.read()
if buffered:
sections.append(buffered)
length += len(buffered)
self._buffer.seek(0)
self._buffer.truncate(0)

# Read each chunk in turn.
while (size < 0) or (length < size):
section = await self._parser.recv_body()
sections.append(section)
length += len(section)
if section == b'':
break

# If we've more data than requested, then push some back into the buffer.
output = b''.join(sections)
if size > -1 and len(output) > size:
output, remainder = output[:size], output[size:]
self._buffer.write(remainder)
self._buffer.seek(0)

return output

async def close(self) -> None:
try:
self._buffer.close()
await self._parser.complete()
finally:
if self._callback is not None:
await self._callback()


class ReadAheadParser:
"""
A buffered I/O stream, with methods for read-ahead parsing.
Expand Down
7 changes: 3 additions & 4 deletions src/ahttpx/_pool.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,10 @@
from ._content import Content
from ._headers import Headers
from ._network import Lock, NetworkBackend, Semaphore
from ._parsers import HTTPParser
from ._parsers import HTTPParser, HTTPStream
from ._response import Response
from ._request import Request
from ._streams import HTTPStream, Stream
from ._streams import Stream
from ._urls import URL


Expand Down Expand Up @@ -170,7 +170,7 @@ async def send(self, request: Request) -> Response:
await self._send_head(request)
await self._send_body(request)
code, headers = await self._recv_head()
stream = HTTPStream(self._recv_body, self._complete)
stream = HTTPStream(self._parser, callback=self._complete)
# TODO...
return Response(code, headers=headers, content=stream)
# finally:
Expand Down Expand Up @@ -237,7 +237,6 @@ async def _recv_body(self) -> bytes:

# Request/response cycle complete...
async def _complete(self) -> None:
await self._parser.complete()
self._idle_expiry = time.monotonic() + self._keepalive_duration

async def _close(self) -> None:
Expand Down
6 changes: 2 additions & 4 deletions src/ahttpx/_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,10 @@
import time

from ._content import Text
from ._parsers import HTTPParser
from ._parsers import HTTPParser, HTTPStream
from ._request import Request
from ._response import Response
from ._network import NetworkBackend, sleep
from ._streams import HTTPStream

__all__ = [
"serve_http", "run"
Expand All @@ -33,7 +32,7 @@ async def handle_requests(self):
try:
while not await self._parser.recv_close():
method, url, headers = await self._recv_head()
stream = HTTPStream(self._recv_body, self._complete)
stream = HTTPStream(self._parser, callback=self._complete)
# TODO: Handle endpoint exceptions
async with Request(method, url, headers=headers, content=stream) as request:
try:
Expand Down Expand Up @@ -89,7 +88,6 @@ async def _send_body(self, response: Response):

# Start it all over again...
async def _complete(self):
await self._parser.complete()
self._idle_expiry = time.monotonic() + self._keepalive_duration


Expand Down
44 changes: 3 additions & 41 deletions src/ahttpx/_streams.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,9 @@
import os


__all__ = ['Stream', 'ByteStream', 'DuplexStream', 'FileStream', 'MultiPartStream']


class Stream:
async def read(self, size: int=-1) -> bytes:
raise NotImplementedError()
Expand Down Expand Up @@ -103,47 +106,6 @@ async def __aenter__(self):
return self


class HTTPStream(Stream):
def __init__(self, next_chunk, complete):
self._next_chunk = next_chunk
self._complete = complete
self._buffer = io.BytesIO()

async def read(self, size=-1) -> bytes:
sections = []
length = 0

# If we have any data in the buffer read that and clear the buffer.
buffered = self._buffer.read()
if buffered:
sections.append(buffered)
length += len(buffered)
self._buffer.seek(0)
self._buffer.truncate(0)

# Read each chunk in turn.
while (size < 0) or (length < size):
section = await self._next_chunk()
sections.append(section)
length += len(section)
if section == b'':
break

# If we've more data than requested, then push some back into the buffer.
output = b''.join(sections)
if size > -1 and len(output) > size:
output, remainder = output[:size], output[size:]
self._buffer.write(remainder)
self._buffer.seek(0)

return output

async def close(self) -> None:
self._buffer.close()
if self._complete is not None:
await self._complete()


class MultiPartStream(Stream):
def __init__(self, form: list[tuple[str, str]], files: list[tuple[str, str]], boundary=''):
self._form = list(form)
Expand Down
4 changes: 2 additions & 2 deletions src/httpx/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,12 @@
from ._content import * # Content, File, Files, Form, HTML, JSON, MultiPart, Text
from ._headers import * # Headers
from ._network import * # NetworkBackend, NetworkStream, timeout
from ._parsers import * # HTTPParser, ProtocolError
from ._parsers import * # HTTPParser, HTTPStream, ProtocolError
from ._pool import * # Connection, ConnectionPool, Transport
from ._quickstart import * # get, post, put, patch, delete
from ._response import * # Response
from ._request import * # Request
from ._streams import * # ByteStream, DuplexStream, FileStream, HTTPStream, Stream
from ._streams import * # ByteStream, DuplexStream, FileStream, Stream
from ._server import * # serve_http, run
from ._urlencode import * # quote, unquote, urldecode, urlencode
from ._urls import * # QueryParams, URL
Expand Down
48 changes: 47 additions & 1 deletion src/httpx/_parsers.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
import enum
import io
import typing

from ._streams import Stream

__all__ = ['HTTPParser', 'Mode', 'ProtocolError']
__all__ = ['HTTPParser', 'HTTPStream', 'Mode', 'ProtocolError']


# TODO...
Expand Down Expand Up @@ -436,6 +438,50 @@ def __repr__(self) -> str:
return f'<HTTPParser [{detail}]>'


class HTTPStream(Stream):
def __init__(self, parser: HTTPParser, callback: typing.Callable | None = None):
self._parser = parser
self._buffer = io.BytesIO()
self._callback = callback

def read(self, size=-1) -> bytes:
sections = []
length = 0

# If we have any data in the buffer read that and clear the buffer.
buffered = self._buffer.read()
if buffered:
sections.append(buffered)
length += len(buffered)
self._buffer.seek(0)
self._buffer.truncate(0)

# Read each chunk in turn.
while (size < 0) or (length < size):
section = self._parser.recv_body()
sections.append(section)
length += len(section)
if section == b'':
break

# If we've more data than requested, then push some back into the buffer.
output = b''.join(sections)
if size > -1 and len(output) > size:
output, remainder = output[:size], output[size:]
self._buffer.write(remainder)
self._buffer.seek(0)

return output

def close(self) -> None:
try:
self._buffer.close()
self._parser.complete()
finally:
if self._callback is not None:
self._callback()


class ReadAheadParser:
"""
A buffered I/O stream, with methods for read-ahead parsing.
Expand Down
7 changes: 3 additions & 4 deletions src/httpx/_pool.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,10 @@
from ._content import Content
from ._headers import Headers
from ._network import Lock, NetworkBackend, Semaphore
from ._parsers import HTTPParser
from ._parsers import HTTPParser, HTTPStream
from ._response import Response
from ._request import Request
from ._streams import HTTPStream, Stream
from ._streams import Stream
from ._urls import URL


Expand Down Expand Up @@ -170,7 +170,7 @@ def send(self, request: Request) -> Response:
self._send_head(request)
self._send_body(request)
code, headers = self._recv_head()
stream = HTTPStream(self._recv_body, self._complete)
stream = HTTPStream(self._parser, callback=self._complete)
# TODO...
return Response(code, headers=headers, content=stream)
# finally:
Expand Down Expand Up @@ -237,7 +237,6 @@ def _recv_body(self) -> bytes:

# Request/response cycle complete...
def _complete(self) -> None:
self._parser.complete()
self._idle_expiry = time.monotonic() + self._keepalive_duration

def _close(self) -> None:
Expand Down
6 changes: 2 additions & 4 deletions src/httpx/_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,10 @@
import time

from ._content import Text
from ._parsers import HTTPParser
from ._parsers import HTTPParser, HTTPStream
from ._request import Request
from ._response import Response
from ._network import NetworkBackend, sleep
from ._streams import HTTPStream

__all__ = [
"serve_http", "run"
Expand All @@ -33,7 +32,7 @@ def handle_requests(self):
try:
while not self._parser.recv_close():
method, url, headers = self._recv_head()
stream = HTTPStream(self._recv_body, self._complete)
stream = HTTPStream(self._parser, callback=self._complete)
# TODO: Handle endpoint exceptions
with Request(method, url, headers=headers, content=stream) as request:
try:
Expand Down Expand Up @@ -89,7 +88,6 @@ def _send_body(self, response: Response):

# Start it all over again...
def _complete(self):
self._parser.complete()
self._idle_expiry = time.monotonic() + self._keepalive_duration


Expand Down
44 changes: 3 additions & 41 deletions src/httpx/_streams.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,9 @@
import os


__all__ = ['Stream', 'ByteStream', 'DuplexStream', 'FileStream', 'MultiPartStream']


class Stream:
def read(self, size: int=-1) -> bytes:
raise NotImplementedError()
Expand Down Expand Up @@ -103,47 +106,6 @@ def __enter__(self):
return self


class HTTPStream(Stream):
def __init__(self, next_chunk, complete):
self._next_chunk = next_chunk
self._complete = complete
self._buffer = io.BytesIO()

def read(self, size=-1) -> bytes:
sections = []
length = 0

# If we have any data in the buffer read that and clear the buffer.
buffered = self._buffer.read()
if buffered:
sections.append(buffered)
length += len(buffered)
self._buffer.seek(0)
self._buffer.truncate(0)

# Read each chunk in turn.
while (size < 0) or (length < size):
section = self._next_chunk()
sections.append(section)
length += len(section)
if section == b'':
break

# If we've more data than requested, then push some back into the buffer.
output = b''.join(sections)
if size > -1 and len(output) > size:
output, remainder = output[:size], output[size:]
self._buffer.write(remainder)
self._buffer.seek(0)

return output

def close(self) -> None:
self._buffer.close()
if self._complete is not None:
self._complete()


class MultiPartStream(Stream):
def __init__(self, form: list[tuple[str, str]], files: list[tuple[str, str]], boundary=''):
self._form = list(form)
Expand Down
Loading