You've already forked httpie-cli
mirror of
https://github.com/httpie/cli.git
synced 2025-08-10 22:42:05 +02:00
Fix --raw with --chunked (#1254)
* Fix --raw with --chunked * Better naming / annotations * More annotations
This commit is contained in:
@@ -1,5 +1,6 @@
|
||||
import zlib
|
||||
from typing import Callable, IO, Iterable, Tuple, Union, TYPE_CHECKING
|
||||
import functools
|
||||
from typing import Any, Callable, IO, Iterable, Optional, Tuple, Union, TYPE_CHECKING
|
||||
from urllib.parse import urlencode
|
||||
|
||||
import requests
|
||||
@@ -11,7 +12,12 @@ if TYPE_CHECKING:
|
||||
from .cli.dicts import MultipartRequestDataDict, RequestDataDict
|
||||
|
||||
|
||||
class ChunkedUploadStream:
|
||||
class ChunkedStream:
|
||||
def __iter__(self) -> Iterable[Union[str, bytes]]:
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class ChunkedUploadStream(ChunkedStream):
|
||||
def __init__(self, stream: Iterable, callback: Callable):
|
||||
self.callback = callback
|
||||
self.stream = stream
|
||||
@@ -22,7 +28,7 @@ class ChunkedUploadStream:
|
||||
yield chunk
|
||||
|
||||
|
||||
class ChunkedMultipartUploadStream:
|
||||
class ChunkedMultipartUploadStream(ChunkedStream):
|
||||
chunk_size = 100 * 1024
|
||||
|
||||
def __init__(self, encoder: 'MultipartEncoder'):
|
||||
@@ -36,69 +42,101 @@ class ChunkedMultipartUploadStream:
|
||||
yield chunk
|
||||
|
||||
|
||||
def as_bytes(data: Union[str, bytes]) -> bytes:
|
||||
if isinstance(data, str):
|
||||
return data.encode()
|
||||
else:
|
||||
return data
|
||||
|
||||
|
||||
CallbackT = Callable[[bytes], bytes]
|
||||
|
||||
|
||||
def _wrap_function_with_callback(
|
||||
func: Callable[..., Any],
|
||||
callback: CallbackT
|
||||
) -> Callable[..., Any]:
|
||||
@functools.wraps(func)
|
||||
def wrapped(*args, **kwargs):
|
||||
chunk = func(*args, **kwargs)
|
||||
callback(chunk)
|
||||
return chunk
|
||||
return wrapped
|
||||
|
||||
|
||||
def _prepare_file_for_upload(
|
||||
file: Union[IO, 'MultipartEncoder'],
|
||||
callback: CallbackT,
|
||||
chunked: bool = False,
|
||||
content_length_header_value: Optional[int] = None,
|
||||
) -> Union[bytes, IO, ChunkedStream]:
|
||||
if not super_len(file):
|
||||
# Zero-length -> assume stdin.
|
||||
if content_length_header_value is None and not chunked:
|
||||
# Read the whole stdin to determine `Content-Length`.
|
||||
#
|
||||
# TODO: Instead of opt-in --chunked, consider making
|
||||
# `Transfer-Encoding: chunked` for STDIN opt-out via
|
||||
# something like --no-chunked.
|
||||
# This would be backwards-incompatible so wait until v3.0.0.
|
||||
#
|
||||
file = as_bytes(file.read())
|
||||
else:
|
||||
file.read = _wrap_function_with_callback(
|
||||
file.read,
|
||||
callback
|
||||
)
|
||||
|
||||
if chunked:
|
||||
from requests_toolbelt import MultipartEncoder
|
||||
if isinstance(file, MultipartEncoder):
|
||||
return ChunkedMultipartUploadStream(
|
||||
encoder=file,
|
||||
)
|
||||
else:
|
||||
return ChunkedUploadStream(
|
||||
stream=file,
|
||||
callback=callback,
|
||||
)
|
||||
else:
|
||||
return file
|
||||
|
||||
|
||||
def prepare_request_body(
|
||||
body: Union[str, bytes, IO, 'MultipartEncoder', RequestDataDict],
|
||||
body_read_callback: Callable[[bytes], bytes],
|
||||
content_length_header_value: int = None,
|
||||
chunked=False,
|
||||
offline=False,
|
||||
) -> Union[str, bytes, IO, 'MultipartEncoder', ChunkedUploadStream]:
|
||||
|
||||
is_file_like = hasattr(body, 'read')
|
||||
|
||||
if isinstance(body, RequestDataDict):
|
||||
body = urlencode(body, doseq=True)
|
||||
raw_body: Union[str, bytes, IO, 'MultipartEncoder', RequestDataDict],
|
||||
body_read_callback: CallbackT,
|
||||
offline: bool = False,
|
||||
chunked: bool = False,
|
||||
content_length_header_value: Optional[int] = None,
|
||||
) -> Union[bytes, IO, 'MultipartEncoder', ChunkedStream]:
|
||||
is_file_like = hasattr(raw_body, 'read')
|
||||
if isinstance(raw_body, (bytes, str)):
|
||||
body = as_bytes(raw_body)
|
||||
elif isinstance(raw_body, RequestDataDict):
|
||||
body = as_bytes(urlencode(raw_body, doseq=True))
|
||||
else:
|
||||
body = raw_body
|
||||
|
||||
if offline:
|
||||
if is_file_like:
|
||||
return body.read()
|
||||
return body
|
||||
|
||||
if not is_file_like:
|
||||
if chunked:
|
||||
body = ChunkedUploadStream(
|
||||
# Pass the entire body as one chunk.
|
||||
stream=(chunk.encode() for chunk in [body]),
|
||||
callback=body_read_callback,
|
||||
)
|
||||
else:
|
||||
# File-like object.
|
||||
|
||||
if not super_len(body):
|
||||
# Zero-length -> assume stdin.
|
||||
if content_length_header_value is None and not chunked:
|
||||
#
|
||||
# Read the whole stdin to determine `Content-Length`.
|
||||
#
|
||||
# TODO: Instead of opt-in --chunked, consider making
|
||||
# `Transfer-Encoding: chunked` for STDIN opt-out via
|
||||
# something like --no-chunked.
|
||||
# This would be backwards-incompatible so wait until v3.0.0.
|
||||
#
|
||||
body = body.read()
|
||||
return as_bytes(raw_body.read())
|
||||
else:
|
||||
orig_read = body.read
|
||||
return body
|
||||
|
||||
def new_read(*args):
|
||||
chunk = orig_read(*args)
|
||||
body_read_callback(chunk)
|
||||
return chunk
|
||||
|
||||
body.read = new_read
|
||||
|
||||
if chunked:
|
||||
from requests_toolbelt import MultipartEncoder
|
||||
if isinstance(body, MultipartEncoder):
|
||||
body = ChunkedMultipartUploadStream(
|
||||
encoder=body,
|
||||
)
|
||||
else:
|
||||
body = ChunkedUploadStream(
|
||||
stream=body,
|
||||
callback=body_read_callback,
|
||||
)
|
||||
|
||||
return body
|
||||
if is_file_like:
|
||||
return _prepare_file_for_upload(
|
||||
body,
|
||||
chunked=chunked,
|
||||
callback=body_read_callback,
|
||||
content_length_header_value=content_length_header_value
|
||||
)
|
||||
elif chunked:
|
||||
return ChunkedUploadStream(
|
||||
stream=iter([body]),
|
||||
callback=body_read_callback
|
||||
)
|
||||
else:
|
||||
return body
|
||||
|
||||
|
||||
def get_multipart_data_and_content_type(
|
||||
|
Reference in New Issue
Block a user