You've already forked httpie-cli
mirror of
https://github.com/httpie/cli.git
synced 2025-08-10 22:42:05 +02:00
Fix --raw with --chunked (#1254)
* Fix --raw with --chunked * Better naming / annotations * More annotations
This commit is contained in:
@@ -21,6 +21,7 @@ This project adheres to [Semantic Versioning](https://semver.org/).
|
|||||||
- Broken plugins will no longer crash the whole application. ([#1204](https://github.com/httpie/httpie/issues/1204))
|
- Broken plugins will no longer crash the whole application. ([#1204](https://github.com/httpie/httpie/issues/1204))
|
||||||
- Fixed auto addition of XML declaration to every formatted XML response. ([#1156](https://github.com/httpie/httpie/issues/1156))
|
- Fixed auto addition of XML declaration to every formatted XML response. ([#1156](https://github.com/httpie/httpie/issues/1156))
|
||||||
- Fixed highlighting when `Content-Type` specifies `charset`. ([#1242](https://github.com/httpie/httpie/issues/1242))
|
- Fixed highlighting when `Content-Type` specifies `charset`. ([#1242](https://github.com/httpie/httpie/issues/1242))
|
||||||
|
- Fixed an unexpected crash when `--raw` is used with `--chunked`. ([#1253](https://github.com/httpie/httpie/issues/1253))
|
||||||
|
|
||||||
## [2.6.0](https://github.com/httpie/httpie/compare/2.5.0...2.6.0) (2021-10-14)
|
## [2.6.0](https://github.com/httpie/httpie/compare/2.5.0...2.6.0) (2021-10-14)
|
||||||
|
|
||||||
|
@@ -4,7 +4,7 @@ import json
|
|||||||
import sys
|
import sys
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Callable, Iterable
|
from typing import Any, Dict, Callable, Iterable
|
||||||
from urllib.parse import urlparse, urlunparse
|
from urllib.parse import urlparse, urlunparse
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
@@ -273,6 +273,24 @@ def make_send_kwargs_mergeable_from_env(args: argparse.Namespace) -> dict:
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def json_dict_to_request_body(data: Dict[str, Any]) -> str:
|
||||||
|
# Propagate the top-level list if there is only one
|
||||||
|
# item in the object, with an en empty key.
|
||||||
|
if len(data) == 1:
|
||||||
|
[(key, value)] = data.items()
|
||||||
|
if key == '' and isinstance(value, list):
|
||||||
|
data = value
|
||||||
|
|
||||||
|
if data:
|
||||||
|
data = json.dumps(data)
|
||||||
|
else:
|
||||||
|
# We need to set data to an empty string to prevent requests
|
||||||
|
# from assigning an empty list to `response.request.data`.
|
||||||
|
data = ''
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
def make_request_kwargs(
|
def make_request_kwargs(
|
||||||
args: argparse.Namespace,
|
args: argparse.Namespace,
|
||||||
base_headers: HTTPHeadersDict = None,
|
base_headers: HTTPHeadersDict = None,
|
||||||
@@ -287,19 +305,7 @@ def make_request_kwargs(
|
|||||||
data = args.data
|
data = args.data
|
||||||
auto_json = data and not args.form
|
auto_json = data and not args.form
|
||||||
if (args.json or auto_json) and isinstance(data, dict):
|
if (args.json or auto_json) and isinstance(data, dict):
|
||||||
# Propagate the top-level list if there is only one
|
data = json_dict_to_request_body(data)
|
||||||
# item in the object, with an en empty key.
|
|
||||||
if len(data) == 1:
|
|
||||||
[(key, value)] = data.items()
|
|
||||||
if key == '' and isinstance(value, list):
|
|
||||||
data = value
|
|
||||||
|
|
||||||
if data:
|
|
||||||
data = json.dumps(data)
|
|
||||||
else:
|
|
||||||
# We need to set data to an empty string to prevent requests
|
|
||||||
# from assigning an empty list to `response.request.data`.
|
|
||||||
data = ''
|
|
||||||
|
|
||||||
# Finalize headers.
|
# Finalize headers.
|
||||||
headers = make_default_headers(args)
|
headers = make_default_headers(args)
|
||||||
@@ -324,7 +330,7 @@ def make_request_kwargs(
|
|||||||
'url': args.url,
|
'url': args.url,
|
||||||
'headers': headers,
|
'headers': headers,
|
||||||
'data': prepare_request_body(
|
'data': prepare_request_body(
|
||||||
body=data,
|
data,
|
||||||
body_read_callback=request_body_read_callback,
|
body_read_callback=request_body_read_callback,
|
||||||
chunked=args.chunked,
|
chunked=args.chunked,
|
||||||
offline=args.offline,
|
offline=args.offline,
|
||||||
|
@@ -1,5 +1,6 @@
|
|||||||
import zlib
|
import zlib
|
||||||
from typing import Callable, IO, Iterable, Tuple, Union, TYPE_CHECKING
|
import functools
|
||||||
|
from typing import Any, Callable, IO, Iterable, Optional, Tuple, Union, TYPE_CHECKING
|
||||||
from urllib.parse import urlencode
|
from urllib.parse import urlencode
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
@@ -11,7 +12,12 @@ if TYPE_CHECKING:
|
|||||||
from .cli.dicts import MultipartRequestDataDict, RequestDataDict
|
from .cli.dicts import MultipartRequestDataDict, RequestDataDict
|
||||||
|
|
||||||
|
|
||||||
class ChunkedUploadStream:
|
class ChunkedStream:
|
||||||
|
def __iter__(self) -> Iterable[Union[str, bytes]]:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
|
class ChunkedUploadStream(ChunkedStream):
|
||||||
def __init__(self, stream: Iterable, callback: Callable):
|
def __init__(self, stream: Iterable, callback: Callable):
|
||||||
self.callback = callback
|
self.callback = callback
|
||||||
self.stream = stream
|
self.stream = stream
|
||||||
@@ -22,7 +28,7 @@ class ChunkedUploadStream:
|
|||||||
yield chunk
|
yield chunk
|
||||||
|
|
||||||
|
|
||||||
class ChunkedMultipartUploadStream:
|
class ChunkedMultipartUploadStream(ChunkedStream):
|
||||||
chunk_size = 100 * 1024
|
chunk_size = 100 * 1024
|
||||||
|
|
||||||
def __init__(self, encoder: 'MultipartEncoder'):
|
def __init__(self, encoder: 'MultipartEncoder'):
|
||||||
@@ -36,38 +42,37 @@ class ChunkedMultipartUploadStream:
|
|||||||
yield chunk
|
yield chunk
|
||||||
|
|
||||||
|
|
||||||
def prepare_request_body(
|
def as_bytes(data: Union[str, bytes]) -> bytes:
|
||||||
body: Union[str, bytes, IO, 'MultipartEncoder', RequestDataDict],
|
if isinstance(data, str):
|
||||||
body_read_callback: Callable[[bytes], bytes],
|
return data.encode()
|
||||||
content_length_header_value: int = None,
|
|
||||||
chunked=False,
|
|
||||||
offline=False,
|
|
||||||
) -> Union[str, bytes, IO, 'MultipartEncoder', ChunkedUploadStream]:
|
|
||||||
|
|
||||||
is_file_like = hasattr(body, 'read')
|
|
||||||
|
|
||||||
if isinstance(body, RequestDataDict):
|
|
||||||
body = urlencode(body, doseq=True)
|
|
||||||
|
|
||||||
if offline:
|
|
||||||
if is_file_like:
|
|
||||||
return body.read()
|
|
||||||
return body
|
|
||||||
|
|
||||||
if not is_file_like:
|
|
||||||
if chunked:
|
|
||||||
body = ChunkedUploadStream(
|
|
||||||
# Pass the entire body as one chunk.
|
|
||||||
stream=(chunk.encode() for chunk in [body]),
|
|
||||||
callback=body_read_callback,
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
# File-like object.
|
return data
|
||||||
|
|
||||||
if not super_len(body):
|
|
||||||
|
CallbackT = Callable[[bytes], bytes]
|
||||||
|
|
||||||
|
|
||||||
|
def _wrap_function_with_callback(
|
||||||
|
func: Callable[..., Any],
|
||||||
|
callback: CallbackT
|
||||||
|
) -> Callable[..., Any]:
|
||||||
|
@functools.wraps(func)
|
||||||
|
def wrapped(*args, **kwargs):
|
||||||
|
chunk = func(*args, **kwargs)
|
||||||
|
callback(chunk)
|
||||||
|
return chunk
|
||||||
|
return wrapped
|
||||||
|
|
||||||
|
|
||||||
|
def _prepare_file_for_upload(
|
||||||
|
file: Union[IO, 'MultipartEncoder'],
|
||||||
|
callback: CallbackT,
|
||||||
|
chunked: bool = False,
|
||||||
|
content_length_header_value: Optional[int] = None,
|
||||||
|
) -> Union[bytes, IO, ChunkedStream]:
|
||||||
|
if not super_len(file):
|
||||||
# Zero-length -> assume stdin.
|
# Zero-length -> assume stdin.
|
||||||
if content_length_header_value is None and not chunked:
|
if content_length_header_value is None and not chunked:
|
||||||
#
|
|
||||||
# Read the whole stdin to determine `Content-Length`.
|
# Read the whole stdin to determine `Content-Length`.
|
||||||
#
|
#
|
||||||
# TODO: Instead of opt-in --chunked, consider making
|
# TODO: Instead of opt-in --chunked, consider making
|
||||||
@@ -75,29 +80,62 @@ def prepare_request_body(
|
|||||||
# something like --no-chunked.
|
# something like --no-chunked.
|
||||||
# This would be backwards-incompatible so wait until v3.0.0.
|
# This would be backwards-incompatible so wait until v3.0.0.
|
||||||
#
|
#
|
||||||
body = body.read()
|
file = as_bytes(file.read())
|
||||||
else:
|
else:
|
||||||
orig_read = body.read
|
file.read = _wrap_function_with_callback(
|
||||||
|
file.read,
|
||||||
def new_read(*args):
|
callback
|
||||||
chunk = orig_read(*args)
|
)
|
||||||
body_read_callback(chunk)
|
|
||||||
return chunk
|
|
||||||
|
|
||||||
body.read = new_read
|
|
||||||
|
|
||||||
if chunked:
|
if chunked:
|
||||||
from requests_toolbelt import MultipartEncoder
|
from requests_toolbelt import MultipartEncoder
|
||||||
if isinstance(body, MultipartEncoder):
|
if isinstance(file, MultipartEncoder):
|
||||||
body = ChunkedMultipartUploadStream(
|
return ChunkedMultipartUploadStream(
|
||||||
encoder=body,
|
encoder=file,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
body = ChunkedUploadStream(
|
return ChunkedUploadStream(
|
||||||
stream=body,
|
stream=file,
|
||||||
callback=body_read_callback,
|
callback=callback,
|
||||||
)
|
)
|
||||||
|
else:
|
||||||
|
return file
|
||||||
|
|
||||||
|
|
||||||
|
def prepare_request_body(
|
||||||
|
raw_body: Union[str, bytes, IO, 'MultipartEncoder', RequestDataDict],
|
||||||
|
body_read_callback: CallbackT,
|
||||||
|
offline: bool = False,
|
||||||
|
chunked: bool = False,
|
||||||
|
content_length_header_value: Optional[int] = None,
|
||||||
|
) -> Union[bytes, IO, 'MultipartEncoder', ChunkedStream]:
|
||||||
|
is_file_like = hasattr(raw_body, 'read')
|
||||||
|
if isinstance(raw_body, (bytes, str)):
|
||||||
|
body = as_bytes(raw_body)
|
||||||
|
elif isinstance(raw_body, RequestDataDict):
|
||||||
|
body = as_bytes(urlencode(raw_body, doseq=True))
|
||||||
|
else:
|
||||||
|
body = raw_body
|
||||||
|
|
||||||
|
if offline:
|
||||||
|
if is_file_like:
|
||||||
|
return as_bytes(raw_body.read())
|
||||||
|
else:
|
||||||
|
return body
|
||||||
|
|
||||||
|
if is_file_like:
|
||||||
|
return _prepare_file_for_upload(
|
||||||
|
body,
|
||||||
|
chunked=chunked,
|
||||||
|
callback=body_read_callback,
|
||||||
|
content_length_header_value=content_length_header_value
|
||||||
|
)
|
||||||
|
elif chunked:
|
||||||
|
return ChunkedUploadStream(
|
||||||
|
stream=iter([body]),
|
||||||
|
callback=body_read_callback
|
||||||
|
)
|
||||||
|
else:
|
||||||
return body
|
return body
|
||||||
|
|
||||||
|
|
||||||
|
@@ -1,4 +1,5 @@
|
|||||||
import os
|
import os
|
||||||
|
import json
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
@@ -70,6 +71,18 @@ def test_chunked_stdin_multiple_chunks(httpbin_with_chunked_support):
|
|||||||
assert r.count(FILE_CONTENT) == 4
|
assert r.count(FILE_CONTENT) == 4
|
||||||
|
|
||||||
|
|
||||||
|
def test_chunked_raw(httpbin_with_chunked_support):
|
||||||
|
r = http(
|
||||||
|
'--verbose',
|
||||||
|
'--chunked',
|
||||||
|
httpbin_with_chunked_support + '/post',
|
||||||
|
'--raw',
|
||||||
|
json.dumps({'a': 1, 'b': '2fafds', 'c': '🥰'}),
|
||||||
|
)
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert 'Transfer-Encoding: chunked' in r
|
||||||
|
|
||||||
|
|
||||||
class TestMultipartFormDataFileUpload:
|
class TestMultipartFormDataFileUpload:
|
||||||
|
|
||||||
def test_non_existent_file_raises_parse_error(self, httpbin):
|
def test_non_existent_file_raises_parse_error(self, httpbin):
|
||||||
|
Reference in New Issue
Block a user