mirror of
https://github.com/httpie/cli.git
synced 2024-11-24 08:22:22 +02:00
Modernize the code base with f-strings (#1068)
This commit is contained in:
parent
39314887c4
commit
0ff0874fa3
@ -284,8 +284,7 @@ class HTTPieArgumentParser(argparse.ArgumentParser):
|
||||
invalid.append(option)
|
||||
|
||||
if invalid:
|
||||
msg = 'unrecognized arguments: %s'
|
||||
self.error(msg % ' '.join(invalid))
|
||||
self.error(f'unrecognized arguments: {" ".join(invalid)}')
|
||||
|
||||
def _body_from_file(self, fd):
|
||||
"""Read the data from a file-like object.
|
||||
@ -381,8 +380,8 @@ class HTTPieArgumentParser(argparse.ArgumentParser):
|
||||
for key, file in self.args.files.items():
|
||||
if key != '':
|
||||
self.error(
|
||||
'Invalid file fields (perhaps you meant --form?): %s'
|
||||
% ','.join(self.args.files.keys()))
|
||||
'Invalid file fields (perhaps you meant --form?):'
|
||||
f' {",".join(self.args.files.keys())}')
|
||||
if request_file is not None:
|
||||
self.error("Can't read request from multiple files")
|
||||
request_file = file
|
||||
@ -407,10 +406,7 @@ class HTTPieArgumentParser(argparse.ArgumentParser):
|
||||
def check_options(value, option):
|
||||
unknown = set(value) - OUTPUT_OPTIONS
|
||||
if unknown:
|
||||
self.error('Unknown output options: {0}={1}'.format(
|
||||
option,
|
||||
','.join(unknown)
|
||||
))
|
||||
self.error(f'Unknown output options: {option}={",".join(unknown)}')
|
||||
|
||||
if self.args.verbose:
|
||||
self.args.all = True
|
||||
|
@ -30,7 +30,7 @@ from ..ssl import AVAILABLE_SSL_VERSION_ARG_MAPPING, DEFAULT_SSL_CIPHERS
|
||||
|
||||
parser = HTTPieArgumentParser(
|
||||
prog='http',
|
||||
description='%s <https://httpie.org>' % __doc__.strip(),
|
||||
description=f'{__doc__.strip()} <https://httpie.org>',
|
||||
epilog=dedent('''
|
||||
For every --OPTION there is also a --no-OPTION that reverts OPTION
|
||||
to its default value.
|
||||
@ -267,7 +267,7 @@ output_processing.add_argument(
|
||||
'''.format(
|
||||
default=DEFAULT_STYLE,
|
||||
available_styles='\n'.join(
|
||||
'{0}{1}'.format(8 * ' ', line.strip())
|
||||
f' {line.strip()}'
|
||||
for line in wrap(', '.join(sorted(AVAILABLE_STYLES)), 60)
|
||||
).strip(),
|
||||
auto_style=AUTO_STYLE,
|
||||
@ -330,7 +330,7 @@ output_processing.add_argument(
|
||||
|
||||
'''.format(
|
||||
option_list='\n'.join(
|
||||
(8 * ' ') + option for option in DEFAULT_FORMAT_OPTIONS).strip()
|
||||
f' {option}' for option in DEFAULT_FORMAT_OPTIONS).strip()
|
||||
)
|
||||
)
|
||||
|
||||
@ -383,12 +383,12 @@ output_options.add_argument(
|
||||
'--verbose', '-v',
|
||||
dest='verbose',
|
||||
action='store_true',
|
||||
help='''
|
||||
help=f'''
|
||||
Verbose output. Print the whole request as well as the response. Also print
|
||||
any intermediary requests/responses (such as redirects).
|
||||
It's a shortcut for: --all --print={0}
|
||||
It's a shortcut for: --all --print={''.join(OUTPUT_OPTIONS)}
|
||||
|
||||
'''.format(''.join(OUTPUT_OPTIONS))
|
||||
'''
|
||||
)
|
||||
output_options.add_argument(
|
||||
'--all',
|
||||
@ -562,7 +562,7 @@ auth.add_argument(
|
||||
name=plugin.name,
|
||||
package=(
|
||||
'' if issubclass(plugin, BuiltinAuthPlugin)
|
||||
else ' (provided by %s)' % plugin.package_name
|
||||
else f' (provided by {plugin.package_name})'
|
||||
),
|
||||
description=(
|
||||
'' if not plugin.description else
|
||||
|
@ -89,13 +89,11 @@ def process_header_arg(arg: KeyValueArg) -> Optional[str]:
|
||||
|
||||
|
||||
def process_empty_header_arg(arg: KeyValueArg) -> str:
|
||||
if arg.value:
|
||||
raise ParseError(
|
||||
'Invalid item "%s" '
|
||||
'(to specify an empty header use `Header;`)'
|
||||
% arg.orig
|
||||
)
|
||||
return arg.value
|
||||
if not arg.value:
|
||||
return arg.value
|
||||
raise ParseError(
|
||||
f'Invalid item {arg.orig!r} (to specify an empty header use `Header;`)'
|
||||
)
|
||||
|
||||
|
||||
def process_query_param_arg(arg: KeyValueArg) -> str:
|
||||
@ -109,7 +107,7 @@ def process_file_upload_arg(arg: KeyValueArg) -> Tuple[str, IO, str]:
|
||||
try:
|
||||
f = open(os.path.expanduser(filename), 'rb')
|
||||
except IOError as e:
|
||||
raise ParseError('"%s": %s' % (arg.orig, e))
|
||||
raise ParseError(f'{arg.orig!r}: {e}')
|
||||
return (
|
||||
os.path.basename(filename),
|
||||
f,
|
||||
@ -142,12 +140,11 @@ def load_text_file(item: KeyValueArg) -> str:
|
||||
with open(os.path.expanduser(path), 'rb') as f:
|
||||
return f.read().decode()
|
||||
except IOError as e:
|
||||
raise ParseError('"%s": %s' % (item.orig, e))
|
||||
raise ParseError(f'{item.orig!r}: {e}')
|
||||
except UnicodeDecodeError:
|
||||
raise ParseError(
|
||||
'"%s": cannot embed the content of "%s",'
|
||||
f'{item.orig!r}: cannot embed the content of {item.value!r},'
|
||||
' not a UTF8 or ASCII-encoded text file'
|
||||
% (item.orig, item.value)
|
||||
)
|
||||
|
||||
|
||||
@ -155,4 +152,4 @@ def load_json(arg: KeyValueArg, contents: str) -> JSONType:
|
||||
try:
|
||||
return load_json_preserve_order(contents)
|
||||
except ValueError as e:
|
||||
raise ParseError('"%s": %s' % (arg.orig, e))
|
||||
raise ParseError(f'{arg.orig!r}: {e}')
|
||||
|
@ -205,10 +205,9 @@ def program(args: argparse.Namespace, env: Environment) -> ExitStatus:
|
||||
if downloader.interrupted:
|
||||
exit_status = ExitStatus.ERROR
|
||||
env.log_error(
|
||||
'Incomplete download: size=%d; downloaded=%d' % (
|
||||
downloader.status.total_size,
|
||||
downloader.status.downloaded
|
||||
))
|
||||
f'Incomplete download: size={downloader.status.total_size};'
|
||||
f' downloaded={downloader.status.downloaded}'
|
||||
)
|
||||
return exit_status
|
||||
|
||||
finally:
|
||||
|
@ -64,7 +64,7 @@ def parse_content_range(content_range: str, resumed_from: int) -> int:
|
||||
|
||||
if not match:
|
||||
raise ContentRangeError(
|
||||
'Invalid Content-Range format %r' % content_range)
|
||||
f'Invalid Content-Range format {content_range!r}')
|
||||
|
||||
content_range_dict = match.groupdict()
|
||||
first_byte_pos = int(content_range_dict['first_byte_pos'])
|
||||
@ -85,16 +85,15 @@ def parse_content_range(content_range: str, resumed_from: int) -> int:
|
||||
or (instance_length is not None
|
||||
and instance_length <= last_byte_pos)):
|
||||
raise ContentRangeError(
|
||||
'Invalid Content-Range returned: %r' % content_range)
|
||||
f'Invalid Content-Range returned: {content_range!r}')
|
||||
|
||||
if (first_byte_pos != resumed_from
|
||||
or (instance_length is not None
|
||||
and last_byte_pos + 1 != instance_length)):
|
||||
# Not what we asked for.
|
||||
raise ContentRangeError(
|
||||
'Unexpected Content-Range returned (%r)'
|
||||
' for the requested Range ("bytes=%d-")'
|
||||
% (content_range, resumed_from)
|
||||
f'Unexpected Content-Range returned ({content_range!r})'
|
||||
f' for the requested Range ("bytes={resumed_from}-")'
|
||||
)
|
||||
|
||||
return last_byte_pos + 1
|
||||
@ -112,7 +111,7 @@ def filename_from_content_disposition(
|
||||
"""
|
||||
# attachment; filename=jakubroztocil-httpie-0.4.1-20-g40bd8f6.tar.gz
|
||||
|
||||
msg = Message('Content-Disposition: %s' % content_disposition)
|
||||
msg = Message(f'Content-Disposition: {content_disposition}')
|
||||
filename = msg.get_filename()
|
||||
if filename:
|
||||
# Basic sanitation.
|
||||
@ -177,7 +176,7 @@ def trim_filename_if_needed(filename: str, directory='.', extra=0) -> str:
|
||||
def get_unique_filename(filename: str, exists=os.path.exists) -> str:
|
||||
attempt = 0
|
||||
while True:
|
||||
suffix = '-' + str(attempt) if attempt > 0 else ''
|
||||
suffix = f'-{attempt}' if attempt > 0 else ''
|
||||
try_filename = trim_filename_if_needed(filename, extra=len(suffix))
|
||||
try_filename += suffix
|
||||
if not exists(try_filename):
|
||||
@ -226,7 +225,7 @@ class Downloader:
|
||||
if bytes_have:
|
||||
# Set ``Range`` header to resume the download
|
||||
# TODO: Use "If-Range: mtime" to make sure it's fresh?
|
||||
request_headers['Range'] = 'bytes=%d-' % bytes_have
|
||||
request_headers['Range'] = f'bytes={bytes_have}-'
|
||||
self._resumed_from = bytes_have
|
||||
|
||||
def start(
|
||||
@ -288,12 +287,8 @@ class Downloader:
|
||||
)
|
||||
|
||||
self._progress_reporter.output.write(
|
||||
'Downloading %sto "%s"\n' % (
|
||||
(humanize_bytes(total_size) + ' '
|
||||
if total_size is not None
|
||||
else ''),
|
||||
self._output_file.name
|
||||
)
|
||||
f'Downloading {humanize_bytes(total_size) + " " if total_size is not None else ""}'
|
||||
f'to "{self._output_file.name}"\n'
|
||||
)
|
||||
self._progress_reporter.start()
|
||||
|
||||
@ -442,7 +437,7 @@ class ProgressReporterThread(threading.Thread):
|
||||
s = int((self.status.total_size - downloaded) / speed)
|
||||
h, s = divmod(s, 60 * 60)
|
||||
m, s = divmod(s, 60)
|
||||
eta = '{0}:{1:0>2}:{2:0>2}'.format(h, m, s)
|
||||
eta = f'{h}:{m:0>2}:{s:0>2}'
|
||||
|
||||
self._status_line = PROGRESS.format(
|
||||
percentage=percentage,
|
||||
@ -455,11 +450,7 @@ class ProgressReporterThread(threading.Thread):
|
||||
self._prev_bytes = downloaded
|
||||
|
||||
self.output.write(
|
||||
CLEAR_LINE
|
||||
+ ' '
|
||||
+ SPINNER[self._spinner_pos]
|
||||
+ ' '
|
||||
+ self._status_line
|
||||
f'{CLEAR_LINE} {SPINNER[self._spinner_pos]} {self._status_line}'
|
||||
)
|
||||
self.output.flush()
|
||||
|
||||
|
@ -102,7 +102,7 @@ class HTTPRequest(HTTPMessage):
|
||||
request_line = '{method} {path}{query} HTTP/1.1'.format(
|
||||
method=self._orig.method,
|
||||
path=url.path or '/',
|
||||
query='?' + url.query if url.query else ''
|
||||
query=f'?{url.query}' if url.query else ''
|
||||
)
|
||||
|
||||
headers = dict(self._orig.headers)
|
||||
@ -110,10 +110,7 @@ class HTTPRequest(HTTPMessage):
|
||||
headers['Host'] = url.netloc.split('@')[-1]
|
||||
|
||||
headers = [
|
||||
'%s: %s' % (
|
||||
name,
|
||||
value if isinstance(value, str) else value.decode('utf8')
|
||||
)
|
||||
f'{name}: {value if isinstance(value, str) else value.decode("utf-8")}'
|
||||
for name, value in headers.items()
|
||||
]
|
||||
|
||||
|
@ -120,8 +120,8 @@ def get_lexer(
|
||||
subtype_name, subtype_suffix = subtype.split('+', 1)
|
||||
lexer_names.extend([subtype_name, subtype_suffix])
|
||||
mime_types.extend([
|
||||
'%s/%s' % (type_, subtype_name),
|
||||
'%s/%s' % (type_, subtype_suffix)
|
||||
f'{type_}/{subtype_name}',
|
||||
f'{type_}/{subtype_suffix}',
|
||||
])
|
||||
|
||||
# As a last resort, if no lexer feels responsible, and
|
||||
|
@ -29,9 +29,9 @@ class HTTPBasicAuth(requests.auth.HTTPBasicAuth):
|
||||
|
||||
@staticmethod
|
||||
def make_header(username: str, password: str) -> str:
|
||||
credentials = u'%s:%s' % (username, password)
|
||||
token = b64encode(credentials.encode('utf8')).strip().decode('latin1')
|
||||
return 'Basic %s' % token
|
||||
credentials = f'{username}:{password}'
|
||||
token = b64encode(credentials.encode('utf-8')).strip().decode('latin1')
|
||||
return f'Basic {token}'
|
||||
|
||||
|
||||
class BasicAuthPlugin(BuiltinAuthPlugin):
|
||||
|
@ -62,7 +62,7 @@ def humanize_bytes(n, precision=2):
|
||||
break
|
||||
|
||||
# noinspection PyUnboundLocalVariable
|
||||
return '%.*f %s' % (precision, n / factor, suffix)
|
||||
return f'{n / factor:.{precision}f} {suffix}'
|
||||
|
||||
|
||||
class ExplicitNullAuth(requests.auth.AuthBase):
|
||||
@ -85,7 +85,7 @@ def get_content_type(filename):
|
||||
if mime:
|
||||
content_type = mime
|
||||
if encoding:
|
||||
content_type = '%s; charset=%s' % (mime, encoding)
|
||||
content_type = f'{mime}; charset={encoding}'
|
||||
return content_type
|
||||
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user