1
0
mirror of https://github.com/kellyjonbrazil/jc.git synced 2026-04-24 20:56:11 +02:00

add stats fields to json output. #676

This commit is contained in:
Kelly Brazil
2026-03-09 17:35:33 -07:00
parent 441bcbde80
commit e01287b329
8 changed files with 275 additions and 24 deletions
+2 -1
View File
@@ -1,6 +1,7 @@
jc changelog
20260306 v1.25.7
20260309 v1.25.7
- Enhance `rsync` and `rsync-s` parsers to add `--stats` or `--info=stats[1-3]` fields
- Fix `proc-pid-smaps` proc parser when unknown VmFlags are output
- Fix `iptables` command parser when Target is blank and verbose output is used
+102 -12
View File
@@ -4,6 +4,8 @@ Supports the `-i` or `--itemize-changes` options with all levels of
verbosity. This parser will process the `STDOUT` output or a log file
generated with the `--log-file` option.
The `--stats` or `--info=stats[1-3]` options are also supported.
Usage (cli):
$ rsync -i -a source/ dest | jc --rsync
@@ -37,7 +39,21 @@ Schema:
"false_alarms": integer,
"data": integer,
"bytes_sec": float,
"speedup": float
"speedup": float,
"total_files": integer,
"regular_files": integer,
"dir_files": integer,
"total_created_files": integer,
"created_regular_files": integer,
"created_dir_files": integer,
"deleted_files": integer,
"transferred_files": integer,
"transferred_file_size": integer,
"literal_data": integer,
"matched_data": integer,
"file_list_size": integer,
"file_list_generation_time": float,
"file_list_transfer_time": float,
},
"files": [
{
@@ -62,6 +78,8 @@ Schema:
}
]
Size values are in bytes.
[0] 'file sent', 'file received', 'local change or creation',
'hard link', 'not updated', 'message'
[1] 'file', 'directory', 'symlink', 'device', 'special file'
@@ -137,7 +155,7 @@ import jc.utils
class info():
"""Provides parser metadata (version, author, etc.)"""
version = '1.2'
version = '1.3'
description = '`rsync` command parser'
author = 'Kelly Brazil'
author_email = 'kellyjonbrazil@gmail.com'
@@ -163,10 +181,16 @@ def _process(proc_data: List[Dict]) -> List[Dict]:
"""
int_list = {
'process', 'sent', 'received', 'total_size', 'matches', 'hash_hits',
'false_alarms', 'data'
'false_alarms', 'data', 'total_files', 'regular_files', 'dir_files',
'total_created_files', 'created_regular_files', 'created_dir_files',
'deleted_files', 'transferred_files', 'transferred_file_size',
'literal_data', 'matched_data', 'file_list_size'
}
float_list = {'bytes_sec', 'speedup'}
float_list = {
'bytes_sec', 'speedup', 'file_list_generation_time',
'file_list_transfer_time'
}
for item in proc_data:
for key in item['summary']:
@@ -338,6 +362,17 @@ def parse(
stat2_line_log_v_re = re.compile(r'(?P<date>\d\d\d\d/\d\d/\d\d)\s+(?P<time>\d\d:\d\d:\d\d)\s+\[(?P<process>\d+)\]\s+sent\s+(?P<sent>[\d,]+)\s+bytes\s+received\s+(?P<received>[\d,]+)\s+bytes\s+(?P<bytes_sec>[\d,.]+)\s+bytes/sec')
stat3_line_log_v_re = re.compile(r'(?P<date>\d\d\d\d/\d\d/\d\d)\s+(?P<time>\d\d:\d\d:\d\d)\s+\[(?P<process>\d+)]\s+total\s+size\s+is\s+(?P<total_size>[\d,]+)\s+speedup\s+is\s+(?P<speedup>[\d,.]+)')
stat_ex_files_number_re = re.compile(r'Number\sof\sfiles:\s(?P<files_total>[,0123456789]+)\s\(reg:\s(?P<files_regular>[,0123456789]+),\sdir:\s(?P<files_dir>[,0123456789]+)\)$')
stat_ex_files_created_re = re.compile(r'Number\sof\screated\sfiles:\s(?P<files_created_total>[,0123456789]+)\s\(reg:\s(?P<files_created_regular>[,0123456789]+),\sdir:\s(?P<files_created_dir>[,0123456789]+)\)$')
stat_ex_files_deleted_re = re.compile(r'Number\sof\sdeleted\sfiles:\s(?P<files_deleted>[,0123456789]+)$')
stat_ex_files_transferred_re = re.compile(r'Number\sof\sregular\sfiles\stransferred:\s(?P<files_transferred>[,0123456789]+)$')
stat_ex_files_transferred_size_re = re.compile(r'Total\sfile\ssize:\s(?P<files_transferred_size>[,.0123456789]+\S?)\sbytes$')
stat_ex_literal_data_re = re.compile(r'Literal\sdata:\s(?P<literal_data>[,.0123456789]+\S?)\sbytes$')
stat_ex_matched_data_re = re.compile(r'Matched\sdata:\s(?P<matched_data>[,.0123456789]+\S?)\sbytes$')
stat_ex_file_list_size_re = re.compile(r'File\slist\ssize:\s(?P<file_list_size>[,.0123456789]+\S?)$')
stat_ex_file_list_generation_time_re = re.compile(r'File\slist\sgeneration\stime:\s(?P<file_list_generation_time>[,.0123456789]+\S?)\sseconds$')
stat_ex_file_list_transfer_time_re = re.compile(r'File\slist\stransfer\stime:\s(?P<file_list_transfer_time>[,.0123456789]+\S?)\sseconds$')
if jc.utils.has_data(data):
for line in filter(None, data.splitlines()):
@@ -451,11 +486,11 @@ def parse(
stat1_line = stat1_line_re.match(line)
if stat1_line:
rsync_run['summary'] = {
rsync_run['summary'].update({
'sent': stat1_line.group('sent'),
'received': stat1_line.group('received'),
'bytes_sec': stat1_line.group('bytes_sec')
}
})
continue
stat2_line = stat2_line_re.match(line)
@@ -466,11 +501,11 @@ def parse(
stat1_line_simple = stat1_line_simple_re.match(line)
if stat1_line_simple:
rsync_run['summary'] = {
rsync_run['summary'].update({
'sent': stat1_line_simple.group('sent'),
'received': stat1_line_simple.group('received'),
'bytes_sec': stat1_line_simple.group('bytes_sec')
}
})
continue
stat2_line_simple = stat2_line_simple_re.match(line)
@@ -481,19 +516,19 @@ def parse(
stat_line_log = stat_line_log_re.match(line)
if stat_line_log:
rsync_run['summary'] = {
rsync_run['summary'].update({
'date': stat_line_log.group('date'),
'time': stat_line_log.group('time'),
'process': stat_line_log.group('process'),
'sent': stat_line_log.group('sent'),
'received': stat_line_log.group('received'),
'total_size': stat_line_log.group('total_size')
}
})
continue
stat1_line_log_v = stat1_line_log_v_re.match(line)
if stat1_line_log_v:
rsync_run['summary'] = {
rsync_run['summary'].update({
'date': stat1_line_log_v.group('date'),
'time': stat1_line_log_v.group('time'),
'process': stat1_line_log_v.group('process'),
@@ -501,7 +536,7 @@ def parse(
'hash_hits': stat1_line_log_v.group('hash_hits'),
'false_alarms': stat1_line_log_v.group('false_alarms'),
'data': stat1_line_log_v.group('data')
}
})
continue
stat2_line_log_v = stat2_line_log_v_re.match(line)
@@ -517,6 +552,61 @@ def parse(
rsync_run['summary']['speedup'] = stat3_line_log_v.group('speedup')
continue
# extra stats lines when using rsync --stats or --info=stats[1-3]
stat_ex_files_number_v = stat_ex_files_number_re.match(line)
if stat_ex_files_number_v:
rsync_run['summary']['total_files'] = stat_ex_files_number_v.group('files_total')
rsync_run['summary']['regular_files'] = stat_ex_files_number_v.group('files_regular')
rsync_run['summary']['dir_files'] = stat_ex_files_number_v.group('files_dir')
continue
stat_ex_files_created_v = stat_ex_files_created_re.match(line)
if stat_ex_files_created_v:
rsync_run['summary']['total_created_files'] = stat_ex_files_created_v.group('files_created_total')
rsync_run['summary']['created_regular_files'] = stat_ex_files_created_v.group('files_created_regular')
rsync_run['summary']['created_dir_files'] = stat_ex_files_created_v.group('files_created_dir')
continue
stat_ex_files_deleted_v = stat_ex_files_deleted_re.match(line)
if stat_ex_files_deleted_v:
rsync_run['summary']['deleted_files'] = stat_ex_files_deleted_v.group('files_deleted')
continue
stat_ex_files_transferred_v = stat_ex_files_transferred_re.match(line)
if stat_ex_files_transferred_v:
rsync_run['summary']['transferred_files'] = stat_ex_files_transferred_v.group('files_transferred')
continue
stat_ex_files_transferred_size_v = stat_ex_files_transferred_size_re.match(line)
if stat_ex_files_transferred_size_v:
rsync_run['summary']['transferred_file_size'] = stat_ex_files_transferred_size_v.group('files_transferred_size')
continue
stat_ex_literal_data_v = stat_ex_literal_data_re.match(line)
if stat_ex_literal_data_v:
rsync_run['summary']['literal_data'] = stat_ex_literal_data_v.group('literal_data')
continue
stat_ex_matched_data_v = stat_ex_matched_data_re.match(line)
if stat_ex_matched_data_v:
rsync_run['summary']['matched_data'] = stat_ex_matched_data_v.group('matched_data')
continue
stat_ex_file_list_size_v = stat_ex_file_list_size_re.match(line)
if stat_ex_file_list_size_v:
rsync_run['summary']['file_list_size'] = stat_ex_file_list_size_v.group('file_list_size')
continue
stat_ex_file_list_generation_time_v = stat_ex_file_list_generation_time_re.match(line)
if stat_ex_file_list_generation_time_v:
rsync_run['summary']['file_list_generation_time'] = stat_ex_file_list_generation_time_v.group('file_list_generation_time')
continue
stat_ex_file_list_transfer_time_v = stat_ex_file_list_transfer_time_re.match(line)
if stat_ex_file_list_transfer_time_v:
rsync_run['summary']['file_list_transfer_time'] = stat_ex_file_list_transfer_time_v.group('file_list_transfer_time')
continue
raw_output.append(rsync_run)
# cleanup blank entries
+90 -11
View File
@@ -7,6 +7,8 @@ Supports the `-i` or `--itemize-changes` options with all levels of
verbosity. This parser will process the `STDOUT` output or a log file
generated with the `--log-file` option.
The `--stats` or `--info=stats[1-3]` options are also supported.
Usage (cli):
$ rsync -i -a source/ dest | jc --rsync-s
@@ -64,6 +66,8 @@ Schema:
}
}
Size values are in bytes.
[0] 'file sent', 'file received', 'local change or creation',
'hard link', 'not updated', 'message'
[1] 'file', 'directory', 'symlink', 'device', 'special file'
@@ -88,7 +92,7 @@ from jc.streaming import (
class info():
"""Provides parser metadata (version, author, etc.)"""
version = '1.3'
version = '1.4'
description = '`rsync` command streaming parser'
author = 'Kelly Brazil'
author_email = 'kellyjonbrazil@gmail.com'
@@ -114,10 +118,16 @@ def _process(proc_data: Dict) -> Dict:
"""
int_list = {
'process', 'sent', 'received', 'total_size', 'matches', 'hash_hits',
'false_alarms', 'data'
'false_alarms', 'data', 'total_files', 'regular_files', 'dir_files',
'total_created_files', 'created_regular_files', 'created_dir_files',
'deleted_files', 'transferred_files', 'transferred_file_size',
'literal_data', 'matched_data', 'file_list_size'
}
float_list = {'bytes_sec', 'speedup'}
float_list = {
'bytes_sec', 'speedup', 'file_list_generation_time',
'file_list_transfer_time'
}
for key in proc_data.copy():
if key in int_list:
@@ -281,6 +291,17 @@ def parse(
stat2_line_log_v_re = re.compile(r'(?P<date>\d\d\d\d/\d\d/\d\d)\s+(?P<time>\d\d:\d\d:\d\d)\s+\[(?P<process>\d+)\]\s+sent\s+(?P<sent>[\d,]+)\s+bytes\s+received\s+(?P<received>[\d,]+)\s+bytes\s+(?P<bytes_sec>[\d,.]+)\s+bytes/sec')
stat3_line_log_v_re = re.compile(r'(?P<date>\d\d\d\d/\d\d/\d\d)\s+(?P<time>\d\d:\d\d:\d\d)\s+\[(?P<process>\d+)]\s+total\s+size\s+is\s+(?P<total_size>[\d,]+)\s+speedup\s+is\s+(?P<speedup>[\d,.]+)')
stat_ex_files_number_re = re.compile(r'Number\sof\sfiles:\s(?P<files_total>[,0123456789]+)\s\(reg:\s(?P<files_regular>[,0123456789]+),\sdir:\s(?P<files_dir>[,0123456789]+)\)$')
stat_ex_files_created_re = re.compile(r'Number\sof\screated\sfiles:\s(?P<files_created_total>[,0123456789]+)\s\(reg:\s(?P<files_created_regular>[,0123456789]+),\sdir:\s(?P<files_created_dir>[,0123456789]+)\)$')
stat_ex_files_deleted_re = re.compile(r'Number\sof\sdeleted\sfiles:\s(?P<files_deleted>[,0123456789]+)$')
stat_ex_files_transferred_re = re.compile(r'Number\sof\sregular\sfiles\stransferred:\s(?P<files_transferred>[,0123456789]+)$')
stat_ex_files_transferred_size_re = re.compile(r'Total\sfile\ssize:\s(?P<files_transferred_size>[,.0123456789]+\S?)\sbytes$')
stat_ex_literal_data_re = re.compile(r'Literal\sdata:\s(?P<literal_data>[,.0123456789]+\S?)\sbytes$')
stat_ex_matched_data_re = re.compile(r'Matched\sdata:\s(?P<matched_data>[,.0123456789]+\S?)\sbytes$')
stat_ex_file_list_size_re = re.compile(r'File\slist\ssize:\s(?P<file_list_size>[,.0123456789]+\S?)$')
stat_ex_file_list_generation_time_re = re.compile(r'File\slist\sgeneration\stime:\s(?P<file_list_generation_time>[,.0123456789]+\S?)\sseconds$')
stat_ex_file_list_transfer_time_re = re.compile(r'File\slist\stransfer\stime:\s(?P<file_list_transfer_time>[,.0123456789]+\S?)\sseconds$')
for line in data:
try:
streaming_line_input_type_check(line)
@@ -408,12 +429,12 @@ def parse(
stat1_line = stat1_line_re.match(line)
if stat1_line:
summary = {
summary.update({
'type': 'summary',
'sent': stat1_line.group('sent'),
'received': stat1_line.group('received'),
'bytes_sec': stat1_line.group('bytes_sec')
}
})
continue
stat2_line = stat2_line_re.match(line)
@@ -424,12 +445,12 @@ def parse(
stat1_line_simple = stat1_line_simple_re.match(line)
if stat1_line_simple:
summary = {
summary.update({
'type': 'summary',
'sent': stat1_line_simple.group('sent'),
'received': stat1_line_simple.group('received'),
'bytes_sec': stat1_line_simple.group('bytes_sec')
}
})
continue
stat2_line_simple = stat2_line_simple_re.match(line)
@@ -440,7 +461,7 @@ def parse(
stat_line_log = stat_line_log_re.match(line)
if stat_line_log:
summary = {
summary.update({
'type': 'summary',
'date': stat_line_log.group('date'),
'time': stat_line_log.group('time'),
@@ -448,12 +469,12 @@ def parse(
'sent': stat_line_log.group('sent'),
'received': stat_line_log.group('received'),
'total_size': stat_line_log.group('total_size')
}
})
continue
stat1_line_log_v = stat1_line_log_v_re.match(line)
if stat1_line_log_v:
summary = {
summary.update({
'type': 'summary',
'date': stat1_line_log_v.group('date'),
'time': stat1_line_log_v.group('time'),
@@ -462,7 +483,7 @@ def parse(
'hash_hits': stat1_line_log_v.group('hash_hits'),
'false_alarms': stat1_line_log_v.group('false_alarms'),
'data': stat1_line_log_v.group('data')
}
})
continue
stat2_line_log_v = stat2_line_log_v_re.match(line)
@@ -478,6 +499,61 @@ def parse(
summary['speedup'] = stat3_line_log_v.group('speedup')
continue
# extra stats lines when using rsync --stats or --info=stats[1-3]
stat_ex_files_number_v = stat_ex_files_number_re.match(line)
if stat_ex_files_number_v:
summary['total_files'] = stat_ex_files_number_v.group('files_total')
summary['regular_files'] = stat_ex_files_number_v.group('files_regular')
summary['dir_files'] = stat_ex_files_number_v.group('files_dir')
continue
stat_ex_files_created_v = stat_ex_files_created_re.match(line)
if stat_ex_files_created_v:
summary['total_created_files'] = stat_ex_files_created_v.group('files_created_total')
summary['created_regular_files'] = stat_ex_files_created_v.group('files_created_regular')
summary['created_dir_files'] = stat_ex_files_created_v.group('files_created_dir')
continue
stat_ex_files_deleted_v = stat_ex_files_deleted_re.match(line)
if stat_ex_files_deleted_v:
summary['deleted_files'] = stat_ex_files_deleted_v.group('files_deleted')
continue
stat_ex_files_transferred_v = stat_ex_files_transferred_re.match(line)
if stat_ex_files_transferred_v:
summary['transferred_files'] = stat_ex_files_transferred_v.group('files_transferred')
continue
stat_ex_files_transferred_size_v = stat_ex_files_transferred_size_re.match(line)
if stat_ex_files_transferred_size_v:
summary['transferred_file_size'] = stat_ex_files_transferred_size_v.group('files_transferred_size')
continue
stat_ex_literal_data_v = stat_ex_literal_data_re.match(line)
if stat_ex_literal_data_v:
summary['literal_data'] = stat_ex_literal_data_v.group('literal_data')
continue
stat_ex_matched_data_v = stat_ex_matched_data_re.match(line)
if stat_ex_matched_data_v:
summary['matched_data'] = stat_ex_matched_data_v.group('matched_data')
continue
stat_ex_file_list_size_v = stat_ex_file_list_size_re.match(line)
if stat_ex_file_list_size_v:
summary['file_list_size'] = stat_ex_file_list_size_v.group('file_list_size')
continue
stat_ex_file_list_generation_time_v = stat_ex_file_list_generation_time_re.match(line)
if stat_ex_file_list_generation_time_v:
summary['file_list_generation_time'] = stat_ex_file_list_generation_time_v.group('file_list_generation_time')
continue
stat_ex_file_list_transfer_time_v = stat_ex_file_list_transfer_time_re.match(line)
if stat_ex_file_list_transfer_time_v:
summary['file_list_transfer_time'] = stat_ex_file_list_transfer_time_v.group('file_list_transfer_time')
continue
except Exception as e:
yield raise_or_yield(ignore_exceptions, e, line)
@@ -488,3 +564,6 @@ def parse(
except Exception as e:
yield raise_or_yield(ignore_exceptions, e, '')
# unused return for Mypy
return []
+1
View File
@@ -0,0 +1 @@
[{"total_files":23784,"regular_files":23191,"dir_files":593,"total_created_files":2651,"created_regular_files":2611,"created_dir_files":40,"deleted_files":0,"transferred_files":2629,"transferred_file_size":6880000000000,"literal_data":0,"matched_data":0,"file_list_size":98100,"file_list_generation_time":0.001,"file_list_transfer_time":0.0,"type":"summary","sent":8990,"received":1290000,"bytes_sec":370210.0,"total_size":6880000000000,"speedup":5311650.06}]
+1
View File
@@ -0,0 +1 @@
[{"summary":{"total_files":23784,"regular_files":23191,"dir_files":593,"total_created_files":2651,"created_regular_files":2611,"created_dir_files":40,"deleted_files":0,"transferred_files":2629,"transferred_file_size":6880000000000,"literal_data":0,"matched_data":0,"file_list_size":98100,"file_list_generation_time":0.001,"file_list_transfer_time":0.0,"sent":8990,"received":1290000,"bytes_sec":370210.0,"total_size":6880000000000,"speedup":5311650.06},"files":[]}]
+56
View File
@@ -0,0 +1,56 @@
rsync[1817530] (server sender) heap statistics:
arena: 1204224 (bytes from sbrk)
ordblks: 46 (chunks not in use)
rsync[1007426] (receiver) heap statistics:
arena: 9244672 (bytes from sbrk)
ordblks: 57 (chunks not in use)
smblks: 1 (free fastbin blocks)
hblks: 1 (chunks from mmap)
hblkhd: 266240 (bytes from mmap)
allmem: 9510912 (bytes from sbrk + mmap)
usmblks: 0 (always 0)
fsmblks: 96 (bytes in freed fastbin blocks)
uordblks: 486480 (bytes used)
fordblks: 8758192 (bytes free)
keepcost: 133856 (bytes in releasable chunk)
smblks: 2 (free fastbin blocks)
hblks: 1 (chunks from mmap)
hblkhd: 266240 (bytes from mmap)
allmem: 1470464 (bytes from sbrk + mmap)
usmblks: 0 (always 0)
fsmblks: 192 (bytes in freed fastbin blocks)
uordblks: 478288 (bytes used)
fordblks: 725936 (bytes free)
keepcost: 427216 (bytes in releasable chunk)
rsync[1007424] (generator) heap statistics:
arena: 1384448 (bytes from sbrk)
ordblks: 6 (chunks not in use)
smblks: 1 (free fastbin blocks)
hblks: 1 (chunks from mmap)
hblkhd: 266240 (bytes from mmap)
allmem: 1650688 (bytes from sbrk + mmap)
usmblks: 0 (always 0)
fsmblks: 96 (bytes in freed fastbin blocks)
uordblks: 486160 (bytes used)
fordblks: 898288 (bytes free)
keepcost: 132272 (bytes in releasable chunk)
Number of files: 23,784 (reg: 23,191, dir: 593)
Number of created files: 2,651 (reg: 2,611, dir: 40)
Number of deleted files: 0
Number of regular files transferred: 2,629
Total file size: 6.88T bytes
Total transferred file size: 759.17G bytes
Literal data: 0 bytes
Matched data: 0 bytes
File list size: 98.10K
File list generation time: 0.001 seconds
File list transfer time: 0.000 seconds
Total bytes sent: 8.99K
Total bytes received: 1.29M
sent 8.99K bytes received 1.29M bytes 370.21K bytes/sec
total size is 6.88T speedup is 5,311,650.06 (DRY RUN)
+12
View File
@@ -45,6 +45,9 @@ class MyTests(unittest.TestCase):
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/rsync-i-vvv-logfile-nochange.out'), 'r', encoding='utf-8') as f:
osx_10_14_6_rsync_i_vvv_logfile_nochange = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/rsync-i-stats.out'), 'r', encoding='utf-8') as f:
generic_rsync_i_stats = f.read()
# output
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/centos-7.7/rsync-i.json'), 'r', encoding='utf-8') as f:
centos_7_7_rsync_i_json = json.loads(f.read())
@@ -82,6 +85,9 @@ class MyTests(unittest.TestCase):
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/rsync-i-vvv-logfile-nochange.json'), 'r', encoding='utf-8') as f:
osx_10_14_6_rsync_i_vvv_logfile_nochange_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/rsync-i-stats.json'), 'r', encoding='utf-8') as f:
generic_rsync_i_stats_json = json.loads(f.read())
def test_rsync_nodata(self):
"""
@@ -173,6 +179,12 @@ total size is 221.79G speedup is 25,388.23
expected = [{"summary":{"sent":8710000,"received":29880,"bytes_sec":10990.0,"total_size":221790000000,"speedup":25388.23},"files":[]}]
self.assertEqual(jc.parsers.rsync.parse(data, quiet=True), expected)
def test_rsync_with_stats(self):
"""
Test 'rsync -i --stats' or 'rsync -i --info=stats[1-3]'
"""
self.assertEqual(jc.parsers.rsync.parse(self.generic_rsync_i_stats, quiet=True), self.generic_rsync_i_stats_json)
if __name__ == '__main__':
unittest.main()
+11
View File
@@ -49,6 +49,9 @@ class MyTests(unittest.TestCase):
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/rsync-i-vvv-logfile-nochange.out'), 'r', encoding='utf-8') as f:
osx_10_14_6_rsync_i_vvv_logfile_nochange = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/rsync-i-stats.out'), 'r', encoding='utf-8') as f:
generic_rsync_i_stats = f.read()
# output
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/centos-7.7/rsync-i-streaming.json'), 'r', encoding='utf-8') as f:
centos_7_7_rsync_i_streaming_json = json.loads(f.read())
@@ -86,6 +89,9 @@ class MyTests(unittest.TestCase):
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/rsync-i-vvv-logfile-nochange-streaming.json'), 'r', encoding='utf-8') as f:
osx_10_14_6_rsync_i_vvv_logfile_nochange_streaming_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/rsync-i-stats-streaming.json'), 'r', encoding='utf-8') as f:
generic_rsync_i_stats_streaming_json = json.loads(f.read())
def test_rsync_s_nodata(self):
"""
@@ -177,6 +183,11 @@ total size is 221.79G speedup is 25,388.23
expected = [{"type":"summary","sent":8710000,"received":29880,"bytes_sec":10990.0,"total_size":221790000000,"speedup":25388.23}]
self.assertEqual(list(jc.parsers.rsync_s.parse(data.splitlines(), quiet=True)), expected)
def test_rsync_s_i_stats(self):
"""
Test 'rsync -i --stats' or 'rsync -i --info=stats[1-3]'
"""
self.assertEqual(list(jc.parsers.rsync_s.parse(self.generic_rsync_i_stats.splitlines(), quiet=True)), self.generic_rsync_i_stats_streaming_json)
if __name__ == '__main__':