mirror of
https://github.com/kellyjonbrazil/jc.git
synced 2026-04-03 17:44:07 +02:00
Compare commits
44 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3f2cd81ca7 | ||
|
|
6c8aafa2c5 | ||
|
|
41e2e16436 | ||
|
|
20b625860e | ||
|
|
0d703cd3a8 | ||
|
|
7b3d345d62 | ||
|
|
53b6f1b329 | ||
|
|
ecc267b3cb | ||
|
|
5d2496b5f7 | ||
|
|
810eeba724 | ||
|
|
fb5c1b9c94 | ||
|
|
f797127ca9 | ||
|
|
56bd860a5e | ||
|
|
9fe659d625 | ||
|
|
128e36404d | ||
|
|
2f9377cb67 | ||
|
|
585ff83a2e | ||
|
|
45b23e8b3c | ||
|
|
b9eec1a5cd | ||
|
|
47545833ed | ||
|
|
6f8e4fb2ed | ||
|
|
4f7821ac8e | ||
|
|
7f2722ff96 | ||
|
|
1d19de3005 | ||
|
|
e01287b329 | ||
|
|
441bcbde80 | ||
|
|
936432d879 | ||
|
|
51543437d7 | ||
|
|
dd9229f161 | ||
|
|
3d9554baec | ||
|
|
e33a81269c | ||
|
|
f3352352ed | ||
|
|
1c0a35dff8 | ||
|
|
8ba75794a6 | ||
|
|
77af5ac9d3 | ||
|
|
4067bfed9f | ||
|
|
0363ddcc6a | ||
|
|
04303efa75 | ||
|
|
956f74358b | ||
|
|
4fe3377029 | ||
|
|
1944a7145e | ||
|
|
3d698e50c0 | ||
|
|
5945ded816 | ||
|
|
ecd0e03c66 |
115
.github/workflows/pythonapp.yml
vendored
115
.github/workflows/pythonapp.yml
vendored
@@ -9,69 +9,14 @@ on:
|
||||
- "**/*.py"
|
||||
|
||||
jobs:
|
||||
very_old_python:
|
||||
if: github.event.pull_request.draft == false
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [macos-13, windows-2022]
|
||||
python-version: ["3.6"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: "Set up timezone to America/Los_Angeles"
|
||||
uses: szenius/set-timezone@v1.2
|
||||
with:
|
||||
timezoneLinux: "America/Los_Angeles"
|
||||
timezoneMacos: "America/Los_Angeles"
|
||||
timezoneWindows: "Pacific Standard Time"
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r requirements.txt
|
||||
- name: Test with unittest
|
||||
run: |
|
||||
python -m unittest discover tests
|
||||
|
||||
old_python:
|
||||
if: github.event.pull_request.draft == false
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [macos-13, ubuntu-22.04, windows-2022]
|
||||
python-version: ["3.7", "3.8", "3.9", "3.10"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: "Set up timezone to America/Los_Angeles"
|
||||
uses: szenius/set-timezone@v1.2
|
||||
with:
|
||||
timezoneLinux: "America/Los_Angeles"
|
||||
timezoneMacos: "America/Los_Angeles"
|
||||
timezoneWindows: "Pacific Standard Time"
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r requirements.txt
|
||||
- name: Test with unittest
|
||||
run: |
|
||||
python -m unittest discover tests
|
||||
|
||||
latest_python:
|
||||
if: github.event.pull_request.draft == false
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [macos-latest, ubuntu-latest, windows-latest]
|
||||
python-version: ["3.11", "3.12"]
|
||||
os: [macos-15-intel, macos-latest, ubuntu-latest, ubuntu-24.04-arm, windows-latest]
|
||||
python-version: ["3.11", "3.12", "3.13", "3.14"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
@@ -92,3 +37,59 @@ jobs:
|
||||
- name: Test with unittest
|
||||
run: |
|
||||
python -m unittest discover tests
|
||||
|
||||
# very_old_python:
|
||||
# if: github.event.pull_request.draft == false
|
||||
# runs-on: ${{ matrix.os }}
|
||||
# strategy:
|
||||
# matrix:
|
||||
# os: [macos-13, windows-2022]
|
||||
# python-version: ["3.6"]
|
||||
|
||||
# steps:
|
||||
# - uses: actions/checkout@v3
|
||||
# - name: "Set up timezone to America/Los_Angeles"
|
||||
# uses: szenius/set-timezone@v1.2
|
||||
# with:
|
||||
# timezoneLinux: "America/Los_Angeles"
|
||||
# timezoneMacos: "America/Los_Angeles"
|
||||
# timezoneWindows: "Pacific Standard Time"
|
||||
# - name: Set up Python ${{ matrix.python-version }}
|
||||
# uses: actions/setup-python@v4
|
||||
# with:
|
||||
# python-version: ${{ matrix.python-version }}
|
||||
# - name: Install dependencies
|
||||
# run: |
|
||||
# python -m pip install --upgrade pip
|
||||
# pip install -r requirements.txt
|
||||
# - name: Test with unittest
|
||||
# run: |
|
||||
# python -m unittest discover tests
|
||||
|
||||
# old_python:
|
||||
# if: github.event.pull_request.draft == false
|
||||
# runs-on: ${{ matrix.os }}
|
||||
# strategy:
|
||||
# matrix:
|
||||
# os: [macos-13, ubuntu-22.04, windows-2022]
|
||||
# python-version: ["3.7", "3.8", "3.9", "3.10"]
|
||||
|
||||
# steps:
|
||||
# - uses: actions/checkout@v3
|
||||
# - name: "Set up timezone to America/Los_Angeles"
|
||||
# uses: szenius/set-timezone@v1.2
|
||||
# with:
|
||||
# timezoneLinux: "America/Los_Angeles"
|
||||
# timezoneMacos: "America/Los_Angeles"
|
||||
# timezoneWindows: "Pacific Standard Time"
|
||||
# - name: Set up Python ${{ matrix.python-version }}
|
||||
# uses: actions/setup-python@v4
|
||||
# with:
|
||||
# python-version: ${{ matrix.python-version }}
|
||||
# - name: Install dependencies
|
||||
# run: |
|
||||
# python -m pip install --upgrade pip
|
||||
# pip install -r requirements.txt
|
||||
# - name: Test with unittest
|
||||
# run: |
|
||||
# python -m unittest discover tests
|
||||
|
||||
13
CHANGELOG
13
CHANGELOG
@@ -1,6 +1,17 @@
|
||||
jc changelog
|
||||
|
||||
202501012 v1.25.6
|
||||
20260330 v1.25.7
|
||||
- Add `typeset` and `declare` Bash internal command parser to convert variables
|
||||
simple arrays, and associative arrays along with object metadata
|
||||
- Enhance `pip-show` command parser to add `-f` show files support
|
||||
- Enhance `rsync` and `rsync-s` parsers to add `--stats` or `--info=stats[1-3]` fields
|
||||
- Fix `hashsum` command parser to correctly parse the `mode` indicator
|
||||
- Fix `dir` command parser for incorrect stripping of the `D:` drive letter
|
||||
- Fix `proc-pid-smaps` proc parser when unknown VmFlags are output
|
||||
- Fix `ifconfig` command parser for incorrect stripping of leading zeros in some hex numbers
|
||||
- Fix `iptables` command parser when Target is blank and verbose output is used
|
||||
|
||||
20251012 v1.25.6
|
||||
- Add `net-localgroup` Windows command parser
|
||||
- Add `net-user` Windows command parser
|
||||
- Add `route-print` Windows command parser
|
||||
|
||||
@@ -123,6 +123,7 @@ pip3 install jc
|
||||
| FreeBSD | `portsnap fetch update && cd /usr/ports/textproc/py-jc && make install clean` |
|
||||
| Ansible filter plugin | `ansible-galaxy collection install community.general` |
|
||||
| FortiSOAR connector | Install from FortiSOAR Connector Marketplace |
|
||||
| Mise-en-place (Linux/MacOS) | `mise use -g jc@latest` |
|
||||
|
||||
> For more OS Packages, see https://repology.org/project/jc/versions.
|
||||
|
||||
|
||||
12
jc/cli.py
12
jc/cli.py
@@ -300,8 +300,8 @@ class JcCli():
|
||||
Pages the parser documentation if a parser is found in the arguments,
|
||||
otherwise the general help text is printed.
|
||||
"""
|
||||
self.indent = 4
|
||||
self.pad = 22
|
||||
self.indent = 2
|
||||
self.pad = 21
|
||||
|
||||
if self.show_categories:
|
||||
utils._safe_print(self.parser_categories_text())
|
||||
@@ -569,7 +569,11 @@ class JcCli():
|
||||
if self.debug:
|
||||
raise
|
||||
|
||||
if e.errno:
|
||||
error_msg = os.strerror(e.errno)
|
||||
else:
|
||||
error_msg = "no further information provided"
|
||||
|
||||
utils.error_message([
|
||||
f'"{file}" file could not be opened: {error_msg}.'
|
||||
])
|
||||
@@ -594,7 +598,11 @@ class JcCli():
|
||||
if self.debug:
|
||||
raise
|
||||
|
||||
if e.errno:
|
||||
error_msg = os.strerror(e.errno)
|
||||
else:
|
||||
error_msg = "no further information provided"
|
||||
|
||||
utils.error_message([
|
||||
f'"{self.magic_run_command_str}" command could not be run: {error_msg}.'
|
||||
])
|
||||
|
||||
@@ -10,7 +10,7 @@ from jc import appdirs
|
||||
from jc import utils
|
||||
|
||||
|
||||
__version__ = '1.25.6'
|
||||
__version__ = '1.25.7'
|
||||
|
||||
parsers: List[str] = [
|
||||
'acpi',
|
||||
@@ -216,6 +216,7 @@ parsers: List[str] = [
|
||||
'traceroute',
|
||||
'traceroute-s',
|
||||
'tune2fs',
|
||||
'typeset',
|
||||
'udevadm',
|
||||
'ufw',
|
||||
'ufw-appinfo',
|
||||
|
||||
@@ -121,7 +121,7 @@ import jc.utils
|
||||
|
||||
class info():
|
||||
"""Provides parser metadata (version, author, etc.)"""
|
||||
version = '1.6'
|
||||
version = '1.7'
|
||||
description = '`dir` command parser'
|
||||
author = 'Rasheed Elsaleh'
|
||||
author_email = 'rasheed@rebelliondefense.com'
|
||||
@@ -184,7 +184,7 @@ def parse(data, raw=False, quiet=False):
|
||||
|
||||
for line in data.splitlines():
|
||||
if line.startswith(" Directory of "):
|
||||
parent_dir = line.lstrip(" Directory of ")
|
||||
parent_dir = line[len(" Directory of "):]
|
||||
continue
|
||||
# skip lines that don't start with a date
|
||||
if not re.match(r'^\d{2}/\d{2}/\d{4}', line):
|
||||
|
||||
@@ -28,6 +28,7 @@ Schema:
|
||||
[
|
||||
{
|
||||
"filename": string,
|
||||
"mode": string,
|
||||
"hash": string,
|
||||
}
|
||||
]
|
||||
@@ -38,37 +39,44 @@ Examples:
|
||||
[
|
||||
{
|
||||
"filename": "devtoolset-3-gcc-4.9.2-6.el7.x86_64.rpm",
|
||||
"mode": "text",
|
||||
"hash": "65fc958c1add637ec23c4b137aecf3d3"
|
||||
},
|
||||
{
|
||||
"filename": "digout",
|
||||
"mode": "text",
|
||||
"hash": "5b9312ee5aff080927753c63a347707d"
|
||||
},
|
||||
{
|
||||
"filename": "dmidecode.out",
|
||||
"mode": "text",
|
||||
"hash": "716fd11c2ac00db109281f7110b8fb9d"
|
||||
},
|
||||
{
|
||||
"filename": "file with spaces in the name",
|
||||
"mode": "text",
|
||||
"hash": "d41d8cd98f00b204e9800998ecf8427e"
|
||||
},
|
||||
{
|
||||
"filename": "id-centos.out",
|
||||
"mode": "text",
|
||||
"hash": "4295be239a14ad77ef3253103de976d2"
|
||||
},
|
||||
{
|
||||
"filename": "ifcfg.json",
|
||||
"mode": "text",
|
||||
"hash": "01fda0d9ba9a75618b072e64ff512b43"
|
||||
},
|
||||
...
|
||||
]
|
||||
"""
|
||||
import re
|
||||
import jc.utils
|
||||
|
||||
|
||||
class info():
|
||||
"""Provides parser metadata (version, author, etc.)"""
|
||||
version = '1.2'
|
||||
version = '1.3'
|
||||
description = 'hashsum command parser (`md5sum`, `shasum`, etc.)'
|
||||
author = 'Kelly Brazil'
|
||||
author_email = 'kellyjonbrazil@gmail.com'
|
||||
@@ -81,6 +89,15 @@ class info():
|
||||
|
||||
__version__ = info.version
|
||||
|
||||
_mode_friendly_names = {
|
||||
" ": "text",
|
||||
"*": "binary",
|
||||
# Perl shasum -- specific
|
||||
"U": "universal",
|
||||
"^": "bits",
|
||||
# BSD-style format only supports binary mode
|
||||
None: "binary"
|
||||
}
|
||||
|
||||
def _process(proc_data):
|
||||
"""
|
||||
@@ -95,7 +112,9 @@ def _process(proc_data):
|
||||
List of Dictionaries. Structured data to conform to the schema.
|
||||
"""
|
||||
|
||||
# no further processing for this parser
|
||||
for entry in proc_data:
|
||||
entry['mode'] = _mode_friendly_names.get(entry['mode'],entry['mode'])
|
||||
|
||||
return proc_data
|
||||
|
||||
|
||||
@@ -127,18 +146,20 @@ def parse(data, raw=False, quiet=False):
|
||||
file_name = line.split('=', maxsplit=1)[0].strip()
|
||||
file_name = file_name[5:]
|
||||
file_name = file_name[:-1]
|
||||
# filler, legacy md5 always uses binary mode
|
||||
file_mode = None
|
||||
# standard md5sum and shasum command output
|
||||
else:
|
||||
file_hash = line.split(maxsplit=1)[0]
|
||||
file_name = line.split(maxsplit=1)[1]
|
||||
m = re.match('([0-9a-f]+) (.)(.*)$', line)
|
||||
if not m:
|
||||
raise ValueError(f'Invalid line format: "{line}"')
|
||||
file_hash, file_mode, file_name = m.groups()
|
||||
|
||||
item = {
|
||||
'filename': file_name,
|
||||
'mode': file_mode,
|
||||
'hash': file_hash
|
||||
}
|
||||
raw_output.append(item)
|
||||
|
||||
if raw:
|
||||
return raw_output
|
||||
else:
|
||||
return _process(raw_output)
|
||||
return raw_output if raw else _process(raw_output)
|
||||
|
||||
@@ -219,7 +219,7 @@ import jc.utils
|
||||
|
||||
class info():
|
||||
"""Provides parser metadata (version, author, etc.)"""
|
||||
version = '2.4'
|
||||
version = '2.5'
|
||||
description = '`ifconfig` command parser'
|
||||
author = 'Kelly Brazil'
|
||||
author_email = 'kellyjonbrazil@gmail.com'
|
||||
@@ -264,7 +264,7 @@ def _process(proc_data: List[JSONDictType]) -> List[JSONDictType]:
|
||||
try:
|
||||
if entry['ipv4_mask'].startswith('0x'):
|
||||
new_mask = entry['ipv4_mask']
|
||||
new_mask = new_mask.lstrip('0x')
|
||||
new_mask = new_mask[2:]
|
||||
new_mask = '.'.join(str(int(i, 16)) for i in [new_mask[i:i + 2] for i in range(0, len(new_mask), 2)])
|
||||
entry['ipv4_mask'] = new_mask
|
||||
except (ValueError, TypeError, AttributeError):
|
||||
@@ -289,7 +289,7 @@ def _process(proc_data: List[JSONDictType]) -> List[JSONDictType]:
|
||||
try:
|
||||
if ip_address['mask'].startswith('0x'):
|
||||
new_mask = ip_address['mask']
|
||||
new_mask = new_mask.lstrip('0x')
|
||||
new_mask = new_mask[2:]
|
||||
new_mask = '.'.join(str(int(i, 16)) for i in [new_mask[i:i + 2] for i in range(0, len(new_mask), 2)])
|
||||
ip_address['mask'] = new_mask
|
||||
except (ValueError, TypeError, AttributeError):
|
||||
|
||||
@@ -173,7 +173,7 @@ import jc.utils
|
||||
|
||||
class info():
|
||||
"""Provides parser metadata (version, author, etc.)"""
|
||||
version = '1.12'
|
||||
version = '1.13'
|
||||
description = '`iptables` command parser'
|
||||
author = 'Kelly Brazil'
|
||||
author_email = 'kellyjonbrazil@gmail.com'
|
||||
@@ -294,9 +294,16 @@ def parse(data, raw=False, quiet=False):
|
||||
|
||||
else:
|
||||
# sometimes the "target" column is blank. Stuff in a dummy character
|
||||
if headers[0] == 'target' and line.startswith(' '):
|
||||
opt_values = {'--', '-f', '!f'}
|
||||
line_split = line.split()
|
||||
if headers[0] == 'target' and line.startswith(' '): # standard output
|
||||
line = '\u2063' + line
|
||||
|
||||
elif headers[0] == 'pkts' and line_split[3] in opt_values: # verbose output
|
||||
first_section = line_split[:2]
|
||||
second_section = line_split[2:]
|
||||
line = ' '.join(first_section) + ' \u2063 ' + ' '.join(second_section)
|
||||
|
||||
rule = line.split(maxsplit=len(headers) - 1)
|
||||
temp_rule = dict(zip(headers, rule))
|
||||
if temp_rule:
|
||||
|
||||
@@ -26,7 +26,10 @@ Schema:
|
||||
"license": string,
|
||||
"location": string,
|
||||
"requires": string,
|
||||
"required_by": string
|
||||
"required_by": string,
|
||||
"files": [
|
||||
string
|
||||
]
|
||||
}
|
||||
]
|
||||
|
||||
@@ -60,13 +63,13 @@ Examples:
|
||||
}
|
||||
]
|
||||
"""
|
||||
from typing import List, Dict, Optional
|
||||
from typing import List, Dict
|
||||
import jc.utils
|
||||
|
||||
|
||||
class info():
|
||||
"""Provides parser metadata (version, author, etc.)"""
|
||||
version = '1.5'
|
||||
version = '1.6'
|
||||
description = '`pip show` command parser'
|
||||
author = 'Kelly Brazil'
|
||||
author_email = 'kellyjonbrazil@gmail.com'
|
||||
@@ -120,6 +123,22 @@ def parse(
|
||||
last_key: str = ''
|
||||
last_key_data: List = []
|
||||
|
||||
def flush_last_key_data() -> None:
|
||||
"""Append buffered continuation lines to the previous field."""
|
||||
nonlocal last_key_data
|
||||
|
||||
if not last_key_data:
|
||||
return
|
||||
|
||||
if last_key == 'files':
|
||||
package[last_key].extend(last_key_data)
|
||||
else:
|
||||
if not isinstance(package[last_key], str):
|
||||
package[last_key] = ''
|
||||
package[last_key] = package[last_key] + '\n' + '\n'.join(last_key_data)
|
||||
|
||||
last_key_data = []
|
||||
|
||||
# Clear any blank lines
|
||||
cleandata = list(filter(None, data.splitlines()))
|
||||
|
||||
@@ -127,8 +146,7 @@ def parse(
|
||||
|
||||
for row in cleandata:
|
||||
if row.startswith('---'):
|
||||
if last_key_data:
|
||||
package[last_key] = package[last_key] + '\n' + '\n'.join(last_key_data)
|
||||
flush_last_key_data()
|
||||
|
||||
raw_output.append(package)
|
||||
package = {}
|
||||
@@ -137,17 +155,17 @@ def parse(
|
||||
continue
|
||||
|
||||
if not row.startswith(' '):
|
||||
item_key = row.split(': ', maxsplit=1)[0].lower().replace('-', '_')
|
||||
item_value: Optional[str] = row.split(': ', maxsplit=1)[1]
|
||||
item_key, item_value = row.split(':', maxsplit=1)
|
||||
item_key = item_key.lower().replace('-', '_')
|
||||
item_value = item_value.lstrip()
|
||||
|
||||
if item_value == '':
|
||||
if item_key == 'files':
|
||||
item_value = []
|
||||
elif item_value == '':
|
||||
item_value = None
|
||||
|
||||
if last_key_data and last_key != item_key:
|
||||
if not isinstance(package[last_key], str):
|
||||
package[last_key] = ''
|
||||
package[last_key] = package[last_key] + '\n' + '\n'.join(last_key_data)
|
||||
last_key_data = []
|
||||
flush_last_key_data()
|
||||
|
||||
package[item_key] = item_value
|
||||
last_key = item_key
|
||||
@@ -158,8 +176,7 @@ def parse(
|
||||
continue
|
||||
|
||||
if package:
|
||||
if last_key_data:
|
||||
package[last_key] = package[last_key] + '\n' + '\n'.join(last_key_data)
|
||||
flush_last_key_data()
|
||||
|
||||
raw_output.append(package)
|
||||
|
||||
|
||||
@@ -168,7 +168,7 @@ import jc.utils
|
||||
|
||||
class info():
|
||||
"""Provides parser metadata (version, author, etc.)"""
|
||||
version = '1.0'
|
||||
version = '1.1'
|
||||
description = '`/proc/<pid>/smaps` file parser'
|
||||
author = 'Kelly Brazil'
|
||||
author_email = 'kellyjonbrazil@gmail.com'
|
||||
@@ -225,13 +225,26 @@ def _process(proc_data: List[Dict]) -> List[Dict]:
|
||||
'ac': 'area is accountable',
|
||||
'nr': 'swap space is not reserved for the area',
|
||||
'ht': 'area uses huge tlb pages',
|
||||
'sf': 'perform synchronous page faults',
|
||||
'nl': 'non-linear mapping',
|
||||
'ar': 'architecture specific flag',
|
||||
'wf': 'wipe on fork',
|
||||
'dd': 'do not include area into core dump',
|
||||
'sd': 'soft-dirty flag',
|
||||
'mm': 'mixed map area',
|
||||
'hg': 'huge page advise flag',
|
||||
'nh': 'no-huge page advise flag',
|
||||
'mg': 'mergable advise flag'
|
||||
'mg': 'mergable advise flag',
|
||||
'bt': 'arm64 BTI guarded page',
|
||||
'mt': 'arm64 MTE allocation tags are enabled',
|
||||
'um': 'userfaultfd missing pages tracking',
|
||||
'uw': 'userfaultfd wprotect pages tracking',
|
||||
'ui': 'userfaultfd minor fault',
|
||||
'ss': 'shadow/guarded control stack page',
|
||||
'sl': 'sealed',
|
||||
'lf': 'lock on fault pages',
|
||||
'dp': 'always lazily freeable mapping',
|
||||
'gu': 'maybe contains guard regions'
|
||||
}
|
||||
|
||||
for entry in proc_data:
|
||||
@@ -245,7 +258,7 @@ def _process(proc_data: List[Dict]) -> List[Dict]:
|
||||
|
||||
if 'VmFlags' in entry:
|
||||
entry['VmFlags'] = entry['VmFlags'].split()
|
||||
entry['VmFlags_pretty'] = [vmflags_map[x] for x in entry['VmFlags']]
|
||||
entry['VmFlags_pretty'] = [vmflags_map.get(x, x) for x in entry['VmFlags']]
|
||||
|
||||
return proc_data
|
||||
|
||||
|
||||
@@ -4,6 +4,8 @@ Supports the `-i` or `--itemize-changes` options with all levels of
|
||||
verbosity. This parser will process the `STDOUT` output or a log file
|
||||
generated with the `--log-file` option.
|
||||
|
||||
The `--stats` or `--info=stats[1-3]` options are also supported.
|
||||
|
||||
Usage (cli):
|
||||
|
||||
$ rsync -i -a source/ dest | jc --rsync
|
||||
@@ -37,7 +39,21 @@ Schema:
|
||||
"false_alarms": integer,
|
||||
"data": integer,
|
||||
"bytes_sec": float,
|
||||
"speedup": float
|
||||
"speedup": float,
|
||||
"total_files": integer,
|
||||
"regular_files": integer,
|
||||
"dir_files": integer,
|
||||
"total_created_files": integer,
|
||||
"created_regular_files": integer,
|
||||
"created_dir_files": integer,
|
||||
"deleted_files": integer,
|
||||
"transferred_files": integer,
|
||||
"transferred_file_size": integer,
|
||||
"literal_data": integer,
|
||||
"matched_data": integer,
|
||||
"file_list_size": integer,
|
||||
"file_list_generation_time": float,
|
||||
"file_list_transfer_time": float,
|
||||
},
|
||||
"files": [
|
||||
{
|
||||
@@ -62,6 +78,8 @@ Schema:
|
||||
}
|
||||
]
|
||||
|
||||
Size values are in bytes.
|
||||
|
||||
[0] 'file sent', 'file received', 'local change or creation',
|
||||
'hard link', 'not updated', 'message'
|
||||
[1] 'file', 'directory', 'symlink', 'device', 'special file'
|
||||
@@ -137,7 +155,7 @@ import jc.utils
|
||||
|
||||
class info():
|
||||
"""Provides parser metadata (version, author, etc.)"""
|
||||
version = '1.2'
|
||||
version = '1.3'
|
||||
description = '`rsync` command parser'
|
||||
author = 'Kelly Brazil'
|
||||
author_email = 'kellyjonbrazil@gmail.com'
|
||||
@@ -163,10 +181,16 @@ def _process(proc_data: List[Dict]) -> List[Dict]:
|
||||
"""
|
||||
int_list = {
|
||||
'process', 'sent', 'received', 'total_size', 'matches', 'hash_hits',
|
||||
'false_alarms', 'data'
|
||||
'false_alarms', 'data', 'total_files', 'regular_files', 'dir_files',
|
||||
'total_created_files', 'created_regular_files', 'created_dir_files',
|
||||
'deleted_files', 'transferred_files', 'transferred_file_size',
|
||||
'literal_data', 'matched_data', 'file_list_size'
|
||||
}
|
||||
|
||||
float_list = {'bytes_sec', 'speedup'}
|
||||
float_list = {
|
||||
'bytes_sec', 'speedup', 'file_list_generation_time',
|
||||
'file_list_transfer_time'
|
||||
}
|
||||
|
||||
for item in proc_data:
|
||||
for key in item['summary']:
|
||||
@@ -338,6 +362,17 @@ def parse(
|
||||
stat2_line_log_v_re = re.compile(r'(?P<date>\d\d\d\d/\d\d/\d\d)\s+(?P<time>\d\d:\d\d:\d\d)\s+\[(?P<process>\d+)\]\s+sent\s+(?P<sent>[\d,]+)\s+bytes\s+received\s+(?P<received>[\d,]+)\s+bytes\s+(?P<bytes_sec>[\d,.]+)\s+bytes/sec')
|
||||
stat3_line_log_v_re = re.compile(r'(?P<date>\d\d\d\d/\d\d/\d\d)\s+(?P<time>\d\d:\d\d:\d\d)\s+\[(?P<process>\d+)]\s+total\s+size\s+is\s+(?P<total_size>[\d,]+)\s+speedup\s+is\s+(?P<speedup>[\d,.]+)')
|
||||
|
||||
stat_ex_files_number_re = re.compile(r'Number\sof\sfiles:\s(?P<files_total>[,0123456789]+)\s\(reg:\s(?P<files_regular>[,0123456789]+),\sdir:\s(?P<files_dir>[,0123456789]+)\)$')
|
||||
stat_ex_files_created_re = re.compile(r'Number\sof\screated\sfiles:\s(?P<files_created_total>[,0123456789]+)\s\(reg:\s(?P<files_created_regular>[,0123456789]+),\sdir:\s(?P<files_created_dir>[,0123456789]+)\)$')
|
||||
stat_ex_files_deleted_re = re.compile(r'Number\sof\sdeleted\sfiles:\s(?P<files_deleted>[,0123456789]+)$')
|
||||
stat_ex_files_transferred_re = re.compile(r'Number\sof\sregular\sfiles\stransferred:\s(?P<files_transferred>[,0123456789]+)$')
|
||||
stat_ex_files_transferred_size_re = re.compile(r'Total\sfile\ssize:\s(?P<files_transferred_size>[,.0123456789]+\S?)\sbytes$')
|
||||
stat_ex_literal_data_re = re.compile(r'Literal\sdata:\s(?P<literal_data>[,.0123456789]+\S?)\sbytes$')
|
||||
stat_ex_matched_data_re = re.compile(r'Matched\sdata:\s(?P<matched_data>[,.0123456789]+\S?)\sbytes$')
|
||||
stat_ex_file_list_size_re = re.compile(r'File\slist\ssize:\s(?P<file_list_size>[,.0123456789]+\S?)$')
|
||||
stat_ex_file_list_generation_time_re = re.compile(r'File\slist\sgeneration\stime:\s(?P<file_list_generation_time>[,.0123456789]+\S?)\sseconds$')
|
||||
stat_ex_file_list_transfer_time_re = re.compile(r'File\slist\stransfer\stime:\s(?P<file_list_transfer_time>[,.0123456789]+\S?)\sseconds$')
|
||||
|
||||
if jc.utils.has_data(data):
|
||||
for line in filter(None, data.splitlines()):
|
||||
|
||||
@@ -451,11 +486,11 @@ def parse(
|
||||
|
||||
stat1_line = stat1_line_re.match(line)
|
||||
if stat1_line:
|
||||
rsync_run['summary'] = {
|
||||
rsync_run['summary'].update({
|
||||
'sent': stat1_line.group('sent'),
|
||||
'received': stat1_line.group('received'),
|
||||
'bytes_sec': stat1_line.group('bytes_sec')
|
||||
}
|
||||
})
|
||||
continue
|
||||
|
||||
stat2_line = stat2_line_re.match(line)
|
||||
@@ -466,11 +501,11 @@ def parse(
|
||||
|
||||
stat1_line_simple = stat1_line_simple_re.match(line)
|
||||
if stat1_line_simple:
|
||||
rsync_run['summary'] = {
|
||||
rsync_run['summary'].update({
|
||||
'sent': stat1_line_simple.group('sent'),
|
||||
'received': stat1_line_simple.group('received'),
|
||||
'bytes_sec': stat1_line_simple.group('bytes_sec')
|
||||
}
|
||||
})
|
||||
continue
|
||||
|
||||
stat2_line_simple = stat2_line_simple_re.match(line)
|
||||
@@ -481,19 +516,19 @@ def parse(
|
||||
|
||||
stat_line_log = stat_line_log_re.match(line)
|
||||
if stat_line_log:
|
||||
rsync_run['summary'] = {
|
||||
rsync_run['summary'].update({
|
||||
'date': stat_line_log.group('date'),
|
||||
'time': stat_line_log.group('time'),
|
||||
'process': stat_line_log.group('process'),
|
||||
'sent': stat_line_log.group('sent'),
|
||||
'received': stat_line_log.group('received'),
|
||||
'total_size': stat_line_log.group('total_size')
|
||||
}
|
||||
})
|
||||
continue
|
||||
|
||||
stat1_line_log_v = stat1_line_log_v_re.match(line)
|
||||
if stat1_line_log_v:
|
||||
rsync_run['summary'] = {
|
||||
rsync_run['summary'].update({
|
||||
'date': stat1_line_log_v.group('date'),
|
||||
'time': stat1_line_log_v.group('time'),
|
||||
'process': stat1_line_log_v.group('process'),
|
||||
@@ -501,7 +536,7 @@ def parse(
|
||||
'hash_hits': stat1_line_log_v.group('hash_hits'),
|
||||
'false_alarms': stat1_line_log_v.group('false_alarms'),
|
||||
'data': stat1_line_log_v.group('data')
|
||||
}
|
||||
})
|
||||
continue
|
||||
|
||||
stat2_line_log_v = stat2_line_log_v_re.match(line)
|
||||
@@ -517,6 +552,61 @@ def parse(
|
||||
rsync_run['summary']['speedup'] = stat3_line_log_v.group('speedup')
|
||||
continue
|
||||
|
||||
# extra stats lines when using rsync --stats or --info=stats[1-3]
|
||||
stat_ex_files_number_v = stat_ex_files_number_re.match(line)
|
||||
if stat_ex_files_number_v:
|
||||
rsync_run['summary']['total_files'] = stat_ex_files_number_v.group('files_total')
|
||||
rsync_run['summary']['regular_files'] = stat_ex_files_number_v.group('files_regular')
|
||||
rsync_run['summary']['dir_files'] = stat_ex_files_number_v.group('files_dir')
|
||||
continue
|
||||
|
||||
stat_ex_files_created_v = stat_ex_files_created_re.match(line)
|
||||
if stat_ex_files_created_v:
|
||||
rsync_run['summary']['total_created_files'] = stat_ex_files_created_v.group('files_created_total')
|
||||
rsync_run['summary']['created_regular_files'] = stat_ex_files_created_v.group('files_created_regular')
|
||||
rsync_run['summary']['created_dir_files'] = stat_ex_files_created_v.group('files_created_dir')
|
||||
continue
|
||||
|
||||
stat_ex_files_deleted_v = stat_ex_files_deleted_re.match(line)
|
||||
if stat_ex_files_deleted_v:
|
||||
rsync_run['summary']['deleted_files'] = stat_ex_files_deleted_v.group('files_deleted')
|
||||
continue
|
||||
|
||||
stat_ex_files_transferred_v = stat_ex_files_transferred_re.match(line)
|
||||
if stat_ex_files_transferred_v:
|
||||
rsync_run['summary']['transferred_files'] = stat_ex_files_transferred_v.group('files_transferred')
|
||||
continue
|
||||
|
||||
stat_ex_files_transferred_size_v = stat_ex_files_transferred_size_re.match(line)
|
||||
if stat_ex_files_transferred_size_v:
|
||||
rsync_run['summary']['transferred_file_size'] = stat_ex_files_transferred_size_v.group('files_transferred_size')
|
||||
continue
|
||||
|
||||
stat_ex_literal_data_v = stat_ex_literal_data_re.match(line)
|
||||
if stat_ex_literal_data_v:
|
||||
rsync_run['summary']['literal_data'] = stat_ex_literal_data_v.group('literal_data')
|
||||
continue
|
||||
|
||||
stat_ex_matched_data_v = stat_ex_matched_data_re.match(line)
|
||||
if stat_ex_matched_data_v:
|
||||
rsync_run['summary']['matched_data'] = stat_ex_matched_data_v.group('matched_data')
|
||||
continue
|
||||
|
||||
stat_ex_file_list_size_v = stat_ex_file_list_size_re.match(line)
|
||||
if stat_ex_file_list_size_v:
|
||||
rsync_run['summary']['file_list_size'] = stat_ex_file_list_size_v.group('file_list_size')
|
||||
continue
|
||||
|
||||
stat_ex_file_list_generation_time_v = stat_ex_file_list_generation_time_re.match(line)
|
||||
if stat_ex_file_list_generation_time_v:
|
||||
rsync_run['summary']['file_list_generation_time'] = stat_ex_file_list_generation_time_v.group('file_list_generation_time')
|
||||
continue
|
||||
|
||||
stat_ex_file_list_transfer_time_v = stat_ex_file_list_transfer_time_re.match(line)
|
||||
if stat_ex_file_list_transfer_time_v:
|
||||
rsync_run['summary']['file_list_transfer_time'] = stat_ex_file_list_transfer_time_v.group('file_list_transfer_time')
|
||||
continue
|
||||
|
||||
raw_output.append(rsync_run)
|
||||
|
||||
# cleanup blank entries
|
||||
|
||||
@@ -7,6 +7,8 @@ Supports the `-i` or `--itemize-changes` options with all levels of
|
||||
verbosity. This parser will process the `STDOUT` output or a log file
|
||||
generated with the `--log-file` option.
|
||||
|
||||
The `--stats` or `--info=stats[1-3]` options are also supported.
|
||||
|
||||
Usage (cli):
|
||||
|
||||
$ rsync -i -a source/ dest | jc --rsync-s
|
||||
@@ -64,6 +66,8 @@ Schema:
|
||||
}
|
||||
}
|
||||
|
||||
Size values are in bytes.
|
||||
|
||||
[0] 'file sent', 'file received', 'local change or creation',
|
||||
'hard link', 'not updated', 'message'
|
||||
[1] 'file', 'directory', 'symlink', 'device', 'special file'
|
||||
@@ -88,7 +92,7 @@ from jc.streaming import (
|
||||
|
||||
class info():
|
||||
"""Provides parser metadata (version, author, etc.)"""
|
||||
version = '1.3'
|
||||
version = '1.4'
|
||||
description = '`rsync` command streaming parser'
|
||||
author = 'Kelly Brazil'
|
||||
author_email = 'kellyjonbrazil@gmail.com'
|
||||
@@ -114,10 +118,16 @@ def _process(proc_data: Dict) -> Dict:
|
||||
"""
|
||||
int_list = {
|
||||
'process', 'sent', 'received', 'total_size', 'matches', 'hash_hits',
|
||||
'false_alarms', 'data'
|
||||
'false_alarms', 'data', 'total_files', 'regular_files', 'dir_files',
|
||||
'total_created_files', 'created_regular_files', 'created_dir_files',
|
||||
'deleted_files', 'transferred_files', 'transferred_file_size',
|
||||
'literal_data', 'matched_data', 'file_list_size'
|
||||
}
|
||||
|
||||
float_list = {'bytes_sec', 'speedup'}
|
||||
float_list = {
|
||||
'bytes_sec', 'speedup', 'file_list_generation_time',
|
||||
'file_list_transfer_time'
|
||||
}
|
||||
|
||||
for key in proc_data.copy():
|
||||
if key in int_list:
|
||||
@@ -281,6 +291,17 @@ def parse(
|
||||
stat2_line_log_v_re = re.compile(r'(?P<date>\d\d\d\d/\d\d/\d\d)\s+(?P<time>\d\d:\d\d:\d\d)\s+\[(?P<process>\d+)\]\s+sent\s+(?P<sent>[\d,]+)\s+bytes\s+received\s+(?P<received>[\d,]+)\s+bytes\s+(?P<bytes_sec>[\d,.]+)\s+bytes/sec')
|
||||
stat3_line_log_v_re = re.compile(r'(?P<date>\d\d\d\d/\d\d/\d\d)\s+(?P<time>\d\d:\d\d:\d\d)\s+\[(?P<process>\d+)]\s+total\s+size\s+is\s+(?P<total_size>[\d,]+)\s+speedup\s+is\s+(?P<speedup>[\d,.]+)')
|
||||
|
||||
stat_ex_files_number_re = re.compile(r'Number\sof\sfiles:\s(?P<files_total>[,0123456789]+)\s\(reg:\s(?P<files_regular>[,0123456789]+),\sdir:\s(?P<files_dir>[,0123456789]+)\)$')
|
||||
stat_ex_files_created_re = re.compile(r'Number\sof\screated\sfiles:\s(?P<files_created_total>[,0123456789]+)\s\(reg:\s(?P<files_created_regular>[,0123456789]+),\sdir:\s(?P<files_created_dir>[,0123456789]+)\)$')
|
||||
stat_ex_files_deleted_re = re.compile(r'Number\sof\sdeleted\sfiles:\s(?P<files_deleted>[,0123456789]+)$')
|
||||
stat_ex_files_transferred_re = re.compile(r'Number\sof\sregular\sfiles\stransferred:\s(?P<files_transferred>[,0123456789]+)$')
|
||||
stat_ex_files_transferred_size_re = re.compile(r'Total\sfile\ssize:\s(?P<files_transferred_size>[,.0123456789]+\S?)\sbytes$')
|
||||
stat_ex_literal_data_re = re.compile(r'Literal\sdata:\s(?P<literal_data>[,.0123456789]+\S?)\sbytes$')
|
||||
stat_ex_matched_data_re = re.compile(r'Matched\sdata:\s(?P<matched_data>[,.0123456789]+\S?)\sbytes$')
|
||||
stat_ex_file_list_size_re = re.compile(r'File\slist\ssize:\s(?P<file_list_size>[,.0123456789]+\S?)$')
|
||||
stat_ex_file_list_generation_time_re = re.compile(r'File\slist\sgeneration\stime:\s(?P<file_list_generation_time>[,.0123456789]+\S?)\sseconds$')
|
||||
stat_ex_file_list_transfer_time_re = re.compile(r'File\slist\stransfer\stime:\s(?P<file_list_transfer_time>[,.0123456789]+\S?)\sseconds$')
|
||||
|
||||
for line in data:
|
||||
try:
|
||||
streaming_line_input_type_check(line)
|
||||
@@ -408,12 +429,12 @@ def parse(
|
||||
|
||||
stat1_line = stat1_line_re.match(line)
|
||||
if stat1_line:
|
||||
summary = {
|
||||
summary.update({
|
||||
'type': 'summary',
|
||||
'sent': stat1_line.group('sent'),
|
||||
'received': stat1_line.group('received'),
|
||||
'bytes_sec': stat1_line.group('bytes_sec')
|
||||
}
|
||||
})
|
||||
continue
|
||||
|
||||
stat2_line = stat2_line_re.match(line)
|
||||
@@ -424,12 +445,12 @@ def parse(
|
||||
|
||||
stat1_line_simple = stat1_line_simple_re.match(line)
|
||||
if stat1_line_simple:
|
||||
summary = {
|
||||
summary.update({
|
||||
'type': 'summary',
|
||||
'sent': stat1_line_simple.group('sent'),
|
||||
'received': stat1_line_simple.group('received'),
|
||||
'bytes_sec': stat1_line_simple.group('bytes_sec')
|
||||
}
|
||||
})
|
||||
continue
|
||||
|
||||
stat2_line_simple = stat2_line_simple_re.match(line)
|
||||
@@ -440,7 +461,7 @@ def parse(
|
||||
|
||||
stat_line_log = stat_line_log_re.match(line)
|
||||
if stat_line_log:
|
||||
summary = {
|
||||
summary.update({
|
||||
'type': 'summary',
|
||||
'date': stat_line_log.group('date'),
|
||||
'time': stat_line_log.group('time'),
|
||||
@@ -448,12 +469,12 @@ def parse(
|
||||
'sent': stat_line_log.group('sent'),
|
||||
'received': stat_line_log.group('received'),
|
||||
'total_size': stat_line_log.group('total_size')
|
||||
}
|
||||
})
|
||||
continue
|
||||
|
||||
stat1_line_log_v = stat1_line_log_v_re.match(line)
|
||||
if stat1_line_log_v:
|
||||
summary = {
|
||||
summary.update({
|
||||
'type': 'summary',
|
||||
'date': stat1_line_log_v.group('date'),
|
||||
'time': stat1_line_log_v.group('time'),
|
||||
@@ -462,7 +483,7 @@ def parse(
|
||||
'hash_hits': stat1_line_log_v.group('hash_hits'),
|
||||
'false_alarms': stat1_line_log_v.group('false_alarms'),
|
||||
'data': stat1_line_log_v.group('data')
|
||||
}
|
||||
})
|
||||
continue
|
||||
|
||||
stat2_line_log_v = stat2_line_log_v_re.match(line)
|
||||
@@ -478,6 +499,61 @@ def parse(
|
||||
summary['speedup'] = stat3_line_log_v.group('speedup')
|
||||
continue
|
||||
|
||||
# extra stats lines when using rsync --stats or --info=stats[1-3]
|
||||
stat_ex_files_number_v = stat_ex_files_number_re.match(line)
|
||||
if stat_ex_files_number_v:
|
||||
summary['total_files'] = stat_ex_files_number_v.group('files_total')
|
||||
summary['regular_files'] = stat_ex_files_number_v.group('files_regular')
|
||||
summary['dir_files'] = stat_ex_files_number_v.group('files_dir')
|
||||
continue
|
||||
|
||||
stat_ex_files_created_v = stat_ex_files_created_re.match(line)
|
||||
if stat_ex_files_created_v:
|
||||
summary['total_created_files'] = stat_ex_files_created_v.group('files_created_total')
|
||||
summary['created_regular_files'] = stat_ex_files_created_v.group('files_created_regular')
|
||||
summary['created_dir_files'] = stat_ex_files_created_v.group('files_created_dir')
|
||||
continue
|
||||
|
||||
stat_ex_files_deleted_v = stat_ex_files_deleted_re.match(line)
|
||||
if stat_ex_files_deleted_v:
|
||||
summary['deleted_files'] = stat_ex_files_deleted_v.group('files_deleted')
|
||||
continue
|
||||
|
||||
stat_ex_files_transferred_v = stat_ex_files_transferred_re.match(line)
|
||||
if stat_ex_files_transferred_v:
|
||||
summary['transferred_files'] = stat_ex_files_transferred_v.group('files_transferred')
|
||||
continue
|
||||
|
||||
stat_ex_files_transferred_size_v = stat_ex_files_transferred_size_re.match(line)
|
||||
if stat_ex_files_transferred_size_v:
|
||||
summary['transferred_file_size'] = stat_ex_files_transferred_size_v.group('files_transferred_size')
|
||||
continue
|
||||
|
||||
stat_ex_literal_data_v = stat_ex_literal_data_re.match(line)
|
||||
if stat_ex_literal_data_v:
|
||||
summary['literal_data'] = stat_ex_literal_data_v.group('literal_data')
|
||||
continue
|
||||
|
||||
stat_ex_matched_data_v = stat_ex_matched_data_re.match(line)
|
||||
if stat_ex_matched_data_v:
|
||||
summary['matched_data'] = stat_ex_matched_data_v.group('matched_data')
|
||||
continue
|
||||
|
||||
stat_ex_file_list_size_v = stat_ex_file_list_size_re.match(line)
|
||||
if stat_ex_file_list_size_v:
|
||||
summary['file_list_size'] = stat_ex_file_list_size_v.group('file_list_size')
|
||||
continue
|
||||
|
||||
stat_ex_file_list_generation_time_v = stat_ex_file_list_generation_time_re.match(line)
|
||||
if stat_ex_file_list_generation_time_v:
|
||||
summary['file_list_generation_time'] = stat_ex_file_list_generation_time_v.group('file_list_generation_time')
|
||||
continue
|
||||
|
||||
stat_ex_file_list_transfer_time_v = stat_ex_file_list_transfer_time_re.match(line)
|
||||
if stat_ex_file_list_transfer_time_v:
|
||||
summary['file_list_transfer_time'] = stat_ex_file_list_transfer_time_v.group('file_list_transfer_time')
|
||||
continue
|
||||
|
||||
except Exception as e:
|
||||
yield raise_or_yield(ignore_exceptions, e, line)
|
||||
|
||||
@@ -488,3 +564,6 @@ def parse(
|
||||
|
||||
except Exception as e:
|
||||
yield raise_or_yield(ignore_exceptions, e, '')
|
||||
|
||||
# unused return for Mypy
|
||||
return []
|
||||
|
||||
335
jc/parsers/typeset.py
Normal file
335
jc/parsers/typeset.py
Normal file
@@ -0,0 +1,335 @@
|
||||
r"""jc - JSON Convert `typeset` and `declare` Bash internal command output parser
|
||||
|
||||
Convert `typeset` and `declare` bash internal commands with no options or the
|
||||
following: `-a`, `-A`, `-i`, `-l`, `-p`, `-r`, `-u`, and `-x`
|
||||
|
||||
Note: function parsing is not supported (e.g. `-f` or `-F`)
|
||||
|
||||
Usage (cli):
|
||||
|
||||
$ typeset | jc --typeset
|
||||
|
||||
Usage (module):
|
||||
|
||||
import jc
|
||||
result = jc.parse('typeset', typeset_command_output)
|
||||
|
||||
Schema:
|
||||
|
||||
[
|
||||
{
|
||||
"name": string,
|
||||
"value": string/integer/array/object/null, # [0]
|
||||
"type": string, # [1]
|
||||
"readonly": boolean/null,
|
||||
"integer": boolean/null,
|
||||
"lowercase": boolean/null,
|
||||
"uppercase": boolean/null,
|
||||
"exported": boolean/null
|
||||
}
|
||||
]
|
||||
|
||||
Key/value pairs other than `name`, `value`, and `type` will only be non-null
|
||||
when the information is available from the `typeset` or `declare` output.
|
||||
|
||||
If declare options are not given to `jc` within the `typeset` output, then
|
||||
it will assume all arrays are simple `array` type.
|
||||
|
||||
[0] Based on type. `variable` type is null if not set, a string when the
|
||||
bash variable is set unless the `integer` field is set to `True`, then
|
||||
the type is integer. `array` type is an array of strings or integers as
|
||||
above. `associative` type is an object of key/value pairs where values
|
||||
are strings or integers as above. Objects have the schema of:
|
||||
|
||||
{
|
||||
"<key1>": string/integer,
|
||||
"<key2>": string/integer
|
||||
}
|
||||
|
||||
[1] Possible values: `variable`, `array`, or `associative`
|
||||
|
||||
Examples:
|
||||
|
||||
$ typeset -p | jc --typeset -p
|
||||
[
|
||||
{
|
||||
"name": "associative_array",
|
||||
"value": {
|
||||
"key2": "abc",
|
||||
"key3": "1 2 3",
|
||||
"key1": "hello \"world\""
|
||||
},
|
||||
"type": "associative",
|
||||
"readonly": false,
|
||||
"integer": false,
|
||||
"lowercase": false,
|
||||
"uppercase": false,
|
||||
"exported": false
|
||||
},
|
||||
{
|
||||
"name": "integers_associative_array",
|
||||
"value": {
|
||||
"one": 1,
|
||||
"two": 500,
|
||||
"three": 999
|
||||
},
|
||||
"type": "associative",
|
||||
"readonly": false,
|
||||
"integer": true,
|
||||
"lowercase": false,
|
||||
"uppercase": false,
|
||||
"exported": false
|
||||
}
|
||||
]
|
||||
|
||||
$ typeset -p | jc --typeset -p -r
|
||||
[
|
||||
{
|
||||
"name": "associative_array",
|
||||
"value": {
|
||||
"key2": "abc",
|
||||
"key3": "1 2 3",
|
||||
"key1": "hello \"world\""
|
||||
},
|
||||
"type": "associative",
|
||||
"readonly": false,
|
||||
"integer": false,
|
||||
"lowercase": false,
|
||||
"uppercase": false,
|
||||
"exported": false
|
||||
},
|
||||
{
|
||||
"name": "integers_associative_array",
|
||||
"value": {
|
||||
"one": "1",
|
||||
"two": "500",
|
||||
"three": "999"
|
||||
},
|
||||
"type": "associative",
|
||||
"readonly": false,
|
||||
"integer": true,
|
||||
"lowercase": false,
|
||||
"uppercase": false,
|
||||
"exported": false
|
||||
}
|
||||
]
|
||||
"""
|
||||
import shlex
|
||||
import re
|
||||
from typing import List, Dict
|
||||
from jc.jc_types import JSONDictType
|
||||
import jc.utils
|
||||
|
||||
|
||||
class info():
|
||||
"""Provides parser metadata (version, author, etc.)"""
|
||||
version = '1.0'
|
||||
description = '`typeset` and `declare` command parser'
|
||||
author = 'Kelly Brazil'
|
||||
author_email = 'kellyjonbrazil@gmail.com'
|
||||
compatible = ['linux', 'darwin', 'cygwin', 'win32', 'aix', 'freebsd']
|
||||
tags = ['command']
|
||||
|
||||
|
||||
__version__ = info.version
|
||||
|
||||
VAR_DEF_PATTERN = re.compile(r'(?P<name>[a-zA-Z_][a-zA-Z0-9_]*)=(?P<val>[^(][^[].+)$')
|
||||
SIMPLE_ARRAY_DEF_PATTERN = re.compile(r'(?P<name>[a-zA-Z_][a-zA-Z0-9_]*)=(?P<body>\(\[\d+\]=.+\))$')
|
||||
ASSOCIATIVE_ARRAY_DEF_PATTERN = re.compile(r'(?P<name>[a-zA-Z_][a-zA-Z0-9_]*)=(?P<body>\(\[[a-zA-Z_][a-zA-Z0-9_]*\]=.+\))$')
|
||||
EMPTY_ARRAY_DEF_PATTERN = re.compile(r'(?P<name>[a-zA-Z_][a-zA-Z0-9_]*)=\(\)$')
|
||||
EMPTY_VAR_DEF_PATTERN = re.compile(r'declare\s.+\s(?P<name>[a-zA-Z_][a-zA-Z0-9_]*)$')
|
||||
DECLARE_OPTS_PATTERN = re.compile(r'declare\s(?P<options>.+?)\s[a-zA-Z_][a-zA-Z0-9_]*')
|
||||
|
||||
|
||||
def _process(proc_data: List[JSONDictType]) -> List[JSONDictType]:
|
||||
"""
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (List of Dictionaries) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
List of Dictionaries. Structured to conform to the schema.
|
||||
"""
|
||||
for item in proc_data:
|
||||
if item['type'] == 'variable' and item['integer']:
|
||||
item['value'] = jc.utils.convert_to_int(item['value'])
|
||||
|
||||
elif item['type'] == 'array' and item['integer'] \
|
||||
and isinstance(item['value'], list):
|
||||
|
||||
new_num_list = []
|
||||
for number in item['value']:
|
||||
new_num_list.append(jc.utils.convert_to_int(number))
|
||||
|
||||
item['value'] = new_num_list
|
||||
|
||||
elif (item['type'] == 'array' and item['integer'] \
|
||||
and isinstance(item['value'], dict)) \
|
||||
or (item['type'] == 'associative' and item['integer']):
|
||||
|
||||
new_num_dict: Dict[str, int] = {}
|
||||
for key, val in item['value'].items():
|
||||
new_num_dict.update({key: jc.utils.convert_to_int(val)})
|
||||
|
||||
item['value'] = new_num_dict
|
||||
|
||||
return proc_data
|
||||
|
||||
|
||||
def _get_simple_array_vals(body: str) -> List[str]:
|
||||
body = _remove_bookends(body)
|
||||
body_split = shlex.split(body)
|
||||
values = []
|
||||
for item in body_split:
|
||||
_, val = item.split('=', maxsplit=1)
|
||||
values.append(_remove_quotes(val))
|
||||
return values
|
||||
|
||||
|
||||
def _get_associative_array_vals(body: str) -> Dict[str, str]:
|
||||
body = _remove_bookends(body)
|
||||
body_split = shlex.split(body)
|
||||
values: Dict = {}
|
||||
for item in body_split:
|
||||
key, val = item.split('=', maxsplit=1)
|
||||
key = _remove_bookends(key, '[', ']')
|
||||
values.update({key: val})
|
||||
return values
|
||||
|
||||
|
||||
def _get_declare_options(line: str, type_hint: str = 'variable') -> Dict:
|
||||
opts = {
|
||||
'type': type_hint,
|
||||
'readonly': None,
|
||||
'integer': None,
|
||||
'lowercase': None,
|
||||
'uppercase': None,
|
||||
'exported': None
|
||||
}
|
||||
|
||||
opts_map = {
|
||||
'r': 'readonly',
|
||||
'i': 'integer',
|
||||
'l': 'lowercase',
|
||||
'u': 'uppercase',
|
||||
'x': 'exported'
|
||||
}
|
||||
|
||||
declare_opts_match = re.match(DECLARE_OPTS_PATTERN, line)
|
||||
if declare_opts_match:
|
||||
for opt in declare_opts_match['options']:
|
||||
if opt in opts_map:
|
||||
opts[opts_map[opt]] = True
|
||||
continue
|
||||
if 'a' in declare_opts_match['options']:
|
||||
opts['type'] = 'array'
|
||||
elif 'A' in declare_opts_match['options']:
|
||||
opts['type'] = 'associative'
|
||||
|
||||
# flip all remaining Nones to False
|
||||
for option in opts.items():
|
||||
key, val = option
|
||||
if val is None:
|
||||
opts[key] = False
|
||||
return opts
|
||||
|
||||
|
||||
def _remove_bookends(data: str, start_char: str = '(', end_char: str = ')') -> str:
|
||||
if data.startswith(start_char) and data.endswith(end_char):
|
||||
return data[1:-1]
|
||||
return data
|
||||
|
||||
|
||||
def _remove_quotes(data: str, remove_char: str ='"') -> str:
|
||||
if data.startswith(remove_char) and data.endswith(remove_char):
|
||||
return data[1:-1]
|
||||
return data
|
||||
|
||||
|
||||
def parse(
|
||||
data: str,
|
||||
raw: bool = False,
|
||||
quiet: bool = False
|
||||
) -> List[JSONDictType]:
|
||||
"""
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) unprocessed output if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
List of Dictionaries. Raw or processed structured data.
|
||||
"""
|
||||
jc.utils.compatibility(__name__, info.compatible, quiet)
|
||||
jc.utils.input_type_check(data)
|
||||
|
||||
raw_output: List[Dict] = []
|
||||
|
||||
if jc.utils.has_data(data):
|
||||
|
||||
for line in filter(None, data.splitlines()):
|
||||
|
||||
item = {
|
||||
"name": '',
|
||||
"value": '',
|
||||
"type": None,
|
||||
"readonly": None,
|
||||
"integer": None,
|
||||
"lowercase": None,
|
||||
"uppercase": None,
|
||||
"exported": None
|
||||
}
|
||||
|
||||
# regular variable
|
||||
var_def_match = re.search(VAR_DEF_PATTERN, line)
|
||||
if var_def_match:
|
||||
item['name'] = var_def_match['name']
|
||||
item['value'] = _remove_quotes(var_def_match['val'])
|
||||
item.update(_get_declare_options(line, 'variable'))
|
||||
raw_output.append(item)
|
||||
continue
|
||||
|
||||
# empty variable
|
||||
empty_var_def_match = re.search(EMPTY_VAR_DEF_PATTERN, line)
|
||||
if empty_var_def_match:
|
||||
item['name'] = empty_var_def_match['name']
|
||||
item['value'] = None
|
||||
item.update(_get_declare_options(line, 'variable'))
|
||||
raw_output.append(item)
|
||||
continue
|
||||
|
||||
# simple array
|
||||
simple_arr_def_match = re.search(SIMPLE_ARRAY_DEF_PATTERN, line)
|
||||
if simple_arr_def_match:
|
||||
item['name'] = simple_arr_def_match['name']
|
||||
item['value'] = _get_simple_array_vals(simple_arr_def_match['body'])
|
||||
item.update(_get_declare_options(line, 'array'))
|
||||
raw_output.append(item)
|
||||
continue
|
||||
|
||||
# associative array
|
||||
associative_arr_def_match = re.search(ASSOCIATIVE_ARRAY_DEF_PATTERN, line)
|
||||
if associative_arr_def_match:
|
||||
item['name'] = associative_arr_def_match['name']
|
||||
item['value'] = _get_associative_array_vals(associative_arr_def_match['body'])
|
||||
item.update(_get_declare_options(line, 'associative'))
|
||||
raw_output.append(item)
|
||||
continue
|
||||
|
||||
# empty array
|
||||
empty_arr_def_match = re.search(EMPTY_ARRAY_DEF_PATTERN, line)
|
||||
if empty_arr_def_match:
|
||||
item['name'] = empty_arr_def_match['name']
|
||||
item['value'] = []
|
||||
item.update(_get_declare_options(line, 'array'))
|
||||
raw_output.append(item)
|
||||
continue
|
||||
|
||||
return raw_output if raw else _process(raw_output)
|
||||
3
setup.py
3
setup.py
@@ -1,11 +1,12 @@
|
||||
import setuptools
|
||||
|
||||
|
||||
with open('README.md', 'r') as f:
|
||||
long_description = f.read()
|
||||
|
||||
setuptools.setup(
|
||||
name='jc',
|
||||
version='1.25.6',
|
||||
version='1.25.7',
|
||||
author='Kelly Brazil',
|
||||
author_email='kellyjonbrazil@gmail.com',
|
||||
description='Converts the output of popular command-line tools and file-types to JSON.',
|
||||
|
||||
1
tests/fixtures/centos-7.7/md5sum-raw.json
vendored
Normal file
1
tests/fixtures/centos-7.7/md5sum-raw.json
vendored
Normal file
File diff suppressed because one or more lines are too long
2
tests/fixtures/centos-7.7/md5sum.json
vendored
2
tests/fixtures/centos-7.7/md5sum.json
vendored
File diff suppressed because one or more lines are too long
1
tests/fixtures/centos-7.7/sha256sum-raw.json
vendored
Normal file
1
tests/fixtures/centos-7.7/sha256sum-raw.json
vendored
Normal file
File diff suppressed because one or more lines are too long
2
tests/fixtures/centos-7.7/sha256sum.json
vendored
2
tests/fixtures/centos-7.7/sha256sum.json
vendored
File diff suppressed because one or more lines are too long
1
tests/fixtures/centos-7.7/sha384sum-raw.json
vendored
Normal file
1
tests/fixtures/centos-7.7/sha384sum-raw.json
vendored
Normal file
File diff suppressed because one or more lines are too long
2
tests/fixtures/centos-7.7/sha384sum.json
vendored
2
tests/fixtures/centos-7.7/sha384sum.json
vendored
File diff suppressed because one or more lines are too long
1
tests/fixtures/generic/iptables-no-jump2.json
vendored
Normal file
1
tests/fixtures/generic/iptables-no-jump2.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
[{"chain":"INPUT","default_policy":"ACCEPT","default_packets":0,"default_bytes":0,"rules":[{"pkts":17,"bytes":1172,"target":null,"prot":"all","opt":null,"in":"*","out":"*","source":"0.0.0.0/0","destination":"0.0.0.0/0"}]}]
|
||||
3
tests/fixtures/generic/iptables-no-jump2.out
vendored
Normal file
3
tests/fixtures/generic/iptables-no-jump2.out
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
Chain INPUT (policy ACCEPT 0 packets, 0 bytes)
|
||||
pkts bytes target prot opt in out source destination
|
||||
17 1172 all -- * * 0.0.0.0/0 0.0.0.0/0
|
||||
1
tests/fixtures/generic/rsync-i-stats-streaming.json
vendored
Normal file
1
tests/fixtures/generic/rsync-i-stats-streaming.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
[{"total_files":23784,"regular_files":23191,"dir_files":593,"total_created_files":2651,"created_regular_files":2611,"created_dir_files":40,"deleted_files":0,"transferred_files":2629,"transferred_file_size":6880000000000,"literal_data":0,"matched_data":0,"file_list_size":98100,"file_list_generation_time":0.001,"file_list_transfer_time":0.0,"type":"summary","sent":8990,"received":1290000,"bytes_sec":370210.0,"total_size":6880000000000,"speedup":5311650.06}]
|
||||
1
tests/fixtures/generic/rsync-i-stats.json
vendored
Normal file
1
tests/fixtures/generic/rsync-i-stats.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
[{"summary":{"total_files":23784,"regular_files":23191,"dir_files":593,"total_created_files":2651,"created_regular_files":2611,"created_dir_files":40,"deleted_files":0,"transferred_files":2629,"transferred_file_size":6880000000000,"literal_data":0,"matched_data":0,"file_list_size":98100,"file_list_generation_time":0.001,"file_list_transfer_time":0.0,"sent":8990,"received":1290000,"bytes_sec":370210.0,"total_size":6880000000000,"speedup":5311650.06},"files":[]}]
|
||||
56
tests/fixtures/generic/rsync-i-stats.out
vendored
Normal file
56
tests/fixtures/generic/rsync-i-stats.out
vendored
Normal file
@@ -0,0 +1,56 @@
|
||||
rsync[1817530] (server sender) heap statistics:
|
||||
arena: 1204224 (bytes from sbrk)
|
||||
ordblks: 46 (chunks not in use)
|
||||
|
||||
rsync[1007426] (receiver) heap statistics:
|
||||
arena: 9244672 (bytes from sbrk)
|
||||
ordblks: 57 (chunks not in use)
|
||||
smblks: 1 (free fastbin blocks)
|
||||
hblks: 1 (chunks from mmap)
|
||||
hblkhd: 266240 (bytes from mmap)
|
||||
allmem: 9510912 (bytes from sbrk + mmap)
|
||||
usmblks: 0 (always 0)
|
||||
fsmblks: 96 (bytes in freed fastbin blocks)
|
||||
uordblks: 486480 (bytes used)
|
||||
fordblks: 8758192 (bytes free)
|
||||
keepcost: 133856 (bytes in releasable chunk)
|
||||
smblks: 2 (free fastbin blocks)
|
||||
hblks: 1 (chunks from mmap)
|
||||
hblkhd: 266240 (bytes from mmap)
|
||||
allmem: 1470464 (bytes from sbrk + mmap)
|
||||
usmblks: 0 (always 0)
|
||||
fsmblks: 192 (bytes in freed fastbin blocks)
|
||||
uordblks: 478288 (bytes used)
|
||||
fordblks: 725936 (bytes free)
|
||||
keepcost: 427216 (bytes in releasable chunk)
|
||||
|
||||
rsync[1007424] (generator) heap statistics:
|
||||
arena: 1384448 (bytes from sbrk)
|
||||
ordblks: 6 (chunks not in use)
|
||||
smblks: 1 (free fastbin blocks)
|
||||
hblks: 1 (chunks from mmap)
|
||||
hblkhd: 266240 (bytes from mmap)
|
||||
allmem: 1650688 (bytes from sbrk + mmap)
|
||||
usmblks: 0 (always 0)
|
||||
fsmblks: 96 (bytes in freed fastbin blocks)
|
||||
uordblks: 486160 (bytes used)
|
||||
fordblks: 898288 (bytes free)
|
||||
keepcost: 132272 (bytes in releasable chunk)
|
||||
|
||||
Number of files: 23,784 (reg: 23,191, dir: 593)
|
||||
Number of created files: 2,651 (reg: 2,611, dir: 40)
|
||||
Number of deleted files: 0
|
||||
Number of regular files transferred: 2,629
|
||||
Total file size: 6.88T bytes
|
||||
Total transferred file size: 759.17G bytes
|
||||
Literal data: 0 bytes
|
||||
Matched data: 0 bytes
|
||||
File list size: 98.10K
|
||||
File list generation time: 0.001 seconds
|
||||
File list transfer time: 0.000 seconds
|
||||
Total bytes sent: 8.99K
|
||||
Total bytes received: 1.29M
|
||||
|
||||
sent 8.99K bytes received 1.29M bytes 370.21K bytes/sec
|
||||
total size is 6.88T speedup is 5,311,650.06 (DRY RUN)
|
||||
|
||||
1
tests/fixtures/generic/typeset--a.json
vendored
Normal file
1
tests/fixtures/generic/typeset--a.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
[{"name":"BASH_ARGC","value":[],"type":"array","readonly":false,"integer":false,"lowercase":false,"uppercase":false,"exported":false},{"name":"BASH_ARGV","value":[],"type":"array","readonly":false,"integer":false,"lowercase":false,"uppercase":false,"exported":false},{"name":"BASH_LINENO","value":[],"type":"array","readonly":false,"integer":false,"lowercase":false,"uppercase":false,"exported":false},{"name":"BASH_SOURCE","value":[],"type":"array","readonly":false,"integer":false,"lowercase":false,"uppercase":false,"exported":false},{"name":"BASH_VERSINFO","value":["5","3","9","1","release","aarch64-apple-darwin24.6.0"],"type":"array","readonly":true,"integer":false,"lowercase":false,"uppercase":false,"exported":false},{"name":"DIRSTACK","value":[],"type":"array","readonly":false,"integer":false,"lowercase":false,"uppercase":false,"exported":false},{"name":"FUNCNAME","value":null,"type":"array","readonly":false,"integer":false,"lowercase":false,"uppercase":false,"exported":false},{"name":"GROUPS","value":[],"type":"array","readonly":false,"integer":false,"lowercase":false,"uppercase":false,"exported":false},{"name":"PIPESTATUS","value":["0"],"type":"array","readonly":false,"integer":false,"lowercase":false,"uppercase":false,"exported":false},{"name":"integers_array","value":[1,100,999],"type":"array","readonly":false,"integer":true,"lowercase":false,"uppercase":false,"exported":false},{"name":"simple_array","value":["hello \"world\"","abc","1 2 3"],"type":"array","readonly":false,"integer":false,"lowercase":false,"uppercase":false,"exported":false},{"name":"uppercase_array","value":["ABC","123","XYZ"],"type":"array","readonly":true,"integer":false,"lowercase":false,"uppercase":true,"exported":false}]
|
||||
12
tests/fixtures/generic/typeset--a.out
vendored
Normal file
12
tests/fixtures/generic/typeset--a.out
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
declare -a BASH_ARGC=()
|
||||
declare -a BASH_ARGV=()
|
||||
declare -a BASH_LINENO=()
|
||||
declare -a BASH_SOURCE=()
|
||||
declare -ar BASH_VERSINFO=([0]="5" [1]="3" [2]="9" [3]="1" [4]="release" [5]="aarch64-apple-darwin24.6.0")
|
||||
declare -a DIRSTACK=()
|
||||
declare -a FUNCNAME
|
||||
declare -a GROUPS=()
|
||||
declare -a PIPESTATUS=([0]="0")
|
||||
declare -ai integers_array=([0]="1" [1]="100" [2]="999")
|
||||
declare -a simple_array=([0]="hello \"world\"" [1]="abc" [2]="1 2 3")
|
||||
declare -aru uppercase_array=([0]="ABC" [1]="123" [2]="XYZ")
|
||||
1
tests/fixtures/generic/typeset--capital-a.json
vendored
Normal file
1
tests/fixtures/generic/typeset--capital-a.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
[{"name":"BASH_ALIASES","value":[],"type":"associative","readonly":false,"integer":false,"lowercase":false,"uppercase":false,"exported":false},{"name":"BASH_CMDS","value":[],"type":"associative","readonly":false,"integer":false,"lowercase":false,"uppercase":false,"exported":false},{"name":"associative_array","value":{"key2":"abc","key3":"1 2 3","key1":"hello \"world\""},"type":"associative","readonly":false,"integer":false,"lowercase":false,"uppercase":false,"exported":false},{"name":"integers_associative_array","value":{"one":1,"two":500,"three":999},"type":"associative","readonly":false,"integer":true,"lowercase":false,"uppercase":false,"exported":false}]
|
||||
4
tests/fixtures/generic/typeset--capital-a.out
vendored
Normal file
4
tests/fixtures/generic/typeset--capital-a.out
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
declare -A BASH_ALIASES=()
|
||||
declare -A BASH_CMDS=()
|
||||
declare -A associative_array=([key2]="abc" [key3]="1 2 3" [key1]="hello \"world\"" )
|
||||
declare -Ai integers_associative_array=([one]="1" [two]="500" [three]="999" )
|
||||
1
tests/fixtures/generic/typeset--p.json
vendored
Normal file
1
tests/fixtures/generic/typeset--p.json
vendored
Normal file
File diff suppressed because one or more lines are too long
32
tests/fixtures/generic/typeset--p.out
vendored
Normal file
32
tests/fixtures/generic/typeset--p.out
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
declare -- BASH="/opt/homebrew/bin/bash"
|
||||
declare -r BASHOPTS="checkwinsize:cmdhist:complete_fullquote:expand_aliases:extquote:force_fignore:globasciiranges:globskipdots:hostcomplete:interactive_comments:patsub_replacement:progcomp:promptvars:sourcepath"
|
||||
declare -i BASHPID
|
||||
declare -A BASH_ALIASES=()
|
||||
declare -a BASH_ARGC=()
|
||||
declare -- BASH_ARGV0
|
||||
declare -- BASH_LOADABLES_PATH="/opt/homebrew/lib/bash:/usr/local/lib/bash:/usr/lib/bash:/opt/local/lib/bash:/usr/pkg/lib/bash:/opt/pkg/lib/bash:."
|
||||
declare -ar BASH_VERSINFO=([0]="5" [1]="3" [2]="9" [3]="1" [4]="release" [5]="aarch64-apple-darwin24.6.0")
|
||||
declare -- COLUMNS="92"
|
||||
declare -ir EUID="501"
|
||||
declare -a FUNCNAME
|
||||
declare -i HISTCMD
|
||||
declare -x HOME="/Users/kbrazil"
|
||||
declare -- IFS=$' \t\n'
|
||||
declare -x JC_COLORS="cyan,default,default,default"
|
||||
declare -x OLDPWD
|
||||
declare -a PIPESTATUS=([0]="0")
|
||||
declare -ir PPID="50074"
|
||||
declare -- PS1="\\s-\\v\\\$ "
|
||||
declare -- PS2="> "
|
||||
declare -- PS4="+ "
|
||||
declare -i RANDOM
|
||||
declare -r SHELLOPTS="braceexpand:emacs:hashall:histexpand:history:interactive-comments:monitor"
|
||||
declare -- _="-p"
|
||||
declare -x __CFBundleIdentifier="com.apple.Terminal"
|
||||
declare -ai integers_array=([0]="1" [1]="100" [2]="999")
|
||||
declare -a simple_array=([0]="hello \"world\"" [1]="abc" [2]="1 2 3")
|
||||
declare -r readonly_var="hello"
|
||||
declare -aru uppercase_array=([0]="ABC" [1]="123" [2]="XYZ")
|
||||
declare -a num_string_array=([0]="1" [1]="2" [2]="3")
|
||||
declare -A associative_array=([key2]="abc" [key3]="1 2 3" [key1]="hello \"world\"" )
|
||||
declare -Ai integers_associative_array=([one]="1" [two]="500" [three]="999" )
|
||||
1
tests/fixtures/generic/typeset--plain.json
vendored
Normal file
1
tests/fixtures/generic/typeset--plain.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
[{"name":"BASHOPTS","value":"checkwinsize:cmdhist:complete_fullquote:expand_aliases:extquote:force_fignore:globasciiranges:globskipdots:hostcomplete:interactive_comments:patsub_replacement:progcomp:promptvars:sourcepath","type":"variable","readonly":null,"integer":null,"lowercase":null,"uppercase":null,"exported":null},{"name":"BASH_ALIASES","value":[],"type":"array","readonly":null,"integer":null,"lowercase":null,"uppercase":null,"exported":null},{"name":"BASH_VERSINFO","value":["5","3","9","1","release","aarch64-apple-darwin24.6.0"],"type":"array","readonly":null,"integer":null,"lowercase":null,"uppercase":null,"exported":null},{"name":"BASH_VERSION","value":"'5.3.9(1)-release'","type":"variable","readonly":null,"integer":null,"lowercase":null,"uppercase":null,"exported":null},{"name":"IFS","value":"$' \\t\\n'","type":"variable","readonly":null,"integer":null,"lowercase":null,"uppercase":null,"exported":null},{"name":"INFOPATH","value":"/opt/homebrew/share/info:","type":"variable","readonly":null,"integer":null,"lowercase":null,"uppercase":null,"exported":null},{"name":"JC_COLORS","value":"cyan,default,default,default","type":"variable","readonly":null,"integer":null,"lowercase":null,"uppercase":null,"exported":null},{"name":"PS1","value":"'\\s-\\v\\$ '","type":"variable","readonly":null,"integer":null,"lowercase":null,"uppercase":null,"exported":null},{"name":"PS2","value":"'> '","type":"variable","readonly":null,"integer":null,"lowercase":null,"uppercase":null,"exported":null},{"name":"PS4","value":"'+ '","type":"variable","readonly":null,"integer":null,"lowercase":null,"uppercase":null,"exported":null},{"name":"TERM_PROGRAM_VERSION","value":"455.1","type":"variable","readonly":null,"integer":null,"lowercase":null,"uppercase":null,"exported":null},{"name":"TERM_SESSION_ID","value":"E5896C5D-9C9A-4178-9246-00158A3F832F","type":"variable","readonly":null,"integer":null,"lowercase":null,"uppercase":null,"exported":null},{"name":"XPC_FLAGS","value":"0x0","type":"variable","readonly":null,"integer":null,"lowercase":null,"uppercase":null,"exported":null},{"name":"associative_array","value":{"key2":"abc","key3":"1 2 3","key1":"hello \"world\""},"type":"associative","readonly":null,"integer":null,"lowercase":null,"uppercase":null,"exported":null},{"name":"integers_array","value":["1","100","999"],"type":"array","readonly":null,"integer":null,"lowercase":null,"uppercase":null,"exported":null},{"name":"simple_array","value":["hello \"world\"","abc","1 2 3"],"type":"array","readonly":null,"integer":null,"lowercase":null,"uppercase":null,"exported":null},{"name":"uppercase_array","value":["ABC","123","XYZ"],"type":"array","readonly":null,"integer":null,"lowercase":null,"uppercase":null,"exported":null}]
|
||||
19
tests/fixtures/generic/typeset--plain.out
vendored
Normal file
19
tests/fixtures/generic/typeset--plain.out
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
BASHOPTS=checkwinsize:cmdhist:complete_fullquote:expand_aliases:extquote:force_fignore:globasciiranges:globskipdots:hostcomplete:interactive_comments:patsub_replacement:progcomp:promptvars:sourcepath
|
||||
BASH_ALIASES=()
|
||||
BASH_VERSINFO=([0]="5" [1]="3" [2]="9" [3]="1" [4]="release" [5]="aarch64-apple-darwin24.6.0")
|
||||
BASH_VERSION='5.3.9(1)-release'
|
||||
COLUMNS=92
|
||||
IFS=$' \t\n'
|
||||
INFOPATH=/opt/homebrew/share/info:
|
||||
JC_COLORS=cyan,default,default,default
|
||||
PS1='\s-\v\$ '
|
||||
PS2='> '
|
||||
PS4='+ '
|
||||
TERM_PROGRAM_VERSION=455.1
|
||||
TERM_SESSION_ID=E5896C5D-9C9A-4178-9246-00158A3F832F
|
||||
XPC_FLAGS=0x0
|
||||
_=-a
|
||||
associative_array=([key2]="abc" [key3]="1 2 3" [key1]="hello \"world\"" )
|
||||
integers_array=([0]="1" [1]="100" [2]="999")
|
||||
simple_array=([0]="hello \"world\"" [1]="abc" [2]="1 2 3")
|
||||
uppercase_array=([0]="ABC" [1]="123" [2]="XYZ")
|
||||
1
tests/fixtures/generic/typeset--r.json
vendored
Normal file
1
tests/fixtures/generic/typeset--r.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
[{"name":"BASHOPTS","value":"checkwinsize:cmdhist:complete_fullquote:expand_aliases:extquote:force_fignore:globasciiranges:globskipdots:hostcomplete:interactive_comments:patsub_replacement:progcomp:promptvars:sourcepath","type":"variable","readonly":true,"integer":false,"lowercase":false,"uppercase":false,"exported":false},{"name":"BASH_VERSINFO","value":["5","3","9","1","release","aarch64-apple-darwin24.6.0"],"type":"array","readonly":true,"integer":false,"lowercase":false,"uppercase":false,"exported":false},{"name":"EUID","value":501,"type":"variable","readonly":true,"integer":true,"lowercase":false,"uppercase":false,"exported":false},{"name":"PPID","value":50074,"type":"variable","readonly":true,"integer":true,"lowercase":false,"uppercase":false,"exported":false},{"name":"SHELLOPTS","value":"braceexpand:emacs:hashall:histexpand:history:interactive-comments:monitor","type":"variable","readonly":true,"integer":false,"lowercase":false,"uppercase":false,"exported":false},{"name":"UID","value":501,"type":"variable","readonly":true,"integer":true,"lowercase":false,"uppercase":false,"exported":false},{"name":"readonly_var","value":"hello","type":"variable","readonly":true,"integer":false,"lowercase":false,"uppercase":false,"exported":false},{"name":"uppercase_array","value":["ABC","123","XYZ"],"type":"array","readonly":true,"integer":false,"lowercase":false,"uppercase":true,"exported":false}]
|
||||
8
tests/fixtures/generic/typeset--r.out
vendored
Normal file
8
tests/fixtures/generic/typeset--r.out
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
declare -r BASHOPTS="checkwinsize:cmdhist:complete_fullquote:expand_aliases:extquote:force_fignore:globasciiranges:globskipdots:hostcomplete:interactive_comments:patsub_replacement:progcomp:promptvars:sourcepath"
|
||||
declare -ar BASH_VERSINFO=([0]="5" [1]="3" [2]="9" [3]="1" [4]="release" [5]="aarch64-apple-darwin24.6.0")
|
||||
declare -ir EUID="501"
|
||||
declare -ir PPID="50074"
|
||||
declare -r SHELLOPTS="braceexpand:emacs:hashall:histexpand:history:interactive-comments:monitor"
|
||||
declare -ir UID="501"
|
||||
declare -r readonly_var="hello"
|
||||
declare -aru uppercase_array=([0]="ABC" [1]="123" [2]="XYZ")
|
||||
46
tests/fixtures/linux-proc/pid_smaps_unknown_flag
vendored
Normal file
46
tests/fixtures/linux-proc/pid_smaps_unknown_flag
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
55a9e753c000-55a9e7570000 r--p 00000000 fd:00 798126 /usr/lib/systemd/systemd
|
||||
Size: 208 kB
|
||||
KernelPageSize: 4 kB
|
||||
MMUPageSize: 4 kB
|
||||
Rss: 208 kB
|
||||
Pss: 104 kB
|
||||
Shared_Clean: 208 kB
|
||||
Shared_Dirty: 0 kB
|
||||
Private_Clean: 0 kB
|
||||
Private_Dirty: 0 kB
|
||||
Referenced: 208 kB
|
||||
Anonymous: 0 kB
|
||||
LazyFree: 0 kB
|
||||
AnonHugePages: 0 kB
|
||||
ShmemPmdMapped: 0 kB
|
||||
FilePmdMapped: 0 kB
|
||||
Shared_Hugetlb: 0 kB
|
||||
Private_Hugetlb: 0 kB
|
||||
Swap: 0 kB
|
||||
SwapPss: 0 kB
|
||||
Locked: 0 kB
|
||||
THPeligible: 0
|
||||
VmFlags: rd mr mw me dw sd zz
|
||||
55a9e7570000-55a9e763a000 r-xp 00034000 fd:00 798126 /usr/lib/systemd/systemd
|
||||
Size: 808 kB
|
||||
KernelPageSize: 4 kB
|
||||
MMUPageSize: 4 kB
|
||||
Rss: 800 kB
|
||||
Pss: 378 kB
|
||||
Shared_Clean: 800 kB
|
||||
Shared_Dirty: 0 kB
|
||||
Private_Clean: 0 kB
|
||||
Private_Dirty: 0 kB
|
||||
Referenced: 800 kB
|
||||
Anonymous: 0 kB
|
||||
LazyFree: 0 kB
|
||||
AnonHugePages: 0 kB
|
||||
ShmemPmdMapped: 0 kB
|
||||
FilePmdMapped: 0 kB
|
||||
Shared_Hugetlb: 0 kB
|
||||
Private_Hugetlb: 0 kB
|
||||
Swap: 0 kB
|
||||
SwapPss: 0 kB
|
||||
Locked: 0 kB
|
||||
THPeligible: 0
|
||||
VmFlags: rd ex mr mw me dw sd yy
|
||||
1
tests/fixtures/linux-proc/pid_smaps_unknown_flag.json
vendored
Normal file
1
tests/fixtures/linux-proc/pid_smaps_unknown_flag.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
[{"start":"55a9e753c000","end":"55a9e7570000","perms":["read","private"],"offset":"00000000","maj":"fd","min":"00","inode":798126,"pathname":"/usr/lib/systemd/systemd","Size":208,"KernelPageSize":4,"MMUPageSize":4,"Rss":208,"Pss":104,"Shared_Clean":208,"Shared_Dirty":0,"Private_Clean":0,"Private_Dirty":0,"Referenced":208,"Anonymous":0,"LazyFree":0,"AnonHugePages":0,"ShmemPmdMapped":0,"FilePmdMapped":0,"Shared_Hugetlb":0,"Private_Hugetlb":0,"Swap":0,"SwapPss":0,"Locked":0,"THPeligible":0,"VmFlags":["rd","mr","mw","me","dw","sd","zz"],"VmFlags_pretty":["readable","may read","may write","may execute","disabled write to the mapped file","soft-dirty flag","zz"]},{"start":"55a9e7570000","end":"55a9e763a000","perms":["read","execute","private"],"offset":"00034000","maj":"fd","min":"00","inode":798126,"pathname":"/usr/lib/systemd/systemd","Size":808,"KernelPageSize":4,"MMUPageSize":4,"Rss":800,"Pss":378,"Shared_Clean":800,"Shared_Dirty":0,"Private_Clean":0,"Private_Dirty":0,"Referenced":800,"Anonymous":0,"LazyFree":0,"AnonHugePages":0,"ShmemPmdMapped":0,"FilePmdMapped":0,"Shared_Hugetlb":0,"Private_Hugetlb":0,"Swap":0,"SwapPss":0,"Locked":0,"THPeligible":0,"VmFlags":["rd","ex","mr","mw","me","dw","sd","yy"],"VmFlags_pretty":["readable","executable","may read","may write","may execute","disabled write to the mapped file","soft-dirty flag","yy"]}]
|
||||
1
tests/fixtures/osx-10.14.6/md5-raw.json
vendored
Normal file
1
tests/fixtures/osx-10.14.6/md5-raw.json
vendored
Normal file
File diff suppressed because one or more lines are too long
2
tests/fixtures/osx-10.14.6/md5.json
vendored
2
tests/fixtures/osx-10.14.6/md5.json
vendored
File diff suppressed because one or more lines are too long
1
tests/fixtures/osx-10.14.6/shasum-raw.json
vendored
Normal file
1
tests/fixtures/osx-10.14.6/shasum-raw.json
vendored
Normal file
File diff suppressed because one or more lines are too long
2
tests/fixtures/osx-10.14.6/shasum.json
vendored
2
tests/fixtures/osx-10.14.6/shasum.json
vendored
File diff suppressed because one or more lines are too long
1
tests/fixtures/ubuntu-18.04/shasum-portable-raw.json
vendored
Normal file
1
tests/fixtures/ubuntu-18.04/shasum-portable-raw.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
[{"filename": "out.txt", "mode": "?", "hash": "5a3c9b9e4594dd4a8a5e963a6e917deb844458e6"}]
|
||||
1
tests/fixtures/ubuntu-18.04/shasum-portable.json
vendored
Normal file
1
tests/fixtures/ubuntu-18.04/shasum-portable.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
[{"filename": "out.txt", "mode": "?", "hash": "5a3c9b9e4594dd4a8a5e963a6e917deb844458e6"}]
|
||||
1
tests/fixtures/ubuntu-18.04/shasum-portable.out
vendored
Normal file
1
tests/fixtures/ubuntu-18.04/shasum-portable.out
vendored
Normal file
@@ -0,0 +1 @@
|
||||
5a3c9b9e4594dd4a8a5e963a6e917deb844458e6 ?out.txt
|
||||
1
tests/fixtures/ubuntu-24.04/sha256sum-binary-raw.json
vendored
Normal file
1
tests/fixtures/ubuntu-24.04/sha256sum-binary-raw.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
[{"filename": "openssl-3.6.0.tar.gz", "mode": "*", "hash": "b6a5f44b7eb69e3fa35dbf15524405b44837a481d43d81daddde3ff21fcbb8e9"}]
|
||||
1
tests/fixtures/ubuntu-24.04/sha256sum-binary.json
vendored
Normal file
1
tests/fixtures/ubuntu-24.04/sha256sum-binary.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
[{"filename": "openssl-3.6.0.tar.gz", "mode": "binary", "hash": "b6a5f44b7eb69e3fa35dbf15524405b44837a481d43d81daddde3ff21fcbb8e9"}]
|
||||
1
tests/fixtures/ubuntu-24.04/sha256sum-binary.out
vendored
Normal file
1
tests/fixtures/ubuntu-24.04/sha256sum-binary.out
vendored
Normal file
@@ -0,0 +1 @@
|
||||
b6a5f44b7eb69e3fa35dbf15524405b44837a481d43d81daddde3ff21fcbb8e9 *openssl-3.6.0.tar.gz
|
||||
1
tests/fixtures/ubuntu-24.04/shasum-universal-bits-raw.json
vendored
Normal file
1
tests/fixtures/ubuntu-24.04/shasum-universal-bits-raw.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
[{"filename": "out.txt", "mode": "U", "hash": "6fe4d572948d4c132d1b1b0ab91e89de4be01efd"}, {"filename": "out.txt", "mode": "^", "hash": "68382a729a930a2219f0bd10c5c4d61eec856a96"}]
|
||||
1
tests/fixtures/ubuntu-24.04/shasum-universal-bits.json
vendored
Normal file
1
tests/fixtures/ubuntu-24.04/shasum-universal-bits.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
[{"filename": "out.txt", "mode": "universal", "hash": "6fe4d572948d4c132d1b1b0ab91e89de4be01efd"}, {"filename": "out.txt", "mode": "bits", "hash": "68382a729a930a2219f0bd10c5c4d61eec856a96"}]
|
||||
2
tests/fixtures/ubuntu-24.04/shasum-universal-bits.out
vendored
Normal file
2
tests/fixtures/ubuntu-24.04/shasum-universal-bits.out
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
6fe4d572948d4c132d1b1b0ab91e89de4be01efd Uout.txt
|
||||
68382a729a930a2219f0bd10c5c4d61eec856a96 ^out.txt
|
||||
@@ -95,6 +95,25 @@ class MyTests(unittest.TestCase):
|
||||
self.assertEqual(jc.parsers.dir.parse(self.windows_10_dir_S, quiet=True),
|
||||
self.windows_10_dir_S_json)
|
||||
|
||||
def test_dir_drive_letter_d(self):
|
||||
"""
|
||||
Test that the D: drive letter is not stripped from the parent path.
|
||||
Regression test: lstrip(" Directory of ") strips any char in the set
|
||||
{' ','D','i','r','e','c','t','o','y','f'}, which incorrectly removes
|
||||
the 'D' from 'D:\\'.
|
||||
"""
|
||||
data = (
|
||||
' Volume in drive D has no label.\r\n'
|
||||
' Volume Serial Number is 1234-5678\r\n'
|
||||
'\r\n'
|
||||
' Directory of D:\\Users\\testuser\r\n'
|
||||
'\r\n'
|
||||
'03/24/2021 03:15 PM <DIR> .\r\n'
|
||||
'03/24/2021 03:15 PM <DIR> ..\r\n'
|
||||
)
|
||||
result = jc.parsers.dir.parse(data, quiet=True)
|
||||
self.assertEqual(result[0]['parent'], 'D:\\Users\\testuser')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
||||
@@ -18,6 +18,15 @@ class MyTests(unittest.TestCase):
|
||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/centos-7.7/sha384sum.out'), 'r', encoding='utf-8') as f:
|
||||
centos_7_7_sha384sum = f.read()
|
||||
|
||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/ubuntu-18.04/shasum-portable.out'), 'r', encoding='utf-8') as f:
|
||||
ubuntu_18_04_shasum_portable = f.read()
|
||||
|
||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/ubuntu-24.04/sha256sum-binary.out'), 'r', encoding='utf-8') as f:
|
||||
ubuntu_24_04_sha256sum_binary = f.read()
|
||||
|
||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/ubuntu-24.04/shasum-universal-bits.out'), 'r', encoding='utf-8') as f:
|
||||
ubuntu_24_04_shasum_universal_bits = f.read()
|
||||
|
||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/md5.out'), 'r', encoding='utf-8') as f:
|
||||
osx_10_14_6_md5 = f.read()
|
||||
|
||||
@@ -28,18 +37,51 @@ class MyTests(unittest.TestCase):
|
||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/centos-7.7/md5sum.json'), 'r', encoding='utf-8') as f:
|
||||
centos_7_7_md5sum_json = json.loads(f.read())
|
||||
|
||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/centos-7.7/md5sum-raw.json'), 'r', encoding='utf-8') as f:
|
||||
centos_7_7_md5sum_raw_json = json.loads(f.read())
|
||||
|
||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/centos-7.7/sha256sum.json'), 'r', encoding='utf-8') as f:
|
||||
centos_7_7_sha256sum_json = json.loads(f.read())
|
||||
|
||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/centos-7.7/sha256sum-raw.json'), 'r', encoding='utf-8') as f:
|
||||
centos_7_7_sha256sum_raw_json = json.loads(f.read())
|
||||
|
||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/centos-7.7/sha384sum.json'), 'r', encoding='utf-8') as f:
|
||||
centos_7_7_sha384sum_json = json.loads(f.read())
|
||||
|
||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/centos-7.7/sha384sum-raw.json'), 'r', encoding='utf-8') as f:
|
||||
centos_7_7_sha384sum_raw_json = json.loads(f.read())
|
||||
|
||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/ubuntu-18.04/shasum-portable.json'), 'r', encoding='utf-8') as f:
|
||||
ubuntu_18_04_shasum_portable_json = json.loads(f.read())
|
||||
|
||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/ubuntu-18.04/shasum-portable-raw.json'), 'r', encoding='utf-8') as f:
|
||||
ubuntu_18_04_shasum_portable_raw_json = json.loads(f.read())
|
||||
|
||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/ubuntu-24.04/sha256sum-binary.json'), 'r', encoding='utf-8') as f:
|
||||
ubuntu_24_04_sha256sum_json = json.loads(f.read())
|
||||
|
||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/ubuntu-24.04/sha256sum-binary-raw.json'), 'r', encoding='utf-8') as f:
|
||||
ubuntu_24_04_sha256sum_raw_json = json.loads(f.read())
|
||||
|
||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/ubuntu-24.04/shasum-universal-bits.json'), 'r', encoding='utf-8') as f:
|
||||
ubuntu_24_04_shasum_json = json.loads(f.read())
|
||||
|
||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/ubuntu-24.04/shasum-universal-bits-raw.json'), 'r', encoding='utf-8') as f:
|
||||
ubuntu_24_04_shasum_raw_json = json.loads(f.read())
|
||||
|
||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/md5.json'), 'r', encoding='utf-8') as f:
|
||||
osx_10_14_6_md5_json = json.loads(f.read())
|
||||
|
||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/md5-raw.json'), 'r', encoding='utf-8') as f:
|
||||
osx_10_14_6_md5_raw_json = json.loads(f.read())
|
||||
|
||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/shasum.json'), 'r', encoding='utf-8') as f:
|
||||
osx_10_14_6_shasum_json = json.loads(f.read())
|
||||
|
||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/shasum-raw.json'), 'r', encoding='utf-8') as f:
|
||||
osx_10_14_6_shasum_raw_json = json.loads(f.read())
|
||||
|
||||
|
||||
def test_hashsum_nodata(self):
|
||||
"""
|
||||
@@ -53,30 +95,113 @@ class MyTests(unittest.TestCase):
|
||||
"""
|
||||
self.assertEqual(jc.parsers.hashsum.parse(self.centos_7_7_md5sum, quiet=True), self.centos_7_7_md5sum_json)
|
||||
|
||||
def test_md5sum_centos_7_7_raw(self):
|
||||
"""
|
||||
Test 'md5sum' on Centos 7.7, raw output
|
||||
"""
|
||||
self.assertEqual(
|
||||
jc.parsers.hashsum.parse(self.centos_7_7_md5sum, quiet=True, raw=True),
|
||||
self.centos_7_7_md5sum_raw_json)
|
||||
|
||||
def test_sha256sum_centos_7_7(self):
|
||||
"""
|
||||
Test 'sha256sum' on Centos 7.7
|
||||
"""
|
||||
self.assertEqual(jc.parsers.hashsum.parse(self.centos_7_7_sha256sum, quiet=True), self.centos_7_7_sha256sum_json)
|
||||
|
||||
def test_sha256sum_centos_7_7_raw(self):
|
||||
"""
|
||||
Test 'sha256sum' on Centos 7.7, raw output
|
||||
"""
|
||||
self.assertEqual(
|
||||
jc.parsers.hashsum.parse(self.centos_7_7_sha256sum, quiet=True, raw=True),
|
||||
self.centos_7_7_sha256sum_raw_json)
|
||||
|
||||
def test_sha384sum_centos_7_7(self):
|
||||
"""
|
||||
Test 'sha384sum' on Centos 7.7
|
||||
"""
|
||||
self.assertEqual(jc.parsers.hashsum.parse(self.centos_7_7_sha384sum, quiet=True), self.centos_7_7_sha384sum_json)
|
||||
|
||||
def test_sha384sum_centos_7_7_raw(self):
|
||||
"""
|
||||
Test 'sha384sum' on Centos 7.7, raw output
|
||||
"""
|
||||
self.assertEqual(jc.parsers.hashsum.parse(
|
||||
self.centos_7_7_sha384sum, quiet=True, raw=True),
|
||||
self.centos_7_7_sha384sum_raw_json)
|
||||
|
||||
def test_sha256sum_ubuntu_18_04_unsupported_mode(self):
|
||||
"""
|
||||
Test 'sha256sum' on Ubuntu 18.04, portable mode (no firendly name)
|
||||
"""
|
||||
self.assertEqual(jc.parsers.hashsum.parse(
|
||||
self.ubuntu_18_04_shasum_portable, quiet=True),
|
||||
self.ubuntu_18_04_shasum_portable_json)
|
||||
|
||||
def test_sha256sum_ubuntu_18_04_unsupported_mode_raw(self):
|
||||
"""
|
||||
Test 'sha256sum' on Ubuntu 18.04, portable mode (no firendly name), raw output
|
||||
"""
|
||||
self.assertEqual(jc.parsers.hashsum.parse(
|
||||
self.ubuntu_18_04_shasum_portable, quiet=True, raw=True),
|
||||
self.ubuntu_18_04_shasum_portable_raw_json)
|
||||
|
||||
def test_sha256sum_ubuntu_24_04_binary(self):
|
||||
"""
|
||||
Test 'sha256sum' on Ubuntu 24.04, binary mode
|
||||
"""
|
||||
self.assertEqual(jc.parsers.hashsum.parse(self.ubuntu_24_04_sha256sum_binary, quiet=True), self.ubuntu_24_04_sha256sum_json)
|
||||
|
||||
def test_sha256sum_ubuntu_24_04_binary_raw(self):
|
||||
"""
|
||||
Test 'sha256sum' on Ubuntu 24.04, binary mode, raw output
|
||||
"""
|
||||
self.assertEqual(jc.parsers.hashsum.parse(
|
||||
self.ubuntu_24_04_sha256sum_binary, quiet=True, raw=True),
|
||||
self.ubuntu_24_04_sha256sum_raw_json)
|
||||
|
||||
def test_shasum_ubuntu_24_04_universal_bits(self):
|
||||
"""
|
||||
Test 'shasum' on Ubuntu 24.04, universal and bits modes
|
||||
"""
|
||||
self.assertEqual(jc.parsers.hashsum.parse(self.ubuntu_24_04_shasum_universal_bits, quiet=True), self.ubuntu_24_04_shasum_json)
|
||||
|
||||
def test_shasum_ubuntu_24_04_raw(self):
|
||||
"""
|
||||
Test 'shasum' on Ubuntu 24.04, universal and bits modes, raw output
|
||||
"""
|
||||
self.assertEqual(
|
||||
jc.parsers.hashsum.parse(self.ubuntu_24_04_shasum_universal_bits, quiet=True, raw=True),
|
||||
self.ubuntu_24_04_shasum_raw_json)
|
||||
|
||||
def test_md5_osx_10_14_6(self):
|
||||
"""
|
||||
Test 'md5' on OSX 10.14.6
|
||||
"""
|
||||
self.assertEqual(jc.parsers.hashsum.parse(self.osx_10_14_6_md5, quiet=True), self.osx_10_14_6_md5_json)
|
||||
|
||||
def test_md5_osx_10_14_6_raw(self):
|
||||
"""
|
||||
Test 'md5' on OSX 10.14.6, raw output
|
||||
"""
|
||||
self.assertEqual(
|
||||
jc.parsers.hashsum.parse(self.osx_10_14_6_md5, quiet=True, raw=True),
|
||||
self.osx_10_14_6_md5_raw_json)
|
||||
|
||||
def test_shasum_osx_10_14_6(self):
|
||||
"""
|
||||
Test 'shasum' on OSX 10.14.6
|
||||
"""
|
||||
self.assertEqual(jc.parsers.hashsum.parse(self.osx_10_14_6_shasum, quiet=True), self.osx_10_14_6_shasum_json)
|
||||
|
||||
def test_shasum_osx_10_14_6_raw(self):
|
||||
"""
|
||||
Test 'shasum' on OSX 10.14.6, raw output
|
||||
"""
|
||||
self.assertEqual(
|
||||
jc.parsers.hashsum.parse(self.osx_10_14_6_shasum, quiet=True, raw=True),
|
||||
self.osx_10_14_6_shasum_raw_json)
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
||||
@@ -148,6 +148,21 @@ class MyTests(unittest.TestCase):
|
||||
"""
|
||||
self.assertEqual(jc.parsers.ifconfig.parse(self.osx_freebsd12_ifconfig_extra_fields4, quiet=True), self.freebsd12_ifconfig_extra_fields4_json)
|
||||
|
||||
def test_ifconfig_hex_mask_all_zeros(self):
|
||||
"""
|
||||
Test 'ifconfig' with 0x00000000 netmask (FreeBSD/macOS hex format).
|
||||
Regression test: lstrip('0x') incorrectly strips leading '0' chars
|
||||
from the hex digits, producing wrong mask for all-zero masks.
|
||||
"""
|
||||
data = (
|
||||
'lo0: flags=8049<UP,LOOPBACK,RUNNING,MULTICAST> mtu 16384\n'
|
||||
'\toptions=1203<RXCSUM,TXCSUM,TXSTATUS,SW_TIMESTAMP>\n'
|
||||
'\tinet 192.168.1.1 netmask 0x00000000\n'
|
||||
)
|
||||
result = jc.parsers.ifconfig.parse(data, quiet=True)
|
||||
self.assertEqual(result[0]['ipv4_mask'], '0.0.0.0')
|
||||
self.assertEqual(result[0]['ipv4'][0]['mask'], '0.0.0.0')
|
||||
|
||||
def test_ifconfig_utun_ipv4(self):
|
||||
"""
|
||||
Test 'ifconfig' with ipv4 utun addresses (macOS)
|
||||
|
||||
@@ -48,6 +48,9 @@ class MyTests(unittest.TestCase):
|
||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/iptables-no-jump.out'), 'r', encoding='utf-8') as f:
|
||||
generic_iptables_no_jump = f.read()
|
||||
|
||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/iptables-no-jump2.out'), 'r', encoding='utf-8') as f:
|
||||
generic_iptables_no_jump2 = f.read()
|
||||
|
||||
# output
|
||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/centos-7.7/iptables-filter.json'), 'r', encoding='utf-8') as f:
|
||||
centos_7_7_iptables_filter_json = json.loads(f.read())
|
||||
@@ -88,6 +91,9 @@ class MyTests(unittest.TestCase):
|
||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/iptables-no-jump.json'), 'r', encoding='utf-8') as f:
|
||||
generic_iptables_no_jump_json = json.loads(f.read())
|
||||
|
||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/iptables-no-jump2.json'), 'r', encoding='utf-8') as f:
|
||||
generic_iptables_no_jump2_json = json.loads(f.read())
|
||||
|
||||
|
||||
def test_iptables_nodata(self):
|
||||
"""
|
||||
@@ -173,6 +179,12 @@ class MyTests(unittest.TestCase):
|
||||
"""
|
||||
self.assertEqual(jc.parsers.iptables.parse(self.generic_iptables_no_jump, quiet=True), self.generic_iptables_no_jump_json)
|
||||
|
||||
def test_iptables_no_jump2_generic(self):
|
||||
"""
|
||||
Test 'sudo iptables' with no jump target and verbose output
|
||||
"""
|
||||
self.assertEqual(jc.parsers.iptables.parse(self.generic_iptables_no_jump2, quiet=True), self.generic_iptables_no_jump2_json)
|
||||
|
||||
def test_iptables_x_option_format(self):
|
||||
"""
|
||||
Test iptables -x
|
||||
|
||||
@@ -89,6 +89,58 @@ class MyTests(unittest.TestCase):
|
||||
"""
|
||||
self.assertEqual(jc.parsers.pip_show.parse(self.generic_pip_show_multiline_license_first_blank, quiet=True), self.generic_pip_show_multiline_license_first_blank_json)
|
||||
|
||||
def test_pip_show_files_section(self):
|
||||
"""
|
||||
Test 'pip show -f' output with a files section
|
||||
"""
|
||||
data = """\
|
||||
Name: jc
|
||||
Version: 1.25.4
|
||||
Summary: Converts the output of popular command-line tools and file-types to JSON.
|
||||
Home-page: https://github.com/kellyjonbrazil/jc
|
||||
Author: Kelly Brazil
|
||||
Author-email: kelly@gmail.com
|
||||
License: MIT
|
||||
Location: /home/pi/.local/lib/python3.11/site-packages
|
||||
Requires: Pygments, ruamel.yaml, xmltodict
|
||||
Required-by: pypiwifi
|
||||
Files:
|
||||
../../../bin/jc
|
||||
jc-1.25.4.dist-info/RECORD
|
||||
"""
|
||||
expected = [{
|
||||
'name': 'jc',
|
||||
'version': '1.25.4',
|
||||
'summary': 'Converts the output of popular command-line tools and file-types to JSON.',
|
||||
'home_page': 'https://github.com/kellyjonbrazil/jc',
|
||||
'author': 'Kelly Brazil',
|
||||
'author_email': 'kelly@gmail.com',
|
||||
'license': 'MIT',
|
||||
'location': '/home/pi/.local/lib/python3.11/site-packages',
|
||||
'requires': 'Pygments, ruamel.yaml, xmltodict',
|
||||
'required_by': 'pypiwifi',
|
||||
'files': ['../../../bin/jc', 'jc-1.25.4.dist-info/RECORD']
|
||||
}]
|
||||
self.assertEqual(jc.parsers.pip_show.parse(data, quiet=True), expected)
|
||||
|
||||
def test_pip_show_files_section_with_following_field(self):
|
||||
"""
|
||||
Test 'pip show -f' output when the files section is followed by a new field
|
||||
"""
|
||||
data = """\
|
||||
Name: jc
|
||||
Files:
|
||||
../../../bin/jc
|
||||
jc-1.25.4.dist-info/RECORD
|
||||
Foo: bar
|
||||
"""
|
||||
expected = [{
|
||||
'name': 'jc',
|
||||
'files': ['../../../bin/jc', 'jc-1.25.4.dist-info/RECORD'],
|
||||
'foo': 'bar'
|
||||
}]
|
||||
self.assertEqual(jc.parsers.pip_show.parse(data, quiet=True), expected)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
||||
@@ -16,7 +16,10 @@ class MyTests(unittest.TestCase):
|
||||
fixtures = {
|
||||
'proc_pid_smaps': (
|
||||
'fixtures/linux-proc/pid_smaps',
|
||||
'fixtures/linux-proc/pid_smaps.json')
|
||||
'fixtures/linux-proc/pid_smaps.json'),
|
||||
'proc_pid_smaps_unknown_flag': (
|
||||
'fixtures/linux-proc/pid_smaps_unknown_flag',
|
||||
'fixtures/linux-proc/pid_smaps_unknown_flag.json')
|
||||
}
|
||||
|
||||
for file, filepaths in fixtures.items():
|
||||
@@ -39,6 +42,13 @@ class MyTests(unittest.TestCase):
|
||||
self.assertEqual(jc.parsers.proc_pid_smaps.parse(self.f_in['proc_pid_smaps'], quiet=True),
|
||||
self.f_json['proc_pid_smaps'])
|
||||
|
||||
def test_proc_pid_smaps_unknown_flag(self):
|
||||
"""
|
||||
Test '/proc/<pid>/smaps' with an unknown flag
|
||||
"""
|
||||
self.assertEqual(jc.parsers.proc_pid_smaps.parse(self.f_in['proc_pid_smaps_unknown_flag'], quiet=True),
|
||||
self.f_json['proc_pid_smaps_unknown_flag'])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
||||
@@ -45,6 +45,9 @@ class MyTests(unittest.TestCase):
|
||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/rsync-i-vvv-logfile-nochange.out'), 'r', encoding='utf-8') as f:
|
||||
osx_10_14_6_rsync_i_vvv_logfile_nochange = f.read()
|
||||
|
||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/rsync-i-stats.out'), 'r', encoding='utf-8') as f:
|
||||
generic_rsync_i_stats = f.read()
|
||||
|
||||
# output
|
||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/centos-7.7/rsync-i.json'), 'r', encoding='utf-8') as f:
|
||||
centos_7_7_rsync_i_json = json.loads(f.read())
|
||||
@@ -82,6 +85,9 @@ class MyTests(unittest.TestCase):
|
||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/rsync-i-vvv-logfile-nochange.json'), 'r', encoding='utf-8') as f:
|
||||
osx_10_14_6_rsync_i_vvv_logfile_nochange_json = json.loads(f.read())
|
||||
|
||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/rsync-i-stats.json'), 'r', encoding='utf-8') as f:
|
||||
generic_rsync_i_stats_json = json.loads(f.read())
|
||||
|
||||
|
||||
def test_rsync_nodata(self):
|
||||
"""
|
||||
@@ -173,6 +179,12 @@ total size is 221.79G speedup is 25,388.23
|
||||
expected = [{"summary":{"sent":8710000,"received":29880,"bytes_sec":10990.0,"total_size":221790000000,"speedup":25388.23},"files":[]}]
|
||||
self.assertEqual(jc.parsers.rsync.parse(data, quiet=True), expected)
|
||||
|
||||
def test_rsync_with_stats(self):
|
||||
"""
|
||||
Test 'rsync -i --stats' or 'rsync -i --info=stats[1-3]'
|
||||
"""
|
||||
self.assertEqual(jc.parsers.rsync.parse(self.generic_rsync_i_stats, quiet=True), self.generic_rsync_i_stats_json)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
||||
@@ -49,6 +49,9 @@ class MyTests(unittest.TestCase):
|
||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/rsync-i-vvv-logfile-nochange.out'), 'r', encoding='utf-8') as f:
|
||||
osx_10_14_6_rsync_i_vvv_logfile_nochange = f.read()
|
||||
|
||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/rsync-i-stats.out'), 'r', encoding='utf-8') as f:
|
||||
generic_rsync_i_stats = f.read()
|
||||
|
||||
# output
|
||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/centos-7.7/rsync-i-streaming.json'), 'r', encoding='utf-8') as f:
|
||||
centos_7_7_rsync_i_streaming_json = json.loads(f.read())
|
||||
@@ -86,6 +89,9 @@ class MyTests(unittest.TestCase):
|
||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/rsync-i-vvv-logfile-nochange-streaming.json'), 'r', encoding='utf-8') as f:
|
||||
osx_10_14_6_rsync_i_vvv_logfile_nochange_streaming_json = json.loads(f.read())
|
||||
|
||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/rsync-i-stats-streaming.json'), 'r', encoding='utf-8') as f:
|
||||
generic_rsync_i_stats_streaming_json = json.loads(f.read())
|
||||
|
||||
|
||||
def test_rsync_s_nodata(self):
|
||||
"""
|
||||
@@ -177,6 +183,11 @@ total size is 221.79G speedup is 25,388.23
|
||||
expected = [{"type":"summary","sent":8710000,"received":29880,"bytes_sec":10990.0,"total_size":221790000000,"speedup":25388.23}]
|
||||
self.assertEqual(list(jc.parsers.rsync_s.parse(data.splitlines(), quiet=True)), expected)
|
||||
|
||||
def test_rsync_s_i_stats(self):
|
||||
"""
|
||||
Test 'rsync -i --stats' or 'rsync -i --info=stats[1-3]'
|
||||
"""
|
||||
self.assertEqual(list(jc.parsers.rsync_s.parse(self.generic_rsync_i_stats.splitlines(), quiet=True)), self.generic_rsync_i_stats_streaming_json)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
31
tests/test_typeset.py
Normal file
31
tests/test_typeset.py
Normal file
@@ -0,0 +1,31 @@
|
||||
import unittest
|
||||
import os
|
||||
import sys
|
||||
sys.path.append(os.getcwd())
|
||||
from tests import utils_for_test as test_utils
|
||||
sys.path.pop()
|
||||
|
||||
# Execute these steps for standard tests:
|
||||
# - Save this file as `test_{parser_name}.py` since the helper methods extract parser names from the filename.
|
||||
# - Organize fixtures in `tests/fixtures` for optimal structure.
|
||||
# - Format fixtures as follows (using double dashes):
|
||||
# - `{parser_name}--{some_test_description}.out` for command output.
|
||||
# - `{parser_name}--{some_test_description}.json` for expected JSON after parsing.
|
||||
|
||||
class MyTests(unittest.TestCase):
|
||||
|
||||
def test_foo_nodata(self):
|
||||
"""
|
||||
Test 'foo' with no data
|
||||
"""
|
||||
test_utils.run_no_data(self, __file__, [])
|
||||
|
||||
def test_foo_all_fixtures(self):
|
||||
"""
|
||||
Test 'foo' with various fixtures
|
||||
"""
|
||||
test_utils.run_all_fixtures(self, __file__)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
Reference in New Issue
Block a user