mirror of
https://github.com/kellyjonbrazil/jc.git
synced 2025-06-23 00:29:59 +02:00
fix for no data
This commit is contained in:
@ -17,6 +17,11 @@ jc changelog
|
||||
- Update route parser to fix error on parsing empty data
|
||||
- Update systemctl parser to fix error on parsing empty data
|
||||
- Update systemctl_lj parser to fix error on parsing empty data
|
||||
- Update systemctl_ls parser to fix error on parsing empty data
|
||||
- Update systemctl_luf parser to fix error on parsing empty data
|
||||
- Update uptime parser to fix error on parsing empty data
|
||||
- Update w parser to fix error on parsing empty data
|
||||
- Update xml parser to fix error on parsing empty data
|
||||
- Add tests to all parsers for no data condition
|
||||
- Update ss parser to fix integer fields
|
||||
|
||||
|
@ -125,11 +125,12 @@ def parse(data, raw=False, quiet=False):
|
||||
linedata = data.splitlines()
|
||||
# Clear any blank lines
|
||||
linedata = list(filter(None, linedata))
|
||||
# clean up non-ascii characters, if any
|
||||
raw_output = []
|
||||
|
||||
if linedata:
|
||||
cleandata = []
|
||||
|
||||
# clean up non-ascii characters, if any
|
||||
for entry in linedata:
|
||||
cleandata.append(entry.encode('ascii', errors='ignore').decode())
|
||||
|
||||
|
@ -34,7 +34,7 @@ import jc.utils
|
||||
|
||||
|
||||
class info():
|
||||
version = '1.1'
|
||||
version = '1.2'
|
||||
description = 'systemctl list-sockets command parser'
|
||||
author = 'Kelly Brazil'
|
||||
author_email = 'kellyjonbrazil@gmail.com'
|
||||
@ -91,24 +91,27 @@ def parse(data, raw=False, quiet=False):
|
||||
linedata = data.splitlines()
|
||||
# Clear any blank lines
|
||||
linedata = list(filter(None, linedata))
|
||||
# clean up non-ascii characters, if any
|
||||
cleandata = []
|
||||
for entry in linedata:
|
||||
cleandata.append(entry.encode('ascii', errors='ignore').decode())
|
||||
|
||||
header_text = cleandata[0].lower()
|
||||
header_list = header_text.split()
|
||||
|
||||
raw_output = []
|
||||
|
||||
for entry in cleandata[1:]:
|
||||
if 'sockets listed.' in entry:
|
||||
break
|
||||
if linedata:
|
||||
cleandata = []
|
||||
# clean up non-ascii characters, if any
|
||||
for entry in linedata:
|
||||
cleandata.append(entry.encode('ascii', errors='ignore').decode())
|
||||
|
||||
else:
|
||||
entry_list = entry.rsplit(maxsplit=2)
|
||||
output_line = dict(zip(header_list, entry_list))
|
||||
raw_output.append(output_line)
|
||||
header_text = cleandata[0].lower()
|
||||
header_list = header_text.split()
|
||||
|
||||
raw_output = []
|
||||
|
||||
for entry in cleandata[1:]:
|
||||
if 'sockets listed.' in entry:
|
||||
break
|
||||
|
||||
else:
|
||||
entry_list = entry.rsplit(maxsplit=2)
|
||||
output_line = dict(zip(header_list, entry_list))
|
||||
raw_output.append(output_line)
|
||||
|
||||
if raw:
|
||||
return raw_output
|
||||
|
@ -31,7 +31,7 @@ import jc.utils
|
||||
|
||||
|
||||
class info():
|
||||
version = '1.1'
|
||||
version = '1.2'
|
||||
description = 'systemctl list-unit-files command parser'
|
||||
author = 'Kelly Brazil'
|
||||
author_email = 'kellyjonbrazil@gmail.com'
|
||||
@ -87,25 +87,28 @@ def parse(data, raw=False, quiet=False):
|
||||
linedata = data.splitlines()
|
||||
# Clear any blank lines
|
||||
linedata = list(filter(None, linedata))
|
||||
# clean up non-ascii characters, if any
|
||||
cleandata = []
|
||||
for entry in linedata:
|
||||
cleandata.append(entry.encode('ascii', errors='ignore').decode())
|
||||
|
||||
header_text = cleandata[0]
|
||||
header_text = header_text.lower().replace('unit file', 'unit_file')
|
||||
header_list = header_text.split()
|
||||
|
||||
raw_output = []
|
||||
|
||||
for entry in cleandata[1:]:
|
||||
if 'unit files listed.' in entry:
|
||||
break
|
||||
if linedata:
|
||||
cleandata = []
|
||||
# clean up non-ascii characters, if any
|
||||
for entry in linedata:
|
||||
cleandata.append(entry.encode('ascii', errors='ignore').decode())
|
||||
|
||||
else:
|
||||
entry_list = entry.split(maxsplit=4)
|
||||
output_line = dict(zip(header_list, entry_list))
|
||||
raw_output.append(output_line)
|
||||
header_text = cleandata[0]
|
||||
header_text = header_text.lower().replace('unit file', 'unit_file')
|
||||
header_list = header_text.split()
|
||||
|
||||
raw_output = []
|
||||
|
||||
for entry in cleandata[1:]:
|
||||
if 'unit files listed.' in entry:
|
||||
break
|
||||
|
||||
else:
|
||||
entry_list = entry.split(maxsplit=4)
|
||||
output_line = dict(zip(header_list, entry_list))
|
||||
raw_output.append(output_line)
|
||||
|
||||
if raw:
|
||||
return raw_output
|
||||
|
@ -34,7 +34,7 @@ import jc.utils
|
||||
|
||||
|
||||
class info():
|
||||
version = '1.0'
|
||||
version = '1.1'
|
||||
description = 'uptime command parser'
|
||||
author = 'Kelly Brazil'
|
||||
author_email = 'kellyjonbrazil@gmail.com'
|
||||
@ -107,10 +107,9 @@ def parse(data, raw=False, quiet=False):
|
||||
jc.utils.compatibility(__name__, info.compatible)
|
||||
|
||||
raw_output = {}
|
||||
|
||||
cleandata = data.splitlines()
|
||||
|
||||
if cleandata:
|
||||
if list(filter(None, cleandata)):
|
||||
parsed_line = cleandata[0].split()
|
||||
|
||||
# allow space for odd times
|
||||
|
@ -83,7 +83,7 @@ import jc.utils
|
||||
|
||||
|
||||
class info():
|
||||
version = '1.1'
|
||||
version = '1.2'
|
||||
description = 'w command parser'
|
||||
author = 'Kelly Brazil'
|
||||
author_email = 'kellyjonbrazil@gmail.com'
|
||||
@ -149,36 +149,39 @@ def parse(data, raw=False, quiet=False):
|
||||
jc.utils.compatibility(__name__, info.compatible)
|
||||
|
||||
cleandata = data.splitlines()[1:]
|
||||
header_text = cleandata[0].lower()
|
||||
# fixup for 'from' column that can be blank
|
||||
from_col = header_text.find('from')
|
||||
# clean up 'login@' header
|
||||
# even though @ in a key is valid json, it can make things difficult
|
||||
header_text = header_text.replace('login@', 'login_at')
|
||||
headers = [h for h in ' '.join(header_text.strip().split()).split() if h]
|
||||
|
||||
# parse lines
|
||||
raw_output = []
|
||||
if cleandata:
|
||||
for entry in cleandata[1:]:
|
||||
output_line = {}
|
||||
|
||||
# normalize data by inserting Null for missing data
|
||||
temp_line = entry.split(maxsplit=len(headers) - 1)
|
||||
if list(filter(None, cleandata)):
|
||||
header_text = cleandata[0].lower()
|
||||
# fixup for 'from' column that can be blank
|
||||
from_col = header_text.find('from')
|
||||
# clean up 'login@' header
|
||||
# even though @ in a key is valid json, it can make things difficult
|
||||
header_text = header_text.replace('login@', 'login_at')
|
||||
headers = [h for h in ' '.join(header_text.strip().split()).split() if h]
|
||||
|
||||
# fix from column, always at column 2
|
||||
if 'from' in headers:
|
||||
if entry[from_col] in string.whitespace:
|
||||
temp_line.insert(2, '-')
|
||||
# parse lines
|
||||
raw_output = []
|
||||
if cleandata:
|
||||
for entry in cleandata[1:]:
|
||||
output_line = {}
|
||||
|
||||
output_line = dict(zip(headers, temp_line))
|
||||
raw_output.append(output_line)
|
||||
# normalize data by inserting Null for missing data
|
||||
temp_line = entry.split(maxsplit=len(headers) - 1)
|
||||
|
||||
# strip whitespace from beginning and end of all string values
|
||||
for row in raw_output:
|
||||
for item in row:
|
||||
if isinstance(row[item], str):
|
||||
row[item] = row[item].strip()
|
||||
# fix from column, always at column 2
|
||||
if 'from' in headers:
|
||||
if entry[from_col] in string.whitespace:
|
||||
temp_line.insert(2, '-')
|
||||
|
||||
output_line = dict(zip(headers, temp_line))
|
||||
raw_output.append(output_line)
|
||||
|
||||
# strip whitespace from beginning and end of all string values
|
||||
for row in raw_output:
|
||||
for item in row:
|
||||
if isinstance(row[item], str):
|
||||
row[item] = row[item].strip()
|
||||
|
||||
if raw:
|
||||
return raw_output
|
||||
|
@ -59,7 +59,7 @@ import xmltodict
|
||||
|
||||
|
||||
class info():
|
||||
version = '1.0'
|
||||
version = '1.1'
|
||||
description = 'XML file parser'
|
||||
author = 'Kelly Brazil'
|
||||
author_email = 'kellyjonbrazil@gmail.com'
|
||||
@ -111,7 +111,9 @@ def parse(data, raw=False, quiet=False):
|
||||
if not quiet:
|
||||
jc.utils.compatibility(__name__, info.compatible)
|
||||
|
||||
if data:
|
||||
raw_output = []
|
||||
|
||||
if list(filter(None, data.splitlines())):
|
||||
raw_output = xmltodict.parse(data)
|
||||
|
||||
if raw:
|
||||
|
@ -23,6 +23,12 @@ class MyTests(unittest.TestCase):
|
||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/ubuntu-18.04/systemctl-ls.json'), 'r', encoding='utf-8') as f:
|
||||
self.ubuntu_18_4_systemctl_ls_json = json.loads(f.read())
|
||||
|
||||
def test_systemctl_ls_nodata(self):
|
||||
"""
|
||||
Test 'systemctl -a list-sockets' with no data
|
||||
"""
|
||||
self.assertEqual(jc.parsers.systemctl_ls.parse('', quiet=True), [])
|
||||
|
||||
def test_systemctl_ls_centos_7_7(self):
|
||||
"""
|
||||
Test 'systemctl -a list-sockets' on Centos 7.7
|
||||
|
@ -23,6 +23,12 @@ class MyTests(unittest.TestCase):
|
||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/ubuntu-18.04/systemctl-luf.json'), 'r', encoding='utf-8') as f:
|
||||
self.ubuntu_18_4_systemctl_luf_json = json.loads(f.read())
|
||||
|
||||
def test_systemctl_luf_nodata(self):
|
||||
"""
|
||||
Test 'systemctl -a list-sockets' with no data
|
||||
"""
|
||||
self.assertEqual(jc.parsers.systemctl_luf.parse('', quiet=True), [])
|
||||
|
||||
def test_systemctl_luf_centos_7_7(self):
|
||||
"""
|
||||
Test 'systemctl -a list-sockets' on Centos 7.7
|
||||
|
@ -23,6 +23,12 @@ class MyTests(unittest.TestCase):
|
||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/ubuntu-18.04/timedatectl.json'), 'r', encoding='utf-8') as f:
|
||||
self.ubuntu_18_4_timedatectl_json = json.loads(f.read())
|
||||
|
||||
def test_timedatectl_nodata(self):
|
||||
"""
|
||||
Test 'timedatectl' with no data
|
||||
"""
|
||||
self.assertEqual(jc.parsers.timedatectl.parse('', quiet=True), {})
|
||||
|
||||
def test_timedatectl_centos_7_7(self):
|
||||
"""
|
||||
Test 'timedatectl' on Centos 7.7
|
||||
|
@ -35,6 +35,12 @@ class MyTests(unittest.TestCase):
|
||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/uname-a.json'), 'r', encoding='utf-8') as f:
|
||||
self.osx_10_14_6_uname_a_json = json.loads(f.read())
|
||||
|
||||
def test_uname_nodata(self):
|
||||
"""
|
||||
Test 'uname -a' with no data
|
||||
"""
|
||||
self.assertEqual(jc.parsers.uname.parse('', quiet=True), {})
|
||||
|
||||
def test_uname_centos_7_7(self):
|
||||
"""
|
||||
Test 'uname -a' on Centos 7.7
|
||||
|
@ -35,6 +35,12 @@ class MyTests(unittest.TestCase):
|
||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/uptime.json'), 'r', encoding='utf-8') as f:
|
||||
self.osx_10_14_6_uptime_json = json.loads(f.read())
|
||||
|
||||
def test_uptime_nodata(self):
|
||||
"""
|
||||
Test 'uptime' with no data
|
||||
"""
|
||||
self.assertEqual(jc.parsers.uptime.parse('', quiet=True), {})
|
||||
|
||||
def test_uptime_centos_7_7(self):
|
||||
"""
|
||||
Test 'uptime' on Centos 7.7
|
||||
|
@ -41,6 +41,12 @@ class MyTests(unittest.TestCase):
|
||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/nixos/w.json'), 'r', encoding='utf-8') as f:
|
||||
self.nixos_w_json = json.loads(f.read())
|
||||
|
||||
def test_w_nodata(self):
|
||||
"""
|
||||
Test 'w' with no data
|
||||
"""
|
||||
self.assertEqual(jc.parsers.w.parse('', quiet=True), [])
|
||||
|
||||
def test_w_centos_7_7(self):
|
||||
"""
|
||||
Test 'w' on Centos 7.7
|
||||
|
@ -47,6 +47,12 @@ class MyTests(unittest.TestCase):
|
||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/who-a.json'), 'r', encoding='utf-8') as f:
|
||||
self.osx_10_14_6_who_a_json = json.loads(f.read())
|
||||
|
||||
def test_who_nodata(self):
|
||||
"""
|
||||
Test 'who' with no data
|
||||
"""
|
||||
self.assertEqual(jc.parsers.who.parse('', quiet=True), [])
|
||||
|
||||
def test_who_centos_7_7(self):
|
||||
"""
|
||||
Test 'who' on Centos 7.7
|
||||
|
@ -23,6 +23,12 @@ class MyTests(unittest.TestCase):
|
||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/xml-foodmenu.json'), 'r', encoding='utf-8') as f:
|
||||
self.generic_xml_foodmenu_json = json.loads(f.read())
|
||||
|
||||
def test_xml_nodata(self):
|
||||
"""
|
||||
Test xml parser with no data
|
||||
"""
|
||||
self.assertEqual(jc.parsers.xml.parse('', quiet=True), [])
|
||||
|
||||
def test_xml_cd_catalog(self):
|
||||
"""
|
||||
Test the cd catalog xml file
|
||||
|
@ -23,6 +23,12 @@ class MyTests(unittest.TestCase):
|
||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/yaml-istio-sidecar.json'), 'r', encoding='utf-8') as f:
|
||||
self.generic_yaml_istio_sidecar_json = json.loads(f.read())
|
||||
|
||||
def test_yaml_nodata(self):
|
||||
"""
|
||||
Test the YAML parser with no data
|
||||
"""
|
||||
self.assertEqual(jc.parsers.yaml.parse('', quiet=True), [])
|
||||
|
||||
def test_yaml_istio_sc(self):
|
||||
"""
|
||||
Test the Istio SC yaml file
|
||||
|
Reference in New Issue
Block a user