1
0
mirror of https://github.com/kellyjonbrazil/jc.git synced 2026-04-03 17:44:07 +02:00

Compare commits

...

83 Commits

Author SHA1 Message Date
Kelly Brazil
c3eaf59836 Merge pull request #5 from kellyjonbrazil/dev
Dev v1.0.1
2019-10-25 19:43:24 -07:00
Kelly Brazil
c9849ce0db changelog update 2019-10-25 19:41:35 -07:00
Kelly Brazil
d3c89a3092 check for enough info to parse 2019-10-25 19:27:02 -07:00
Kelly Brazil
a3d43f27f7 fix odd uptime text parsing 2019-10-25 18:25:33 -07:00
Kelly Brazil
f4d9c1b699 fix uptime for minutes and hours long uptime 2019-10-25 17:16:02 -07:00
Kelly Brazil
de647bba4a documentation update 2019-10-25 16:14:32 -07:00
Kelly Brazil
d791307031 documentation update 2019-10-25 16:12:45 -07:00
Kelly Brazil
1a4fc204e2 Documentation update 2019-10-25 16:09:31 -07:00
Kelly Brazil
0328e14c7c handle ctrl-c gracefully 2019-10-25 16:05:34 -07:00
Kelly Brazil
1acc4d6c29 fix uptime parsing for short uptimes 2019-10-25 15:52:53 -07:00
Kelly Brazil
27245590ce remove integers 2019-10-25 15:40:53 -07:00
Kelly Brazil
7ca2a4bdb9 remove integer values 2019-10-25 15:39:48 -07:00
Kelly Brazil
5f1ec67348 lower() headers 2019-10-25 14:58:15 -07:00
Kelly Brazil
7e44c4278a formatting 2019-10-25 10:55:38 -07:00
Kelly Brazil
eda726c4a3 documentation update 2019-10-25 10:55:26 -07:00
Kelly Brazil
5f8e70d730 convert headers to lowercase 2019-10-25 10:55:09 -07:00
Kelly Brazil
25b90546c6 change 'Use%' to 'Use_percent' 2019-10-25 10:39:05 -07:00
Kelly Brazil
75c0841538 changelog update 2019-10-25 10:32:54 -07:00
Kelly Brazil
5b532b9b71 minor cleanup 2019-10-25 10:31:03 -07:00
Kelly Brazil
8c7b3193d1 documentation change 2019-10-25 10:28:19 -07:00
Kelly Brazil
0897c96ef3 formatting 2019-10-25 10:22:23 -07:00
Kelly Brazil
57d0ab2ed7 change LOGIN@ to LOGIN_AT 2019-10-25 10:22:10 -07:00
Kelly Brazil
a07d9a0e4b change SIZE/OFF key to SIZE_OFF 2019-10-25 10:04:29 -07:00
Kelly Brazil
b3996cb4df change MAJ:MIN key to MAJ_MIN 2019-10-25 09:54:42 -07:00
Kelly Brazil
4fa88c1ba3 clear out non-ascii chars from data 2019-10-25 09:53:44 -07:00
Kelly Brazil
c8c5564b29 change buff/cache key to buff_cache 2019-10-25 09:46:03 -07:00
Kelly Brazil
6d047486d9 doc fix 2019-10-24 17:53:56 -07:00
Kelly Brazil
42bdc05814 changelog fix 2019-10-24 17:41:51 -07:00
Kelly Brazil
85bfb68886 history parser fixes 2019-10-24 17:33:42 -07:00
Kelly Brazil
08ec21556b formatting 2019-10-24 17:12:27 -07:00
Kelly Brazil
320929bf25 documentation update 2019-10-24 17:11:17 -07:00
Kelly Brazil
41cd489c34 add history and uptime parsers 2019-10-24 17:09:32 -07:00
Kelly Brazil
f101d881a1 add w parser 2019-10-24 16:06:55 -07:00
Kelly Brazil
fa7466022b fix env parser 2019-10-24 15:54:31 -07:00
Kelly Brazil
ea0cf0acf2 documentation update 2019-10-24 09:48:35 -07:00
Kelly Brazil
e7921b65f5 Merge pull request #4 from kellyjonbrazil/dev
Dev v0.9.1
2019-10-23 18:41:55 -07:00
Kelly Brazil
2cc1b1bd54 version bump 2019-10-23 18:39:24 -07:00
Kelly Brazil
58ae976db0 documentation update 2019-10-23 18:30:55 -07:00
Kelly Brazil
66772392ae add lsmod parser 2019-10-23 18:04:54 -07:00
Kelly Brazil
29c47c03a6 documentation update 2019-10-23 17:37:25 -07:00
Kelly Brazil
91eb9a4d13 use None instead of -- 2019-10-23 17:27:23 -07:00
Kelly Brazil
a1a3de32ec add lsof parser 2019-10-23 17:22:25 -07:00
Kelly Brazil
9c47fd05bf doco fix 2019-10-23 14:11:13 -07:00
Kelly Brazil
649c0aa7c1 add documentation 2019-10-23 14:10:10 -07:00
Kelly Brazil
3db758764e add jobs parser 2019-10-23 14:05:47 -07:00
Kelly Brazil
802f1510eb tighten if statements 2019-10-23 10:27:05 -07:00
Kelly Brazil
56901788de stop blocking when no pipe and enhance help text 2019-10-23 09:51:29 -07:00
Kelly Brazil
679ae6d5dc version bump 2019-10-23 09:20:11 -07:00
Kelly Brazil
b15c8c352a simplify state variables 2019-10-23 08:46:54 -07:00
Kelly Brazil
393e8bc560 Merge pull request #3 from kellyjonbrazil/dev
Dev v0.8.1
2019-10-22 17:26:40 -07:00
Kelly Brazil
976fd7d9bd readme update 2019-10-22 17:24:56 -07:00
Kelly Brazil
d8337870ca update documentation 2019-10-22 17:21:00 -07:00
Kelly Brazil
39a8aec77f v0.8.1 build 2019-10-22 17:15:02 -07:00
Kelly Brazil
306d539b6b readme update 2019-10-22 16:50:01 -07:00
Kelly Brazil
f3087b8a8e update readme and formatting 2019-10-22 16:40:27 -07:00
Kelly Brazil
414c2ecef8 fix iptables parser 2019-10-22 16:32:55 -07:00
Kelly Brazil
776ef2d1be add iptables parser 2019-10-22 15:42:29 -07:00
Kelly Brazil
9ac5746996 add uname parser 2019-10-22 13:28:15 -07:00
Kelly Brazil
a3e55d97c0 add mount parser 2019-10-22 12:54:41 -07:00
Kelly Brazil
b15227e7ba add lsblk parser 2019-10-22 11:55:11 -07:00
Kelly Brazil
ec3d1f84ce fix free parser 2019-10-22 11:26:58 -07:00
Kelly Brazil
753d5fd9fe readme update 2019-10-22 11:17:21 -07:00
Kelly Brazil
73a0d70c92 readme update 2019-10-22 11:15:44 -07:00
Kelly Brazil
c2c189f3e6 readme update 2019-10-22 11:14:19 -07:00
Kelly Brazil
36bc55a310 fix df 2019-10-22 11:11:41 -07:00
Kelly Brazil
a023001cd3 add df, env, and free parsers 2019-10-22 11:10:11 -07:00
Kelly Brazil
e3750b4962 documentation enhancements 2019-10-22 07:40:42 -07:00
Kelly Brazil
b5ea08e55b fix transport protocol 2019-10-21 18:22:51 -07:00
Kelly Brazil
8e71b8e352 fix jq example 2019-10-21 18:11:51 -07:00
Kelly Brazil
4c8610c54f fixed build 2019-10-21 17:59:32 -07:00
Kelly Brazil
c8f886dc8f fix example 2019-10-21 17:56:53 -07:00
Kelly Brazil
4cfc2d22b3 update changelog 2019-10-21 17:38:40 -07:00
Kelly Brazil
59238c8540 Merge pull request #2 from kellyjonbrazil/dev
Dev v0.6.2
2019-10-21 17:36:33 -07:00
Kelly Brazil
30080c0165 reorder parsers 2019-10-21 17:26:00 -07:00
Kelly Brazil
fab80bb3b4 readme update 2019-10-21 17:20:12 -07:00
Kelly Brazil
a9f2df8054 move parsed_line var lower 2019-10-21 14:27:26 -07:00
Kelly Brazil
1d110be6cb update doco 2019-10-21 14:13:31 -07:00
Kelly Brazil
be81b5e1ed readme update 2019-10-21 13:47:22 -07:00
Kelly Brazil
5f88f7d8a0 netstat cleanup 2019-10-21 13:41:53 -07:00
Kelly Brazil
e57c7cc8ef change output from dict to list 2019-10-21 13:23:29 -07:00
Kelly Brazil
b216627c10 flatten netstat output 2019-10-21 13:19:00 -07:00
Kelly Brazil
6e925eab13 clean up arg parsing 2019-10-21 13:07:30 -07:00
Kelly Brazil
d54d906c57 update readme 2019-10-18 19:03:11 -07:00
23 changed files with 2620 additions and 647 deletions

1174
README.md

File diff suppressed because it is too large Load Diff

View File

@@ -1,5 +1,38 @@
jc changelog
20191025 v1.0.1
- Add w parser
- Add uptime parser
- Add history parser
- Fix uptime parser
- Flatten env parser output
- Remove problematic characters from key names in: df, free, history, lsblk, lsof, and w
- Where possible, lowercase all keys (except cases like env where the key is the variable name)
- Remove integer values
- Handle CTRL-C gracefully
20191023 v0.9.1
- Add jobs parser
- Add lsof parser
- Add lsmod parser
- No blocking if no piped data
- Better help text
- Clean up iptables parser code
20191022 v0.8.1
- Add env parser
- Add df parser
- Add free parser
- Add lsblk parser
- Add mount parser
- Add uname parser
- Add iptables parser
20191021 v0.6.4
- Flatten netstat parser output
- Clean up argument parsing
- Fix command help
20191018 v0.5.5
- Fix netstat -p parsing for Ubuntu
- Add ps parser

View File

@@ -5,55 +5,67 @@
This module serializes standard unix command line output to structured JSON
output.
Example:
CLI Example:
$ ls -al | jc | jq .
$ ls -l /bin | jc --ls -p
[
{
"filename": ".",
"suffix": Null,
"bytes": 224,
"date_updated": "Oct 1 12:09",
"owner_user": "joeuser",
"owner_group": "staff",
"flags": "drwxr-xr-x+",
"link_to": Null,
"links": 47
},
{
"filename": "..",
"suffix": Null,
"bytes": 224,
"date_updated": "Oct 1 12:09",
"owner_user": "admin",
"owner_group": "root",
"flags": "drwxr-xr-x",
"link_to": Null,
"links": 7
},
{
"filename": "testfile.txt",
"suffix": "txt",
"bytes": 14686,
"date_updated": "Oct 1 12:09",
"owner_user": "joeuser",
"owner_group": "staff",
"flags": "-rwxr-xr-x@",
"link_to": Null,
"links": 1
},
{
"filename": "ncat",
"suffix": Null,
"bytes": 14686,
"date_updated": "Oct 1 12:09",
"owner_user": "joeuser",
"owner_group": "staff",
"flags": "lrwxr-xr-x",
"link_to": "../Cellar/nmap/7.70/bin/ncat",
"links": 1
}
{
"filename": "bash",
"flags": "-r-xr-xr-x",
"links": 1,
"owner": "root",
"group": "wheel",
"bytes": 618416,
"date": "May 3 22:26"
},
{
"filename": "cat",
"flags": "-rwxr-xr-x",
"links": 1,
"owner": "root",
"group": "wheel",
"bytes": 23648,
"date": "May 3 22:26"
},
{
"filename": "chmod",
"flags": "-rwxr-xr-x",
"links": 1,
"owner": "root",
"group": "wheel",
"bytes": 30016,
"date": "May 3 22:26"
},
...
]
Module Example:
>>> import jc.parsers.ls
>>>
>>> data='''-rwxr-xr-x 1 root wheel 23648 May 3 22:26 cat
... -rwxr-xr-x 1 root wheel 30016 May 3 22:26 chmod
... -rwxr-xr-x 1 root wheel 29024 May 3 22:26 cp
... -rwxr-xr-x 1 root wheel 375824 May 3 22:26 csh
... -rwxr-xr-x 1 root wheel 28608 May 3 22:26 date
... -rwxr-xr-x 1 root wheel 32000 May 3 22:26 dd
... -rwxr-xr-x 1 root wheel 23392 May 3 22:26 df
... -rwxr-xr-x 1 root wheel 18128 May 3 22:26 echo'''
>>>
>>> jc.parsers.ls.parse(data)
[{'filename': 'cat', 'flags': '-rwxr-xr-x', 'links': 1, 'owner': 'root', 'group': 'wheel',
'bytes': 23648, 'date': 'May 3 22:26'}, {'filename': 'chmod', 'flags': '-rwxr-xr-x', 'links': 1,
'owner': 'root', 'group': 'wheel', 'bytes': 30016, 'date': 'May 3 22:26'}, {'filename': 'cp',
'flags': '-rwxr-xr-x', 'links': 1, 'owner': 'root', 'group': 'wheel', 'bytes': 29024,
'date': 'May 3 22:26'}, {'filename': 'csh', 'flags': '-rwxr-xr-x', 'links': 1, 'owner': 'root',
'group': 'wheel', 'bytes': 375824, 'date': 'May 3 22:26'}, {'filename': 'date',
'flags': '-rwxr-xr-x', 'links': 1, 'owner': 'root', 'group': 'wheel', 'bytes': 28608,
'date': 'May 3 22:26'}, {'filename': 'dd', 'flags': '-rwxr-xr-x', 'links': 1, 'owner': 'root',
'group': 'wheel', 'bytes': 32000, 'date': 'May 3 22:26'}, {'filename': 'df', 'flags': '-rwxr-xr-x',
'links': 1, 'owner': 'root', 'group': 'wheel', 'bytes': 23392, 'date': 'May 3 22:26'},
{'filename': 'echo', 'flags': '-rwxr-xr-x', 'links': 1, 'owner': 'root', 'group': 'wheel',
'bytes': 18128, 'date': 'May 3 22:26'}]
"""
name = 'jc'

124
jc/jc.py
View File

@@ -5,42 +5,134 @@ Main input module
"""
import sys
import signal
import json
import jc.parsers.df
import jc.parsers.env
import jc.parsers.free
import jc.parsers.history
import jc.parsers.ifconfig
import jc.parsers.iptables
import jc.parsers.jobs
import jc.parsers.ls
import jc.parsers.lsblk
import jc.parsers.lsmod
import jc.parsers.lsof
import jc.parsers.mount
import jc.parsers.netstat
import jc.parsers.ps
import jc.parsers.route
import jc.parsers.uname
import jc.parsers.uptime
import jc.parsers.w
def helptext():
print('Usage: jc PARSER [OPTIONS]\n', file=sys.stderr)
print('Parsers:', file=sys.stderr)
print(' --df df parser', file=sys.stderr)
print(' --env env parser', file=sys.stderr)
print(' --free free parser', file=sys.stderr)
print(' --history history parser', file=sys.stderr)
print(' --ifconfig iconfig parser', file=sys.stderr)
print(' --iptables iptables parser', file=sys.stderr)
print(' --jobs jobs parser', file=sys.stderr)
print(' --ls ls parser', file=sys.stderr)
print(' --lsblk lsblk parser', file=sys.stderr)
print(' --lsmod lsmod parser', file=sys.stderr)
print(' --lsof lsof parser', file=sys.stderr)
print(' --mount mount parser', file=sys.stderr)
print(' --netstat netstat parser', file=sys.stderr)
print(' --ps ps parser', file=sys.stderr)
print(' --route route parser', file=sys.stderr)
print(' --uname uname parser', file=sys.stderr)
print(' --uptime uptime parser', file=sys.stderr)
print(' --w w parser\n', file=sys.stderr)
print('Options:', file=sys.stderr)
print(' -p pretty print output\n', file=sys.stderr)
print('Example:', file=sys.stderr)
print(' ls -al | jc --ls -p\n', file=sys.stderr)
def ctrlc(signum, frame):
exit()
def main():
pretty = False
data = sys.stdin.read()
signal.signal(signal.SIGINT, ctrlc)
if len(sys.argv) < 2:
print('Error: jc')
print(' Must specify parser. (e.g. --ls, --netstat, --ifconfig, etc.)')
print(' Use -p to pretty print')
print('Example: ls -al | jc --ls -p\n')
if sys.stdin.isatty():
print('jc: missing piped data\n', file=sys.stderr)
helptext()
exit()
arg = sys.argv[1]
data = sys.stdin.read()
pretty = False
if len(sys.argv) > 2:
if sys.argv[2] == '-p':
pretty = True
# options
if '-p' in sys.argv:
pretty = True
if arg == '--ifconfig':
# parsers
if '--df' in sys.argv:
result = jc.parsers.df.parse(data)
elif '--env' in sys.argv:
result = jc.parsers.env.parse(data)
elif '--free' in sys.argv:
result = jc.parsers.free.parse(data)
elif '--history' in sys.argv:
result = jc.parsers.history.parse(data)
elif '--ifconfig' in sys.argv:
result = jc.parsers.ifconfig.parse(data)
elif arg == '--ls':
elif '--iptables' in sys.argv:
result = jc.parsers.iptables.parse(data)
elif '--jobs' in sys.argv:
result = jc.parsers.jobs.parse(data)
elif '--ls' in sys.argv:
result = jc.parsers.ls.parse(data)
elif arg == '--netstat':
elif '--lsblk' in sys.argv:
result = jc.parsers.lsblk.parse(data)
elif '--lsmod' in sys.argv:
result = jc.parsers.lsmod.parse(data)
elif '--lsof' in sys.argv:
result = jc.parsers.lsof.parse(data)
elif '--mount' in sys.argv:
result = jc.parsers.mount.parse(data)
elif '--netstat' in sys.argv:
result = jc.parsers.netstat.parse(data)
elif arg == '--ps':
elif '--ps' in sys.argv:
result = jc.parsers.ps.parse(data)
elif arg == '--route':
elif '--route' in sys.argv:
result = jc.parsers.route.parse(data)
elif '--uname' in sys.argv:
result = jc.parsers.uname.parse(data)
elif '--uptime' in sys.argv:
result = jc.parsers.uptime.parse(data)
elif '--w' in sys.argv:
result = jc.parsers.w.parse(data)
else:
print('jc: missing or incorrect arguments\n', file=sys.stderr)
helptext()
exit()
# output resulting dictionary as json
if pretty:
print(json.dumps(result, indent=2))

60
jc/parsers/df.py Normal file
View File

@@ -0,0 +1,60 @@
"""jc - JSON CLI output utility df Parser
Usage:
specify --df as the first argument if the piped input is coming from df
Example:
$ df | jc --df -p
[
{
"filesystem": "udev",
"1k-blocks": "977500",
"used": "0",
"available": "977500",
"use_percent": "0%",
"mounted": "/dev"
},
{
"filesystem": "tmpfs",
"1k-blocks": "201732",
"used": "1204",
"available": "200528",
"use_percent": "1%",
"mounted": "/run"
},
{
"filesystem": "/dev/sda2",
"1k-blocks": "20508240",
"used": "5748312",
"available": "13695124",
"use_percent": "30%",
"mounted": "/"
},
{
"filesystem": "tmpfs",
"1k-blocks": "1008648",
"used": "0",
"available": "1008648",
"use_percent": "0%",
"mounted": "/dev/shm"
}
...
]
"""
def parse(data):
# code adapted from Conor Heine at:
# https://gist.github.com/cahna/43a1a3ff4d075bcd71f9d7120037a501
cleandata = data.splitlines()
headers = [h for h in ' '.join(cleandata[0].lower().strip().split()).split() if h]
# clean up 'use%' header
# even though % in a key is valid json, it can make things difficult
headers = ['use_percent' if x == 'use%' else x for x in headers]
raw_data = map(lambda s: s.strip().split(None, len(headers) - 1), cleandata[1:])
return [dict(zip(headers, r)) for r in raw_data]

37
jc/parsers/env.py Normal file
View File

@@ -0,0 +1,37 @@
"""jc - JSON CLI output utility env Parser
Usage:
specify --env as the first argument if the piped input is coming from env
Example:
$ env | jc --env -p
{
"TERM": "xterm-256color",
"SHELL": "/bin/bash",
"USER": "root",
"PATH": "/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin",
"PWD": "/root",
"LANG": "en_US.UTF-8",
"HOME": "/root",
"LOGNAME": "root",
"_": "/usr/bin/env"
}
"""
def parse(data):
output = {}
linedata = data.splitlines()
# Clear any blank lines
cleandata = list(filter(None, linedata))
if cleandata:
for entry in cleandata:
parsed_line = entry.split('=', maxsplit=1)
output[parsed_line[0]] = parsed_line[1]
return output

48
jc/parsers/free.py Normal file
View File

@@ -0,0 +1,48 @@
"""jc - JSON CLI output utility free Parser
Usage:
specify --free as the first argument if the piped input is coming from free
Example:
$ free | jc --free -p
[
{
"type": "Mem",
"total": "2017300",
"used": "213104",
"free": "1148452",
"shared": "1176",
"buff_cache": "655744",
"available": "1622204"
},
{
"type": "Swap",
"total": "2097148",
"used": "0",
"free": "2097148"
}
]
"""
def parse(data):
# code adapted from Conor Heine at:
# https://gist.github.com/cahna/43a1a3ff4d075bcd71f9d7120037a501
cleandata = data.splitlines()
headers = [h for h in ' '.join(cleandata[0].lower().strip().split()).split() if h]
headers.insert(0, "type")
# clean up 'buff/cache' header
# even though forward slash in a key is valid json, it can make things difficult
headers = ['buff_cache' if x == 'buff/cache' else x for x in headers]
raw_data = map(lambda s: s.strip().split(None, len(headers) - 1), cleandata[1:])
output = [dict(zip(headers, r)) for r in raw_data]
for entry in output:
entry['type'] = entry['type'].rstrip(':')
return output

38
jc/parsers/history.py Normal file
View File

@@ -0,0 +1,38 @@
"""jc - JSON CLI output utility history Parser
Usage:
specify --history as the first argument if the piped input is coming from history
Example:
$ history | jc --history -p
{
"n118": "sleep 100",
"n119": "ls /bin",
"n120": "echo \"hello\"",
"n121": "docker images",
...
}
"""
def parse(data):
output = {}
# split lines and clear out any non-ascii chars
linedata = data.encode('ascii', errors='ignore').decode().splitlines()
# Clear any blank lines
cleandata = list(filter(None, linedata))
if cleandata:
for entry in cleandata:
try:
parsed_line = entry.split(maxsplit=1)
# prepend alpha character n to key so the resulting JSON is easier to work with
output['n' + parsed_line[0]] = parsed_line[1]
except IndexError:
# need to catch indexerror in case there is weird input from prior commands
pass
return output

View File

@@ -8,7 +8,86 @@ Usage:
Example:
$ ifconfig | jc --ifconfig -p
[
{
"name": "docker0",
"flags": "4099",
"state": "UP,BROADCAST,MULTICAST",
"mtu": "1500",
"ipv4_addr": "172.17.0.1",
"ipv4_mask": "255.255.0.0",
"ipv4_bcast": "0.0.0.0",
"mac_addr": "02:42:53:18:31:cc",
"type": "Ethernet",
"rx_packets": "0",
"rx_errors": "0",
"rx_dropped": "0",
"rx_overruns": "0",
"rx_frame": "0",
"tx_packets": "0",
"tx_errors": "0",
"tx_dropped": "0",
"tx_overruns": "0",
"tx_carrier": "0",
"tx_collisions": "0",
"ipv6_addr": null,
"ipv6_mask": null,
"ipv6_scope": null,
"metric": null
},
{
"name": "ens33",
"flags": "4163",
"state": "UP,BROADCAST,RUNNING,MULTICAST",
"mtu": "1500",
"ipv4_addr": "192.168.71.135",
"ipv4_mask": "255.255.255.0",
"ipv4_bcast": "192.168.71.255",
"ipv6_addr": "fe80::c1cb:715d:bc3e:b8a0",
"ipv6_mask": "64",
"ipv6_scope": "link",
"mac_addr": "00:0c:29:3b:58:0e",
"type": "Ethernet",
"rx_packets": "26348",
"rx_errors": "0",
"rx_dropped": "0",
"rx_overruns": "0",
"rx_frame": "0",
"tx_packets": "5308",
"tx_errors": "0",
"tx_dropped": "0",
"tx_overruns": "0",
"tx_carrier": "0",
"tx_collisions": "0",
"metric": null
},
{
"name": "lo",
"flags": "73",
"state": "UP,LOOPBACK,RUNNING",
"mtu": "65536",
"ipv4_addr": "127.0.0.1",
"ipv4_mask": "255.0.0.0",
"ipv4_bcast": null,
"ipv6_addr": "::1",
"ipv6_mask": "128",
"ipv6_scope": "host",
"mac_addr": null,
"type": "Local Loopback",
"rx_packets": "64",
"rx_errors": "0",
"rx_dropped": "0",
"rx_overruns": "0",
"rx_frame": "0",
"tx_packets": "64",
"tx_errors": "0",
"tx_dropped": "0",
"tx_overruns": "0",
"tx_carrier": "0",
"tx_collisions": "0",
"metric": null
}
]
"""
from collections import namedtuple
from ifconfigparser import IfconfigParser

364
jc/parsers/iptables.py Normal file
View File

@@ -0,0 +1,364 @@
"""jc - JSON CLI output utility ipables Parser
Usage:
Specify --iptables as the first argument if the piped input is coming from iptables
Supports -vLn for all tables
Examples:
$ sudo iptables -L -t nat | jc --iptables -p
[
{
"chain": "PREROUTING",
"rules": [
{
"target": "PREROUTING_direct",
"prot": "all",
"opt": "--",
"source": "anywhere",
"destination": "anywhere"
},
{
"target": "PREROUTING_ZONES_SOURCE",
"prot": "all",
"opt": "--",
"source": "anywhere",
"destination": "anywhere"
},
{
"target": "PREROUTING_ZONES",
"prot": "all",
"opt": "--",
"source": "anywhere",
"destination": "anywhere"
},
{
"target": "DOCKER",
"prot": "all",
"opt": "--",
"source": "anywhere",
"destination": "anywhere",
"options": "ADDRTYPE match dst-type LOCAL"
}
]
},
{
"chain": "INPUT",
"rules": []
},
{
"chain": "OUTPUT",
"rules": [
{
"target": "OUTPUT_direct",
"prot": "all",
"opt": "--",
"source": "anywhere",
"destination": "anywhere"
},
{
"target": "DOCKER",
"prot": "all",
"opt": "--",
"source": "anywhere",
"destination": "!loopback/8",
"options": "ADDRTYPE match dst-type LOCAL"
}
]
},
...
]
$ sudo iptables -vnL -t filter | jc --iptables -p
[
{
"chain": "INPUT",
"rules": [
{
"pkts": "1571",
"bytes": "3394K",
"target": "ACCEPT",
"prot": "all",
"opt": "--",
"in": "*",
"out": "*",
"source": "0.0.0.0/0",
"destination": "0.0.0.0/0",
"options": "ctstate RELATED,ESTABLISHED"
},
{
"pkts": "0",
"bytes": "0",
"target": "ACCEPT",
"prot": "all",
"opt": "--",
"in": "lo",
"out": "*",
"source": "0.0.0.0/0",
"destination": "0.0.0.0/0"
},
{
"pkts": "711",
"bytes": "60126",
"target": "INPUT_direct",
"prot": "all",
"opt": "--",
"in": "*",
"out": "*",
"source": "0.0.0.0/0",
"destination": "0.0.0.0/0"
},
{
"pkts": "711",
"bytes": "60126",
"target": "INPUT_ZONES_SOURCE",
"prot": "all",
"opt": "--",
"in": "*",
"out": "*",
"source": "0.0.0.0/0",
"destination": "0.0.0.0/0"
},
{
"pkts": "711",
"bytes": "60126",
"target": "INPUT_ZONES",
"prot": "all",
"opt": "--",
"in": "*",
"out": "*",
"source": "0.0.0.0/0",
"destination": "0.0.0.0/0"
},
{
"pkts": "0",
"bytes": "0",
"target": "DROP",
"prot": "all",
"opt": "--",
"in": "*",
"out": "*",
"source": "0.0.0.0/0",
"destination": "0.0.0.0/0",
"options": "ctstate INVALID"
},
{
"pkts": "710",
"bytes": "60078",
"target": "REJECT",
"prot": "all",
"opt": "--",
"in": "*",
"out": "*",
"source": "0.0.0.0/0",
"destination": "0.0.0.0/0",
"options": "reject-with icmp-host-prohibited"
}
]
},
{
"chain": "FORWARD",
"rules": [
{
"pkts": "0",
"bytes": "0",
"target": "DOCKER-ISOLATION",
"prot": "all",
"opt": "--",
"in": "*",
"out": "*",
"source": "0.0.0.0/0",
"destination": "0.0.0.0/0"
},
{
"pkts": "0",
"bytes": "0",
"target": "DOCKER",
"prot": "all",
"opt": "--",
"in": "*",
"out": "docker0",
"source": "0.0.0.0/0",
"destination": "0.0.0.0/0"
},
{
"pkts": "0",
"bytes": "0",
"target": "ACCEPT",
"prot": "all",
"opt": "--",
"in": "*",
"out": "docker0",
"source": "0.0.0.0/0",
"destination": "0.0.0.0/0",
"options": "ctstate RELATED,ESTABLISHED"
},
{
"pkts": "0",
"bytes": "0",
"target": "ACCEPT",
"prot": "all",
"opt": "--",
"in": "docker0",
"out": "!docker0",
"source": "0.0.0.0/0",
"destination": "0.0.0.0/0"
},
{
"pkts": "0",
"bytes": "0",
"target": "ACCEPT",
"prot": "all",
"opt": "--",
"in": "docker0",
"out": "docker0",
"source": "0.0.0.0/0",
"destination": "0.0.0.0/0"
},
{
"pkts": "0",
"bytes": "0",
"target": "ACCEPT",
"prot": "all",
"opt": "--",
"in": "*",
"out": "*",
"source": "0.0.0.0/0",
"destination": "0.0.0.0/0",
"options": "ctstate RELATED,ESTABLISHED"
},
{
"pkts": "0",
"bytes": "0",
"target": "ACCEPT",
"prot": "all",
"opt": "--",
"in": "lo",
"out": "*",
"source": "0.0.0.0/0",
"destination": "0.0.0.0/0"
},
{
"pkts": "0",
"bytes": "0",
"target": "FORWARD_direct",
"prot": "all",
"opt": "--",
"in": "*",
"out": "*",
"source": "0.0.0.0/0",
"destination": "0.0.0.0/0"
},
{
"pkts": "0",
"bytes": "0",
"target": "FORWARD_IN_ZONES_SOURCE",
"prot": "all",
"opt": "--",
"in": "*",
"out": "*",
"source": "0.0.0.0/0",
"destination": "0.0.0.0/0"
},
{
"pkts": "0",
"bytes": "0",
"target": "FORWARD_IN_ZONES",
"prot": "all",
"opt": "--",
"in": "*",
"out": "*",
"source": "0.0.0.0/0",
"destination": "0.0.0.0/0"
},
{
"pkts": "0",
"bytes": "0",
"target": "FORWARD_OUT_ZONES_SOURCE",
"prot": "all",
"opt": "--",
"in": "*",
"out": "*",
"source": "0.0.0.0/0",
"destination": "0.0.0.0/0"
},
{
"pkts": "0",
"bytes": "0",
"target": "FORWARD_OUT_ZONES",
"prot": "all",
"opt": "--",
"in": "*",
"out": "*",
"source": "0.0.0.0/0",
"destination": "0.0.0.0/0"
},
{
"pkts": "0",
"bytes": "0",
"target": "DROP",
"prot": "all",
"opt": "--",
"in": "*",
"out": "*",
"source": "0.0.0.0/0",
"destination": "0.0.0.0/0",
"options": "ctstate INVALID"
},
{
"pkts": "0",
"bytes": "0",
"target": "REJECT",
"prot": "all",
"opt": "--",
"in": "*",
"out": "*",
"source": "0.0.0.0/0",
"destination": "0.0.0.0/0",
"options": "reject-with icmp-host-prohibited"
}
]
},
...
]
"""
def parse(data):
output = []
chain = {}
headers = []
cleandata = data.splitlines()
for line in cleandata:
if line.find('Chain') == 0:
output.append(chain)
chain = {}
headers = []
parsed_line = line.split()
chain['chain'] = parsed_line[1]
chain['rules'] = []
continue
elif line.find('target') == 0 or line.find('pkts') == 1:
headers = []
headers = [h for h in ' '.join(line.lower().strip().split()).split() if h]
headers.append("options")
continue
else:
rule = line.split(maxsplit=len(headers) - 1)
temp_rule = dict(zip(headers, rule))
if temp_rule:
chain['rules'].append(temp_rule)
output = list(filter(None, output))
return output

100
jc/parsers/jobs.py Normal file
View File

@@ -0,0 +1,100 @@
"""jc - JSON CLI output utility jobs Parser
Usage:
specify --jobs as the first argument if the piped input is coming from jobs
Also supports the -l option
Example:
$ jobs -l | jc --jobs -p
[
{
"job_number": "1",
"pid": "19510",
"status": "Running",
"command": "sleep 1000 &"
},
{
"job_number": "2",
"pid": "19511",
"status": "Running",
"command": "sleep 1001 &"
},
{
"job_number": "3",
"pid": "19512",
"history": "previous",
"status": "Running",
"command": "sleep 1002 &"
},
{
"job_number": "4",
"pid": "19513",
"history": "current",
"status": "Running",
"command": "sleep 1003 &"
}
]
"""
import string
def parse(data):
output = []
linedata = data.splitlines()
# Clear any blank lines
cleandata = list(filter(None, linedata))
if cleandata:
for entry in cleandata:
output_line = {}
remainder = []
job_number = ''
pid = ''
job_history = ''
parsed_line = entry.split(maxsplit=2)
# check if -l was used
if parsed_line[1][0] in string.digits:
pid = parsed_line.pop(1)
remainder = parsed_line.pop(1)
job_number = parsed_line.pop(0)
remainder = remainder.split(maxsplit=1)
# rebuild parsed_line
parsed_line = []
for r in remainder:
parsed_line.append(r)
parsed_line.insert(0, job_number)
# check for + or - in first field
if parsed_line[0].find('+') != -1:
job_history = 'current'
parsed_line[0] = parsed_line[0].rstrip('+')
if parsed_line[0].find('-') != -1:
job_history = 'previous'
parsed_line[0] = parsed_line[0].rstrip('-')
# clean up first field
parsed_line[0] = parsed_line[0].lstrip('[').rstrip(']')
# create list of dictionaries
output_line['job_number'] = parsed_line[0]
if pid:
output_line['pid'] = pid
if job_history:
output_line['history'] = job_history
output_line['status'] = parsed_line[1]
output_line['command'] = parsed_line[2]
output.append(output_line)
return output

View File

@@ -5,81 +5,96 @@ Usage:
ls options supported:
- None
- l
- a
- lah
Examples:
$ ls -a /usr/bin | jc --ls -p
$ ls /usr/bin | jc --ls -p
[
{
"filename": "."
"filename": "apropos"
},
{
"filename": ".."
"filename": "arch"
},
{
"filename": "2to3-"
"filename": "awk"
},
{
"filename": "2to3-2.7"
},
{
"filename": "AssetCacheLocatorUtil"
"filename": "base64"
},
...
]
$ ls -al /usr/bin | jc --ls -p
$ ls -l /usr/bin | jc --ls -p
[
{
"filename": ".",
"flags": "drwxr-xr-x",
"links": 970,
"filename": "apropos",
"link_to": "whatis",
"flags": "lrwxrwxrwx.",
"links": "1",
"owner": "root",
"group": "wheel",
"bytes": 31040,
"date": "Aug 27 21:20"
"group": "root",
"size": "6",
"date": "Aug 15 10:53"
},
{
"filename": "..",
"flags": "drwxr-xr-x@",
"links": 9,
"filename": "arch",
"flags": "-rwxr-xr-x.",
"links": "1",
"owner": "root",
"group": "wheel",
"bytes": 288,
"date": "May 3 22:14"
"group": "root",
"size": "33080",
"date": "Aug 19 23:25"
},
{
"filename": "2to3-",
"flags": "-rwxr-xr-x",
"links": 4,
"filename": "awk",
"link_to": "gawk",
"flags": "lrwxrwxrwx.",
"links": "1",
"owner": "root",
"group": "wheel",
"bytes": 925,
"date": "Feb 22 2019"
"group": "root",
"size": "4",
"date": "Aug 15 10:53"
},
{
"filename": "2to3-2.7",
"link_to": "../../System/Library/Frameworks/Python.framework/Versions/2.7/bin/2to3-2.7",
"flags": "lrwxr-xr-x",
"links": 1,
"filename": "base64",
"flags": "-rwxr-xr-x.",
"links": "1",
"owner": "root",
"group": "wheel",
"bytes": 74,
"date": "May 4 02:12"
"group": "root",
"size": "37360",
"date": "Aug 19 23:25"
},
{
"filename": "basename",
"flags": "-rwxr-xr-x.",
"links": "1",
"owner": "root",
"group": "root",
"size": "29032",
"date": "Aug 19 23:25"
},
{
"filename": "bash",
"flags": "-rwxr-xr-x.",
"links": "1",
"owner": "root",
"group": "root",
"size": "964600",
"date": "Aug 8 05:06"
},
...
]
$ $ ls -l /usr/bin | jc --ls | jq .[] | jq 'select(.bytes > 50000000)'
$ ls -l /usr/bin | jc --ls | jq '.[] | select(.size|tonumber > 50000000)'
{
"filename": "emacs",
"flags": "-r-xr-xr-x",
"links": 1,
"owner": "root",
"group": "wheel",
"bytes": 117164432,
"size": "117164432",
"date": "May 3 22:26"
}
"""
@@ -117,10 +132,10 @@ def parse(data):
output_line['link_to'] = filename_field[1]
output_line['flags'] = parsed_line[0]
output_line['links'] = int(parsed_line[1])
output_line['links'] = parsed_line[1]
output_line['owner'] = parsed_line[2]
output_line['group'] = parsed_line[3]
output_line['bytes'] = int(parsed_line[4])
output_line['size'] = parsed_line[4]
output_line['date'] = ' '.join(parsed_line[5:8])
output.append(output_line)
else:

83
jc/parsers/lsblk.py Normal file
View File

@@ -0,0 +1,83 @@
"""jc - JSON CLI output utility lsblk Parser
Usage:
specify --lsblk as the first argument if the piped input is coming from lsblk
Example:
$ lsblk | jc --lsblk -p
[
{
"name": "sda",
"maj_min": "8:0",
"rm": "0",
"size": "20G",
"ro": "0",
"type": "disk"
},
{
"name": "sda1",
"maj_min": "8:1",
"rm": "0",
"size": "1G",
"ro": "0",
"type": "part",
"mountpoint": "/boot"
},
{
"name": "sda2",
"maj_min": "8:2",
"rm": "0",
"size": "19G",
"ro": "0",
"type": "part"
},
{
"name": "centos-root",
"maj_min": "253:0",
"rm": "0",
"size": "17G",
"ro": "0",
"type": "lvm",
"mountpoint": "/"
},
{
"name": "centos-swap",
"maj_min": "253:1",
"rm": "0",
"size": "2G",
"ro": "0",
"type": "lvm",
"mountpoint": "[SWAP]"
},
{
"name": "sr0",
"maj_min": "11:0",
"rm": "1",
"size": "1024M",
"ro": "0",
"type": "rom"
}
]
"""
def parse(data):
# code adapted from Conor Heine at:
# https://gist.github.com/cahna/43a1a3ff4d075bcd71f9d7120037a501
cleandata = data.splitlines()
headers = [h for h in ' '.join(cleandata[0].lower().strip().split()).split() if h]
# clean up 'maj:min' header
# even though colon in a key is valid json, it can make things difficult
headers = ['maj_min' if x == 'maj:min' else x for x in headers]
raw_data = map(lambda s: s.strip().split(None, len(headers) - 1), cleandata[1:])
output = [dict(zip(headers, r)) for r in raw_data]
for entry in output:
entry['name'] = entry['name'].encode('ascii', errors='ignore').decode()
return output

72
jc/parsers/lsmod.py Normal file
View File

@@ -0,0 +1,72 @@
"""jc - JSON CLI output utility lsmod Parser
Usage:
specify --lsmod as the first argument if the piped input is coming from lsmod
Example:
$ lsmod | jc --lsmod -p
[
...
{
"module": "nf_conntrack",
"size": "139224",
"used": "7",
"by": [
"nf_nat",
"nf_nat_ipv4",
"nf_nat_ipv6",
"xt_conntrack",
"nf_nat_masquerade_ipv4",
"nf_conntrack_ipv4",
"nf_conntrack_ipv6"
]
},
{
"module": "ip_set",
"size": "45799",
"used": "0"
},
{
"module": "nfnetlink",
"size": "14519",
"used": "1",
"by": [
"ip_set"
]
},
{
"module": "ebtable_filter",
"size": "12827",
"used": "1"
},
{
"module": "ebtables",
"size": "35009",
"used": "2",
"by": [
"ebtable_nat",
"ebtable_filter"
]
},
...
]
"""
def parse(data):
# code adapted from Conor Heine at:
# https://gist.github.com/cahna/43a1a3ff4d075bcd71f9d7120037a501
cleandata = data.splitlines()
headers = [h for h in ' '.join(cleandata[0].lower().strip().split()).split() if h]
raw_data = map(lambda s: s.strip().split(None, len(headers) - 1), cleandata[1:])
output = [dict(zip(headers, r)) for r in raw_data]
for mod in output:
if 'by' in mod:
mod['by'] = mod['by'].split(',')
return output

125
jc/parsers/lsof.py Normal file
View File

@@ -0,0 +1,125 @@
"""jc - JSON CLI output utility lsof Parser
Usage:
specify --lsof as the first argument if the piped input is coming from lsof
Example:
$ sudo lsof | jc --lsof -p | more
[
{
"command": "systemd",
"pid": "1",
"tid": null,
"user": "root",
"fd": "cwd",
"type": "DIR",
"device": "8,2",
"size_off": "4096",
"node": "2",
"name": "/"
},
{
"command": "systemd",
"pid": "1",
"tid": null,
"user": "root",
"fd": "rtd",
"type": "DIR",
"device": "8,2",
"size_off": "4096",
"node": "2",
"name": "/"
},
{
"command": "systemd",
"pid": "1",
"tid": null,
"user": "root",
"fd": "txt",
"type": "REG",
"device": "8,2",
"size_off": "1595792",
"node": "668802",
"name": "/lib/systemd/systemd"
},
{
"command": "systemd",
"pid": "1",
"tid": null,
"user": "root",
"fd": "mem",
"type": "REG",
"device": "8,2",
"size_off": "1700792",
"node": "656167",
"name": "/lib/x86_64-linux-gnu/libm-2.27.so"
},
{
"command": "systemd",
"pid": "1",
"tid": null,
"user": "root",
"fd": "mem",
"type": "REG",
"device": "8,2",
"size_off": "121016",
"node": "655394",
"name": "/lib/x86_64-linux-gnu/libudev.so.1.6.9"
},
...
]
"""
import string
def parse(data):
output = []
linedata = data.splitlines()
# Clear any blank lines
cleandata = list(filter(None, linedata))
if cleandata:
# find column value of last character of each header
header_text = cleandata.pop(0).lower()
# clean up 'size/off' header
# even though forward slash in a key is valid json, it can make things difficult
header_row = header_text.replace('size/off', 'size_off')
headers = header_row.split()
header_spec = []
for i, h in enumerate(headers):
# header tuple is (index, header_name, col)
header_spec.append((i, h, header_row.find(h) + len(h)))
# parse lines
for entry in cleandata:
output_line = {}
# normalize data by inserting Null for missing data
temp_line = entry.split(maxsplit=len(headers) - 1)
for spec in header_spec:
index = spec[0]
header_name = spec[1]
col = spec[2] - 1 # subtract one since column starts at 0 instead of 1
if header_name == 'command' or header_name == 'name':
continue
if entry[col] in string.whitespace:
temp_line.insert(index, None)
name = ' '.join(temp_line[9:])
fixed_line = temp_line[0:9]
fixed_line.append(name)
output_line = dict(zip(headers, fixed_line))
output.append(output_line)
return output

75
jc/parsers/mount.py Normal file
View File

@@ -0,0 +1,75 @@
"""jc - JSON CLI output utility mount Parser
Usage:
specify --mount as the first argument if the piped input is coming from mount
Example:
$ mount | jc --mount -p
[
{
"filesystem": "sysfs",
"mount_point": "/sys",
"type": "sysfs",
"access": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime"
]
},
{
"filesystem": "proc",
"mount_point": "/proc",
"type": "proc",
"access": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime"
]
},
{
"filesystem": "udev",
"mount_point": "/dev",
"type": "devtmpfs",
"access": [
"rw",
"nosuid",
"relatime",
"size=977500k",
"nr_inodes=244375",
"mode=755"
]
},
...
]
"""
def parse(data):
output = []
linedata = data.splitlines()
# Clear any blank lines
cleandata = list(filter(None, linedata))
if cleandata:
for entry in cleandata:
output_line = {}
parsed_line = entry.split()
output_line['filesystem'] = parsed_line[0]
output_line['mount_point'] = parsed_line[2]
output_line['type'] = parsed_line[4]
access = parsed_line[5].lstrip('(').rstrip(')').split(',')
output_line['access'] = access
output.append(output_line)
return output

View File

@@ -11,154 +11,121 @@ Limitations:
Examples:
$ netstat -p | jc --netstat -p
{
"client": {
"tcp": {
"ipv4": [
{
"local_address": "localhost.localdo",
"local_port": "34480",
"foreign_address": "lb-192-30-255-113",
"foreign_port": "https",
"state": "ESTABLISHED",
"pid": 53550,
"program_name": "git-remote-ht",
"receive_q": 0,
"send_q": 0
},
{
"local_address": "localhost.localdo",
"local_port": "34478",
"foreign_address": "lb-192-30-255-113",
"foreign_port": "https",
"state": "ESTABLISHED",
"pid": 53550,
"program_name": "git-remote-ht",
"receive_q": 0,
"send_q": 0
}
]
}
[
{
"transport_protocol": "tcp",
"network_protocol": "ipv4",
"local_address": "localhost.localdo",
"local_port": "34480",
"foreign_address": "lb-192-30-255-113",
"foreign_port": "https",
"state": "ESTABLISHED",
"pid": 53550,
"program_name": "git-remote-ht",
"receive_q": "0",
"send_q": "0"
},
{
"transport_protocol": "tcp",
"network_protocol": "ipv4",
"local_address": "localhost.localdo",
"local_port": "34478",
"foreign_address": "lb-192-30-255-113",
"foreign_port": "https",
"state": "ESTABLISHED",
"pid": 53550,
"program_name": "git-remote-ht",
"receive_q": "0",
"send_q": "0"
}
}
]
$ netstat -lp | jc --netstat -p
{
"server": {
"tcp": {
"ipv4": [
{
"local_address": "localhost",
"local_port": "smtp",
"foreign_address": "0.0.0.0",
"foreign_port": "*",
"state": "LISTEN",
"pid": 1594,
"program_name": "master",
"receive_q": 0,
"send_q": 0
},
{
"local_address": "0.0.0.0",
"local_port": "ssh",
"foreign_address": "0.0.0.0",
"foreign_port": "*",
"state": "LISTEN",
"pid": 21918,
"program_name": "sshd",
"receive_q": 0,
"send_q": 0
}
],
"ipv6": [
{
"local_address": "localhost",
"local_port": "smtp",
"foreign_address": "[::]",
"foreign_port": "*",
"state": "LISTEN",
"pid": 1594,
"program_name": "master",
"receive_q": 0,
"send_q": 0
},
{
"local_address": "[::]",
"local_port": "ssh",
"foreign_address": "[::]",
"foreign_port": "*",
"state": "LISTEN",
"pid": 21918,
"program_name": "sshd",
"receive_q": 0,
"send_q": 0
}
]
},
"udp": {
"ipv4": [
{
"local_address": "0.0.0.0",
"local_port": "bootpc",
"foreign_address": "0.0.0.0",
"foreign_port": "*",
"pid": 13903,
"program_name": "dhclient",
"receive_q": 0,
"send_q": 0
},
{
"local_address": "localhost",
"local_port": "323",
"foreign_address": "0.0.0.0",
"foreign_port": "*",
"pid": 30926,
"program_name": "chronyd",
"receive_q": 0,
"send_q": 0
}
],
"ipv6": [
{
"local_address": "localhost",
"local_port": "323",
"foreign_address": "[::]",
"foreign_port": "*",
"pid": 30926,
"program_name": "chronyd",
"receive_q": 0,
"send_q": 0
}
]
}
}
}
$ sudo netstat -lpn | jc --netstat -p
[
{
"transport_protocol": "tcp",
"network_protocol": "ipv4",
"local_address": "127.0.0.1",
"local_port": "25",
"foreign_address": "0.0.0.0",
"foreign_port": "*",
"state": "LISTEN",
"pid": "1584",
"program_name": "master",
"receive_q": "0",
"send_q": "0"
},
{
"transport_protocol": "tcp",
"network_protocol": "ipv4",
"local_address": "0.0.0.0",
"local_port": "22",
"foreign_address": "0.0.0.0",
"foreign_port": "*",
"state": "LISTEN",
"pid": "1213",
"program_name": "sshd",
"receive_q": "0",
"send_q": "0"
},
{
"transport_protocol": "tcp",
"network_protocol": "ipv6",
"local_address": "::1",
"local_port": "25",
"foreign_address": "::",
"foreign_port": "*",
"state": "LISTEN",
"pid": "1584",
"program_name": "master",
"receive_q": "0",
"send_q": "0"
},
{
"transport_protocol": "udp",
"network_protocol": "ipv4",
"local_address": "0.0.0.0",
"local_port": "68",
"foreign_address": "0.0.0.0",
"foreign_port": "*",
"pid": "19177",
"program_name": "dhclient",
"receive_q": "0",
"send_q": "0"
},
...
]
"""
import string
output = {}
class state():
section = ''
session = ''
network = ''
client_tcp_ip4 = []
client_tcp_ip6 = []
client_udp_ip4 = []
client_udp_ip6 = []
server_tcp_ip4 = []
server_tcp_ip6 = []
server_udp_ip4 = []
server_udp_ip6 = []
output = []
def parse_line(entry):
parsed_line = entry.split()
output_line = {}
if entry.find('tcp') == 0:
output_line['transport_protocol'] = 'tcp'
if entry.find('p6') == 2:
output_line['network_protocol'] = 'ipv6'
else:
output_line['network_protocol'] = 'ipv4'
elif entry.find('udp') == 0:
output_line['transport_protocol'] = 'udp'
if entry.find('p6') == 2:
output_line['network_protocol'] = 'ipv6'
else:
output_line['network_protocol'] = 'ipv4'
else:
return
parsed_line = entry.split()
output_line['local_address'] = parsed_line[3].rsplit(':', 1)[0]
output_line['local_port'] = parsed_line[3].rsplit(':', 1)[-1]
output_line['foreign_address'] = parsed_line[4].rsplit(':', 1)[0]
@@ -170,15 +137,15 @@ def parse_line(entry):
output_line['state'] = parsed_line[5]
if len(parsed_line) > 6 and parsed_line[6][0] in string.digits:
output_line['pid'] = int(parsed_line[6].split('/')[0])
output_line['pid'] = parsed_line[6].split('/')[0]
output_line['program_name'] = parsed_line[6].split('/')[1]
else:
if parsed_line[5][0] in string.digits:
output_line['pid'] = int(parsed_line[5].split('/')[0])
output_line['pid'] = parsed_line[5].split('/')[0]
output_line['program_name'] = parsed_line[5].split('/')[1]
output_line['receive_q'] = int(parsed_line[1])
output_line['send_q'] = int(parsed_line[2])
output_line['receive_q'] = parsed_line[1]
output_line['send_q'] = parsed_line[2]
return output_line
@@ -189,11 +156,9 @@ def parse(data):
for line in cleandata:
if line.find('Active Internet connections (w/o servers)') == 0:
state.section = 'client'
continue
if line.find('Active Internet connections (only servers)') == 0:
state.section = 'server'
continue
if line.find('Proto') == 0:
@@ -202,119 +167,7 @@ def parse(data):
if line.find('Active UNIX') == 0:
break
if state.section == 'client':
if line.find('tcp') == 0:
state.session = 'tcp'
if line.find('p6') == 2:
state.network = 'ipv6'
else:
state.network = 'ipv4'
elif line.find('udp') == 0:
state.session = 'udp'
if line.find('p6') == 2:
state.network = 'ipv6'
else:
state.network = 'ipv4'
elif state.section == 'server':
if line.find('tcp') == 0:
state.session = 'tcp'
if line.find('p6') == 2:
state.network = 'ipv6'
else:
state.network = 'ipv4'
elif line.find('udp') == 0:
state.session = 'udp'
if line.find('p6') == 2:
state.network = 'ipv6'
else:
state.network = 'ipv4'
output.append(parse_line(line))
# client section
if state.section == 'client' and state.session == 'tcp' and state.network == 'ipv4':
state.client_tcp_ip4.append(parse_line(line))
if state.section == 'client' and state.session == 'tcp' and state.network == 'ipv6':
state.client_tcp_ip6.append(parse_line(line))
if state.section == 'client' and state.session == 'udp' and state.network == 'ipv4':
state.client_udp_ip4.append(parse_line(line))
if state.section == 'client' and state.session == 'udp' and state.network == 'ipv6':
state.client_udp_ip6.append(parse_line(line))
# server section
if state.section == 'server' and state.session == 'tcp' and state.network == 'ipv4':
state.server_tcp_ip4.append(parse_line(line))
if state.section == 'server' and state.session == 'tcp' and state.network == 'ipv6':
state.server_tcp_ip6.append(parse_line(line))
if state.section == 'server' and state.session == 'udp' and state.network == 'ipv4':
state.server_udp_ip4.append(parse_line(line))
if state.section == 'server' and state.session == 'udp' and state.network == 'ipv6':
state.server_udp_ip6.append(parse_line(line))
state.session = ''
state.network = ''
# build dictionary
# client section
if state.client_tcp_ip4:
if 'client' not in output:
output['client'] = {}
if 'tcp' not in output['client']:
output['client']['tcp'] = {}
output['client']['tcp']['ipv4'] = state.client_tcp_ip4
if state.client_tcp_ip6:
if 'client' not in output:
output['client'] = {}
if 'tcp' not in output['client']:
output['client']['tcp'] = {}
output['client']['tcp']['ipv6'] = state.client_tcp_ip6
if state.client_udp_ip4:
if 'client' not in output:
output['client'] = {}
if 'udp' not in output['client']:
output['client']['udp'] = {}
output['client']['udp']['ipv4'] = state.client_udp_ip4
if state.client_udp_ip6:
if 'client' not in output:
output['client'] = {}
if 'udp' not in output['client']:
output['client']['udp'] = {}
output['client']['udp']['ipv6'] = state.client_udp_ip6
# server section
if state.server_tcp_ip4:
if 'server' not in output:
output['server'] = {}
if 'tcp' not in output['server']:
output['server']['tcp'] = {}
output['server']['tcp']['ipv4'] = state.server_tcp_ip4
if state.server_tcp_ip6:
if 'server' not in output:
output['server'] = {}
if 'tcp' not in output['server']:
output['server']['tcp'] = {}
output['server']['tcp']['ipv6'] = state.server_tcp_ip6
if state.server_udp_ip4:
if 'server' not in output:
output['server'] = {}
if 'udp' not in output['server']:
output['server']['udp'] = {}
output['server']['udp']['ipv4'] = state.server_udp_ip4
if state.server_udp_ip6:
if 'server' not in output:
output['server'] = {}
if 'udp' not in output['server']:
output['server']['udp'] = {}
output['server']['udp']['ipv6'] = state.server_udp_ip6
return output
clean_output = list(filter(None, output))
return clean_output

View File

@@ -11,45 +11,56 @@ Example:
$ ps -ef | jc --ps -p
[
...
{
"UID": "root",
"PID": "1",
"PPID": "0",
"C": "0",
"STIME": "13:58",
"TTY": "?",
"TIME": "00:00:05",
"CMD": "/lib/systemd/systemd --system --deserialize 35"
"uid": "root",
"pid": "545",
"ppid": "1",
"c": "0",
"stime": "Oct21",
"tty": "?",
"time": "00:00:03",
"cmd": "/usr/lib/systemd/systemd-journald"
},
{
"UID": "root",
"PID": "2",
"PPID": "0",
"C": "0",
"STIME": "13:58",
"TTY": "?",
"TIME": "00:00:00",
"CMD": "[kthreadd]"
"uid": "root",
"pid": "566",
"ppid": "1",
"c": "0",
"stime": "Oct21",
"tty": "?",
"time": "00:00:00",
"cmd": "/usr/sbin/lvmetad -f"
},
{
"UID": "root",
"PID": "4",
"PPID": "2",
"C": "0",
"STIME": "13:58",
"TTY": "?",
"TIME": "00:00:00",
"CMD": "[kworker/0:0H]"
"uid": "root",
"pid": "580",
"ppid": "1",
"c": "0",
"stime": "Oct21",
"tty": "?",
"time": "00:00:00",
"cmd": "/usr/lib/systemd/systemd-udevd"
},
{
"UID": "root",
"PID": "6",
"PPID": "2",
"C": "0",
"STIME": "13:58",
"TTY": "?",
"TIME": "00:00:00",
"CMD": "[mm_percpu_wq]"
"uid": "root",
"pid": "659",
"ppid": "2",
"c": "0",
"stime": "Oct21",
"tty": "?",
"time": "00:00:00",
"cmd": "[kworker/u257:0]"
},
{
"uid": "root",
"pid": "666",
"ppid": "2",
"c": "0",
"stime": "Oct21",
"tty": "?",
"time": "00:00:00",
"cmd": "[hci0]"
},
...
]
@@ -62,6 +73,12 @@ def parse(data):
# https://gist.github.com/cahna/43a1a3ff4d075bcd71f9d7120037a501
cleandata = data.splitlines()
headers = [h for h in ' '.join(cleandata[0].strip().split()).split() if h]
headers = [h for h in ' '.join(cleandata[0].lower().strip().split()).split() if h]
# clean up '%cpu' and '%mem' headers
# even though % in a key is valid json, it can make things difficult
headers = ['cpu_percent' if x == '%cpu' else x for x in headers]
headers = ['mem_percent' if x == '%mem' else x for x in headers]
raw_data = map(lambda s: s.strip().split(None, len(headers) - 1), cleandata[1:])
return [dict(zip(headers, r)) for r in raw_data]

View File

@@ -6,47 +6,37 @@ Usage:
Example:
$ route -n | jc --route -p
$ route | jc --route -p
[
{
"Destination": "0.0.0.0",
"Gateway": "192.168.71.2",
"Genmask": "0.0.0.0",
"Flags": "UG",
"Metric": "100",
"Ref": "0",
"Use": "0",
"Iface": "ens33"
"destination": "default",
"gateway": "gateway",
"genmask": "0.0.0.0",
"flags": "UG",
"metric": "100",
"ref": "0",
"use": "0",
"iface": "ens33"
},
{
"Destination": "172.17.0.0",
"Gateway": "0.0.0.0",
"Genmask": "255.255.0.0",
"Flags": "U",
"Metric": "0",
"Ref": "0",
"Use": "0",
"Iface": "docker0"
"destination": "172.17.0.0",
"gateway": "0.0.0.0",
"genmask": "255.255.0.0",
"flags": "U",
"metric": "0",
"ref": "0",
"use": "0",
"iface": "docker0"
},
{
"Destination": "192.168.71.0",
"Gateway": "0.0.0.0",
"Genmask": "255.255.255.0",
"Flags": "U",
"Metric": "0",
"Ref": "0",
"Use": "0",
"Iface": "ens33"
},
{
"Destination": "192.168.71.2",
"Gateway": "0.0.0.0",
"Genmask": "255.255.255.255",
"Flags": "UH",
"Metric": "100",
"Ref": "0",
"Use": "0",
"Iface": "ens33"
"destination": "192.168.71.0",
"gateway": "0.0.0.0",
"genmask": "255.255.255.0",
"flags": "U",
"metric": "100",
"ref": "0",
"use": "0",
"iface": "ens33"
}
]
"""
@@ -58,6 +48,6 @@ def parse(data):
# https://gist.github.com/cahna/43a1a3ff4d075bcd71f9d7120037a501
cleandata = data.splitlines()[1:]
headers = [h for h in ' '.join(cleandata[0].strip().split()).split() if h]
headers = [h for h in ' '.join(cleandata[0].lower().strip().split()).split() if h]
raw_data = map(lambda s: s.strip().split(None, len(headers) - 1), cleandata[1:])
return [dict(zip(headers, r)) for r in raw_data]

44
jc/parsers/uname.py Normal file
View File

@@ -0,0 +1,44 @@
"""jc - JSON CLI output utility uname Parser
Usage:
specify --uname as the first argument if the piped input is coming from uname
Limitations:
must use 'uname -a'
Example:
$ uname -a | jc --uname -p
{
"kernel_name": "Linux",
"node_name": "user-ubuntu",
"kernel_release": "4.15.0-65-generic",
"operating_system": "GNU/Linux",
"hardware_platform": "x86_64",
"processor": "x86_64",
"machine": "x86_64",
"kernel_version": "#74-Ubuntu SMP Tue Sep 17 17:06:04 UTC 2019"
}
"""
def parse(data):
output = {}
parsed_line = data.split(maxsplit=3)
if len(parsed_line) > 1:
output['kernel_name'] = parsed_line.pop(0)
output['node_name'] = parsed_line.pop(0)
output['kernel_release'] = parsed_line.pop(0)
parsed_line = parsed_line[-1].rsplit(maxsplit=4)
output['operating_system'] = parsed_line.pop(-1)
output['hardware_platform'] = parsed_line.pop(-1)
output['processor'] = parsed_line.pop(-1)
output['machine'] = parsed_line.pop(-1)
output['kernel_version'] = parsed_line.pop(0)
return output

45
jc/parsers/uptime.py Normal file
View File

@@ -0,0 +1,45 @@
"""jc - JSON CLI output utility uptime Parser
Usage:
specify --uptime as the first argument if the piped input is coming from uptime
Example:
$ uptime | jc --uptime -p
{
"time": "16:52",
"uptime": "3 days, 4:49",
"users": "5",
"load_1m": "1.85",
"load_5m": "1.90",
"load_15m": "1.91"
}
"""
def parse(data):
output = {}
cleandata = data.splitlines()
if cleandata:
parsed_line = cleandata[0].split()
# allow space for odd times
while len(parsed_line) < 20:
parsed_line.insert(2, ' ')
# find first part of time
for i, word in enumerate(parsed_line[2:]):
if word != ' ':
marker = i + 2
break
output['time'] = parsed_line[0]
output['uptime'] = ' '.join(parsed_line[marker:13]).lstrip().rstrip(',')
output['users'] = parsed_line[13]
output['load_1m'] = parsed_line[17].rstrip(',')
output['load_5m'] = parsed_line[18].rstrip(',')
output['load_15m'] = parsed_line[19]
return output

47
jc/parsers/w.py Normal file
View File

@@ -0,0 +1,47 @@
"""jc - JSON CLI output utility w Parser
Usage:
specify --w as the first argument if the piped input is coming from w
Example:
$ w | jc --w -p
[
{
"user": "root",
"tty": "ttyS0",
"from": "-",
"login_at": "Mon20",
"idle": "0.00s",
"jcpu": "14.70s",
"pcpu": "0.00s",
"what": "bash"
},
{
"user": "root",
"tty": "pts/0",
"from": "192.168.71.1",
"login_at": "Thu22",
"idle": "22:46m",
"jcpu": "0.05s",
"pcpu": "0.05s",
"what": "-bash"
}
]
"""
def parse(data):
# code adapted from Conor Heine at:
# https://gist.github.com/cahna/43a1a3ff4d075bcd71f9d7120037a501
cleandata = data.splitlines()[1:]
headers = [h for h in ' '.join(cleandata[0].lower().strip().split()).split() if h]
# clean up 'login@' header
# even though @ in a key is valid json, it can make things difficult
headers = ['login_at' if x == 'login@' else x for x in headers]
raw_data = map(lambda s: s.strip().split(None, len(headers) - 1), cleandata[1:])
return [dict(zip(headers, r)) for r in raw_data]

View File

@@ -5,7 +5,7 @@ with open('README.md', 'r') as f:
setuptools.setup(
name='jc',
version='0.5.5',
version='1.0.1',
author='Kelly Brazil',
author_email='kellyjonbrazil@gmail.com',
description='This tool serializes the output of popular command line tools to structured JSON output.',