mirror of
https://github.com/kellyjonbrazil/jc.git
synced 2026-04-03 17:44:07 +02:00
Compare commits
250 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9c1b8bacf9 | ||
|
|
4867655eb2 | ||
|
|
47410d1a95 | ||
|
|
5fa49f5e67 | ||
|
|
36c53827fa | ||
|
|
51631aef5b | ||
|
|
a0298ac8a3 | ||
|
|
98c0188821 | ||
|
|
ab1dabe3e4 | ||
|
|
94bdb11fdf | ||
|
|
b6727d68ba | ||
|
|
89bad7fc2b | ||
|
|
c0b8b810a2 | ||
|
|
31eb65acd1 | ||
|
|
513bbeb464 | ||
|
|
3a52fb725a | ||
|
|
5affd44499 | ||
|
|
5dbc6e806c | ||
|
|
59ae31f3f3 | ||
|
|
230e921c2e | ||
|
|
a7c3d88b08 | ||
|
|
9b453bcb84 | ||
|
|
ce43c782f6 | ||
|
|
cb16faaf4d | ||
|
|
3f1d3ff6d8 | ||
|
|
6f67eecd5e | ||
|
|
e75c819190 | ||
|
|
601e68d104 | ||
|
|
8285ecfd1e | ||
|
|
8726de902e | ||
|
|
4133585274 | ||
|
|
ad913b1417 | ||
|
|
7113e5a844 | ||
|
|
a3a8369dc0 | ||
|
|
64016b8ef0 | ||
|
|
1cb49d60c8 | ||
|
|
c858adfd12 | ||
|
|
08d68327c7 | ||
|
|
0d7c6c5664 | ||
|
|
8bfa41dbf4 | ||
|
|
7e2fa48ed4 | ||
|
|
340635cad5 | ||
|
|
8f77d1de09 | ||
|
|
7dcf1b25ff | ||
|
|
9b73538106 | ||
|
|
3bf8c8c6db | ||
|
|
04a1ff2ca7 | ||
|
|
64647d230a | ||
|
|
c2a67e1b70 | ||
|
|
edb9a7c76e | ||
|
|
a407f5b678 | ||
|
|
e5b4987acb | ||
|
|
ba8cc18eeb | ||
|
|
d2c7316e00 | ||
|
|
609aa14d24 | ||
|
|
ef1ad4c700 | ||
|
|
a0e2732152 | ||
|
|
9b5d3e3be1 | ||
|
|
2663ef31fb | ||
|
|
a4cdd3378e | ||
|
|
2f805da24d | ||
|
|
79152a946d | ||
|
|
de37bb37d0 | ||
|
|
f783e44e5c | ||
|
|
af82f2c991 | ||
|
|
46774daf9d | ||
|
|
648306b785 | ||
|
|
b7a4f205b8 | ||
|
|
fdb168b43a | ||
|
|
b6f65c93c4 | ||
|
|
3f4838f17a | ||
|
|
eef0dee2aa | ||
|
|
e17388d3b2 | ||
|
|
7e6a1bc719 | ||
|
|
37738a2ea2 | ||
|
|
c5834a57db | ||
|
|
91b9373f38 | ||
|
|
ce0bb5b816 | ||
|
|
f330ff0eff | ||
|
|
4b02700414 | ||
|
|
ee30180376 | ||
|
|
338e0ff15c | ||
|
|
3ac75305df | ||
|
|
3bdcf44afb | ||
|
|
e3f4ffede5 | ||
|
|
f0c8725d43 | ||
|
|
5473bc4eb6 | ||
|
|
b9bd9422bf | ||
|
|
cb5729a070 | ||
|
|
f0b1ab4233 | ||
|
|
b15386e849 | ||
|
|
d2a2c8da35 | ||
|
|
7251548cbb | ||
|
|
146e29f7cb | ||
|
|
363fd3eab4 | ||
|
|
4083dd4260 | ||
|
|
b2b74547ba | ||
|
|
dddb0baabf | ||
|
|
84b4f67ef9 | ||
|
|
3a089138b8 | ||
|
|
3ff0305c8e | ||
|
|
761edc3c6c | ||
|
|
3351c81f64 | ||
|
|
3dfc6f67d7 | ||
|
|
1546ec3bd1 | ||
|
|
2a953011f7 | ||
|
|
d2f755de9d | ||
|
|
f363334639 | ||
|
|
750197e485 | ||
|
|
36b349e4ed | ||
|
|
b5f1e94fe2 | ||
|
|
6a504fb0e1 | ||
|
|
e02bad2240 | ||
|
|
1d4043a3b6 | ||
|
|
039e034829 | ||
|
|
d828de4f4f | ||
|
|
098000bb10 | ||
|
|
b41165eff5 | ||
|
|
6ad7891b2b | ||
|
|
7213831559 | ||
|
|
10eedd82e4 | ||
|
|
a55493da0f | ||
|
|
89973f4162 | ||
|
|
4802222ad5 | ||
|
|
bcd28f06f8 | ||
|
|
e17a47a7fa | ||
|
|
cad2e16c7a | ||
|
|
5da5d278da | ||
|
|
81b6776e57 | ||
|
|
5ecb6bd58b | ||
|
|
21b56096c5 | ||
|
|
8c78f95973 | ||
|
|
94a88bb566 | ||
|
|
579124475b | ||
|
|
5da83e0200 | ||
|
|
a90a76d004 | ||
|
|
bdfa959123 | ||
|
|
4380594275 | ||
|
|
88bf252c0d | ||
|
|
a5efd8adce | ||
|
|
2ee392eeff | ||
|
|
9c1d893e16 | ||
|
|
88dcb90c83 | ||
|
|
a3bcabc89c | ||
|
|
dafbf9fdcf | ||
|
|
680cb2b2ca | ||
|
|
54818a06e0 | ||
|
|
88f4c5b5a9 | ||
|
|
2bb7409887 | ||
|
|
c780aac3ab | ||
|
|
5010aaec28 | ||
|
|
066e93cb07 | ||
|
|
0bd2faa7f7 | ||
|
|
e2f926453b | ||
|
|
b953b79f9c | ||
|
|
7f99677806 | ||
|
|
721b546659 | ||
|
|
2de5e41269 | ||
|
|
dfe0f6e99b | ||
|
|
8873b1bc69 | ||
|
|
9ff9470700 | ||
|
|
2c58fca530 | ||
|
|
9e5cd90da7 | ||
|
|
7ee0d49424 | ||
|
|
a9058ee21e | ||
|
|
fcf0aac87d | ||
|
|
daec5f0681 | ||
|
|
5b2491d5ae | ||
|
|
d9b41ac73b | ||
|
|
7168ffddf8 | ||
|
|
a855344bec | ||
|
|
d8b3b59fae | ||
|
|
4b7d7840d3 | ||
|
|
58a094a9b4 | ||
|
|
65adbb4189 | ||
|
|
f7350959c9 | ||
|
|
8934a7d832 | ||
|
|
669a424fd6 | ||
|
|
591a65c2bd | ||
|
|
a78fb89078 | ||
|
|
8979dab2a5 | ||
|
|
0a891f0add | ||
|
|
c220e35b14 | ||
|
|
f26c5818bd | ||
|
|
e712cd3fc4 | ||
|
|
0309c9ac67 | ||
|
|
9a9eb4120a | ||
|
|
d1927456b0 | ||
|
|
a3d9213a1e | ||
|
|
3365c03a1e | ||
|
|
4f6c87389b | ||
|
|
41a2a9adac | ||
|
|
74dae2905b | ||
|
|
d1f64214de | ||
|
|
d3e1aa20a8 | ||
|
|
72cae95777 | ||
|
|
219bc8130f | ||
|
|
e8c1a554c0 | ||
|
|
087a60bc2a | ||
|
|
9c9823c3b8 | ||
|
|
cf8d13030b | ||
|
|
1eff69c187 | ||
|
|
b10fb77d71 | ||
|
|
87cee8b230 | ||
|
|
83ab10d628 | ||
|
|
d58a6e1d1d | ||
|
|
cb46ca5c27 | ||
|
|
5528d979f0 | ||
|
|
ee94a038a6 | ||
|
|
1d658f7a9f | ||
|
|
392cb44f9b | ||
|
|
579bef079c | ||
|
|
0691cfcab3 | ||
|
|
db29c7c186 | ||
|
|
fb1e036375 | ||
|
|
c3eaf59836 | ||
|
|
c9849ce0db | ||
|
|
d3c89a3092 | ||
|
|
a3d43f27f7 | ||
|
|
f4d9c1b699 | ||
|
|
de647bba4a | ||
|
|
d791307031 | ||
|
|
1a4fc204e2 | ||
|
|
0328e14c7c | ||
|
|
1acc4d6c29 | ||
|
|
27245590ce | ||
|
|
7ca2a4bdb9 | ||
|
|
5f1ec67348 | ||
|
|
7e44c4278a | ||
|
|
eda726c4a3 | ||
|
|
5f8e70d730 | ||
|
|
25b90546c6 | ||
|
|
75c0841538 | ||
|
|
5b532b9b71 | ||
|
|
8c7b3193d1 | ||
|
|
0897c96ef3 | ||
|
|
57d0ab2ed7 | ||
|
|
a07d9a0e4b | ||
|
|
b3996cb4df | ||
|
|
4fa88c1ba3 | ||
|
|
c8c5564b29 | ||
|
|
6d047486d9 | ||
|
|
42bdc05814 | ||
|
|
85bfb68886 | ||
|
|
08ec21556b | ||
|
|
320929bf25 | ||
|
|
41cd489c34 | ||
|
|
f101d881a1 | ||
|
|
fa7466022b | ||
|
|
ea0cf0acf2 |
@@ -1,5 +1,39 @@
|
||||
jc changelog
|
||||
|
||||
20191117 v1.5.1
|
||||
- Add ss parser
|
||||
- Add stat parser
|
||||
- Add /etc/hosts parser
|
||||
- Add /etc/fstab parser
|
||||
- Add systemctl parser (includes list-jobs, list-sockets, and list-unit-files)
|
||||
- Add -r and raw=True options. By default, jc will now convert numbers and boolean, if possible, and add other semantic information, while the raw output will keep all values as text and provide a more literal JSON output
|
||||
- Add -q and quiet=True options to suppress warnings to stderr
|
||||
- Add -d option to debug parsing issues
|
||||
- Add compatibility warnings to stderr
|
||||
- Add documentation
|
||||
- Updated iptables parser to allow --line-numbers option
|
||||
- Updated lsblk parser to allow parsing of added columns
|
||||
- Updated mount parser: changed 'access' field name to 'options'
|
||||
- Updated netstat parser to allow parsing of unix sockets and raw network connections
|
||||
- Updated w parser to fix unaligned data where blanks are possible
|
||||
- Clean up code and reorganize package
|
||||
|
||||
20191031 v1.1.1
|
||||
- Add arp parser
|
||||
- Add dig parser
|
||||
- Add unit tests
|
||||
- fix netstat parser (remove side effects due to global variable)
|
||||
|
||||
20191025 v1.0.1
|
||||
- Add w parser
|
||||
- Add uptime parser
|
||||
- Add history parser
|
||||
- Flatten env parser output
|
||||
- Remove problematic characters from key names in: df, free, history, lsblk, lsof, and w
|
||||
- Where possible, lowercase all keys (except cases like env where the key is the variable name)
|
||||
- Remove integer values
|
||||
- Handle CTRL-C gracefully
|
||||
|
||||
20191023 v0.9.1
|
||||
- Add jobs parser
|
||||
- Add lsof parser
|
||||
|
||||
34
docgen.sh
Executable file
34
docgen.sh
Executable file
@@ -0,0 +1,34 @@
|
||||
#!/bin/bash
|
||||
# Generate docs.md
|
||||
|
||||
cd jc
|
||||
pydocmd simple jc+ > ../docs/readme.md
|
||||
pydocmd simple utils+ > ../docs/utils.md
|
||||
pydocmd simple jc.parsers.arp+ > ../docs/parsers/arp.md
|
||||
pydocmd simple jc.parsers.df+ > ../docs/parsers/df.md
|
||||
pydocmd simple jc.parsers.dig+ > ../docs/parsers/dig.md
|
||||
pydocmd simple jc.parsers.env+ > ../docs/parsers/env.md
|
||||
pydocmd simple jc.parsers.free+ > ../docs/parsers/free.md
|
||||
pydocmd simple jc.parsers.fstab+ > ../docs/parsers/fstab.md
|
||||
pydocmd simple jc.parsers.history+ > ../docs/parsers/history.md
|
||||
pydocmd simple jc.parsers.hosts+ > ../docs/parsers/hosts.md
|
||||
pydocmd simple jc.parsers.ifconfig+ > ../docs/parsers/ifconfig.md
|
||||
pydocmd simple jc.parsers.iptables+ > ../docs/parsers/iptables.md
|
||||
pydocmd simple jc.parsers.jobs+ > ../docs/parsers/jobs.md
|
||||
pydocmd simple jc.parsers.ls+ > ../docs/parsers/ls.md
|
||||
pydocmd simple jc.parsers.lsblk+ > ../docs/parsers/lsblk.md
|
||||
pydocmd simple jc.parsers.lsmod+ > ../docs/parsers/lsmod.md
|
||||
pydocmd simple jc.parsers.lsof+ > ../docs/parsers/lsof.md
|
||||
pydocmd simple jc.parsers.mount+ > ../docs/parsers/mount.md
|
||||
pydocmd simple jc.parsers.netstat+ > ../docs/parsers/netstat.md
|
||||
pydocmd simple jc.parsers.ps+ > ../docs/parsers/ps.md
|
||||
pydocmd simple jc.parsers.route+ > ../docs/parsers/route.md
|
||||
pydocmd simple jc.parsers.ss+ > ../docs/parsers/ss.md
|
||||
pydocmd simple jc.parsers.stat+ > ../docs/parsers/stat.md
|
||||
pydocmd simple jc.parsers.systemctl+ > ../docs/parsers/systemctl.md
|
||||
pydocmd simple jc.parsers.systemctl_lj+ > ../docs/parsers/systemctl_lj.md
|
||||
pydocmd simple jc.parsers.systemctl_ls+ > ../docs/parsers/systemctl_ls.md
|
||||
pydocmd simple jc.parsers.systemctl_luf+ > ../docs/parsers/systemctl_luf.md
|
||||
pydocmd simple jc.parsers.uname+ > ../docs/parsers/uname.md
|
||||
pydocmd simple jc.parsers.uptime+ > ../docs/parsers/uptime.md
|
||||
pydocmd simple jc.parsers.w+ > ../docs/parsers/w.md
|
||||
123
docs/parsers/arp.md
Normal file
123
docs/parsers/arp.md
Normal file
@@ -0,0 +1,123 @@
|
||||
# jc.parsers.arp
|
||||
jc - JSON CLI output utility arp Parser
|
||||
|
||||
Usage:
|
||||
specify --arp as the first argument if the piped input is coming from arp
|
||||
|
||||
Examples:
|
||||
|
||||
$ arp | jc --arp -p
|
||||
[
|
||||
{
|
||||
"address": "192.168.71.254",
|
||||
"hwtype": "ether",
|
||||
"hwaddress": "00:50:56:f0:98:26",
|
||||
"flags_mask": "C",
|
||||
"iface": "ens33"
|
||||
},
|
||||
{
|
||||
"address": "gateway",
|
||||
"hwtype": "ether",
|
||||
"hwaddress": "00:50:56:f7:4a:fc",
|
||||
"flags_mask": "C",
|
||||
"iface": "ens33"
|
||||
}
|
||||
]
|
||||
|
||||
$ arp | jc --arp -p -r
|
||||
[
|
||||
{
|
||||
"address": "gateway",
|
||||
"hwtype": "ether",
|
||||
"hwaddress": "00:50:56:f7:4a:fc",
|
||||
"flags_mask": "C",
|
||||
"iface": "ens33"
|
||||
},
|
||||
{
|
||||
"address": "192.168.71.254",
|
||||
"hwtype": "ether",
|
||||
"hwaddress": "00:50:56:fe:7a:b4",
|
||||
"flags_mask": "C",
|
||||
"iface": "ens33"
|
||||
}
|
||||
]
|
||||
|
||||
$ arp -a | jc --arp -p
|
||||
[
|
||||
{
|
||||
"name": null,
|
||||
"address": "192.168.71.254",
|
||||
"hwtype": "ether",
|
||||
"hwaddress": "00:50:56:f0:98:26",
|
||||
"iface": "ens33"
|
||||
},
|
||||
{
|
||||
"name": "gateway",
|
||||
"address": "192.168.71.2",
|
||||
"hwtype": "ether",
|
||||
"hwaddress": "00:50:56:f7:4a:fc",
|
||||
"iface": "ens33"
|
||||
}
|
||||
]
|
||||
|
||||
$ arp -a | jc --arp -p -r
|
||||
[
|
||||
{
|
||||
"name": "?",
|
||||
"address": "192.168.71.254",
|
||||
"hwtype": "ether",
|
||||
"hwaddress": "00:50:56:fe:7a:b4",
|
||||
"iface": "ens33"
|
||||
},
|
||||
{
|
||||
"name": "_gateway",
|
||||
"address": "192.168.71.2",
|
||||
"hwtype": "ether",
|
||||
"hwaddress": "00:50:56:f7:4a:fc",
|
||||
"iface": "ens33"
|
||||
}
|
||||
]
|
||||
|
||||
## process
|
||||
```python
|
||||
process(proc_data)
|
||||
```
|
||||
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary structured data with the following schema:
|
||||
|
||||
[
|
||||
{
|
||||
"name": string,
|
||||
"address": string,
|
||||
"hwtype": string,
|
||||
"hwaddress": string,
|
||||
"flags_mask": string,
|
||||
"iface": string
|
||||
}
|
||||
]
|
||||
|
||||
## parse
|
||||
```python
|
||||
parse(data, raw=False, quiet=False)
|
||||
```
|
||||
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
|
||||
110
docs/parsers/df.md
Normal file
110
docs/parsers/df.md
Normal file
@@ -0,0 +1,110 @@
|
||||
# jc.parsers.df
|
||||
jc - JSON CLI output utility df Parser
|
||||
|
||||
Usage:
|
||||
specify --df as the first argument if the piped input is coming from df
|
||||
|
||||
Examples:
|
||||
|
||||
$ df | jc --df -p
|
||||
[
|
||||
{
|
||||
"filesystem": "devtmpfs",
|
||||
"1k-blocks": 1918820,
|
||||
"used": 0,
|
||||
"available": 1918820,
|
||||
"use_percent": 0,
|
||||
"mounted_on": "/dev"
|
||||
},
|
||||
{
|
||||
"filesystem": "tmpfs",
|
||||
"1k-blocks": 1930668,
|
||||
"used": 0,
|
||||
"available": 1930668,
|
||||
"use_percent": 0,
|
||||
"mounted_on": "/dev/shm"
|
||||
},
|
||||
{
|
||||
"filesystem": "tmpfs",
|
||||
"1k-blocks": 1930668,
|
||||
"used": 11800,
|
||||
"available": 1918868,
|
||||
"use_percent": 1,
|
||||
"mounted_on": "/run"
|
||||
},
|
||||
...
|
||||
]
|
||||
|
||||
$ df | jc --df -p -r
|
||||
[
|
||||
{
|
||||
"filesystem": "devtmpfs",
|
||||
"1k-blocks": "1918820",
|
||||
"used": "0",
|
||||
"available": "1918820",
|
||||
"use_percent": "0%",
|
||||
"mounted_on": "/dev"
|
||||
},
|
||||
{
|
||||
"filesystem": "tmpfs",
|
||||
"1k-blocks": "1930668",
|
||||
"used": "0",
|
||||
"available": "1930668",
|
||||
"use_percent": "0%",
|
||||
"mounted_on": "/dev/shm"
|
||||
},
|
||||
{
|
||||
"filesystem": "tmpfs",
|
||||
"1k-blocks": "1930668",
|
||||
"used": "11800",
|
||||
"available": "1918868",
|
||||
"use_percent": "1%",
|
||||
"mounted_on": "/run"
|
||||
},
|
||||
...
|
||||
]
|
||||
|
||||
## process
|
||||
```python
|
||||
process(proc_data)
|
||||
```
|
||||
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary structured data with the following schema:
|
||||
|
||||
[
|
||||
{
|
||||
"filesystem": string,
|
||||
"size": string,
|
||||
"1k-blocks": integer,
|
||||
"used": integer,
|
||||
"available": integer,
|
||||
"use_percent": integer,
|
||||
"mounted_on": string
|
||||
}
|
||||
]
|
||||
|
||||
## parse
|
||||
```python
|
||||
parse(data, raw=False, quiet=False)
|
||||
```
|
||||
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
|
||||
392
docs/parsers/dig.md
Normal file
392
docs/parsers/dig.md
Normal file
@@ -0,0 +1,392 @@
|
||||
# jc.parsers.dig
|
||||
jc - JSON CLI output utility dig Parser
|
||||
|
||||
Usage:
|
||||
Specify --dig as the first argument if the piped input is coming from dig
|
||||
|
||||
Examples:
|
||||
|
||||
$ dig cnn.com www.cnn.com @205.251.194.64 | jc --dig -p
|
||||
[
|
||||
{
|
||||
"id": 34128,
|
||||
"opcode": "QUERY",
|
||||
"status": "NOERROR",
|
||||
"flags": [
|
||||
"qr",
|
||||
"rd",
|
||||
"ra"
|
||||
],
|
||||
"query_num": 1,
|
||||
"answer_num": 4,
|
||||
"authority_num": 0,
|
||||
"additional_num": 1,
|
||||
"question": {
|
||||
"name": "cnn.com.",
|
||||
"class": "IN",
|
||||
"type": "A"
|
||||
},
|
||||
"answer": [
|
||||
{
|
||||
"name": "cnn.com.",
|
||||
"class": "IN",
|
||||
"type": "A",
|
||||
"ttl": 60,
|
||||
"data": "151.101.65.67"
|
||||
},
|
||||
{
|
||||
"name": "cnn.com.",
|
||||
"class": "IN",
|
||||
"type": "A",
|
||||
"ttl": 60,
|
||||
"data": "151.101.193.67"
|
||||
},
|
||||
{
|
||||
"name": "cnn.com.",
|
||||
"class": "IN",
|
||||
"type": "A",
|
||||
"ttl": 60,
|
||||
"data": "151.101.1.67"
|
||||
},
|
||||
{
|
||||
"name": "cnn.com.",
|
||||
"class": "IN",
|
||||
"type": "A",
|
||||
"ttl": 60,
|
||||
"data": "151.101.129.67"
|
||||
}
|
||||
],
|
||||
"query_time": 37,
|
||||
"server": "2600",
|
||||
"when": "Tue Nov 12 07:14:42 PST 2019",
|
||||
"rcvd": 100
|
||||
},
|
||||
{
|
||||
"id": 15273,
|
||||
"opcode": "QUERY",
|
||||
"status": "NOERROR",
|
||||
"flags": [
|
||||
"qr",
|
||||
"aa",
|
||||
"rd"
|
||||
],
|
||||
"query_num": 1,
|
||||
"answer_num": 1,
|
||||
"authority_num": 4,
|
||||
"additional_num": 1,
|
||||
"question": {
|
||||
"name": "www.cnn.com.",
|
||||
"class": "IN",
|
||||
"type": "A"
|
||||
},
|
||||
"answer": [
|
||||
{
|
||||
"name": "www.cnn.com.",
|
||||
"class": "IN",
|
||||
"type": "CNAME",
|
||||
"ttl": 300,
|
||||
"data": "turner-tls.map.fastly.net."
|
||||
}
|
||||
],
|
||||
"authority": [
|
||||
{
|
||||
"name": "cnn.com.",
|
||||
"class": "IN",
|
||||
"type": "NS",
|
||||
"ttl": 3600,
|
||||
"data": "ns-1086.awsdns-07.org."
|
||||
},
|
||||
{
|
||||
"name": "cnn.com.",
|
||||
"class": "IN",
|
||||
"type": "NS",
|
||||
"ttl": 3600,
|
||||
"data": "ns-1630.awsdns-11.co.uk."
|
||||
},
|
||||
{
|
||||
"name": "cnn.com.",
|
||||
"class": "IN",
|
||||
"type": "NS",
|
||||
"ttl": 3600,
|
||||
"data": "ns-47.awsdns-05.com."
|
||||
},
|
||||
{
|
||||
"name": "cnn.com.",
|
||||
"class": "IN",
|
||||
"type": "NS",
|
||||
"ttl": 3600,
|
||||
"data": "ns-576.awsdns-08.net."
|
||||
}
|
||||
],
|
||||
"query_time": 23,
|
||||
"server": "205.251.194.64#53(205.251.194.64)",
|
||||
"when": "Tue Nov 12 07:14:42 PST 2019",
|
||||
"rcvd": 212
|
||||
}
|
||||
]
|
||||
|
||||
$ dig cnn.com www.cnn.com @205.251.194.64 | jc --dig -p -r
|
||||
[
|
||||
{
|
||||
"id": "23843",
|
||||
"opcode": "QUERY",
|
||||
"status": "NOERROR",
|
||||
"flags": [
|
||||
"qr",
|
||||
"rd",
|
||||
"ra"
|
||||
],
|
||||
"query_num": "1",
|
||||
"answer_num": "4",
|
||||
"authority_num": "0",
|
||||
"additional_num": "1",
|
||||
"question": {
|
||||
"name": "cnn.com.",
|
||||
"class": "IN",
|
||||
"type": "A"
|
||||
},
|
||||
"answer": [
|
||||
{
|
||||
"name": "cnn.com.",
|
||||
"class": "IN",
|
||||
"type": "A",
|
||||
"ttl": "30",
|
||||
"data": "151.101.193.67"
|
||||
},
|
||||
{
|
||||
"name": "cnn.com.",
|
||||
"class": "IN",
|
||||
"type": "A",
|
||||
"ttl": "30",
|
||||
"data": "151.101.1.67"
|
||||
},
|
||||
{
|
||||
"name": "cnn.com.",
|
||||
"class": "IN",
|
||||
"type": "A",
|
||||
"ttl": "30",
|
||||
"data": "151.101.65.67"
|
||||
},
|
||||
{
|
||||
"name": "cnn.com.",
|
||||
"class": "IN",
|
||||
"type": "A",
|
||||
"ttl": "30",
|
||||
"data": "151.101.129.67"
|
||||
}
|
||||
],
|
||||
"query_time": "24 msec",
|
||||
"server": "192.168.1.254#53(192.168.1.254)",
|
||||
"when": "Tue Nov 12 07:16:19 PST 2019",
|
||||
"rcvd": "100"
|
||||
},
|
||||
{
|
||||
"id": "8266",
|
||||
"opcode": "QUERY",
|
||||
"status": "NOERROR",
|
||||
"flags": [
|
||||
"qr",
|
||||
"aa",
|
||||
"rd"
|
||||
],
|
||||
"query_num": "1",
|
||||
"answer_num": "1",
|
||||
"authority_num": "4",
|
||||
"additional_num": "1",
|
||||
"question": {
|
||||
"name": "www.cnn.com.",
|
||||
"class": "IN",
|
||||
"type": "A"
|
||||
},
|
||||
"answer": [
|
||||
{
|
||||
"name": "www.cnn.com.",
|
||||
"class": "IN",
|
||||
"type": "CNAME",
|
||||
"ttl": "300",
|
||||
"data": "turner-tls.map.fastly.net."
|
||||
}
|
||||
],
|
||||
"authority": [
|
||||
{
|
||||
"name": "cnn.com.",
|
||||
"class": "IN",
|
||||
"type": "NS",
|
||||
"ttl": "3600",
|
||||
"data": "ns-1086.awsdns-07.org."
|
||||
},
|
||||
{
|
||||
"name": "cnn.com.",
|
||||
"class": "IN",
|
||||
"type": "NS",
|
||||
"ttl": "3600",
|
||||
"data": "ns-1630.awsdns-11.co.uk."
|
||||
},
|
||||
{
|
||||
"name": "cnn.com.",
|
||||
"class": "IN",
|
||||
"type": "NS",
|
||||
"ttl": "3600",
|
||||
"data": "ns-47.awsdns-05.com."
|
||||
},
|
||||
{
|
||||
"name": "cnn.com.",
|
||||
"class": "IN",
|
||||
"type": "NS",
|
||||
"ttl": "3600",
|
||||
"data": "ns-576.awsdns-08.net."
|
||||
}
|
||||
],
|
||||
"query_time": "26 msec",
|
||||
"server": "205.251.194.64#53(205.251.194.64)",
|
||||
"when": "Tue Nov 12 07:16:19 PST 2019",
|
||||
"rcvd": "212"
|
||||
}
|
||||
]
|
||||
|
||||
$ dig -x 1.1.1.1 | jc --dig -p
|
||||
[
|
||||
{
|
||||
"id": 34898,
|
||||
"opcode": "QUERY",
|
||||
"status": "NOERROR",
|
||||
"flags": [
|
||||
"qr",
|
||||
"rd",
|
||||
"ra"
|
||||
],
|
||||
"query_num": 1,
|
||||
"answer_num": 1,
|
||||
"authority_num": 0,
|
||||
"additional_num": 1,
|
||||
"question": {
|
||||
"name": "1.1.1.1.in-addr.arpa.",
|
||||
"class": "IN",
|
||||
"type": "PTR"
|
||||
},
|
||||
"answer": [
|
||||
{
|
||||
"name": "1.1.1.1.in-addr.arpa.",
|
||||
"class": "IN",
|
||||
"type": "PTR",
|
||||
"ttl": 952,
|
||||
"data": "one.one.one.one."
|
||||
}
|
||||
],
|
||||
"query_time": 103,
|
||||
"server": "2600",
|
||||
"when": "Tue Nov 12 07:15:33 PST 2019",
|
||||
"rcvd": 78
|
||||
}
|
||||
]
|
||||
|
||||
$ dig -x 1.1.1.1 | jc --dig -p -r
|
||||
[
|
||||
{
|
||||
"id": "50986",
|
||||
"opcode": "QUERY",
|
||||
"status": "NOERROR",
|
||||
"flags": [
|
||||
"qr",
|
||||
"rd",
|
||||
"ra"
|
||||
],
|
||||
"query_num": "1",
|
||||
"answer_num": "1",
|
||||
"authority_num": "0",
|
||||
"additional_num": "1",
|
||||
"question": {
|
||||
"name": "1.1.1.1.in-addr.arpa.",
|
||||
"class": "IN",
|
||||
"type": "PTR"
|
||||
},
|
||||
"answer": [
|
||||
{
|
||||
"name": "1.1.1.1.in-addr.arpa.",
|
||||
"class": "IN",
|
||||
"type": "PTR",
|
||||
"ttl": "1800",
|
||||
"data": "one.one.one.one."
|
||||
}
|
||||
],
|
||||
"query_time": "38 msec",
|
||||
"server": "2600",
|
||||
"when": "Tue Nov 12 07:17:19 PST 2019",
|
||||
"rcvd": "78"
|
||||
}
|
||||
]
|
||||
|
||||
## process
|
||||
```python
|
||||
process(proc_data)
|
||||
```
|
||||
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary structured data with the following schema:
|
||||
|
||||
[
|
||||
{
|
||||
"id": integer,
|
||||
"opcode": string,
|
||||
"status": string,
|
||||
"flags": [
|
||||
string
|
||||
],
|
||||
"query_num": integer,
|
||||
"answer_num": integer,
|
||||
"authority_num": integer,
|
||||
"additional_num": integer,
|
||||
"question": {
|
||||
"name": string,
|
||||
"class": string,
|
||||
"type": string
|
||||
},
|
||||
"answer": [
|
||||
{
|
||||
"name": string,
|
||||
"class": string,
|
||||
"type": string,
|
||||
"ttl": integer,
|
||||
"data": string
|
||||
}
|
||||
],
|
||||
"authority": [
|
||||
{
|
||||
"name": string,
|
||||
"class": string,
|
||||
"type": string,
|
||||
"ttl": integer,
|
||||
"data": string
|
||||
}
|
||||
],
|
||||
"query_time": integer, # in msec
|
||||
"server": string,
|
||||
"when": string,
|
||||
"rcvd": integer
|
||||
}
|
||||
]
|
||||
|
||||
## parse
|
||||
```python
|
||||
parse(data, raw=False, quiet=False)
|
||||
```
|
||||
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
|
||||
85
docs/parsers/env.md
Normal file
85
docs/parsers/env.md
Normal file
@@ -0,0 +1,85 @@
|
||||
# jc.parsers.env
|
||||
jc - JSON CLI output utility env Parser
|
||||
|
||||
Usage:
|
||||
specify --env as the first argument if the piped input is coming from env
|
||||
|
||||
Examples:
|
||||
|
||||
$ env | jc --env -p
|
||||
[
|
||||
{
|
||||
"name": "XDG_SESSION_ID",
|
||||
"value": "1"
|
||||
},
|
||||
{
|
||||
"name": "HOSTNAME",
|
||||
"value": "localhost.localdomain"
|
||||
},
|
||||
{
|
||||
"name": "TERM",
|
||||
"value": "vt220"
|
||||
},
|
||||
{
|
||||
"name": "SHELL",
|
||||
"value": "/bin/bash"
|
||||
},
|
||||
{
|
||||
"name": "HISTSIZE",
|
||||
"value": "1000"
|
||||
},
|
||||
...
|
||||
]
|
||||
|
||||
$ env | jc --env -p -r
|
||||
{
|
||||
"TERM": "xterm-256color",
|
||||
"SHELL": "/bin/bash",
|
||||
"USER": "root",
|
||||
"PATH": "/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin",
|
||||
"PWD": "/root",
|
||||
"LANG": "en_US.UTF-8",
|
||||
"HOME": "/root",
|
||||
"LOGNAME": "root",
|
||||
"_": "/usr/bin/env"
|
||||
}
|
||||
|
||||
## process
|
||||
```python
|
||||
process(proc_data)
|
||||
```
|
||||
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary structured data with the following schema:
|
||||
|
||||
[
|
||||
{
|
||||
"name": string,
|
||||
"value": string
|
||||
}
|
||||
]
|
||||
|
||||
## parse
|
||||
```python
|
||||
parse(data, raw=False, quiet=False)
|
||||
```
|
||||
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
|
||||
90
docs/parsers/free.md
Normal file
90
docs/parsers/free.md
Normal file
@@ -0,0 +1,90 @@
|
||||
# jc.parsers.free
|
||||
jc - JSON CLI output utility free Parser
|
||||
|
||||
Usage:
|
||||
specify --free as the first argument if the piped input is coming from free
|
||||
|
||||
Examples:
|
||||
|
||||
$ free | jc --free -p
|
||||
[
|
||||
{
|
||||
"type": "Mem",
|
||||
"total": 3861340,
|
||||
"used": 220508,
|
||||
"free": 3381972,
|
||||
"shared": 11800,
|
||||
"buff_cache": 258860,
|
||||
"available": 3397784
|
||||
},
|
||||
{
|
||||
"type": "Swap",
|
||||
"total": 2097148,
|
||||
"used": 0,
|
||||
"free": 2097148
|
||||
}
|
||||
]
|
||||
|
||||
$ free | jc --free -p -r
|
||||
[
|
||||
{
|
||||
"type": "Mem",
|
||||
"total": "2017300",
|
||||
"used": "213104",
|
||||
"free": "1148452",
|
||||
"shared": "1176",
|
||||
"buff_cache": "655744",
|
||||
"available": "1622204"
|
||||
},
|
||||
{
|
||||
"type": "Swap",
|
||||
"total": "2097148",
|
||||
"used": "0",
|
||||
"free": "2097148"
|
||||
}
|
||||
]
|
||||
|
||||
## process
|
||||
```python
|
||||
process(proc_data)
|
||||
```
|
||||
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary structured data with the following schema:
|
||||
|
||||
[
|
||||
{
|
||||
"type": string,
|
||||
"total": integer,
|
||||
"used": integer,
|
||||
"free": integer,
|
||||
"shared": integer,
|
||||
"buff_cache": integer,
|
||||
"available": integer
|
||||
}
|
||||
]
|
||||
|
||||
## parse
|
||||
```python
|
||||
parse(data, raw=False, quiet=False)
|
||||
```
|
||||
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
|
||||
107
docs/parsers/fstab.md
Normal file
107
docs/parsers/fstab.md
Normal file
@@ -0,0 +1,107 @@
|
||||
# jc.parsers.fstab
|
||||
jc - JSON CLI output utility fstab Parser
|
||||
|
||||
Usage:
|
||||
specify --fstab as the first argument if the piped input is coming from a fstab file
|
||||
|
||||
Examples:
|
||||
|
||||
$ cat /etc/fstab | jc --fstab -p
|
||||
[
|
||||
{
|
||||
"fs_spec": "/dev/mapper/centos-root",
|
||||
"fs_file": "/",
|
||||
"fs_vfstype": "xfs",
|
||||
"fs_mntops": "defaults",
|
||||
"fs_freq": 0,
|
||||
"fs_passno": 0
|
||||
},
|
||||
{
|
||||
"fs_spec": "UUID=05d927bb-5875-49e3-ada1-7f46cb31c932",
|
||||
"fs_file": "/boot",
|
||||
"fs_vfstype": "xfs",
|
||||
"fs_mntops": "defaults",
|
||||
"fs_freq": 0,
|
||||
"fs_passno": 0
|
||||
},
|
||||
{
|
||||
"fs_spec": "/dev/mapper/centos-swap",
|
||||
"fs_file": "swap",
|
||||
"fs_vfstype": "swap",
|
||||
"fs_mntops": "defaults",
|
||||
"fs_freq": 0,
|
||||
"fs_passno": 0
|
||||
}
|
||||
]
|
||||
|
||||
$ cat /etc/fstab | jc --fstab -p -r
|
||||
[
|
||||
{
|
||||
"fs_spec": "/dev/mapper/centos-root",
|
||||
"fs_file": "/",
|
||||
"fs_vfstype": "xfs",
|
||||
"fs_mntops": "defaults",
|
||||
"fs_freq": "0",
|
||||
"fs_passno": "0"
|
||||
},
|
||||
{
|
||||
"fs_spec": "UUID=05d927bb-5875-49e3-ada1-7f46cb31c932",
|
||||
"fs_file": "/boot",
|
||||
"fs_vfstype": "xfs",
|
||||
"fs_mntops": "defaults",
|
||||
"fs_freq": "0",
|
||||
"fs_passno": "0"
|
||||
},
|
||||
{
|
||||
"fs_spec": "/dev/mapper/centos-swap",
|
||||
"fs_file": "swap",
|
||||
"fs_vfstype": "swap",
|
||||
"fs_mntops": "defaults",
|
||||
"fs_freq": "0",
|
||||
"fs_passno": "0"
|
||||
}
|
||||
]
|
||||
|
||||
## process
|
||||
```python
|
||||
process(proc_data)
|
||||
```
|
||||
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary structured data with the following schema:
|
||||
|
||||
[
|
||||
{
|
||||
"fs_spec": string,
|
||||
"fs_file": string,
|
||||
"fs_vfstype": string,
|
||||
"fs_mntops": string,
|
||||
"fs_freq": integer,
|
||||
"fs_passno": integer
|
||||
}
|
||||
]
|
||||
|
||||
## parse
|
||||
```python
|
||||
parse(data, raw=False, quiet=False)
|
||||
```
|
||||
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
|
||||
77
docs/parsers/history.md
Normal file
77
docs/parsers/history.md
Normal file
@@ -0,0 +1,77 @@
|
||||
# jc.parsers.history
|
||||
jc - JSON CLI output utility history Parser
|
||||
|
||||
Usage:
|
||||
specify --history as the first argument if the piped input is coming from history
|
||||
|
||||
Examples:
|
||||
|
||||
$ history | jc --history -p
|
||||
[
|
||||
{
|
||||
"line": "118",
|
||||
"command": "sleep 100"
|
||||
},
|
||||
{
|
||||
"line": "119",
|
||||
"command": "ls /bin"
|
||||
},
|
||||
{
|
||||
"line": "120",
|
||||
"command": "echo "hello""
|
||||
},
|
||||
{
|
||||
"line": "121",
|
||||
"command": "docker images"
|
||||
},
|
||||
...
|
||||
]
|
||||
|
||||
$ history | jc --history -p -r
|
||||
{
|
||||
"118": "sleep 100",
|
||||
"119": "ls /bin",
|
||||
"120": "echo "hello"",
|
||||
"121": "docker images",
|
||||
...
|
||||
}
|
||||
|
||||
## process
|
||||
```python
|
||||
process(proc_data)
|
||||
```
|
||||
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary structured data with the following schema:
|
||||
|
||||
[
|
||||
{
|
||||
"line": string,
|
||||
"command": string
|
||||
}
|
||||
]
|
||||
|
||||
## parse
|
||||
```python
|
||||
parse(data, raw=False, quiet=False)
|
||||
```
|
||||
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
|
||||
96
docs/parsers/hosts.md
Normal file
96
docs/parsers/hosts.md
Normal file
@@ -0,0 +1,96 @@
|
||||
# jc.parsers.hosts
|
||||
jc - JSON CLI output utility hosts Parser
|
||||
|
||||
Usage:
|
||||
specify --hosts as the first argument if the piped input is coming from a hosts file
|
||||
|
||||
Examples:
|
||||
|
||||
$ cat /etc/hosts | jc --hosts -p
|
||||
[
|
||||
{
|
||||
"ip": "127.0.0.1",
|
||||
"hostname": [
|
||||
"localhost"
|
||||
]
|
||||
},
|
||||
{
|
||||
"ip": "127.0.1.1",
|
||||
"hostname": [
|
||||
"root-ubuntu"
|
||||
]
|
||||
},
|
||||
{
|
||||
"ip": "::1",
|
||||
"hostname": [
|
||||
"ip6-localhost",
|
||||
"ip6-loopback"
|
||||
]
|
||||
},
|
||||
{
|
||||
"ip": "fe00::0",
|
||||
"hostname": [
|
||||
"ip6-localnet"
|
||||
]
|
||||
},
|
||||
{
|
||||
"ip": "ff00::0",
|
||||
"hostname": [
|
||||
"ip6-mcastprefix"
|
||||
]
|
||||
},
|
||||
{
|
||||
"ip": "ff02::1",
|
||||
"hostname": [
|
||||
"ip6-allnodes"
|
||||
]
|
||||
},
|
||||
{
|
||||
"ip": "ff02::2",
|
||||
"hostname": [
|
||||
"ip6-allrouters"
|
||||
]
|
||||
}
|
||||
]
|
||||
|
||||
## process
|
||||
```python
|
||||
process(proc_data)
|
||||
```
|
||||
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary structured data with the following schema:
|
||||
|
||||
[
|
||||
{
|
||||
"ip": string,
|
||||
"hostname": [
|
||||
string
|
||||
]
|
||||
}
|
||||
]
|
||||
|
||||
## parse
|
||||
```python
|
||||
parse(data, raw=False, quiet=False)
|
||||
```
|
||||
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
|
||||
183
docs/parsers/ifconfig.md
Normal file
183
docs/parsers/ifconfig.md
Normal file
@@ -0,0 +1,183 @@
|
||||
# jc.parsers.ifconfig
|
||||
jc - JSON CLI output utility ifconfig Parser
|
||||
|
||||
Usage:
|
||||
specify --ifconfig as the first argument if the piped input is coming from ifconfig
|
||||
|
||||
no ifconfig options are supported.
|
||||
|
||||
Examples:
|
||||
|
||||
$ ifconfig | jc --ifconfig -p
|
||||
[
|
||||
{
|
||||
"name": "ens33",
|
||||
"flags": 4163,
|
||||
"state": "UP,BROADCAST,RUNNING,MULTICAST",
|
||||
"mtu": 1500,
|
||||
"ipv4_addr": "192.168.71.138",
|
||||
"ipv4_mask": "255.255.255.0",
|
||||
"ipv4_bcast": "192.168.71.255",
|
||||
"ipv6_addr": "fe80::c1cb:715d:bc3e:b8a0",
|
||||
"ipv6_mask": 64,
|
||||
"ipv6_scope": "link",
|
||||
"mac_addr": "00:0c:29:3b:58:0e",
|
||||
"type": "Ethernet",
|
||||
"rx_packets": 6374,
|
||||
"rx_errors": 0,
|
||||
"rx_dropped": 0,
|
||||
"rx_overruns": 0,
|
||||
"rx_frame": 0,
|
||||
"tx_packets": 3707,
|
||||
"tx_errors": 0,
|
||||
"tx_dropped": 0,
|
||||
"tx_overruns": 0,
|
||||
"tx_carrier": 0,
|
||||
"tx_collisions": 0,
|
||||
"metric": null
|
||||
},
|
||||
{
|
||||
"name": "lo",
|
||||
"flags": 73,
|
||||
"state": "UP,LOOPBACK,RUNNING",
|
||||
"mtu": 65536,
|
||||
"ipv4_addr": "127.0.0.1",
|
||||
"ipv4_mask": "255.0.0.0",
|
||||
"ipv4_bcast": null,
|
||||
"ipv6_addr": "::1",
|
||||
"ipv6_mask": 128,
|
||||
"ipv6_scope": "host",
|
||||
"mac_addr": null,
|
||||
"type": "Local Loopback",
|
||||
"rx_packets": 81,
|
||||
"rx_errors": 0,
|
||||
"rx_dropped": 0,
|
||||
"rx_overruns": 0,
|
||||
"rx_frame": 0,
|
||||
"tx_packets": 81,
|
||||
"tx_errors": 0,
|
||||
"tx_dropped": 0,
|
||||
"tx_overruns": 0,
|
||||
"tx_carrier": 0,
|
||||
"tx_collisions": 0,
|
||||
"metric": null
|
||||
}
|
||||
]
|
||||
|
||||
$ ifconfig | jc --ifconfig -p -r
|
||||
[
|
||||
{
|
||||
"name": "ens33",
|
||||
"flags": "4163",
|
||||
"state": "UP,BROADCAST,RUNNING,MULTICAST",
|
||||
"mtu": "1500",
|
||||
"ipv4_addr": "192.168.71.135",
|
||||
"ipv4_mask": "255.255.255.0",
|
||||
"ipv4_bcast": "192.168.71.255",
|
||||
"ipv6_addr": "fe80::c1cb:715d:bc3e:b8a0",
|
||||
"ipv6_mask": "64",
|
||||
"ipv6_scope": "link",
|
||||
"mac_addr": "00:0c:29:3b:58:0e",
|
||||
"type": "Ethernet",
|
||||
"rx_packets": "26348",
|
||||
"rx_errors": "0",
|
||||
"rx_dropped": "0",
|
||||
"rx_overruns": "0",
|
||||
"rx_frame": "0",
|
||||
"tx_packets": "5308",
|
||||
"tx_errors": "0",
|
||||
"tx_dropped": "0",
|
||||
"tx_overruns": "0",
|
||||
"tx_carrier": "0",
|
||||
"tx_collisions": "0",
|
||||
"metric": null
|
||||
},
|
||||
{
|
||||
"name": "lo",
|
||||
"flags": "73",
|
||||
"state": "UP,LOOPBACK,RUNNING",
|
||||
"mtu": "65536",
|
||||
"ipv4_addr": "127.0.0.1",
|
||||
"ipv4_mask": "255.0.0.0",
|
||||
"ipv4_bcast": null,
|
||||
"ipv6_addr": "::1",
|
||||
"ipv6_mask": "128",
|
||||
"ipv6_scope": "host",
|
||||
"mac_addr": null,
|
||||
"type": "Local Loopback",
|
||||
"rx_packets": "64",
|
||||
"rx_errors": "0",
|
||||
"rx_dropped": "0",
|
||||
"rx_overruns": "0",
|
||||
"rx_frame": "0",
|
||||
"tx_packets": "64",
|
||||
"tx_errors": "0",
|
||||
"tx_dropped": "0",
|
||||
"tx_overruns": "0",
|
||||
"tx_carrier": "0",
|
||||
"tx_collisions": "0",
|
||||
"metric": null
|
||||
}
|
||||
]
|
||||
|
||||
## process
|
||||
```python
|
||||
process(proc_data)
|
||||
```
|
||||
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary structured data with the following schema:
|
||||
|
||||
[
|
||||
{
|
||||
"name": string,
|
||||
"flags": integer,
|
||||
"state": string,
|
||||
"mtu": integer,
|
||||
"ipv4_addr": string,
|
||||
"ipv4_mask": string,
|
||||
"ipv4_bcast": string,
|
||||
"ipv6_addr": string,
|
||||
"ipv6_mask": integer,
|
||||
"ipv6_scope": string,
|
||||
"mac_addr": string,
|
||||
"type": string,
|
||||
"rx_packets": integer,
|
||||
"rx_errors": integer,
|
||||
"rx_dropped": integer,
|
||||
"rx_overruns": integer,
|
||||
"rx_frame": integer,
|
||||
"tx_packets": integer,
|
||||
"tx_errors": integer,
|
||||
"tx_dropped": integer,
|
||||
"tx_overruns": integer,
|
||||
"tx_carrier": integer,
|
||||
"tx_collisions": integer,
|
||||
"metric": integer
|
||||
}
|
||||
]
|
||||
|
||||
## parse
|
||||
```python
|
||||
parse(data, raw=False, quiet=False)
|
||||
```
|
||||
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
|
||||
181
docs/parsers/iptables.md
Normal file
181
docs/parsers/iptables.md
Normal file
@@ -0,0 +1,181 @@
|
||||
# jc.parsers.iptables
|
||||
jc - JSON CLI output utility ipables Parser
|
||||
|
||||
Usage:
|
||||
Specify --iptables as the first argument if the piped input is coming from iptables
|
||||
|
||||
Supports -vLn and --line-numbers for all tables
|
||||
|
||||
Examples:
|
||||
|
||||
$ sudo iptables --line-numbers -v -L -t nat | jc --iptables -p
|
||||
[
|
||||
{
|
||||
"chain": "PREROUTING",
|
||||
"rules": [
|
||||
{
|
||||
"num": 1,
|
||||
"pkts": 2183,
|
||||
"bytes": 186000,
|
||||
"target": "PREROUTING_direct",
|
||||
"prot": "all",
|
||||
"opt": null,
|
||||
"in": "any",
|
||||
"out": "any",
|
||||
"source": "anywhere",
|
||||
"destination": "anywhere"
|
||||
},
|
||||
{
|
||||
"num": 2,
|
||||
"pkts": 2183,
|
||||
"bytes": 186000,
|
||||
"target": "PREROUTING_ZONES_SOURCE",
|
||||
"prot": "all",
|
||||
"opt": null,
|
||||
"in": "any",
|
||||
"out": "any",
|
||||
"source": "anywhere",
|
||||
"destination": "anywhere"
|
||||
},
|
||||
{
|
||||
"num": 3,
|
||||
"pkts": 2183,
|
||||
"bytes": 186000,
|
||||
"target": "PREROUTING_ZONES",
|
||||
"prot": "all",
|
||||
"opt": null,
|
||||
"in": "any",
|
||||
"out": "any",
|
||||
"source": "anywhere",
|
||||
"destination": "anywhere"
|
||||
},
|
||||
{
|
||||
"num": 4,
|
||||
"pkts": 0,
|
||||
"bytes": 0,
|
||||
"target": "DOCKER",
|
||||
"prot": "all",
|
||||
"opt": null,
|
||||
"in": "any",
|
||||
"out": "any",
|
||||
"source": "anywhere",
|
||||
"destination": "anywhere",
|
||||
"options": "ADDRTYPE match dst-type LOCAL"
|
||||
}
|
||||
]
|
||||
},
|
||||
...
|
||||
]
|
||||
|
||||
$ sudo iptables --line-numbers -v -L -t nat | jc --iptables -p -r
|
||||
[
|
||||
{
|
||||
"chain": "PREROUTING",
|
||||
"rules": [
|
||||
{
|
||||
"num": "1",
|
||||
"pkts": "2183",
|
||||
"bytes": "186K",
|
||||
"target": "PREROUTING_direct",
|
||||
"prot": "all",
|
||||
"opt": "--",
|
||||
"in": "any",
|
||||
"out": "any",
|
||||
"source": "anywhere",
|
||||
"destination": "anywhere"
|
||||
},
|
||||
{
|
||||
"num": "2",
|
||||
"pkts": "2183",
|
||||
"bytes": "186K",
|
||||
"target": "PREROUTING_ZONES_SOURCE",
|
||||
"prot": "all",
|
||||
"opt": "--",
|
||||
"in": "any",
|
||||
"out": "any",
|
||||
"source": "anywhere",
|
||||
"destination": "anywhere"
|
||||
},
|
||||
{
|
||||
"num": "3",
|
||||
"pkts": "2183",
|
||||
"bytes": "186K",
|
||||
"target": "PREROUTING_ZONES",
|
||||
"prot": "all",
|
||||
"opt": "--",
|
||||
"in": "any",
|
||||
"out": "any",
|
||||
"source": "anywhere",
|
||||
"destination": "anywhere"
|
||||
},
|
||||
{
|
||||
"num": "4",
|
||||
"pkts": "0",
|
||||
"bytes": "0",
|
||||
"target": "DOCKER",
|
||||
"prot": "all",
|
||||
"opt": "--",
|
||||
"in": "any",
|
||||
"out": "any",
|
||||
"source": "anywhere",
|
||||
"destination": "anywhere",
|
||||
"options": "ADDRTYPE match dst-type LOCAL"
|
||||
}
|
||||
]
|
||||
},
|
||||
...
|
||||
]
|
||||
|
||||
## process
|
||||
```python
|
||||
process(proc_data)
|
||||
```
|
||||
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary structured data with the following schema:
|
||||
|
||||
[
|
||||
{
|
||||
"chain": string,
|
||||
"rules": [
|
||||
{
|
||||
"num" integer,
|
||||
"pkts": integer,
|
||||
"bytes": integer, # converted based on suffix
|
||||
"target": string,
|
||||
"prot": string,
|
||||
"opt": string, # "--" = Null
|
||||
"in": string,
|
||||
"out": string,
|
||||
"source": string,
|
||||
"destination": string,
|
||||
"options": string
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
|
||||
## parse
|
||||
```python
|
||||
parse(data, raw=False, quiet=False)
|
||||
```
|
||||
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
|
||||
111
docs/parsers/jobs.md
Normal file
111
docs/parsers/jobs.md
Normal file
@@ -0,0 +1,111 @@
|
||||
# jc.parsers.jobs
|
||||
jc - JSON CLI output utility jobs Parser
|
||||
|
||||
Usage:
|
||||
specify --jobs as the first argument if the piped input is coming from jobs
|
||||
|
||||
Also supports the -l option
|
||||
|
||||
Example:
|
||||
|
||||
$ jobs -l | jc --jobs -p
|
||||
[
|
||||
{
|
||||
"job_number": 1,
|
||||
"pid": 5283,
|
||||
"status": "Running",
|
||||
"command": "sleep 10000 &"
|
||||
},
|
||||
{
|
||||
"job_number": 2,
|
||||
"pid": 5284,
|
||||
"status": "Running",
|
||||
"command": "sleep 10100 &"
|
||||
},
|
||||
{
|
||||
"job_number": 3,
|
||||
"pid": 5285,
|
||||
"history": "previous",
|
||||
"status": "Running",
|
||||
"command": "sleep 10001 &"
|
||||
},
|
||||
{
|
||||
"job_number": 4,
|
||||
"pid": 5286,
|
||||
"history": "current",
|
||||
"status": "Running",
|
||||
"command": "sleep 10112 &"
|
||||
}
|
||||
]
|
||||
|
||||
$ jobs -l | jc --jobs -p -r
|
||||
[
|
||||
{
|
||||
"job_number": "1",
|
||||
"pid": "19510",
|
||||
"status": "Running",
|
||||
"command": "sleep 1000 &"
|
||||
},
|
||||
{
|
||||
"job_number": "2",
|
||||
"pid": "19511",
|
||||
"status": "Running",
|
||||
"command": "sleep 1001 &"
|
||||
},
|
||||
{
|
||||
"job_number": "3",
|
||||
"pid": "19512",
|
||||
"history": "previous",
|
||||
"status": "Running",
|
||||
"command": "sleep 1002 &"
|
||||
},
|
||||
{
|
||||
"job_number": "4",
|
||||
"pid": "19513",
|
||||
"history": "current",
|
||||
"status": "Running",
|
||||
"command": "sleep 1003 &"
|
||||
}
|
||||
]
|
||||
|
||||
## process
|
||||
```python
|
||||
process(proc_data)
|
||||
```
|
||||
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary structured data with the following schema:
|
||||
[
|
||||
{
|
||||
"job_number": integer,
|
||||
"pid": integer,
|
||||
"history": string,
|
||||
"status": string,
|
||||
"command": string
|
||||
}
|
||||
]
|
||||
|
||||
## parse
|
||||
```python
|
||||
parse(data, raw=False, quiet=False)
|
||||
```
|
||||
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
|
||||
181
docs/parsers/ls.md
Normal file
181
docs/parsers/ls.md
Normal file
@@ -0,0 +1,181 @@
|
||||
# jc.parsers.ls
|
||||
jc - JSON CLI output utility ls Parser
|
||||
|
||||
Usage:
|
||||
specify --ls as the first argument if the piped input is coming from ls
|
||||
|
||||
ls options supported:
|
||||
- None
|
||||
- la
|
||||
- h file sizes will be available in text form with -r but larger file sizes
|
||||
with human readable suffixes will be converted to Null in default view
|
||||
since the parser attempts to convert this field to an integer.
|
||||
|
||||
Examples:
|
||||
|
||||
$ ls /usr/bin | jc --ls -p
|
||||
[
|
||||
{
|
||||
"filename": "apropos"
|
||||
},
|
||||
{
|
||||
"filename": "arch"
|
||||
},
|
||||
{
|
||||
"filename": "awk"
|
||||
},
|
||||
{
|
||||
"filename": "base64"
|
||||
},
|
||||
...
|
||||
]
|
||||
|
||||
$ ls -l /usr/bin | jc --ls -p
|
||||
[
|
||||
{
|
||||
"filename": "apropos",
|
||||
"link_to": "whatis",
|
||||
"flags": "lrwxrwxrwx.",
|
||||
"links": 1,
|
||||
"owner": "root",
|
||||
"group": "root",
|
||||
"size": 6,
|
||||
"date": "Aug 15 10:53"
|
||||
},
|
||||
{
|
||||
"filename": "ar",
|
||||
"flags": "-rwxr-xr-x.",
|
||||
"links": 1,
|
||||
"owner": "root",
|
||||
"group": "root",
|
||||
"size": 62744,
|
||||
"date": "Aug 8 16:14"
|
||||
},
|
||||
{
|
||||
"filename": "arch",
|
||||
"flags": "-rwxr-xr-x.",
|
||||
"links": 1,
|
||||
"owner": "root",
|
||||
"group": "root",
|
||||
"size": 33080,
|
||||
"date": "Aug 19 23:25"
|
||||
},
|
||||
...
|
||||
]
|
||||
|
||||
$ ls -l /usr/bin | jc --ls -p -r
|
||||
[
|
||||
{
|
||||
"filename": "apropos",
|
||||
"link_to": "whatis",
|
||||
"flags": "lrwxrwxrwx.",
|
||||
"links": "1",
|
||||
"owner": "root",
|
||||
"group": "root",
|
||||
"size": "6",
|
||||
"date": "Aug 15 10:53"
|
||||
},
|
||||
{
|
||||
"filename": "arch",
|
||||
"flags": "-rwxr-xr-x.",
|
||||
"links": "1",
|
||||
"owner": "root",
|
||||
"group": "root",
|
||||
"size": "33080",
|
||||
"date": "Aug 19 23:25"
|
||||
},
|
||||
{
|
||||
"filename": "awk",
|
||||
"link_to": "gawk",
|
||||
"flags": "lrwxrwxrwx.",
|
||||
"links": "1",
|
||||
"owner": "root",
|
||||
"group": "root",
|
||||
"size": "4",
|
||||
"date": "Aug 15 10:53"
|
||||
},
|
||||
{
|
||||
"filename": "base64",
|
||||
"flags": "-rwxr-xr-x.",
|
||||
"links": "1",
|
||||
"owner": "root",
|
||||
"group": "root",
|
||||
"size": "37360",
|
||||
"date": "Aug 19 23:25"
|
||||
},
|
||||
{
|
||||
"filename": "basename",
|
||||
"flags": "-rwxr-xr-x.",
|
||||
"links": "1",
|
||||
"owner": "root",
|
||||
"group": "root",
|
||||
"size": "29032",
|
||||
"date": "Aug 19 23:25"
|
||||
},
|
||||
{
|
||||
"filename": "bash",
|
||||
"flags": "-rwxr-xr-x.",
|
||||
"links": "1",
|
||||
"owner": "root",
|
||||
"group": "root",
|
||||
"size": "964600",
|
||||
"date": "Aug 8 05:06"
|
||||
},
|
||||
...
|
||||
]
|
||||
|
||||
$ ls -l /usr/bin | jc --ls | jq '.[] | select(.size > 50000000)'
|
||||
{
|
||||
"filename": "emacs",
|
||||
"flags": "-r-xr-xr-x",
|
||||
"links": 1,
|
||||
"owner": "root",
|
||||
"group": "wheel",
|
||||
"size": 117164432,
|
||||
"date": "May 3 2019"
|
||||
}
|
||||
|
||||
## process
|
||||
```python
|
||||
process(proc_data)
|
||||
```
|
||||
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary structured data with the following schema:
|
||||
|
||||
[
|
||||
{
|
||||
"filename": string,
|
||||
"flags": string,
|
||||
"links": integer,
|
||||
"owner": string,
|
||||
"group": string,
|
||||
"size": integer,
|
||||
"date": string
|
||||
}
|
||||
]
|
||||
|
||||
## parse
|
||||
```python
|
||||
parse(data, raw=False, quiet=False)
|
||||
```
|
||||
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
|
||||
286
docs/parsers/lsblk.md
Normal file
286
docs/parsers/lsblk.md
Normal file
@@ -0,0 +1,286 @@
|
||||
# jc.parsers.lsblk
|
||||
jc - JSON CLI output utility lsblk Parser
|
||||
|
||||
Usage:
|
||||
specify --lsblk as the first argument if the piped input is coming from lsblk
|
||||
|
||||
Examples:
|
||||
|
||||
$ lsblk | jc --lsblk -p
|
||||
[
|
||||
{
|
||||
"name": "sda",
|
||||
"maj_min": "8:0",
|
||||
"rm": false,
|
||||
"size": "20G",
|
||||
"ro": false,
|
||||
"type": "disk",
|
||||
"mountpoint": null
|
||||
},
|
||||
{
|
||||
"name": "sda1",
|
||||
"maj_min": "8:1",
|
||||
"rm": false,
|
||||
"size": "1G",
|
||||
"ro": false,
|
||||
"type": "part",
|
||||
"mountpoint": "/boot"
|
||||
},
|
||||
...
|
||||
]
|
||||
|
||||
$ lsblk -o +KNAME,FSTYPE,LABEL,UUID,PARTLABEL,PARTUUID,RA,MODEL,SERIAL,STATE,OWNER,GROUP,MODE,ALIGNMENT,MIN-IO,OPT-IO,PHY-SEC,LOG-SEC,ROTA,SCHED,RQ-SIZE,DISC-ALN,DISC-GRAN,DISC-MAX,DISC-ZERO,WSAME,WWN,RAND,PKNAME,HCTL,TRAN,REV,VENDOR | jc --lsblk -p
|
||||
[
|
||||
{
|
||||
"name": "sda",
|
||||
"maj_min": "8:0",
|
||||
"rm": false,
|
||||
"size": "20G",
|
||||
"ro": false,
|
||||
"type": "disk",
|
||||
"mountpoint": null,
|
||||
"kname": "sda",
|
||||
"fstype": null,
|
||||
"label": null,
|
||||
"uuid": null,
|
||||
"partlabel": null,
|
||||
"partuuid": null,
|
||||
"ra": 4096,
|
||||
"model": "VMware Virtual S",
|
||||
"serial": null,
|
||||
"state": "running",
|
||||
"owner": "root",
|
||||
"group": "disk",
|
||||
"mode": "brw-rw----",
|
||||
"alignment": 0,
|
||||
"min_io": 512,
|
||||
"opt_io": 0,
|
||||
"phy_sec": 512,
|
||||
"log_sec": 512,
|
||||
"rota": true,
|
||||
"sched": "deadline",
|
||||
"rq_size": 128,
|
||||
"disc_aln": 0,
|
||||
"disc_gran": "0B",
|
||||
"disc_max": "0B",
|
||||
"disc_zero": false,
|
||||
"wsame": "32M",
|
||||
"wwn": null,
|
||||
"rand": true,
|
||||
"pkname": null,
|
||||
"hctl": "0:0:0:0",
|
||||
"tran": "spi",
|
||||
"rev": "1.0",
|
||||
"vendor": "VMware,"
|
||||
},
|
||||
{
|
||||
"name": "sda1",
|
||||
"maj_min": "8:1",
|
||||
"rm": false,
|
||||
"size": "1G",
|
||||
"ro": false,
|
||||
"type": "part",
|
||||
"mountpoint": "/boot",
|
||||
"kname": "sda1",
|
||||
"fstype": "xfs",
|
||||
"label": null,
|
||||
"uuid": "05d927bb-5875-49e3-ada1-7f46cb31c932",
|
||||
"partlabel": null,
|
||||
"partuuid": null,
|
||||
"ra": 4096,
|
||||
"model": null,
|
||||
"serial": null,
|
||||
"state": null,
|
||||
"owner": "root",
|
||||
"group": "disk",
|
||||
"mode": "brw-rw----",
|
||||
"alignment": 0,
|
||||
"min_io": 512,
|
||||
"opt_io": 0,
|
||||
"phy_sec": 512,
|
||||
"log_sec": 512,
|
||||
"rota": true,
|
||||
"sched": "deadline",
|
||||
"rq_size": 128,
|
||||
"disc_aln": 0,
|
||||
"disc_gran": "0B",
|
||||
"disc_max": "0B",
|
||||
"disc_zero": false,
|
||||
"wsame": "32M",
|
||||
"wwn": null,
|
||||
"rand": true,
|
||||
"pkname": "sda",
|
||||
"hctl": null,
|
||||
"tran": null,
|
||||
"rev": null,
|
||||
"vendor": null
|
||||
},
|
||||
...
|
||||
]
|
||||
|
||||
$ lsblk -o +KNAME,FSTYPE,LABEL,UUID,PARTLABEL,PARTUUID,RA,MODEL,SERIAL,STATE,OWNER,GROUP,MODE,ALIGNMENT,MIN-IO,OPT-IO,PHY-SEC,LOG-SEC,ROTA,SCHED,RQ-SIZE,DISC-ALN,DISC-GRAN,DISC-MAX,DISC-ZERO,WSAME,WWN,RAND,PKNAME,HCTL,TRAN,REV,VENDOR | jc --lsblk -p -r
|
||||
[
|
||||
{
|
||||
"name": "sda",
|
||||
"maj_min": "8:0",
|
||||
"rm": "0",
|
||||
"size": "20G",
|
||||
"ro": "0",
|
||||
"type": "disk",
|
||||
"mountpoint": null,
|
||||
"kname": "sda",
|
||||
"fstype": null,
|
||||
"label": null,
|
||||
"uuid": null,
|
||||
"partlabel": null,
|
||||
"partuuid": null,
|
||||
"ra": "4096",
|
||||
"model": "VMware Virtual S",
|
||||
"serial": null,
|
||||
"state": "running",
|
||||
"owner": "root",
|
||||
"group": "disk",
|
||||
"mode": "brw-rw----",
|
||||
"alignment": "0",
|
||||
"min_io": "512",
|
||||
"opt_io": "0",
|
||||
"phy_sec": "512",
|
||||
"log_sec": "512",
|
||||
"rota": "1",
|
||||
"sched": "deadline",
|
||||
"rq_size": "128",
|
||||
"disc_aln": "0",
|
||||
"disc_gran": "0B",
|
||||
"disc_max": "0B",
|
||||
"disc_zero": "0",
|
||||
"wsame": "32M",
|
||||
"wwn": null,
|
||||
"rand": "1",
|
||||
"pkname": null,
|
||||
"hctl": "0:0:0:0",
|
||||
"tran": "spi",
|
||||
"rev": "1.0",
|
||||
"vendor": "VMware,"
|
||||
},
|
||||
{
|
||||
"name": "sda1",
|
||||
"maj_min": "8:1",
|
||||
"rm": "0",
|
||||
"size": "1G",
|
||||
"ro": "0",
|
||||
"type": "part",
|
||||
"mountpoint": "/boot",
|
||||
"kname": "sda1",
|
||||
"fstype": "xfs",
|
||||
"label": null,
|
||||
"uuid": "05d927bb-5875-49e3-ada1-7f46cb31c932",
|
||||
"partlabel": null,
|
||||
"partuuid": null,
|
||||
"ra": "4096",
|
||||
"model": null,
|
||||
"serial": null,
|
||||
"state": null,
|
||||
"owner": "root",
|
||||
"group": "disk",
|
||||
"mode": "brw-rw----",
|
||||
"alignment": "0",
|
||||
"min_io": "512",
|
||||
"opt_io": "0",
|
||||
"phy_sec": "512",
|
||||
"log_sec": "512",
|
||||
"rota": "1",
|
||||
"sched": "deadline",
|
||||
"rq_size": "128",
|
||||
"disc_aln": "0",
|
||||
"disc_gran": "0B",
|
||||
"disc_max": "0B",
|
||||
"disc_zero": "0",
|
||||
"wsame": "32M",
|
||||
"wwn": null,
|
||||
"rand": "1",
|
||||
"pkname": "sda",
|
||||
"hctl": null,
|
||||
"tran": null,
|
||||
"rev": null,
|
||||
"vendor": null
|
||||
},
|
||||
...
|
||||
]
|
||||
|
||||
## process
|
||||
```python
|
||||
process(proc_data)
|
||||
```
|
||||
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary structured data with the following schema:
|
||||
|
||||
[
|
||||
{
|
||||
"name": string,
|
||||
"maj_min": string,
|
||||
"rm": boolean,
|
||||
"size": string,
|
||||
"ro": boolean,
|
||||
"type": string,
|
||||
"mountpoint": string,
|
||||
"kname": string,
|
||||
"fstype": string,
|
||||
"label": string,
|
||||
"uuid": string,
|
||||
"partlabel": string,
|
||||
"partuuid": string,
|
||||
"ra": integer,
|
||||
"model": string,
|
||||
"serial": string,
|
||||
"state": string,
|
||||
"owner": string,
|
||||
"group": string,
|
||||
"mode": string,
|
||||
"alignment": integer,
|
||||
"min_io": integer,
|
||||
"opt_io": integer,
|
||||
"phy_sec": integer,
|
||||
"log_sec": integer,
|
||||
"rota": boolean,
|
||||
"sched": string,
|
||||
"rq_size": integer,
|
||||
"disc_aln": integer,
|
||||
"disc_gran": string,
|
||||
"disc_max": string,
|
||||
"disc_zero": boolean,
|
||||
"wsame": string,
|
||||
"wwn": string,
|
||||
"rand": boolean,
|
||||
"pkname": string,
|
||||
"hctl": string,
|
||||
"tran": string,
|
||||
"rev": string,
|
||||
"vendor": string
|
||||
}
|
||||
]
|
||||
|
||||
## parse
|
||||
```python
|
||||
parse(data, raw=False, quiet=False)
|
||||
```
|
||||
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
|
||||
143
docs/parsers/lsmod.md
Normal file
143
docs/parsers/lsmod.md
Normal file
@@ -0,0 +1,143 @@
|
||||
# jc.parsers.lsmod
|
||||
jc - JSON CLI output utility lsmod Parser
|
||||
|
||||
Usage:
|
||||
specify --lsmod as the first argument if the piped input is coming from lsmod
|
||||
|
||||
Examples:
|
||||
|
||||
$ lsmod | jc --lsmod -p
|
||||
[
|
||||
...
|
||||
{
|
||||
"module": "nf_nat",
|
||||
"size": 26583,
|
||||
"used": 3,
|
||||
"by": [
|
||||
"nf_nat_ipv4",
|
||||
"nf_nat_ipv6",
|
||||
"nf_nat_masquerade_ipv4"
|
||||
]
|
||||
},
|
||||
{
|
||||
"module": "iptable_mangle",
|
||||
"size": 12695,
|
||||
"used": 1
|
||||
},
|
||||
{
|
||||
"module": "iptable_security",
|
||||
"size": 12705,
|
||||
"used": 1
|
||||
},
|
||||
{
|
||||
"module": "iptable_raw",
|
||||
"size": 12678,
|
||||
"used": 1
|
||||
},
|
||||
{
|
||||
"module": "nf_conntrack",
|
||||
"size": 139224,
|
||||
"used": 7,
|
||||
"by": [
|
||||
"nf_nat",
|
||||
"nf_nat_ipv4",
|
||||
"nf_nat_ipv6",
|
||||
"xt_conntrack",
|
||||
"nf_nat_masquerade_ipv4",
|
||||
"nf_conntrack_ipv4",
|
||||
"nf_conntrack_ipv6"
|
||||
]
|
||||
},
|
||||
...
|
||||
]
|
||||
|
||||
$ lsmod | jc --lsmod -p -r
|
||||
[
|
||||
...
|
||||
{
|
||||
"module": "nf_conntrack",
|
||||
"size": "139224",
|
||||
"used": "7",
|
||||
"by": [
|
||||
"nf_nat",
|
||||
"nf_nat_ipv4",
|
||||
"nf_nat_ipv6",
|
||||
"xt_conntrack",
|
||||
"nf_nat_masquerade_ipv4",
|
||||
"nf_conntrack_ipv4",
|
||||
"nf_conntrack_ipv6"
|
||||
]
|
||||
},
|
||||
{
|
||||
"module": "ip_set",
|
||||
"size": "45799",
|
||||
"used": "0"
|
||||
},
|
||||
{
|
||||
"module": "nfnetlink",
|
||||
"size": "14519",
|
||||
"used": "1",
|
||||
"by": [
|
||||
"ip_set"
|
||||
]
|
||||
},
|
||||
{
|
||||
"module": "ebtable_filter",
|
||||
"size": "12827",
|
||||
"used": "1"
|
||||
},
|
||||
{
|
||||
"module": "ebtables",
|
||||
"size": "35009",
|
||||
"used": "2",
|
||||
"by": [
|
||||
"ebtable_nat",
|
||||
"ebtable_filter"
|
||||
]
|
||||
},
|
||||
...
|
||||
]
|
||||
|
||||
## process
|
||||
```python
|
||||
process(proc_data)
|
||||
```
|
||||
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary structured data with the following schema:
|
||||
|
||||
[
|
||||
{
|
||||
"module": string,
|
||||
"size": integer,
|
||||
"used": integer,
|
||||
"by": [
|
||||
string
|
||||
]
|
||||
}
|
||||
]
|
||||
|
||||
## parse
|
||||
```python
|
||||
parse(data, raw=False, quiet=False)
|
||||
```
|
||||
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
|
||||
137
docs/parsers/lsof.md
Normal file
137
docs/parsers/lsof.md
Normal file
@@ -0,0 +1,137 @@
|
||||
# jc.parsers.lsof
|
||||
jc - JSON CLI output utility lsof Parser
|
||||
|
||||
Usage:
|
||||
specify --lsof as the first argument if the piped input is coming from lsof
|
||||
|
||||
Examples:
|
||||
|
||||
$ sudo lsof | jc --lsof -p
|
||||
[
|
||||
{
|
||||
"command": "systemd",
|
||||
"pid": 1,
|
||||
"tid": null,
|
||||
"user": "root",
|
||||
"fd": "cwd",
|
||||
"type": "DIR",
|
||||
"device": "253,0",
|
||||
"size_off": 224,
|
||||
"node": 64,
|
||||
"name": "/"
|
||||
},
|
||||
{
|
||||
"command": "systemd",
|
||||
"pid": 1,
|
||||
"tid": null,
|
||||
"user": "root",
|
||||
"fd": "rtd",
|
||||
"type": "DIR",
|
||||
"device": "253,0",
|
||||
"size_off": 224,
|
||||
"node": 64,
|
||||
"name": "/"
|
||||
},
|
||||
{
|
||||
"command": "systemd",
|
||||
"pid": 1,
|
||||
"tid": null,
|
||||
"user": "root",
|
||||
"fd": "txt",
|
||||
"type": "REG",
|
||||
"device": "253,0",
|
||||
"size_off": 1624520,
|
||||
"node": 50360451,
|
||||
"name": "/usr/lib/systemd/systemd"
|
||||
},
|
||||
...
|
||||
]
|
||||
|
||||
$ sudo lsof | jc --lsof -p -r
|
||||
[
|
||||
{
|
||||
"command": "systemd",
|
||||
"pid": "1",
|
||||
"tid": null,
|
||||
"user": "root",
|
||||
"fd": "cwd",
|
||||
"type": "DIR",
|
||||
"device": "8,2",
|
||||
"size_off": "4096",
|
||||
"node": "2",
|
||||
"name": "/"
|
||||
},
|
||||
{
|
||||
"command": "systemd",
|
||||
"pid": "1",
|
||||
"tid": null,
|
||||
"user": "root",
|
||||
"fd": "rtd",
|
||||
"type": "DIR",
|
||||
"device": "8,2",
|
||||
"size_off": "4096",
|
||||
"node": "2",
|
||||
"name": "/"
|
||||
},
|
||||
{
|
||||
"command": "systemd",
|
||||
"pid": "1",
|
||||
"tid": null,
|
||||
"user": "root",
|
||||
"fd": "txt",
|
||||
"type": "REG",
|
||||
"device": "8,2",
|
||||
"size_off": "1595792",
|
||||
"node": "668802",
|
||||
"name": "/lib/systemd/systemd"
|
||||
},
|
||||
...
|
||||
]
|
||||
|
||||
## process
|
||||
```python
|
||||
process(proc_data)
|
||||
```
|
||||
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary structured data with the following schema:
|
||||
|
||||
[
|
||||
{
|
||||
"command": string,
|
||||
"pid": integer,
|
||||
"tid": integer,
|
||||
"user": string,
|
||||
"fd": string,
|
||||
"type": string,
|
||||
"device": string,
|
||||
"size_off": integer,
|
||||
"node": integer,
|
||||
"name": string
|
||||
}
|
||||
]
|
||||
|
||||
## parse
|
||||
```python
|
||||
parse(data, raw=False, quiet=False)
|
||||
```
|
||||
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
|
||||
93
docs/parsers/mount.md
Normal file
93
docs/parsers/mount.md
Normal file
@@ -0,0 +1,93 @@
|
||||
# jc.parsers.mount
|
||||
jc - JSON CLI output utility mount Parser
|
||||
|
||||
Usage:
|
||||
specify --mount as the first argument if the piped input is coming from mount
|
||||
|
||||
Example:
|
||||
|
||||
$ mount | jc --mount -p
|
||||
[
|
||||
{
|
||||
"filesystem": "sysfs",
|
||||
"mount_point": "/sys",
|
||||
"type": "sysfs",
|
||||
"access": [
|
||||
"rw",
|
||||
"nosuid",
|
||||
"nodev",
|
||||
"noexec",
|
||||
"relatime"
|
||||
]
|
||||
},
|
||||
{
|
||||
"filesystem": "proc",
|
||||
"mount_point": "/proc",
|
||||
"type": "proc",
|
||||
"access": [
|
||||
"rw",
|
||||
"nosuid",
|
||||
"nodev",
|
||||
"noexec",
|
||||
"relatime"
|
||||
]
|
||||
},
|
||||
{
|
||||
"filesystem": "udev",
|
||||
"mount_point": "/dev",
|
||||
"type": "devtmpfs",
|
||||
"access": [
|
||||
"rw",
|
||||
"nosuid",
|
||||
"relatime",
|
||||
"size=977500k",
|
||||
"nr_inodes=244375",
|
||||
"mode=755"
|
||||
]
|
||||
},
|
||||
...
|
||||
]
|
||||
|
||||
## process
|
||||
```python
|
||||
process(proc_data)
|
||||
```
|
||||
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary structured data with the following schema:
|
||||
|
||||
[
|
||||
{
|
||||
"filesystem": string,
|
||||
"mount_point": string,
|
||||
"type": string,
|
||||
"access": [
|
||||
string
|
||||
]
|
||||
}
|
||||
]
|
||||
|
||||
## parse
|
||||
```python
|
||||
parse(data, raw=False, quiet=False)
|
||||
```
|
||||
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
|
||||
365
docs/parsers/netstat.md
Normal file
365
docs/parsers/netstat.md
Normal file
@@ -0,0 +1,365 @@
|
||||
# jc.parsers.netstat
|
||||
jc - JSON CLI output utility netstat Parser
|
||||
|
||||
Usage:
|
||||
Specify --netstat as the first argument if the piped input is coming from netstat
|
||||
|
||||
Examples:
|
||||
|
||||
$ sudo netstat -apee | jc --netstat -p
|
||||
[
|
||||
{
|
||||
"proto": "tcp",
|
||||
"recv_q": 0,
|
||||
"send_q": 0,
|
||||
"local_address": "localhost",
|
||||
"foreign_address": "0.0.0.0",
|
||||
"state": "LISTEN",
|
||||
"user": "systemd-resolve",
|
||||
"inode": 26958,
|
||||
"program_name": "systemd-resolve",
|
||||
"kind": "network",
|
||||
"pid": 887,
|
||||
"local_port": "domain",
|
||||
"foreign_port": "*",
|
||||
"transport_protocol": "tcp",
|
||||
"network_protocol": "ipv4"
|
||||
},
|
||||
{
|
||||
"proto": "tcp",
|
||||
"recv_q": 0,
|
||||
"send_q": 0,
|
||||
"local_address": "0.0.0.0",
|
||||
"foreign_address": "0.0.0.0",
|
||||
"state": "LISTEN",
|
||||
"user": "root",
|
||||
"inode": 30499,
|
||||
"program_name": "sshd",
|
||||
"kind": "network",
|
||||
"pid": 1186,
|
||||
"local_port": "ssh",
|
||||
"foreign_port": "*",
|
||||
"transport_protocol": "tcp",
|
||||
"network_protocol": "ipv4"
|
||||
},
|
||||
{
|
||||
"proto": "tcp",
|
||||
"recv_q": 0,
|
||||
"send_q": 0,
|
||||
"local_address": "localhost",
|
||||
"foreign_address": "localhost",
|
||||
"state": "ESTABLISHED",
|
||||
"user": "root",
|
||||
"inode": 46829,
|
||||
"program_name": "sshd: root",
|
||||
"kind": "network",
|
||||
"pid": 2242,
|
||||
"local_port": "ssh",
|
||||
"foreign_port": "52186",
|
||||
"transport_protocol": "tcp",
|
||||
"network_protocol": "ipv4",
|
||||
"foreign_port_num": 52186
|
||||
},
|
||||
{
|
||||
"proto": "tcp",
|
||||
"recv_q": 0,
|
||||
"send_q": 0,
|
||||
"local_address": "localhost",
|
||||
"foreign_address": "localhost",
|
||||
"state": "ESTABLISHED",
|
||||
"user": "root",
|
||||
"inode": 46828,
|
||||
"program_name": "ssh",
|
||||
"kind": "network",
|
||||
"pid": 2241,
|
||||
"local_port": "52186",
|
||||
"foreign_port": "ssh",
|
||||
"transport_protocol": "tcp",
|
||||
"network_protocol": "ipv4",
|
||||
"local_port_num": 52186
|
||||
},
|
||||
{
|
||||
"proto": "tcp6",
|
||||
"recv_q": 0,
|
||||
"send_q": 0,
|
||||
"local_address": "[::]",
|
||||
"foreign_address": "[::]",
|
||||
"state": "LISTEN",
|
||||
"user": "root",
|
||||
"inode": 30510,
|
||||
"program_name": "sshd",
|
||||
"kind": "network",
|
||||
"pid": 1186,
|
||||
"local_port": "ssh",
|
||||
"foreign_port": "*",
|
||||
"transport_protocol": "tcp",
|
||||
"network_protocol": "ipv6"
|
||||
},
|
||||
{
|
||||
"proto": "udp",
|
||||
"recv_q": 0,
|
||||
"send_q": 0,
|
||||
"local_address": "localhost",
|
||||
"foreign_address": "0.0.0.0",
|
||||
"state": null,
|
||||
"user": "systemd-resolve",
|
||||
"inode": 26957,
|
||||
"program_name": "systemd-resolve",
|
||||
"kind": "network",
|
||||
"pid": 887,
|
||||
"local_port": "domain",
|
||||
"foreign_port": "*",
|
||||
"transport_protocol": "udp",
|
||||
"network_protocol": "ipv4"
|
||||
},
|
||||
{
|
||||
"proto": "raw6",
|
||||
"recv_q": 0,
|
||||
"send_q": 0,
|
||||
"local_address": "[::]",
|
||||
"foreign_address": "[::]",
|
||||
"state": "7",
|
||||
"user": "systemd-network",
|
||||
"inode": 27001,
|
||||
"program_name": "systemd-network",
|
||||
"kind": "network",
|
||||
"pid": 867,
|
||||
"local_port": "ipv6-icmp",
|
||||
"foreign_port": "*",
|
||||
"transport_protocol": null,
|
||||
"network_protocol": "ipv6"
|
||||
},
|
||||
{
|
||||
"proto": "unix",
|
||||
"refcnt": 2,
|
||||
"flags": null,
|
||||
"type": "DGRAM",
|
||||
"state": null,
|
||||
"inode": 33322,
|
||||
"program_name": "systemd",
|
||||
"path": "/run/user/1000/systemd/notify",
|
||||
"kind": "socket",
|
||||
"pid": 1607
|
||||
},
|
||||
{
|
||||
"proto": "unix",
|
||||
"refcnt": 2,
|
||||
"flags": "ACC",
|
||||
"type": "SEQPACKET",
|
||||
"state": "LISTENING",
|
||||
"inode": 20835,
|
||||
"program_name": "init",
|
||||
"path": "/run/udev/control",
|
||||
"kind": "socket",
|
||||
"pid": 1
|
||||
},
|
||||
...
|
||||
]
|
||||
|
||||
$ sudo netstat -apee | jc --netstat -p -r
|
||||
[
|
||||
{
|
||||
"proto": "tcp",
|
||||
"recv_q": "0",
|
||||
"send_q": "0",
|
||||
"local_address": "localhost",
|
||||
"foreign_address": "0.0.0.0",
|
||||
"state": "LISTEN",
|
||||
"user": "systemd-resolve",
|
||||
"inode": "26958",
|
||||
"program_name": "systemd-resolve",
|
||||
"kind": "network",
|
||||
"pid": "887",
|
||||
"local_port": "domain",
|
||||
"foreign_port": "*",
|
||||
"transport_protocol": "tcp",
|
||||
"network_protocol": "ipv4"
|
||||
},
|
||||
{
|
||||
"proto": "tcp",
|
||||
"recv_q": "0",
|
||||
"send_q": "0",
|
||||
"local_address": "0.0.0.0",
|
||||
"foreign_address": "0.0.0.0",
|
||||
"state": "LISTEN",
|
||||
"user": "root",
|
||||
"inode": "30499",
|
||||
"program_name": "sshd",
|
||||
"kind": "network",
|
||||
"pid": "1186",
|
||||
"local_port": "ssh",
|
||||
"foreign_port": "*",
|
||||
"transport_protocol": "tcp",
|
||||
"network_protocol": "ipv4"
|
||||
},
|
||||
{
|
||||
"proto": "tcp",
|
||||
"recv_q": "0",
|
||||
"send_q": "0",
|
||||
"local_address": "localhost",
|
||||
"foreign_address": "localhost",
|
||||
"state": "ESTABLISHED",
|
||||
"user": "root",
|
||||
"inode": "46829",
|
||||
"program_name": "sshd: root",
|
||||
"kind": "network",
|
||||
"pid": "2242",
|
||||
"local_port": "ssh",
|
||||
"foreign_port": "52186",
|
||||
"transport_protocol": "tcp",
|
||||
"network_protocol": "ipv4"
|
||||
},
|
||||
{
|
||||
"proto": "tcp",
|
||||
"recv_q": "0",
|
||||
"send_q": "0",
|
||||
"local_address": "localhost",
|
||||
"foreign_address": "localhost",
|
||||
"state": "ESTABLISHED",
|
||||
"user": "root",
|
||||
"inode": "46828",
|
||||
"program_name": "ssh",
|
||||
"kind": "network",
|
||||
"pid": "2241",
|
||||
"local_port": "52186",
|
||||
"foreign_port": "ssh",
|
||||
"transport_protocol": "tcp",
|
||||
"network_protocol": "ipv4"
|
||||
},
|
||||
{
|
||||
"proto": "tcp6",
|
||||
"recv_q": "0",
|
||||
"send_q": "0",
|
||||
"local_address": "[::]",
|
||||
"foreign_address": "[::]",
|
||||
"state": "LISTEN",
|
||||
"user": "root",
|
||||
"inode": "30510",
|
||||
"program_name": "sshd",
|
||||
"kind": "network",
|
||||
"pid": "1186",
|
||||
"local_port": "ssh",
|
||||
"foreign_port": "*",
|
||||
"transport_protocol": "tcp",
|
||||
"network_protocol": "ipv6"
|
||||
},
|
||||
{
|
||||
"proto": "udp",
|
||||
"recv_q": "0",
|
||||
"send_q": "0",
|
||||
"local_address": "localhost",
|
||||
"foreign_address": "0.0.0.0",
|
||||
"state": null,
|
||||
"user": "systemd-resolve",
|
||||
"inode": "26957",
|
||||
"program_name": "systemd-resolve",
|
||||
"kind": "network",
|
||||
"pid": "887",
|
||||
"local_port": "domain",
|
||||
"foreign_port": "*",
|
||||
"transport_protocol": "udp",
|
||||
"network_protocol": "ipv4"
|
||||
},
|
||||
{
|
||||
"proto": "raw6",
|
||||
"recv_q": "0",
|
||||
"send_q": "0",
|
||||
"local_address": "[::]",
|
||||
"foreign_address": "[::]",
|
||||
"state": "7",
|
||||
"user": "systemd-network",
|
||||
"inode": "27001",
|
||||
"program_name": "systemd-network",
|
||||
"kind": "network",
|
||||
"pid": "867",
|
||||
"local_port": "ipv6-icmp",
|
||||
"foreign_port": "*",
|
||||
"transport_protocol": null,
|
||||
"network_protocol": "ipv6"
|
||||
},
|
||||
{
|
||||
"proto": "unix",
|
||||
"refcnt": "2",
|
||||
"flags": null,
|
||||
"type": "DGRAM",
|
||||
"state": null,
|
||||
"inode": "33322",
|
||||
"program_name": "systemd",
|
||||
"path": "/run/user/1000/systemd/notify",
|
||||
"kind": "socket",
|
||||
"pid": " 1607"
|
||||
},
|
||||
{
|
||||
"proto": "unix",
|
||||
"refcnt": "2",
|
||||
"flags": "ACC",
|
||||
"type": "SEQPACKET",
|
||||
"state": "LISTENING",
|
||||
"inode": "20835",
|
||||
"program_name": "init",
|
||||
"path": "/run/udev/control",
|
||||
"kind": "socket",
|
||||
"pid": " 1"
|
||||
},
|
||||
...
|
||||
]
|
||||
|
||||
## process
|
||||
```python
|
||||
process(proc_data)
|
||||
```
|
||||
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary structured data with the following schema:
|
||||
|
||||
[
|
||||
{
|
||||
"proto": string,
|
||||
"recv_q": integer,
|
||||
"send_q": integer,
|
||||
"transport_protocol" string,
|
||||
"network_protocol": string,
|
||||
"local_address": string,
|
||||
"local_port": string,
|
||||
"local_port_num": integer,
|
||||
"foreign_address": string,
|
||||
"foreign_port": string,
|
||||
"foreign_port_num": integer,
|
||||
"state": string,
|
||||
"program_name": string,
|
||||
"pid": integer,
|
||||
"user": string,
|
||||
"security_context": string,
|
||||
"refcnt": integer,
|
||||
"flags": string,
|
||||
"type": string,
|
||||
"inode": integer,
|
||||
"path": string,
|
||||
"kind": string
|
||||
}
|
||||
]
|
||||
|
||||
## parse
|
||||
```python
|
||||
parse(data, raw=False, quiet=False)
|
||||
```
|
||||
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
|
||||
223
docs/parsers/ps.md
Normal file
223
docs/parsers/ps.md
Normal file
@@ -0,0 +1,223 @@
|
||||
# jc.parsers.ps
|
||||
jc - JSON CLI output utility ps Parser
|
||||
|
||||
Usage:
|
||||
specify --ps as the first argument if the piped input is coming from ps
|
||||
|
||||
ps options supported:
|
||||
- ef
|
||||
- axu
|
||||
|
||||
Examples:
|
||||
|
||||
$ ps -ef | jc --ps -p
|
||||
[
|
||||
{
|
||||
"uid": "root",
|
||||
"pid": 1,
|
||||
"ppid": 0,
|
||||
"c": 0,
|
||||
"stime": "Nov01",
|
||||
"tty": null,
|
||||
"time": "00:00:11",
|
||||
"cmd": "/usr/lib/systemd/systemd --switched-root --system --deserialize 22"
|
||||
},
|
||||
{
|
||||
"uid": "root",
|
||||
"pid": 2,
|
||||
"ppid": 0,
|
||||
"c": 0,
|
||||
"stime": "Nov01",
|
||||
"tty": null,
|
||||
"time": "00:00:00",
|
||||
"cmd": "[kthreadd]"
|
||||
},
|
||||
{
|
||||
"uid": "root",
|
||||
"pid": 4,
|
||||
"ppid": 2,
|
||||
"c": 0,
|
||||
"stime": "Nov01",
|
||||
"tty": null,
|
||||
"time": "00:00:00",
|
||||
"cmd": "[kworker/0:0H]"
|
||||
},
|
||||
...
|
||||
]
|
||||
|
||||
$ ps -ef | jc --ps -p -r
|
||||
[
|
||||
{
|
||||
"uid": "root",
|
||||
"pid": "1",
|
||||
"ppid": "0",
|
||||
"c": "0",
|
||||
"stime": "Nov01",
|
||||
"tty": "?",
|
||||
"time": "00:00:11",
|
||||
"cmd": "/usr/lib/systemd/systemd --switched-root --system --deserialize 22"
|
||||
},
|
||||
{
|
||||
"uid": "root",
|
||||
"pid": "2",
|
||||
"ppid": "0",
|
||||
"c": "0",
|
||||
"stime": "Nov01",
|
||||
"tty": "?",
|
||||
"time": "00:00:00",
|
||||
"cmd": "[kthreadd]"
|
||||
},
|
||||
{
|
||||
"uid": "root",
|
||||
"pid": "4",
|
||||
"ppid": "2",
|
||||
"c": "0",
|
||||
"stime": "Nov01",
|
||||
"tty": "?",
|
||||
"time": "00:00:00",
|
||||
"cmd": "[kworker/0:0H]"
|
||||
},
|
||||
...
|
||||
]
|
||||
|
||||
$ ps axu | jc --ps -p
|
||||
[
|
||||
{
|
||||
"user": "root",
|
||||
"pid": 1,
|
||||
"cpu_percent": 0.0,
|
||||
"mem_percent": 0.1,
|
||||
"vsz": 128072,
|
||||
"rss": 6784,
|
||||
"tty": null,
|
||||
"stat": "Ss",
|
||||
"start": "Nov09",
|
||||
"time": "0:08",
|
||||
"command": "/usr/lib/systemd/systemd --switched-root --system --deserialize 22"
|
||||
},
|
||||
{
|
||||
"user": "root",
|
||||
"pid": 2,
|
||||
"cpu_percent": 0.0,
|
||||
"mem_percent": 0.0,
|
||||
"vsz": 0,
|
||||
"rss": 0,
|
||||
"tty": null,
|
||||
"stat": "S",
|
||||
"start": "Nov09",
|
||||
"time": "0:00",
|
||||
"command": "[kthreadd]"
|
||||
},
|
||||
{
|
||||
"user": "root",
|
||||
"pid": 4,
|
||||
"cpu_percent": 0.0,
|
||||
"mem_percent": 0.0,
|
||||
"vsz": 0,
|
||||
"rss": 0,
|
||||
"tty": null,
|
||||
"stat": "S<",
|
||||
"start": "Nov09",
|
||||
"time": "0:00",
|
||||
"command": "[kworker/0:0H]"
|
||||
},
|
||||
...
|
||||
]
|
||||
|
||||
$ ps axu | jc --ps -p -r
|
||||
[
|
||||
{
|
||||
"user": "root",
|
||||
"pid": "1",
|
||||
"cpu_percent": "0.0",
|
||||
"mem_percent": "0.1",
|
||||
"vsz": "128072",
|
||||
"rss": "6784",
|
||||
"tty": "?",
|
||||
"stat": "Ss",
|
||||
"start": "Nov09",
|
||||
"time": "0:08",
|
||||
"command": "/usr/lib/systemd/systemd --switched-root --system --deserialize 22"
|
||||
},
|
||||
{
|
||||
"user": "root",
|
||||
"pid": "2",
|
||||
"cpu_percent": "0.0",
|
||||
"mem_percent": "0.0",
|
||||
"vsz": "0",
|
||||
"rss": "0",
|
||||
"tty": "?",
|
||||
"stat": "S",
|
||||
"start": "Nov09",
|
||||
"time": "0:00",
|
||||
"command": "[kthreadd]"
|
||||
},
|
||||
{
|
||||
"user": "root",
|
||||
"pid": "4",
|
||||
"cpu_percent": "0.0",
|
||||
"mem_percent": "0.0",
|
||||
"vsz": "0",
|
||||
"rss": "0",
|
||||
"tty": "?",
|
||||
"stat": "S<",
|
||||
"start": "Nov09",
|
||||
"time": "0:00",
|
||||
"command": "[kworker/0:0H]"
|
||||
},
|
||||
...
|
||||
]
|
||||
|
||||
## process
|
||||
```python
|
||||
process(proc_data)
|
||||
```
|
||||
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary structured data with the following schema:
|
||||
|
||||
[
|
||||
{
|
||||
"uid": string,
|
||||
"pid": integer,
|
||||
"ppid": integer,
|
||||
"c": integer,
|
||||
"stime": string,
|
||||
"tty": string, # ? = Null
|
||||
"time": string,
|
||||
"cmd": string,
|
||||
"user": string,
|
||||
"cpu_percent": float,
|
||||
"mem_percent": float,
|
||||
"vsz": integer,
|
||||
"rss": integer,
|
||||
"stat": string,
|
||||
"start": string,
|
||||
"command": string
|
||||
}
|
||||
]
|
||||
|
||||
## parse
|
||||
```python
|
||||
parse(data, raw=False, quiet=False)
|
||||
```
|
||||
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
|
||||
142
docs/parsers/route.md
Normal file
142
docs/parsers/route.md
Normal file
@@ -0,0 +1,142 @@
|
||||
# jc.parsers.route
|
||||
jc - JSON CLI output utility route Parser
|
||||
|
||||
Usage:
|
||||
specify --route as the first argument if the piped input is coming from route
|
||||
|
||||
Examples:
|
||||
|
||||
$ route -ee | jc --route -p
|
||||
[
|
||||
{
|
||||
"destination": "default",
|
||||
"gateway": "gateway",
|
||||
"genmask": "0.0.0.0",
|
||||
"flags": "UG",
|
||||
"metric": 100,
|
||||
"ref": 0,
|
||||
"use": 0,
|
||||
"iface": "ens33",
|
||||
"mss": 0,
|
||||
"window": 0,
|
||||
"irtt": 0
|
||||
},
|
||||
{
|
||||
"destination": "172.17.0.0",
|
||||
"gateway": "0.0.0.0",
|
||||
"genmask": "255.255.0.0",
|
||||
"flags": "U",
|
||||
"metric": 0,
|
||||
"ref": 0,
|
||||
"use": 0,
|
||||
"iface": "docker",
|
||||
"mss": 0,
|
||||
"window": 0,
|
||||
"irtt": 0
|
||||
},
|
||||
{
|
||||
"destination": "192.168.71.0",
|
||||
"gateway": "0.0.0.0",
|
||||
"genmask": "255.255.255.0",
|
||||
"flags": "U",
|
||||
"metric": 100,
|
||||
"ref": 0,
|
||||
"use": 0,
|
||||
"iface": "ens33",
|
||||
"mss": 0,
|
||||
"window": 0,
|
||||
"irtt": 0
|
||||
}
|
||||
]
|
||||
|
||||
$ route -ee | jc --route -p -r
|
||||
[
|
||||
{
|
||||
"destination": "default",
|
||||
"gateway": "gateway",
|
||||
"genmask": "0.0.0.0",
|
||||
"flags": "UG",
|
||||
"metric": "100",
|
||||
"ref": "0",
|
||||
"use": "0",
|
||||
"iface": "ens33",
|
||||
"mss": "0",
|
||||
"window": "0",
|
||||
"irtt": "0"
|
||||
},
|
||||
{
|
||||
"destination": "172.17.0.0",
|
||||
"gateway": "0.0.0.0",
|
||||
"genmask": "255.255.0.0",
|
||||
"flags": "U",
|
||||
"metric": "0",
|
||||
"ref": "0",
|
||||
"use": "0",
|
||||
"iface": "docker",
|
||||
"mss": "0",
|
||||
"window": "0",
|
||||
"irtt": "0"
|
||||
},
|
||||
{
|
||||
"destination": "192.168.71.0",
|
||||
"gateway": "0.0.0.0",
|
||||
"genmask": "255.255.255.0",
|
||||
"flags": "U",
|
||||
"metric": "100",
|
||||
"ref": "0",
|
||||
"use": "0",
|
||||
"iface": "ens33",
|
||||
"mss": "0",
|
||||
"window": "0",
|
||||
"irtt": "0"
|
||||
}
|
||||
]
|
||||
|
||||
## process
|
||||
```python
|
||||
process(proc_data)
|
||||
```
|
||||
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary structured data with the following schema:
|
||||
|
||||
[
|
||||
{
|
||||
"destination": string,
|
||||
"gateway": string,
|
||||
"genmask": string,
|
||||
"flags": string,
|
||||
"metric": integer,
|
||||
"ref": integer,
|
||||
"use": integer,
|
||||
"mss": integer,
|
||||
"window": integer,
|
||||
"irtt": integer,
|
||||
"iface": string
|
||||
}
|
||||
]
|
||||
|
||||
## parse
|
||||
```python
|
||||
parse(data, raw=False, quiet=False)
|
||||
```
|
||||
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
|
||||
297
docs/parsers/ss.md
Normal file
297
docs/parsers/ss.md
Normal file
@@ -0,0 +1,297 @@
|
||||
# jc.parsers.ss
|
||||
jc - JSON CLI output utility ss Parser
|
||||
|
||||
Usage:
|
||||
specify --ss as the first argument if the piped input is coming from ss
|
||||
|
||||
Limitations:
|
||||
Extended information options like -e and -p are not supported and may cause parsing irregularities
|
||||
|
||||
Examples:
|
||||
|
||||
$ sudo ss -a | jc --ss -p
|
||||
[
|
||||
{
|
||||
"netid": "nl",
|
||||
"state": "UNCONN",
|
||||
"recv_q": 0,
|
||||
"send_q": 0,
|
||||
"peer_address": "*",
|
||||
"channel": "rtnl:kernel"
|
||||
},
|
||||
{
|
||||
"netid": "nl",
|
||||
"state": "UNCONN",
|
||||
"recv_q": 0,
|
||||
"send_q": 0,
|
||||
"peer_address": "*",
|
||||
"pid": 893,
|
||||
"channel": "rtnl:systemd-resolve"
|
||||
},
|
||||
...
|
||||
{
|
||||
"netid": "p_raw",
|
||||
"state": "UNCONN",
|
||||
"recv_q": 0,
|
||||
"send_q": 0,
|
||||
"peer_address": "*",
|
||||
"link_layer": "LLDP",
|
||||
"interface": "ens33"
|
||||
},
|
||||
{
|
||||
"netid": "u_dgr",
|
||||
"state": "UNCONN",
|
||||
"recv_q": 0,
|
||||
"send_q": 0,
|
||||
"local_port": "93066",
|
||||
"peer_address": "*",
|
||||
"peer_port": "0",
|
||||
"path": "/run/user/1000/systemd/notify"
|
||||
},
|
||||
{
|
||||
"netid": "u_seq",
|
||||
"state": "LISTEN",
|
||||
"recv_q": 0,
|
||||
"send_q": 128,
|
||||
"local_port": "20699",
|
||||
"peer_address": "*",
|
||||
"peer_port": "0",
|
||||
"path": "/run/udev/control"
|
||||
},
|
||||
...
|
||||
{
|
||||
"netid": "icmp6",
|
||||
"state": "UNCONN",
|
||||
"recv_q": 0,
|
||||
"send_q": 0,
|
||||
"local_address": "*",
|
||||
"local_port": "ipv6-icmp",
|
||||
"peer_address": "*",
|
||||
"peer_port": "*",
|
||||
"interface": "ens33"
|
||||
},
|
||||
{
|
||||
"netid": "udp",
|
||||
"state": "UNCONN",
|
||||
"recv_q": 0,
|
||||
"send_q": 0,
|
||||
"local_address": "127.0.0.53",
|
||||
"local_port": "domain",
|
||||
"peer_address": "0.0.0.0",
|
||||
"peer_port": "*",
|
||||
"interface": "lo"
|
||||
},
|
||||
{
|
||||
"netid": "tcp",
|
||||
"state": "LISTEN",
|
||||
"recv_q": 0,
|
||||
"send_q": 128,
|
||||
"local_address": "127.0.0.53",
|
||||
"local_port": "domain",
|
||||
"peer_address": "0.0.0.0",
|
||||
"peer_port": "*",
|
||||
"interface": "lo"
|
||||
},
|
||||
{
|
||||
"netid": "tcp",
|
||||
"state": "LISTEN",
|
||||
"recv_q": 0,
|
||||
"send_q": 128,
|
||||
"local_address": "0.0.0.0",
|
||||
"local_port": "ssh",
|
||||
"peer_address": "0.0.0.0",
|
||||
"peer_port": "*"
|
||||
},
|
||||
{
|
||||
"netid": "tcp",
|
||||
"state": "LISTEN",
|
||||
"recv_q": 0,
|
||||
"send_q": 128,
|
||||
"local_address": "[::]",
|
||||
"local_port": "ssh",
|
||||
"peer_address": "[::]",
|
||||
"peer_port": "*"
|
||||
},
|
||||
{
|
||||
"netid": "v_str",
|
||||
"state": "ESTAB",
|
||||
"recv_q": 0,
|
||||
"send_q": 0,
|
||||
"local_address": "999900439",
|
||||
"local_port": "1023",
|
||||
"peer_address": "0",
|
||||
"peer_port": "976",
|
||||
"local_port_num": 1023,
|
||||
"peer_port_num": 976
|
||||
}
|
||||
]
|
||||
|
||||
$ sudo ss -a | jc --ss -p -r
|
||||
[
|
||||
{
|
||||
"netid": "nl",
|
||||
"state": "UNCONN",
|
||||
"recv_q": "0",
|
||||
"send_q": "0",
|
||||
"peer_address": "*",
|
||||
"channel": "rtnl:kernel"
|
||||
},
|
||||
{
|
||||
"netid": "nl",
|
||||
"state": "UNCONN",
|
||||
"recv_q": "0",
|
||||
"send_q": "0",
|
||||
"peer_address": "*",
|
||||
"pid": "893",
|
||||
"channel": "rtnl:systemd-resolve"
|
||||
},
|
||||
...
|
||||
{
|
||||
"netid": "p_raw",
|
||||
"state": "UNCONN",
|
||||
"recv_q": "0",
|
||||
"send_q": "0",
|
||||
"peer_address": "*",
|
||||
"link_layer": "LLDP",
|
||||
"interface": "ens33"
|
||||
},
|
||||
{
|
||||
"netid": "u_dgr",
|
||||
"state": "UNCONN",
|
||||
"recv_q": "0",
|
||||
"send_q": "0",
|
||||
"local_port": "93066",
|
||||
"peer_address": "*",
|
||||
"peer_port": "0",
|
||||
"path": "/run/user/1000/systemd/notify"
|
||||
},
|
||||
{
|
||||
"netid": "u_seq",
|
||||
"state": "LISTEN",
|
||||
"recv_q": "0",
|
||||
"send_q": "128",
|
||||
"local_port": "20699",
|
||||
"peer_address": "*",
|
||||
"peer_port": "0",
|
||||
"path": "/run/udev/control"
|
||||
},
|
||||
...
|
||||
{
|
||||
"netid": "icmp6",
|
||||
"state": "UNCONN",
|
||||
"recv_q": "0",
|
||||
"send_q": "0",
|
||||
"local_address": "*",
|
||||
"local_port": "ipv6-icmp",
|
||||
"peer_address": "*",
|
||||
"peer_port": "*",
|
||||
"interface": "ens33"
|
||||
},
|
||||
{
|
||||
"netid": "udp",
|
||||
"state": "UNCONN",
|
||||
"recv_q": "0",
|
||||
"send_q": "0",
|
||||
"local_address": "127.0.0.53",
|
||||
"local_port": "domain",
|
||||
"peer_address": "0.0.0.0",
|
||||
"peer_port": "*",
|
||||
"interface": "lo"
|
||||
},
|
||||
{
|
||||
"netid": "tcp",
|
||||
"state": "LISTEN",
|
||||
"recv_q": "0",
|
||||
"send_q": "128",
|
||||
"local_address": "127.0.0.53",
|
||||
"local_port": "domain",
|
||||
"peer_address": "0.0.0.0",
|
||||
"peer_port": "*",
|
||||
"interface": "lo"
|
||||
},
|
||||
{
|
||||
"netid": "tcp",
|
||||
"state": "LISTEN",
|
||||
"recv_q": "0",
|
||||
"send_q": "128",
|
||||
"local_address": "0.0.0.0",
|
||||
"local_port": "ssh",
|
||||
"peer_address": "0.0.0.0",
|
||||
"peer_port": "*"
|
||||
},
|
||||
{
|
||||
"netid": "tcp",
|
||||
"state": "LISTEN",
|
||||
"recv_q": "0",
|
||||
"send_q": "128",
|
||||
"local_address": "[::]",
|
||||
"local_port": "ssh",
|
||||
"peer_address": "[::]",
|
||||
"peer_port": "*"
|
||||
},
|
||||
{
|
||||
"netid": "v_str",
|
||||
"state": "ESTAB",
|
||||
"recv_q": "0",
|
||||
"send_q": "0",
|
||||
"local_address": "999900439",
|
||||
"local_port": "1023",
|
||||
"peer_address": "0",
|
||||
"peer_port": "976"
|
||||
}
|
||||
]
|
||||
|
||||
## process
|
||||
```python
|
||||
process(proc_data)
|
||||
```
|
||||
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary structured data with the following schema:
|
||||
|
||||
[
|
||||
{
|
||||
"netid": string,
|
||||
"state": string,
|
||||
"recv_q": integer,
|
||||
"send_q": integer,
|
||||
"local_address": string,
|
||||
"local_port": string,
|
||||
"local_port_num": integer,
|
||||
"peer_address": string,
|
||||
"peer_port": string,
|
||||
"peer_port_num": integer,
|
||||
"interface": string,
|
||||
"link_layer" string,
|
||||
"channel": string,
|
||||
"path": string,
|
||||
"pid": integer
|
||||
}
|
||||
]
|
||||
|
||||
Information from https://www.cyberciti.biz/files/ss.html used to define field names
|
||||
|
||||
## parse
|
||||
```python
|
||||
parse(data, raw=False, quiet=False)
|
||||
```
|
||||
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
|
||||
154
docs/parsers/stat.md
Normal file
154
docs/parsers/stat.md
Normal file
@@ -0,0 +1,154 @@
|
||||
# jc.parsers.stat
|
||||
jc - JSON CLI output utility stats Parser
|
||||
|
||||
Usage:
|
||||
specify --stats as the first argument if the piped input is coming from stats
|
||||
|
||||
Examples:
|
||||
|
||||
$ stat /bin/* | jc --stat -p
|
||||
[
|
||||
{
|
||||
"file": "/bin/bash",
|
||||
"size": 1113504,
|
||||
"blocks": 2176,
|
||||
"io_blocks": 4096,
|
||||
"type": "regular file",
|
||||
"device": "802h/2050d",
|
||||
"inode": 131099,
|
||||
"links": 1,
|
||||
"access": "0755",
|
||||
"flags": "-rwxr-xr-x",
|
||||
"uid": 0,
|
||||
"user": "root",
|
||||
"gid": 0,
|
||||
"group": "root",
|
||||
"access_time": "2019-11-14 08:18:03.509681766 +0000",
|
||||
"modify_time": "2019-06-06 22:28:15.000000000 +0000",
|
||||
"change_time": "2019-08-12 17:21:29.521945390 +0000",
|
||||
"birth_time": null
|
||||
},
|
||||
{
|
||||
"file": "/bin/btrfs",
|
||||
"size": 716464,
|
||||
"blocks": 1400,
|
||||
"io_blocks": 4096,
|
||||
"type": "regular file",
|
||||
"device": "802h/2050d",
|
||||
"inode": 131100,
|
||||
"links": 1,
|
||||
"access": "0755",
|
||||
"flags": "-rwxr-xr-x",
|
||||
"uid": 0,
|
||||
"user": "root",
|
||||
"gid": 0,
|
||||
"group": "root",
|
||||
"access_time": "2019-11-14 08:18:28.990834276 +0000",
|
||||
"modify_time": "2018-03-12 23:04:27.000000000 +0000",
|
||||
"change_time": "2019-08-12 17:21:29.545944399 +0000",
|
||||
"birth_time": null
|
||||
},
|
||||
...
|
||||
]
|
||||
|
||||
$ stat /bin/* | jc --stat -p -r
|
||||
[
|
||||
{
|
||||
"file": "/bin/bash",
|
||||
"size": "1113504",
|
||||
"blocks": "2176",
|
||||
"io_blocks": "4096",
|
||||
"type": "regular file",
|
||||
"device": "802h/2050d",
|
||||
"inode": "131099",
|
||||
"links": "1",
|
||||
"access": "0755",
|
||||
"flags": "-rwxr-xr-x",
|
||||
"uid": "0",
|
||||
"user": "root",
|
||||
"gid": "0",
|
||||
"group": "root",
|
||||
"access_time": "2019-11-14 08:18:03.509681766 +0000",
|
||||
"modify_time": "2019-06-06 22:28:15.000000000 +0000",
|
||||
"change_time": "2019-08-12 17:21:29.521945390 +0000",
|
||||
"birth_time": null
|
||||
},
|
||||
{
|
||||
"file": "/bin/btrfs",
|
||||
"size": "716464",
|
||||
"blocks": "1400",
|
||||
"io_blocks": "4096",
|
||||
"type": "regular file",
|
||||
"device": "802h/2050d",
|
||||
"inode": "131100",
|
||||
"links": "1",
|
||||
"access": "0755",
|
||||
"flags": "-rwxr-xr-x",
|
||||
"uid": "0",
|
||||
"user": "root",
|
||||
"gid": "0",
|
||||
"group": "root",
|
||||
"access_time": "2019-11-14 08:18:28.990834276 +0000",
|
||||
"modify_time": "2018-03-12 23:04:27.000000000 +0000",
|
||||
"change_time": "2019-08-12 17:21:29.545944399 +0000",
|
||||
"birth_time": null
|
||||
},
|
||||
..
|
||||
]
|
||||
|
||||
## process
|
||||
```python
|
||||
process(proc_data)
|
||||
```
|
||||
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary structured data with the following schema:
|
||||
|
||||
[
|
||||
{
|
||||
"file": string,
|
||||
"link_to" string,
|
||||
"size": integer,
|
||||
"blocks": integer,
|
||||
"io_blocks": integer,
|
||||
"type": string,
|
||||
"device": string,
|
||||
"inode": integer,
|
||||
"links": integer,
|
||||
"access": string,
|
||||
"flags": string,
|
||||
"uid": integer,
|
||||
"user": string,
|
||||
"gid": integer,
|
||||
"group": string,
|
||||
"access_time": string, # - = null
|
||||
"modify_time": string, # - = null
|
||||
"change_time": string, # - = null
|
||||
"birth_time": string # - = null
|
||||
}
|
||||
]
|
||||
|
||||
## parse
|
||||
```python
|
||||
parse(data, raw=False, quiet=False)
|
||||
```
|
||||
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
|
||||
76
docs/parsers/systemctl.md
Normal file
76
docs/parsers/systemctl.md
Normal file
@@ -0,0 +1,76 @@
|
||||
# jc.parsers.systemctl
|
||||
jc - JSON CLI output utility systemctl Parser
|
||||
|
||||
Usage:
|
||||
specify --systemctl as the first argument if the piped input is coming from systemctl
|
||||
|
||||
Examples:
|
||||
|
||||
$ systemctl -a | jc --systemctl -p
|
||||
[
|
||||
{
|
||||
"unit": "proc-sys-fs-binfmt_misc.automount",
|
||||
"load": "loaded",
|
||||
"active": "active",
|
||||
"sub": "waiting",
|
||||
"description": "Arbitrary Executable File Formats File System Automount Point"
|
||||
},
|
||||
{
|
||||
"unit": "dev-block-8:2.device",
|
||||
"load": "loaded",
|
||||
"active": "active",
|
||||
"sub": "plugged",
|
||||
"description": "LVM PV 3klkIj-w1qk-DkJi-0XBJ-y3o7-i2Ac-vHqWBM on /dev/sda2 2"
|
||||
},
|
||||
{
|
||||
"unit": "dev-cdrom.device",
|
||||
"load": "loaded",
|
||||
"active": "active",
|
||||
"sub": "plugged",
|
||||
"description": "VMware_Virtual_IDE_CDROM_Drive"
|
||||
},
|
||||
...
|
||||
]
|
||||
|
||||
## process
|
||||
```python
|
||||
process(proc_data)
|
||||
```
|
||||
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary structured data with the following schema:
|
||||
|
||||
[
|
||||
{
|
||||
"unit": string,
|
||||
"load": string,
|
||||
"active": string,
|
||||
"sub": string,
|
||||
"description": string
|
||||
}
|
||||
]
|
||||
|
||||
## parse
|
||||
```python
|
||||
parse(data, raw=False, quiet=False)
|
||||
```
|
||||
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
|
||||
94
docs/parsers/systemctl_lj.md
Normal file
94
docs/parsers/systemctl_lj.md
Normal file
@@ -0,0 +1,94 @@
|
||||
# jc.parsers.systemctl_lj
|
||||
jc - JSON CLI output utility systemctl-lj Parser
|
||||
|
||||
Usage:
|
||||
specify --systemctl-lj as the first argument if the piped input is coming from systemctl list-jobs
|
||||
|
||||
Examples:
|
||||
|
||||
$ systemctl list-jobs| jc --systemctl-lj -p
|
||||
[
|
||||
{
|
||||
"job": 3543,
|
||||
"unit": "nginxAfterGlusterfs.service",
|
||||
"type": "start",
|
||||
"state": "waiting"
|
||||
},
|
||||
{
|
||||
"job": 3545,
|
||||
"unit": "glusterReadyForLocalhostMount.service",
|
||||
"type": "start",
|
||||
"state": "running"
|
||||
},
|
||||
{
|
||||
"job": 3506,
|
||||
"unit": "nginx.service",
|
||||
"type": "start",
|
||||
"state": "waiting"
|
||||
}
|
||||
]
|
||||
|
||||
$ systemctl list-jobs| jc --systemctl-lj -p -r
|
||||
[
|
||||
{
|
||||
"job": "3543",
|
||||
"unit": "nginxAfterGlusterfs.service",
|
||||
"type": "start",
|
||||
"state": "waiting"
|
||||
},
|
||||
{
|
||||
"job": "3545",
|
||||
"unit": "glusterReadyForLocalhostMount.service",
|
||||
"type": "start",
|
||||
"state": "running"
|
||||
},
|
||||
{
|
||||
"job": "3506",
|
||||
"unit": "nginx.service",
|
||||
"type": "start",
|
||||
"state": "waiting"
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
## process
|
||||
```python
|
||||
process(proc_data)
|
||||
```
|
||||
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary structured data with the following schema:
|
||||
|
||||
[
|
||||
{
|
||||
"job": integer,
|
||||
"unit": string,
|
||||
"type": string,
|
||||
"state": string
|
||||
}
|
||||
]
|
||||
|
||||
## parse
|
||||
```python
|
||||
parse(data, raw=False, quiet=False)
|
||||
```
|
||||
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
|
||||
68
docs/parsers/systemctl_ls.md
Normal file
68
docs/parsers/systemctl_ls.md
Normal file
@@ -0,0 +1,68 @@
|
||||
# jc.parsers.systemctl_ls
|
||||
jc - JSON CLI output utility systemctl-ls Parser
|
||||
|
||||
Usage:
|
||||
specify --systemctl-ls as the first argument if the piped input is coming from systemctl list-sockets
|
||||
|
||||
Examples:
|
||||
|
||||
$ systemctl list-sockets | jc --systemctl-ls -p
|
||||
[
|
||||
{
|
||||
"listen": "/dev/log",
|
||||
"unit": "systemd-journald.socket",
|
||||
"activates": "systemd-journald.service"
|
||||
},
|
||||
{
|
||||
"listen": "/run/dbus/system_bus_socket",
|
||||
"unit": "dbus.socket",
|
||||
"activates": "dbus.service"
|
||||
},
|
||||
{
|
||||
"listen": "/run/dmeventd-client",
|
||||
"unit": "dm-event.socket",
|
||||
"activates": "dm-event.service"
|
||||
},
|
||||
...
|
||||
]
|
||||
|
||||
## process
|
||||
```python
|
||||
process(proc_data)
|
||||
```
|
||||
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary structured data with the following schema:
|
||||
|
||||
[
|
||||
{
|
||||
"listen": string,
|
||||
"unit": string,
|
||||
"activates": string
|
||||
}
|
||||
]
|
||||
|
||||
## parse
|
||||
```python
|
||||
parse(data, raw=False, quiet=False)
|
||||
```
|
||||
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
|
||||
64
docs/parsers/systemctl_luf.md
Normal file
64
docs/parsers/systemctl_luf.md
Normal file
@@ -0,0 +1,64 @@
|
||||
# jc.parsers.systemctl_luf
|
||||
jc - JSON CLI output utility systemctl-luf Parser
|
||||
|
||||
Usage:
|
||||
specify --systemctl-luf as the first argument if the piped input is coming from systemctl list-unit-files
|
||||
|
||||
Examples:
|
||||
|
||||
$ systemctl list-unit-files | jc --systemctl-luf -p
|
||||
[
|
||||
{
|
||||
"unit_file": "proc-sys-fs-binfmt_misc.automount",
|
||||
"state": "static"
|
||||
},
|
||||
{
|
||||
"unit_file": "dev-hugepages.mount",
|
||||
"state": "static"
|
||||
},
|
||||
{
|
||||
"unit_file": "dev-mqueue.mount",
|
||||
"state": "static"
|
||||
},
|
||||
...
|
||||
]
|
||||
|
||||
## process
|
||||
```python
|
||||
process(proc_data)
|
||||
```
|
||||
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary structured data with the following schema:
|
||||
|
||||
[
|
||||
{
|
||||
"unit_file": string,
|
||||
"state": string
|
||||
}
|
||||
]
|
||||
|
||||
## parse
|
||||
```python
|
||||
parse(data, raw=False, quiet=False)
|
||||
```
|
||||
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
|
||||
66
docs/parsers/uname.md
Normal file
66
docs/parsers/uname.md
Normal file
@@ -0,0 +1,66 @@
|
||||
# jc.parsers.uname
|
||||
jc - JSON CLI output utility uname Parser
|
||||
|
||||
Usage:
|
||||
specify --uname as the first argument if the piped input is coming from uname
|
||||
|
||||
Limitations:
|
||||
must use 'uname -a'
|
||||
|
||||
Example:
|
||||
|
||||
$ uname -a | jc --uname -p
|
||||
{
|
||||
"kernel_name": "Linux",
|
||||
"node_name": "user-ubuntu",
|
||||
"kernel_release": "4.15.0-65-generic",
|
||||
"operating_system": "GNU/Linux",
|
||||
"hardware_platform": "x86_64",
|
||||
"processor": "x86_64",
|
||||
"machine": "x86_64",
|
||||
"kernel_version": "#74-Ubuntu SMP Tue Sep 17 17:06:04 UTC 2019"
|
||||
}
|
||||
|
||||
## process
|
||||
```python
|
||||
process(proc_data)
|
||||
```
|
||||
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary structured data with the following schema:
|
||||
|
||||
{
|
||||
"kernel_name": string,
|
||||
"node_name": string,
|
||||
"kernel_release": string,
|
||||
"operating_system": string,
|
||||
"hardware_platform": string,
|
||||
"processor": string,
|
||||
"machine": string,
|
||||
"kernel_version": string
|
||||
}
|
||||
|
||||
## parse
|
||||
```python
|
||||
parse(data, raw=False, quiet=False)
|
||||
```
|
||||
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
|
||||
69
docs/parsers/uptime.md
Normal file
69
docs/parsers/uptime.md
Normal file
@@ -0,0 +1,69 @@
|
||||
# jc.parsers.uptime
|
||||
jc - JSON CLI output utility uptime Parser
|
||||
|
||||
Usage:
|
||||
specify --uptime as the first argument if the piped input is coming from uptime
|
||||
|
||||
Example:
|
||||
|
||||
$ uptime | jc --uptime -p
|
||||
{
|
||||
"time": "11:30:44",
|
||||
"uptime": "1 day, 21:17",
|
||||
"users": 1,
|
||||
"load_1m": 0.01,
|
||||
"load_5m": 0.04,
|
||||
"load_15m": 0.05
|
||||
}
|
||||
|
||||
$ uptime | jc --uptime -p -r
|
||||
{
|
||||
"time": "11:31:09",
|
||||
"uptime": "1 day, 21:17",
|
||||
"users": "1",
|
||||
"load_1m": "0.00",
|
||||
"load_5m": "0.04",
|
||||
"load_15m": "0.05"
|
||||
}
|
||||
|
||||
## process
|
||||
```python
|
||||
process(proc_data)
|
||||
```
|
||||
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary structured data with the following schema:
|
||||
|
||||
{
|
||||
"time": string,
|
||||
"uptime": string,
|
||||
"users": integer,
|
||||
"load_1m": float,
|
||||
"load_5m": float,
|
||||
"load_15m": float
|
||||
}
|
||||
|
||||
## parse
|
||||
```python
|
||||
parse(data, raw=False, quiet=False)
|
||||
```
|
||||
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
|
||||
121
docs/parsers/w.md
Normal file
121
docs/parsers/w.md
Normal file
@@ -0,0 +1,121 @@
|
||||
# jc.parsers.w
|
||||
jc - JSON CLI output utility w Parser
|
||||
|
||||
Usage:
|
||||
specify --w as the first argument if the piped input is coming from w
|
||||
|
||||
Examples:
|
||||
|
||||
$ w | jc --w -p
|
||||
[
|
||||
{
|
||||
"user": "root",
|
||||
"tty": "tty1",
|
||||
"from": null,
|
||||
"login_at": "07:49",
|
||||
"idle": "1:15m",
|
||||
"jcpu": "0.00s",
|
||||
"pcpu": "0.00s",
|
||||
"what": "-bash"
|
||||
},
|
||||
{
|
||||
"user": "root",
|
||||
"tty": "ttyS0",
|
||||
"from": null,
|
||||
"login_at": "06:24",
|
||||
"idle": "0.00s",
|
||||
"jcpu": "0.43s",
|
||||
"pcpu": "0.00s",
|
||||
"what": "w"
|
||||
},
|
||||
{
|
||||
"user": "root",
|
||||
"tty": "pts/0",
|
||||
"from": "192.168.71.1",
|
||||
"login_at": "06:29",
|
||||
"idle": "2:35m",
|
||||
"jcpu": "0.00s",
|
||||
"pcpu": "0.00s",
|
||||
"what": "-bash"
|
||||
}
|
||||
]
|
||||
|
||||
$ w | jc --w -p -r
|
||||
[
|
||||
{
|
||||
"user": "kbrazil",
|
||||
"tty": "tty1",
|
||||
"from": "-",
|
||||
"login_at": "07:49",
|
||||
"idle": "1:16m",
|
||||
"jcpu": "0.00s",
|
||||
"pcpu": "0.00s",
|
||||
"what": "-bash"
|
||||
},
|
||||
{
|
||||
"user": "kbrazil",
|
||||
"tty": "ttyS0",
|
||||
"from": "-",
|
||||
"login_at": "06:24",
|
||||
"idle": "2.00s",
|
||||
"jcpu": "0.46s",
|
||||
"pcpu": "0.00s",
|
||||
"what": "w"
|
||||
},
|
||||
{
|
||||
"user": "kbrazil",
|
||||
"tty": "pts/0",
|
||||
"from": "192.168.71.1",
|
||||
"login_at": "06:29",
|
||||
"idle": "2:36m",
|
||||
"jcpu": "0.00s",
|
||||
"pcpu": "0.00s",
|
||||
"what": "-bash"
|
||||
}
|
||||
]
|
||||
|
||||
## process
|
||||
```python
|
||||
process(proc_data)
|
||||
```
|
||||
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary structured data with the following schema:
|
||||
|
||||
[
|
||||
{
|
||||
"user": string, # '-'' = null
|
||||
"tty": string, # '-'' = null
|
||||
"from": string, # '-'' = null
|
||||
"login_at": string, # '-'' = null
|
||||
"idle": string, # '-'' = null
|
||||
"jcpu": string,
|
||||
"pcpu": string,
|
||||
"what": string # '-'' = null
|
||||
}
|
||||
]
|
||||
|
||||
## parse
|
||||
```python
|
||||
parse(data, raw=False, quiet=False)
|
||||
```
|
||||
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
|
||||
68
docs/readme.md
Normal file
68
docs/readme.md
Normal file
@@ -0,0 +1,68 @@
|
||||
# jc
|
||||
JC - JSON CLI output utility
|
||||
|
||||
* kellyjonbrazil@gmail.com
|
||||
|
||||
This package serializes the output of many standard unix command line tools to JSON format.
|
||||
|
||||
CLI Example:
|
||||
|
||||
$ ls -l /usr/bin | jc --ls -p
|
||||
[
|
||||
{
|
||||
"filename": "apropos",
|
||||
"link_to": "whatis",
|
||||
"flags": "lrwxrwxrwx.",
|
||||
"links": 1,
|
||||
"owner": "root",
|
||||
"group": "root",
|
||||
"size": 6,
|
||||
"date": "Aug 15 10:53"
|
||||
},
|
||||
{
|
||||
"filename": "ar",
|
||||
"flags": "-rwxr-xr-x.",
|
||||
"links": 1,
|
||||
"owner": "root",
|
||||
"group": "root",
|
||||
"size": 62744,
|
||||
"date": "Aug 8 16:14"
|
||||
},
|
||||
{
|
||||
"filename": "arch",
|
||||
"flags": "-rwxr-xr-x.",
|
||||
"links": 1,
|
||||
"owner": "root",
|
||||
"group": "root",
|
||||
"size": 33080,
|
||||
"date": "Aug 19 23:25"
|
||||
},
|
||||
...
|
||||
]
|
||||
|
||||
Module Example:
|
||||
|
||||
>>> import jc.parsers.ls
|
||||
>>>
|
||||
>>> data='''-rwxr-xr-x 1 root wheel 23648 May 3 22:26 cat
|
||||
... -rwxr-xr-x 1 root wheel 30016 May 3 22:26 chmod
|
||||
... -rwxr-xr-x 1 root wheel 29024 May 3 22:26 cp
|
||||
... -rwxr-xr-x 1 root wheel 375824 May 3 22:26 csh
|
||||
... -rwxr-xr-x 1 root wheel 28608 May 3 22:26 date
|
||||
... -rwxr-xr-x 1 root wheel 32000 May 3 22:26 dd
|
||||
... -rwxr-xr-x 1 root wheel 23392 May 3 22:26 df
|
||||
... -rwxr-xr-x 1 root wheel 18128 May 3 22:26 echo'''
|
||||
>>>
|
||||
>>> jc.parsers.ls.parse(data)
|
||||
[{'filename': 'cat', 'flags': '-rwxr-xr-x', 'links': 1, 'owner': 'root', 'group': 'wheel', 'size': 23648,
|
||||
'date': 'May 3 22:26'}, {'filename': 'chmod', 'flags': '-rwxr-xr-x', 'links': 1, 'owner': 'root',
|
||||
'group': 'wheel', 'size': 30016, 'date': 'May 3 22:26'}, {'filename': 'cp', 'flags': '-rwxr-xr-x',
|
||||
'links': 1, 'owner': 'root', 'group': 'wheel', 'size': 29024, 'date': 'May 3 22:26'}, {'filename': 'csh',
|
||||
'flags': '-rwxr-xr-x', 'links': 1, 'owner': 'root', 'group': 'wheel', 'size': 375824, 'date': 'May 3
|
||||
22:26'}, {'filename': 'date', 'flags': '-rwxr-xr-x', 'links': 1, 'owner': 'root', 'group': 'wheel',
|
||||
'size': 28608, 'date': 'May 3 22:26'}, {'filename': 'dd', 'flags': '-rwxr-xr-x', 'links': 1, 'owner':
|
||||
'root', 'group': 'wheel', 'size': 32000, 'date': 'May 3 22:26'}, {'filename': 'df', 'flags':
|
||||
'-rwxr-xr-x', 'links': 1, 'owner': 'root', 'group': 'wheel', 'size': 23392, 'date': 'May 3 22:26'},
|
||||
{'filename': 'echo', 'flags': '-rwxr-xr-x', 'links': 1, 'owner': 'root', 'group': 'wheel', 'size': 18128,
|
||||
'date': 'May 3 22:26'}]
|
||||
|
||||
50
docs/utils.md
Normal file
50
docs/utils.md
Normal file
@@ -0,0 +1,50 @@
|
||||
# utils
|
||||
jc - JSON CLI output utility utils
|
||||
## warning_message
|
||||
```python
|
||||
warning_message(message)
|
||||
```
|
||||
|
||||
Prints a warning message for non-fatal issues
|
||||
|
||||
Parameters:
|
||||
|
||||
message: (string) text of message
|
||||
|
||||
Returns:
|
||||
|
||||
no return, just prints output to STDERR
|
||||
|
||||
## error_message
|
||||
```python
|
||||
error_message(message)
|
||||
```
|
||||
|
||||
Prints an error message for fatal issues
|
||||
|
||||
Parameters:
|
||||
|
||||
message: (string) text of message
|
||||
|
||||
Returns:
|
||||
|
||||
no return, just prints output to STDERR
|
||||
|
||||
## compatibility
|
||||
```python
|
||||
compatibility(mod_name, compatible)
|
||||
```
|
||||
Checks for the parser's compatibility with the running OS platform.
|
||||
|
||||
Parameters:
|
||||
|
||||
mod_name: (string) __name__ of the calling module
|
||||
|
||||
compatible: (list) sys.platform name(s) compatible with the parser
|
||||
compatible options:
|
||||
linux, darwin, cygwin, win32, aix, freebsd
|
||||
|
||||
Returns:
|
||||
|
||||
no return, just prints output to STDERR
|
||||
|
||||
2
install.sh
Executable file
2
install.sh
Executable file
@@ -0,0 +1,2 @@
|
||||
#!/bin/bash
|
||||
pip3 install --upgrade --user -e .
|
||||
114
jc/__init__.py
114
jc/__init__.py
@@ -2,70 +2,68 @@
|
||||
|
||||
* kellyjonbrazil@gmail.com
|
||||
|
||||
This module serializes standard unix command line output to structured JSON
|
||||
output.
|
||||
This package serializes the output of many standard unix command line tools to JSON format.
|
||||
|
||||
CLI Example:
|
||||
|
||||
$ ls -l /bin | jc --ls -p
|
||||
[
|
||||
{
|
||||
"filename": "bash",
|
||||
"flags": "-r-xr-xr-x",
|
||||
"links": 1,
|
||||
"owner": "root",
|
||||
"group": "wheel",
|
||||
"bytes": 618416,
|
||||
"date": "May 3 22:26"
|
||||
},
|
||||
{
|
||||
"filename": "cat",
|
||||
"flags": "-rwxr-xr-x",
|
||||
"links": 1,
|
||||
"owner": "root",
|
||||
"group": "wheel",
|
||||
"bytes": 23648,
|
||||
"date": "May 3 22:26"
|
||||
},
|
||||
{
|
||||
"filename": "chmod",
|
||||
"flags": "-rwxr-xr-x",
|
||||
"links": 1,
|
||||
"owner": "root",
|
||||
"group": "wheel",
|
||||
"bytes": 30016,
|
||||
"date": "May 3 22:26"
|
||||
},
|
||||
...
|
||||
]
|
||||
|
||||
$ ls -l /usr/bin | jc --ls -p
|
||||
[
|
||||
{
|
||||
"filename": "apropos",
|
||||
"link_to": "whatis",
|
||||
"flags": "lrwxrwxrwx.",
|
||||
"links": 1,
|
||||
"owner": "root",
|
||||
"group": "root",
|
||||
"size": 6,
|
||||
"date": "Aug 15 10:53"
|
||||
},
|
||||
{
|
||||
"filename": "ar",
|
||||
"flags": "-rwxr-xr-x.",
|
||||
"links": 1,
|
||||
"owner": "root",
|
||||
"group": "root",
|
||||
"size": 62744,
|
||||
"date": "Aug 8 16:14"
|
||||
},
|
||||
{
|
||||
"filename": "arch",
|
||||
"flags": "-rwxr-xr-x.",
|
||||
"links": 1,
|
||||
"owner": "root",
|
||||
"group": "root",
|
||||
"size": 33080,
|
||||
"date": "Aug 19 23:25"
|
||||
},
|
||||
...
|
||||
]
|
||||
|
||||
Module Example:
|
||||
|
||||
>>> import jc.parsers.ls
|
||||
>>>
|
||||
>>> data='''-rwxr-xr-x 1 root wheel 23648 May 3 22:26 cat
|
||||
... -rwxr-xr-x 1 root wheel 30016 May 3 22:26 chmod
|
||||
... -rwxr-xr-x 1 root wheel 29024 May 3 22:26 cp
|
||||
... -rwxr-xr-x 1 root wheel 375824 May 3 22:26 csh
|
||||
... -rwxr-xr-x 1 root wheel 28608 May 3 22:26 date
|
||||
... -rwxr-xr-x 1 root wheel 32000 May 3 22:26 dd
|
||||
... -rwxr-xr-x 1 root wheel 23392 May 3 22:26 df
|
||||
... -rwxr-xr-x 1 root wheel 18128 May 3 22:26 echo'''
|
||||
>>>
|
||||
>>> jc.parsers.ls.parse(data)
|
||||
[{'filename': 'cat', 'flags': '-rwxr-xr-x', 'links': 1, 'owner': 'root', 'group': 'wheel',
|
||||
'bytes': 23648, 'date': 'May 3 22:26'}, {'filename': 'chmod', 'flags': '-rwxr-xr-x', 'links': 1,
|
||||
'owner': 'root', 'group': 'wheel', 'bytes': 30016, 'date': 'May 3 22:26'}, {'filename': 'cp',
|
||||
'flags': '-rwxr-xr-x', 'links': 1, 'owner': 'root', 'group': 'wheel', 'bytes': 29024,
|
||||
'date': 'May 3 22:26'}, {'filename': 'csh', 'flags': '-rwxr-xr-x', 'links': 1, 'owner': 'root',
|
||||
'group': 'wheel', 'bytes': 375824, 'date': 'May 3 22:26'}, {'filename': 'date',
|
||||
'flags': '-rwxr-xr-x', 'links': 1, 'owner': 'root', 'group': 'wheel', 'bytes': 28608,
|
||||
'date': 'May 3 22:26'}, {'filename': 'dd', 'flags': '-rwxr-xr-x', 'links': 1, 'owner': 'root',
|
||||
'group': 'wheel', 'bytes': 32000, 'date': 'May 3 22:26'}, {'filename': 'df', 'flags': '-rwxr-xr-x',
|
||||
'links': 1, 'owner': 'root', 'group': 'wheel', 'bytes': 23392, 'date': 'May 3 22:26'},
|
||||
{'filename': 'echo', 'flags': '-rwxr-xr-x', 'links': 1, 'owner': 'root', 'group': 'wheel',
|
||||
'bytes': 18128, 'date': 'May 3 22:26'}]
|
||||
>>> import jc.parsers.ls
|
||||
>>>
|
||||
>>> data='''-rwxr-xr-x 1 root wheel 23648 May 3 22:26 cat
|
||||
... -rwxr-xr-x 1 root wheel 30016 May 3 22:26 chmod
|
||||
... -rwxr-xr-x 1 root wheel 29024 May 3 22:26 cp
|
||||
... -rwxr-xr-x 1 root wheel 375824 May 3 22:26 csh
|
||||
... -rwxr-xr-x 1 root wheel 28608 May 3 22:26 date
|
||||
... -rwxr-xr-x 1 root wheel 32000 May 3 22:26 dd
|
||||
... -rwxr-xr-x 1 root wheel 23392 May 3 22:26 df
|
||||
... -rwxr-xr-x 1 root wheel 18128 May 3 22:26 echo'''
|
||||
>>>
|
||||
>>> jc.parsers.ls.parse(data)
|
||||
[{'filename': 'cat', 'flags': '-rwxr-xr-x', 'links': 1, 'owner': 'root', 'group': 'wheel', 'size': 23648,
|
||||
'date': 'May 3 22:26'}, {'filename': 'chmod', 'flags': '-rwxr-xr-x', 'links': 1, 'owner': 'root',
|
||||
'group': 'wheel', 'size': 30016, 'date': 'May 3 22:26'}, {'filename': 'cp', 'flags': '-rwxr-xr-x',
|
||||
'links': 1, 'owner': 'root', 'group': 'wheel', 'size': 29024, 'date': 'May 3 22:26'}, {'filename': 'csh',
|
||||
'flags': '-rwxr-xr-x', 'links': 1, 'owner': 'root', 'group': 'wheel', 'size': 375824, 'date': 'May 3
|
||||
22:26'}, {'filename': 'date', 'flags': '-rwxr-xr-x', 'links': 1, 'owner': 'root', 'group': 'wheel',
|
||||
'size': 28608, 'date': 'May 3 22:26'}, {'filename': 'dd', 'flags': '-rwxr-xr-x', 'links': 1, 'owner':
|
||||
'root', 'group': 'wheel', 'size': 32000, 'date': 'May 3 22:26'}, {'filename': 'df', 'flags':
|
||||
'-rwxr-xr-x', 'links': 1, 'owner': 'root', 'group': 'wheel', 'size': 23392, 'date': 'May 3 22:26'},
|
||||
{'filename': 'echo', 'flags': '-rwxr-xr-x', 'links': 1, 'owner': 'root', 'group': 'wheel', 'size': 18128,
|
||||
'date': 'May 3 22:26'}]
|
||||
"""
|
||||
|
||||
name = 'jc'
|
||||
|
||||
183
jc/cli.py
Normal file
183
jc/cli.py
Normal file
@@ -0,0 +1,183 @@
|
||||
#!/usr/bin/env python3
|
||||
"""jc - JSON CLI output utility
|
||||
|
||||
JC cli module
|
||||
"""
|
||||
import sys
|
||||
import textwrap
|
||||
import signal
|
||||
import json
|
||||
import jc.utils
|
||||
import jc.parsers.arp
|
||||
import jc.parsers.df
|
||||
import jc.parsers.dig
|
||||
import jc.parsers.env
|
||||
import jc.parsers.free
|
||||
import jc.parsers.fstab
|
||||
import jc.parsers.history
|
||||
import jc.parsers.hosts
|
||||
import jc.parsers.ifconfig
|
||||
import jc.parsers.iptables
|
||||
import jc.parsers.jobs
|
||||
import jc.parsers.ls
|
||||
import jc.parsers.lsblk
|
||||
import jc.parsers.lsmod
|
||||
import jc.parsers.lsof
|
||||
import jc.parsers.mount
|
||||
import jc.parsers.netstat
|
||||
import jc.parsers.ps
|
||||
import jc.parsers.route
|
||||
import jc.parsers.ss
|
||||
import jc.parsers.stat
|
||||
import jc.parsers.systemctl
|
||||
import jc.parsers.systemctl_lj
|
||||
import jc.parsers.systemctl_ls
|
||||
import jc.parsers.systemctl_luf
|
||||
import jc.parsers.uname
|
||||
import jc.parsers.uptime
|
||||
import jc.parsers.w
|
||||
|
||||
|
||||
def ctrlc(signum, frame):
|
||||
exit()
|
||||
|
||||
|
||||
def helptext(message):
|
||||
helptext_string = f'''
|
||||
jc: {message}
|
||||
|
||||
Usage: jc PARSER [OPTIONS]
|
||||
|
||||
Parsers:
|
||||
--arp arp parser
|
||||
--df df parser
|
||||
--dig dig parser
|
||||
--env env parser
|
||||
--free free parser
|
||||
--fstab /etc/fstab file parser
|
||||
--history history parser
|
||||
--hosts /etc/hosts file parser
|
||||
--ifconfig iconfig parser
|
||||
--iptables iptables parser
|
||||
--jobs jobs parser
|
||||
--ls ls parser
|
||||
--lsblk lsblk parser
|
||||
--lsmod lsmod parser
|
||||
--lsof lsof parser
|
||||
--mount mount parser
|
||||
--netstat netstat parser
|
||||
--ps ps parser
|
||||
--route route parser
|
||||
--ss ss parser
|
||||
--stat stat parser
|
||||
--systemctl systemctl parser
|
||||
--systemctl-lj systemctl list-jobs parser
|
||||
--systemctl-ls systemctl list-sockets parser
|
||||
--systemctl-luf systemctl list-unit-files parser
|
||||
--uname uname -a parser
|
||||
--uptime uptime parser
|
||||
--w w parser
|
||||
|
||||
Options:
|
||||
-d debug - show trace messages
|
||||
-p pretty print output
|
||||
-q quiet - suppress warnings
|
||||
-r raw JSON output
|
||||
|
||||
Example:
|
||||
ls -al | jc --ls -p
|
||||
'''
|
||||
print(textwrap.dedent(helptext_string), file=sys.stderr)
|
||||
|
||||
|
||||
def main():
|
||||
signal.signal(signal.SIGINT, ctrlc)
|
||||
|
||||
if sys.stdin.isatty():
|
||||
helptext('missing piped data')
|
||||
exit()
|
||||
|
||||
data = sys.stdin.read()
|
||||
debug = False
|
||||
pretty = False
|
||||
quiet = False
|
||||
raw = False
|
||||
|
||||
# options
|
||||
if '-d' in sys.argv:
|
||||
debug = True
|
||||
|
||||
if '-p' in sys.argv:
|
||||
pretty = True
|
||||
|
||||
if '-q' in sys.argv:
|
||||
quiet = True
|
||||
|
||||
if '-r' in sys.argv:
|
||||
raw = True
|
||||
|
||||
# parsers
|
||||
parser_map = {
|
||||
'--arp': jc.parsers.arp.parse,
|
||||
'--df': jc.parsers.df.parse,
|
||||
'--dig': jc.parsers.dig.parse,
|
||||
'--env': jc.parsers.env.parse,
|
||||
'--free': jc.parsers.free.parse,
|
||||
'--fstab': jc.parsers.fstab.parse,
|
||||
'--history': jc.parsers.history.parse,
|
||||
'--hosts': jc.parsers.hosts.parse,
|
||||
'--ifconfig': jc.parsers.ifconfig.parse,
|
||||
'--iptables': jc.parsers.iptables.parse,
|
||||
'--jobs': jc.parsers.jobs.parse,
|
||||
'--ls': jc.parsers.ls.parse,
|
||||
'--lsblk': jc.parsers.lsblk.parse,
|
||||
'--lsmod': jc.parsers.lsmod.parse,
|
||||
'--lsof': jc.parsers.lsof.parse,
|
||||
'--mount': jc.parsers.mount.parse,
|
||||
'--netstat': jc.parsers.netstat.parse,
|
||||
'--ps': jc.parsers.ps.parse,
|
||||
'--route': jc.parsers.route.parse,
|
||||
'--ss': jc.parsers.ss.parse,
|
||||
'--stat': jc.parsers.stat.parse,
|
||||
'--systemctl': jc.parsers.systemctl.parse,
|
||||
'--systemctl-lj': jc.parsers.systemctl_lj.parse,
|
||||
'--systemctl-ls': jc.parsers.systemctl_ls.parse,
|
||||
'--systemctl-luf': jc.parsers.systemctl_luf.parse,
|
||||
'--uname': jc.parsers.uname.parse,
|
||||
'--uptime': jc.parsers.uptime.parse,
|
||||
'--w': jc.parsers.w.parse
|
||||
}
|
||||
|
||||
found = False
|
||||
|
||||
if debug:
|
||||
for arg in sys.argv:
|
||||
if arg in parser_map:
|
||||
result = parser_map[arg](data, raw=raw, quiet=quiet)
|
||||
found = True
|
||||
break
|
||||
else:
|
||||
for arg in sys.argv:
|
||||
if arg in parser_map:
|
||||
try:
|
||||
result = parser_map[arg](data, raw=raw, quiet=quiet)
|
||||
found = True
|
||||
break
|
||||
except:
|
||||
parser_name = arg.lstrip('--')
|
||||
jc.utils.error_message(f'{parser_name} parser could not parse the input data. Did you use the correct parser?\n For details use the -d option.')
|
||||
exit(1)
|
||||
|
||||
if not found:
|
||||
helptext('missing or incorrect arguments')
|
||||
exit()
|
||||
|
||||
# output resulting dictionary as json
|
||||
if pretty:
|
||||
print(json.dumps(result, indent=2))
|
||||
else:
|
||||
print(json.dumps(result))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
122
jc/jc.py
122
jc/jc.py
@@ -1,122 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""jc - JSON CLI output utility
|
||||
|
||||
Main input module
|
||||
"""
|
||||
|
||||
import sys
|
||||
import json
|
||||
import jc.parsers.df
|
||||
import jc.parsers.env
|
||||
import jc.parsers.free
|
||||
import jc.parsers.ifconfig
|
||||
import jc.parsers.iptables
|
||||
import jc.parsers.jobs
|
||||
import jc.parsers.ls
|
||||
import jc.parsers.lsblk
|
||||
import jc.parsers.lsmod
|
||||
import jc.parsers.lsof
|
||||
import jc.parsers.mount
|
||||
import jc.parsers.netstat
|
||||
import jc.parsers.ps
|
||||
import jc.parsers.route
|
||||
import jc.parsers.uname
|
||||
|
||||
|
||||
def helptext():
|
||||
print('Usage: jc [parser] [options]\n', file=sys.stderr)
|
||||
print('Parsers:', file=sys.stderr)
|
||||
print(' --df df parser', file=sys.stderr)
|
||||
print(' --env env parser', file=sys.stderr)
|
||||
print(' --free free parser', file=sys.stderr)
|
||||
print(' --ifconfig iconfig parser', file=sys.stderr)
|
||||
print(' --iptables iptables parser', file=sys.stderr)
|
||||
print(' --jobs jobs parser', file=sys.stderr)
|
||||
print(' --ls ls parser', file=sys.stderr)
|
||||
print(' --lsblk lsblk parser', file=sys.stderr)
|
||||
print(' --lsmod lsmod parser', file=sys.stderr)
|
||||
print(' --lsof lsof parser', file=sys.stderr)
|
||||
print(' --mount mount parser', file=sys.stderr)
|
||||
print(' --netstat netstat parser', file=sys.stderr)
|
||||
print(' --ps ps parser', file=sys.stderr)
|
||||
print(' --route route parser', file=sys.stderr)
|
||||
print(' --uname uname parser\n', file=sys.stderr)
|
||||
print('Options:', file=sys.stderr)
|
||||
print(' -p pretty print output\n', file=sys.stderr)
|
||||
print('Example:', file=sys.stderr)
|
||||
print(' ls -al | jc --ls -p\n', file=sys.stderr)
|
||||
|
||||
|
||||
def main():
|
||||
if sys.stdin.isatty():
|
||||
print('jc: missing piped data\n', file=sys.stderr)
|
||||
helptext()
|
||||
exit()
|
||||
|
||||
data = sys.stdin.read()
|
||||
pretty = False
|
||||
|
||||
# options
|
||||
if '-p' in sys.argv:
|
||||
pretty = True
|
||||
|
||||
# parsers
|
||||
if '--df' in sys.argv:
|
||||
result = jc.parsers.df.parse(data)
|
||||
|
||||
elif '--env' in sys.argv:
|
||||
result = jc.parsers.env.parse(data)
|
||||
|
||||
elif '--free' in sys.argv:
|
||||
result = jc.parsers.free.parse(data)
|
||||
|
||||
elif '--ifconfig' in sys.argv:
|
||||
result = jc.parsers.ifconfig.parse(data)
|
||||
|
||||
elif '--iptables' in sys.argv:
|
||||
result = jc.parsers.iptables.parse(data)
|
||||
|
||||
elif '--jobs' in sys.argv:
|
||||
result = jc.parsers.jobs.parse(data)
|
||||
|
||||
elif '--ls' in sys.argv:
|
||||
result = jc.parsers.ls.parse(data)
|
||||
|
||||
elif '--lsblk' in sys.argv:
|
||||
result = jc.parsers.lsblk.parse(data)
|
||||
|
||||
elif '--lsmod' in sys.argv:
|
||||
result = jc.parsers.lsmod.parse(data)
|
||||
|
||||
elif '--lsof' in sys.argv:
|
||||
result = jc.parsers.lsof.parse(data)
|
||||
|
||||
elif '--mount' in sys.argv:
|
||||
result = jc.parsers.mount.parse(data)
|
||||
|
||||
elif '--netstat' in sys.argv:
|
||||
result = jc.parsers.netstat.parse(data)
|
||||
|
||||
elif '--ps' in sys.argv:
|
||||
result = jc.parsers.ps.parse(data)
|
||||
|
||||
elif '--route' in sys.argv:
|
||||
result = jc.parsers.route.parse(data)
|
||||
|
||||
elif '--uname' in sys.argv:
|
||||
result = jc.parsers.uname.parse(data)
|
||||
|
||||
else:
|
||||
print('jc: missing or incorrect arguments\n', file=sys.stderr)
|
||||
helptext()
|
||||
exit()
|
||||
|
||||
# output resulting dictionary as json
|
||||
if pretty:
|
||||
print(json.dumps(result, indent=2))
|
||||
else:
|
||||
print(json.dumps(result))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
175
jc/parsers/arp.py
Normal file
175
jc/parsers/arp.py
Normal file
@@ -0,0 +1,175 @@
|
||||
"""jc - JSON CLI output utility arp Parser
|
||||
|
||||
Usage:
|
||||
specify --arp as the first argument if the piped input is coming from arp
|
||||
|
||||
Examples:
|
||||
|
||||
$ arp | jc --arp -p
|
||||
[
|
||||
{
|
||||
"address": "192.168.71.254",
|
||||
"hwtype": "ether",
|
||||
"hwaddress": "00:50:56:f0:98:26",
|
||||
"flags_mask": "C",
|
||||
"iface": "ens33"
|
||||
},
|
||||
{
|
||||
"address": "gateway",
|
||||
"hwtype": "ether",
|
||||
"hwaddress": "00:50:56:f7:4a:fc",
|
||||
"flags_mask": "C",
|
||||
"iface": "ens33"
|
||||
}
|
||||
]
|
||||
|
||||
$ arp | jc --arp -p -r
|
||||
[
|
||||
{
|
||||
"address": "gateway",
|
||||
"hwtype": "ether",
|
||||
"hwaddress": "00:50:56:f7:4a:fc",
|
||||
"flags_mask": "C",
|
||||
"iface": "ens33"
|
||||
},
|
||||
{
|
||||
"address": "192.168.71.254",
|
||||
"hwtype": "ether",
|
||||
"hwaddress": "00:50:56:fe:7a:b4",
|
||||
"flags_mask": "C",
|
||||
"iface": "ens33"
|
||||
}
|
||||
]
|
||||
|
||||
$ arp -a | jc --arp -p
|
||||
[
|
||||
{
|
||||
"name": null,
|
||||
"address": "192.168.71.254",
|
||||
"hwtype": "ether",
|
||||
"hwaddress": "00:50:56:f0:98:26",
|
||||
"iface": "ens33"
|
||||
},
|
||||
{
|
||||
"name": "gateway",
|
||||
"address": "192.168.71.2",
|
||||
"hwtype": "ether",
|
||||
"hwaddress": "00:50:56:f7:4a:fc",
|
||||
"iface": "ens33"
|
||||
}
|
||||
]
|
||||
|
||||
$ arp -a | jc --arp -p -r
|
||||
[
|
||||
{
|
||||
"name": "?",
|
||||
"address": "192.168.71.254",
|
||||
"hwtype": "ether",
|
||||
"hwaddress": "00:50:56:fe:7a:b4",
|
||||
"iface": "ens33"
|
||||
},
|
||||
{
|
||||
"name": "_gateway",
|
||||
"address": "192.168.71.2",
|
||||
"hwtype": "ether",
|
||||
"hwaddress": "00:50:56:f7:4a:fc",
|
||||
"iface": "ens33"
|
||||
}
|
||||
]
|
||||
"""
|
||||
import jc.utils
|
||||
|
||||
|
||||
def process(proc_data):
|
||||
"""
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary structured data with the following schema:
|
||||
|
||||
[
|
||||
{
|
||||
"name": string,
|
||||
"address": string,
|
||||
"hwtype": string,
|
||||
"hwaddress": string,
|
||||
"flags_mask": string,
|
||||
"iface": string
|
||||
}
|
||||
]
|
||||
"""
|
||||
|
||||
# in BSD style, change name to null if it is a question mark
|
||||
for entry in proc_data:
|
||||
if 'name' in entry and entry['name'] == '?':
|
||||
entry['name'] = None
|
||||
|
||||
return proc_data
|
||||
|
||||
|
||||
def parse(data, raw=False, quiet=False):
|
||||
"""
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
"""
|
||||
|
||||
# compatible options: linux, darwin, cygwin, win32, aix, freebsd
|
||||
compatible = ['linux', 'aix', 'freebsd']
|
||||
|
||||
if not quiet:
|
||||
jc.utils.compatibility(__name__, compatible)
|
||||
|
||||
# code adapted from Conor Heine at:
|
||||
# https://gist.github.com/cahna/43a1a3ff4d075bcd71f9d7120037a501
|
||||
|
||||
cleandata = data.splitlines()
|
||||
|
||||
# remove final Entries row if -v was used
|
||||
if cleandata[-1].find("Entries:") == 0:
|
||||
cleandata.pop(-1)
|
||||
|
||||
# detect if linux or bsd style was used
|
||||
if cleandata[0].find('Address') == 0:
|
||||
|
||||
# fix header row to change Flags Mask to flags_mask
|
||||
cleandata[0] = cleandata[0].replace('Flags Mask', 'flags_mask')
|
||||
|
||||
headers = [h for h in ' '.join(cleandata[0].lower().strip().split()).split() if h]
|
||||
raw_data = map(lambda s: s.strip().split(None, len(headers) - 1), cleandata[1:])
|
||||
raw_output = [dict(zip(headers, r)) for r in raw_data]
|
||||
|
||||
if raw:
|
||||
return raw_output
|
||||
else:
|
||||
return process(raw_output)
|
||||
|
||||
else:
|
||||
raw_output = []
|
||||
for line in cleandata:
|
||||
line = line.split()
|
||||
output_line = {}
|
||||
output_line['name'] = line[0]
|
||||
output_line['address'] = line[1].lstrip('(').rstrip(')')
|
||||
output_line['hwtype'] = line[4].lstrip('[').rstrip(']')
|
||||
output_line['hwaddress'] = line[3]
|
||||
output_line['iface'] = line[6]
|
||||
raw_output.append(output_line)
|
||||
|
||||
if raw:
|
||||
return raw_output
|
||||
else:
|
||||
return process(raw_output)
|
||||
182
jc/parsers/df.py
182
jc/parsers/df.py
@@ -3,53 +3,151 @@
|
||||
Usage:
|
||||
specify --df as the first argument if the piped input is coming from df
|
||||
|
||||
Example:
|
||||
Examples:
|
||||
|
||||
$ df | jc --df -p
|
||||
[
|
||||
{
|
||||
"Filesystem": "udev",
|
||||
"1K-blocks": "977500",
|
||||
"Used": "0",
|
||||
"Available": "977500",
|
||||
"Use%": "0%",
|
||||
"Mounted": "/dev"
|
||||
},
|
||||
{
|
||||
"Filesystem": "tmpfs",
|
||||
"1K-blocks": "201732",
|
||||
"Used": "1180",
|
||||
"Available": "200552",
|
||||
"Use%": "1%",
|
||||
"Mounted": "/run"
|
||||
},
|
||||
{
|
||||
"Filesystem": "/dev/sda2",
|
||||
"1K-blocks": "20508240",
|
||||
"Used": "5747284",
|
||||
"Available": "13696152",
|
||||
"Use%": "30%",
|
||||
"Mounted": "/"
|
||||
},
|
||||
{
|
||||
"Filesystem": "tmpfs",
|
||||
"1K-blocks": "1008648",
|
||||
"Used": "0",
|
||||
"Available": "1008648",
|
||||
"Use%": "0%",
|
||||
"Mounted": "/dev/shm"
|
||||
},
|
||||
...
|
||||
]
|
||||
$ df | jc --df -p
|
||||
[
|
||||
{
|
||||
"filesystem": "devtmpfs",
|
||||
"1k-blocks": 1918820,
|
||||
"used": 0,
|
||||
"available": 1918820,
|
||||
"use_percent": 0,
|
||||
"mounted_on": "/dev"
|
||||
},
|
||||
{
|
||||
"filesystem": "tmpfs",
|
||||
"1k-blocks": 1930668,
|
||||
"used": 0,
|
||||
"available": 1930668,
|
||||
"use_percent": 0,
|
||||
"mounted_on": "/dev/shm"
|
||||
},
|
||||
{
|
||||
"filesystem": "tmpfs",
|
||||
"1k-blocks": 1930668,
|
||||
"used": 11800,
|
||||
"available": 1918868,
|
||||
"use_percent": 1,
|
||||
"mounted_on": "/run"
|
||||
},
|
||||
...
|
||||
]
|
||||
|
||||
$ df | jc --df -p -r
|
||||
[
|
||||
{
|
||||
"filesystem": "devtmpfs",
|
||||
"1k-blocks": "1918820",
|
||||
"used": "0",
|
||||
"available": "1918820",
|
||||
"use_percent": "0%",
|
||||
"mounted_on": "/dev"
|
||||
},
|
||||
{
|
||||
"filesystem": "tmpfs",
|
||||
"1k-blocks": "1930668",
|
||||
"used": "0",
|
||||
"available": "1930668",
|
||||
"use_percent": "0%",
|
||||
"mounted_on": "/dev/shm"
|
||||
},
|
||||
{
|
||||
"filesystem": "tmpfs",
|
||||
"1k-blocks": "1930668",
|
||||
"used": "11800",
|
||||
"available": "1918868",
|
||||
"use_percent": "1%",
|
||||
"mounted_on": "/run"
|
||||
},
|
||||
...
|
||||
]
|
||||
"""
|
||||
import jc.utils
|
||||
|
||||
|
||||
def parse(data):
|
||||
def process(proc_data):
|
||||
"""
|
||||
Final processing to conform to the schema.
|
||||
|
||||
# code adapted from Conor Heine at:
|
||||
# https://gist.github.com/cahna/43a1a3ff4d075bcd71f9d7120037a501
|
||||
Parameters:
|
||||
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary structured data with the following schema:
|
||||
|
||||
[
|
||||
{
|
||||
"filesystem": string,
|
||||
"size": string,
|
||||
"1k-blocks": integer,
|
||||
"used": integer,
|
||||
"available": integer,
|
||||
"use_percent": integer,
|
||||
"mounted_on": string
|
||||
}
|
||||
]
|
||||
"""
|
||||
for entry in proc_data:
|
||||
# change any entry for key with '-blocks' in the name to int
|
||||
for k in entry:
|
||||
if str(k).find('-blocks') != -1:
|
||||
try:
|
||||
blocks_int = int(entry[k])
|
||||
entry[k] = blocks_int
|
||||
except (ValueError):
|
||||
entry[k] = None
|
||||
|
||||
# remove percent sign from 'use_percent'
|
||||
if 'use_percent' in entry:
|
||||
entry['use_percent'] = entry['use_percent'].rstrip('%')
|
||||
|
||||
# change used, available, and use_percent to int
|
||||
int_list = ['used', 'available', 'use_percent']
|
||||
for key in int_list:
|
||||
if key in entry:
|
||||
try:
|
||||
key_int = int(entry[key])
|
||||
entry[key] = key_int
|
||||
except (ValueError):
|
||||
entry[key] = None
|
||||
|
||||
return proc_data
|
||||
|
||||
|
||||
def parse(data, raw=False, quiet=False):
|
||||
"""
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
"""
|
||||
|
||||
# compatible options: linux, darwin, cygwin, win32, aix, freebsd
|
||||
compatible = ['linux']
|
||||
|
||||
if not quiet:
|
||||
jc.utils.compatibility(__name__, compatible)
|
||||
|
||||
cleandata = data.splitlines()
|
||||
headers = [h for h in ' '.join(cleandata[0].strip().split()).split() if h]
|
||||
fix_headers = cleandata[0].lower().replace('avail ', 'available ')
|
||||
fix_headers = fix_headers.replace('use%', 'use_percent')
|
||||
fix_headers = fix_headers.replace('mounted on', 'mounted_on')
|
||||
headers = [h for h in ' '.join(fix_headers.strip().split()).split() if h]
|
||||
|
||||
raw_data = map(lambda s: s.strip().split(None, len(headers) - 1), cleandata[1:])
|
||||
return [dict(zip(headers, r)) for r in raw_data]
|
||||
raw_output = [dict(zip(headers, r)) for r in raw_data]
|
||||
|
||||
if raw:
|
||||
return raw_output
|
||||
else:
|
||||
return process(raw_output)
|
||||
|
||||
598
jc/parsers/dig.py
Normal file
598
jc/parsers/dig.py
Normal file
@@ -0,0 +1,598 @@
|
||||
"""jc - JSON CLI output utility dig Parser
|
||||
|
||||
Usage:
|
||||
Specify --dig as the first argument if the piped input is coming from dig
|
||||
|
||||
Examples:
|
||||
|
||||
$ dig cnn.com www.cnn.com @205.251.194.64 | jc --dig -p
|
||||
[
|
||||
{
|
||||
"id": 34128,
|
||||
"opcode": "QUERY",
|
||||
"status": "NOERROR",
|
||||
"flags": [
|
||||
"qr",
|
||||
"rd",
|
||||
"ra"
|
||||
],
|
||||
"query_num": 1,
|
||||
"answer_num": 4,
|
||||
"authority_num": 0,
|
||||
"additional_num": 1,
|
||||
"question": {
|
||||
"name": "cnn.com.",
|
||||
"class": "IN",
|
||||
"type": "A"
|
||||
},
|
||||
"answer": [
|
||||
{
|
||||
"name": "cnn.com.",
|
||||
"class": "IN",
|
||||
"type": "A",
|
||||
"ttl": 60,
|
||||
"data": "151.101.65.67"
|
||||
},
|
||||
{
|
||||
"name": "cnn.com.",
|
||||
"class": "IN",
|
||||
"type": "A",
|
||||
"ttl": 60,
|
||||
"data": "151.101.193.67"
|
||||
},
|
||||
{
|
||||
"name": "cnn.com.",
|
||||
"class": "IN",
|
||||
"type": "A",
|
||||
"ttl": 60,
|
||||
"data": "151.101.1.67"
|
||||
},
|
||||
{
|
||||
"name": "cnn.com.",
|
||||
"class": "IN",
|
||||
"type": "A",
|
||||
"ttl": 60,
|
||||
"data": "151.101.129.67"
|
||||
}
|
||||
],
|
||||
"query_time": 37,
|
||||
"server": "2600",
|
||||
"when": "Tue Nov 12 07:14:42 PST 2019",
|
||||
"rcvd": 100
|
||||
},
|
||||
{
|
||||
"id": 15273,
|
||||
"opcode": "QUERY",
|
||||
"status": "NOERROR",
|
||||
"flags": [
|
||||
"qr",
|
||||
"aa",
|
||||
"rd"
|
||||
],
|
||||
"query_num": 1,
|
||||
"answer_num": 1,
|
||||
"authority_num": 4,
|
||||
"additional_num": 1,
|
||||
"question": {
|
||||
"name": "www.cnn.com.",
|
||||
"class": "IN",
|
||||
"type": "A"
|
||||
},
|
||||
"answer": [
|
||||
{
|
||||
"name": "www.cnn.com.",
|
||||
"class": "IN",
|
||||
"type": "CNAME",
|
||||
"ttl": 300,
|
||||
"data": "turner-tls.map.fastly.net."
|
||||
}
|
||||
],
|
||||
"authority": [
|
||||
{
|
||||
"name": "cnn.com.",
|
||||
"class": "IN",
|
||||
"type": "NS",
|
||||
"ttl": 3600,
|
||||
"data": "ns-1086.awsdns-07.org."
|
||||
},
|
||||
{
|
||||
"name": "cnn.com.",
|
||||
"class": "IN",
|
||||
"type": "NS",
|
||||
"ttl": 3600,
|
||||
"data": "ns-1630.awsdns-11.co.uk."
|
||||
},
|
||||
{
|
||||
"name": "cnn.com.",
|
||||
"class": "IN",
|
||||
"type": "NS",
|
||||
"ttl": 3600,
|
||||
"data": "ns-47.awsdns-05.com."
|
||||
},
|
||||
{
|
||||
"name": "cnn.com.",
|
||||
"class": "IN",
|
||||
"type": "NS",
|
||||
"ttl": 3600,
|
||||
"data": "ns-576.awsdns-08.net."
|
||||
}
|
||||
],
|
||||
"query_time": 23,
|
||||
"server": "205.251.194.64#53(205.251.194.64)",
|
||||
"when": "Tue Nov 12 07:14:42 PST 2019",
|
||||
"rcvd": 212
|
||||
}
|
||||
]
|
||||
|
||||
$ dig cnn.com www.cnn.com @205.251.194.64 | jc --dig -p -r
|
||||
[
|
||||
{
|
||||
"id": "23843",
|
||||
"opcode": "QUERY",
|
||||
"status": "NOERROR",
|
||||
"flags": [
|
||||
"qr",
|
||||
"rd",
|
||||
"ra"
|
||||
],
|
||||
"query_num": "1",
|
||||
"answer_num": "4",
|
||||
"authority_num": "0",
|
||||
"additional_num": "1",
|
||||
"question": {
|
||||
"name": "cnn.com.",
|
||||
"class": "IN",
|
||||
"type": "A"
|
||||
},
|
||||
"answer": [
|
||||
{
|
||||
"name": "cnn.com.",
|
||||
"class": "IN",
|
||||
"type": "A",
|
||||
"ttl": "30",
|
||||
"data": "151.101.193.67"
|
||||
},
|
||||
{
|
||||
"name": "cnn.com.",
|
||||
"class": "IN",
|
||||
"type": "A",
|
||||
"ttl": "30",
|
||||
"data": "151.101.1.67"
|
||||
},
|
||||
{
|
||||
"name": "cnn.com.",
|
||||
"class": "IN",
|
||||
"type": "A",
|
||||
"ttl": "30",
|
||||
"data": "151.101.65.67"
|
||||
},
|
||||
{
|
||||
"name": "cnn.com.",
|
||||
"class": "IN",
|
||||
"type": "A",
|
||||
"ttl": "30",
|
||||
"data": "151.101.129.67"
|
||||
}
|
||||
],
|
||||
"query_time": "24 msec",
|
||||
"server": "192.168.1.254#53(192.168.1.254)",
|
||||
"when": "Tue Nov 12 07:16:19 PST 2019",
|
||||
"rcvd": "100"
|
||||
},
|
||||
{
|
||||
"id": "8266",
|
||||
"opcode": "QUERY",
|
||||
"status": "NOERROR",
|
||||
"flags": [
|
||||
"qr",
|
||||
"aa",
|
||||
"rd"
|
||||
],
|
||||
"query_num": "1",
|
||||
"answer_num": "1",
|
||||
"authority_num": "4",
|
||||
"additional_num": "1",
|
||||
"question": {
|
||||
"name": "www.cnn.com.",
|
||||
"class": "IN",
|
||||
"type": "A"
|
||||
},
|
||||
"answer": [
|
||||
{
|
||||
"name": "www.cnn.com.",
|
||||
"class": "IN",
|
||||
"type": "CNAME",
|
||||
"ttl": "300",
|
||||
"data": "turner-tls.map.fastly.net."
|
||||
}
|
||||
],
|
||||
"authority": [
|
||||
{
|
||||
"name": "cnn.com.",
|
||||
"class": "IN",
|
||||
"type": "NS",
|
||||
"ttl": "3600",
|
||||
"data": "ns-1086.awsdns-07.org."
|
||||
},
|
||||
{
|
||||
"name": "cnn.com.",
|
||||
"class": "IN",
|
||||
"type": "NS",
|
||||
"ttl": "3600",
|
||||
"data": "ns-1630.awsdns-11.co.uk."
|
||||
},
|
||||
{
|
||||
"name": "cnn.com.",
|
||||
"class": "IN",
|
||||
"type": "NS",
|
||||
"ttl": "3600",
|
||||
"data": "ns-47.awsdns-05.com."
|
||||
},
|
||||
{
|
||||
"name": "cnn.com.",
|
||||
"class": "IN",
|
||||
"type": "NS",
|
||||
"ttl": "3600",
|
||||
"data": "ns-576.awsdns-08.net."
|
||||
}
|
||||
],
|
||||
"query_time": "26 msec",
|
||||
"server": "205.251.194.64#53(205.251.194.64)",
|
||||
"when": "Tue Nov 12 07:16:19 PST 2019",
|
||||
"rcvd": "212"
|
||||
}
|
||||
]
|
||||
|
||||
$ dig -x 1.1.1.1 | jc --dig -p
|
||||
[
|
||||
{
|
||||
"id": 34898,
|
||||
"opcode": "QUERY",
|
||||
"status": "NOERROR",
|
||||
"flags": [
|
||||
"qr",
|
||||
"rd",
|
||||
"ra"
|
||||
],
|
||||
"query_num": 1,
|
||||
"answer_num": 1,
|
||||
"authority_num": 0,
|
||||
"additional_num": 1,
|
||||
"question": {
|
||||
"name": "1.1.1.1.in-addr.arpa.",
|
||||
"class": "IN",
|
||||
"type": "PTR"
|
||||
},
|
||||
"answer": [
|
||||
{
|
||||
"name": "1.1.1.1.in-addr.arpa.",
|
||||
"class": "IN",
|
||||
"type": "PTR",
|
||||
"ttl": 952,
|
||||
"data": "one.one.one.one."
|
||||
}
|
||||
],
|
||||
"query_time": 103,
|
||||
"server": "2600",
|
||||
"when": "Tue Nov 12 07:15:33 PST 2019",
|
||||
"rcvd": 78
|
||||
}
|
||||
]
|
||||
|
||||
$ dig -x 1.1.1.1 | jc --dig -p -r
|
||||
[
|
||||
{
|
||||
"id": "50986",
|
||||
"opcode": "QUERY",
|
||||
"status": "NOERROR",
|
||||
"flags": [
|
||||
"qr",
|
||||
"rd",
|
||||
"ra"
|
||||
],
|
||||
"query_num": "1",
|
||||
"answer_num": "1",
|
||||
"authority_num": "0",
|
||||
"additional_num": "1",
|
||||
"question": {
|
||||
"name": "1.1.1.1.in-addr.arpa.",
|
||||
"class": "IN",
|
||||
"type": "PTR"
|
||||
},
|
||||
"answer": [
|
||||
{
|
||||
"name": "1.1.1.1.in-addr.arpa.",
|
||||
"class": "IN",
|
||||
"type": "PTR",
|
||||
"ttl": "1800",
|
||||
"data": "one.one.one.one."
|
||||
}
|
||||
],
|
||||
"query_time": "38 msec",
|
||||
"server": "2600",
|
||||
"when": "Tue Nov 12 07:17:19 PST 2019",
|
||||
"rcvd": "78"
|
||||
}
|
||||
]
|
||||
"""
|
||||
import jc.utils
|
||||
|
||||
|
||||
def process(proc_data):
|
||||
"""
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary structured data with the following schema:
|
||||
|
||||
[
|
||||
{
|
||||
"id": integer,
|
||||
"opcode": string,
|
||||
"status": string,
|
||||
"flags": [
|
||||
string
|
||||
],
|
||||
"query_num": integer,
|
||||
"answer_num": integer,
|
||||
"authority_num": integer,
|
||||
"additional_num": integer,
|
||||
"question": {
|
||||
"name": string,
|
||||
"class": string,
|
||||
"type": string
|
||||
},
|
||||
"answer": [
|
||||
{
|
||||
"name": string,
|
||||
"class": string,
|
||||
"type": string,
|
||||
"ttl": integer,
|
||||
"data": string
|
||||
}
|
||||
],
|
||||
"authority": [
|
||||
{
|
||||
"name": string,
|
||||
"class": string,
|
||||
"type": string,
|
||||
"ttl": integer,
|
||||
"data": string
|
||||
}
|
||||
],
|
||||
"query_time": integer, # in msec
|
||||
"server": string,
|
||||
"when": string,
|
||||
"rcvd": integer
|
||||
}
|
||||
]
|
||||
"""
|
||||
|
||||
for entry in proc_data:
|
||||
int_list = ['id', 'query_num', 'answer_num', 'authority_num', 'additional_num', 'rcvd']
|
||||
for key in int_list:
|
||||
if key in entry:
|
||||
try:
|
||||
key_int = int(entry[key])
|
||||
entry[key] = key_int
|
||||
except (ValueError):
|
||||
entry[key] = None
|
||||
|
||||
if 'answer' in entry:
|
||||
for ans in entry['answer']:
|
||||
try:
|
||||
ttl_int = int(ans['ttl'])
|
||||
ans['ttl'] = ttl_int
|
||||
except (ValueError):
|
||||
ans['ttl'] = None
|
||||
|
||||
if 'authority' in entry:
|
||||
for auth in entry['authority']:
|
||||
try:
|
||||
ttl_int = int(auth['ttl'])
|
||||
auth['ttl'] = ttl_int
|
||||
except (ValueError):
|
||||
auth['ttl'] = None
|
||||
|
||||
if 'query_time' in entry:
|
||||
try:
|
||||
qt_int = int(entry['query_time'].split()[0])
|
||||
entry['query_time'] = qt_int
|
||||
except (ValueError):
|
||||
entry['query_time'] = None
|
||||
|
||||
return proc_data
|
||||
|
||||
|
||||
def parse_header(header):
|
||||
# ;; ->>HEADER<<- opcode: QUERY, status: NXDOMAIN, id: 6140
|
||||
header = header.split()
|
||||
opcode = header[3].rstrip(',')
|
||||
status = header[5].rstrip(',')
|
||||
header_id = header[7]
|
||||
|
||||
return {'id': header_id,
|
||||
'opcode': opcode,
|
||||
'status': status}
|
||||
|
||||
|
||||
def parse_flags_line(flagsline):
|
||||
# ;; flags: qr rd ra; QUERY: 1, ANSWER: 0, AUTHORITY: 0, ADDITIONAL: 1
|
||||
flagsline = flagsline.split(';')
|
||||
flags = flagsline.pop(0)
|
||||
flags = flagsline.pop(0)
|
||||
flags = flagsline.pop(0).split(':')
|
||||
flags = flags[1].lstrip()
|
||||
flags = flags.split()
|
||||
|
||||
restline = flagsline[0].replace(',', ' ').replace(':', ' ')
|
||||
restlist = restline.split()
|
||||
|
||||
query_num = restlist[1]
|
||||
answer_num = restlist[3]
|
||||
authority_num = restlist[5]
|
||||
additional_num = restlist[7]
|
||||
|
||||
return {'flags': flags,
|
||||
'query_num': query_num,
|
||||
'answer_num': answer_num,
|
||||
'authority_num': authority_num,
|
||||
'additional_num': additional_num}
|
||||
|
||||
|
||||
def parse_question(question):
|
||||
# ;www.cnn.com. IN A
|
||||
question = question.split()
|
||||
dns_name = question[0].lstrip(';')
|
||||
dns_class = question[1]
|
||||
dns_type = question[2]
|
||||
|
||||
return {'name': dns_name,
|
||||
'class': dns_class,
|
||||
'type': dns_type}
|
||||
|
||||
|
||||
def parse_authority(authority):
|
||||
# cnn.com. 3600 IN NS ns-1086.awsdns-07.org.
|
||||
authority = authority.split()
|
||||
authority_name = authority[0]
|
||||
authority_class = authority[2]
|
||||
authority_type = authority[3]
|
||||
authority_ttl = authority[1]
|
||||
authority_data = authority[4]
|
||||
|
||||
return {'name': authority_name,
|
||||
'class': authority_class,
|
||||
'type': authority_type,
|
||||
'ttl': authority_ttl,
|
||||
'data': authority_data}
|
||||
|
||||
|
||||
def parse_answer(answer):
|
||||
# www.cnn.com. 5 IN CNAME turner-tls.map.fastly.net.
|
||||
answer = answer.split()
|
||||
answer_name = answer[0]
|
||||
answer_class = answer[2]
|
||||
answer_type = answer[3]
|
||||
answer_ttl = answer[1]
|
||||
answer_data = answer[4]
|
||||
|
||||
return {'name': answer_name,
|
||||
'class': answer_class,
|
||||
'type': answer_type,
|
||||
'ttl': answer_ttl,
|
||||
'data': answer_data}
|
||||
|
||||
|
||||
def parse(data, raw=False, quiet=False):
|
||||
"""
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
"""
|
||||
|
||||
# compatible options: linux, darwin, cygwin, win32, aix, freebsd
|
||||
compatible = ['linux', 'darwin', 'cygwin', 'win32', 'aix', 'freebsd']
|
||||
|
||||
if not quiet:
|
||||
jc.utils.compatibility(__name__, compatible)
|
||||
|
||||
raw_output = []
|
||||
cleandata = data.splitlines()
|
||||
# remove blank lines
|
||||
cleandata = list(filter(None, cleandata))
|
||||
|
||||
question = False
|
||||
authority = False
|
||||
answer = False
|
||||
|
||||
output_entry = {}
|
||||
for line in cleandata:
|
||||
|
||||
if line.find(';; ->>HEADER<<-') == 0:
|
||||
output_entry = {}
|
||||
output_entry.update(parse_header(line))
|
||||
continue
|
||||
|
||||
if line.find(';; flags:') == 0:
|
||||
output_entry.update(parse_flags_line(line))
|
||||
continue
|
||||
|
||||
if line.find(';; QUESTION SECTION:') == 0:
|
||||
question = True
|
||||
authority = False
|
||||
answer = False
|
||||
continue
|
||||
|
||||
if question:
|
||||
output_entry['question'] = parse_question(line)
|
||||
question = False
|
||||
authority = False
|
||||
answer = False
|
||||
continue
|
||||
|
||||
if line.find(';; AUTHORITY SECTION:') == 0:
|
||||
question = False
|
||||
authority = True
|
||||
answer = False
|
||||
authority_list = []
|
||||
continue
|
||||
|
||||
if line.find(';') == -1 and authority:
|
||||
authority_list.append(parse_authority(line))
|
||||
output_entry.update({'authority': authority_list})
|
||||
continue
|
||||
|
||||
if line.find(';; ANSWER SECTION:') == 0:
|
||||
question = False
|
||||
authority = False
|
||||
answer = True
|
||||
answer_list = []
|
||||
continue
|
||||
|
||||
if line.find(';') == -1 and answer:
|
||||
answer_list.append(parse_answer(line))
|
||||
output_entry.update({'answer': answer_list})
|
||||
continue
|
||||
|
||||
# footer consists of 4 lines
|
||||
# footer line 1
|
||||
if line.find(';; Query time:') == 0:
|
||||
output_entry.update({'query_time': line.split(':')[1].lstrip()})
|
||||
continue
|
||||
|
||||
# footer line 2
|
||||
if line.find(';; SERVER:') == 0:
|
||||
output_entry.update({'server': line.split(':')[1].lstrip()})
|
||||
continue
|
||||
|
||||
# footer line 3
|
||||
if line.find(';; WHEN:') == 0:
|
||||
output_entry.update({'when': line.split(':', maxsplit=1)[1].lstrip()})
|
||||
continue
|
||||
|
||||
# footer line 4 (last line)
|
||||
if line.find(';; MSG SIZE rcvd:') == 0:
|
||||
output_entry.update({'rcvd': line.split(':')[1].lstrip()})
|
||||
|
||||
if output_entry:
|
||||
raw_output.append(output_entry)
|
||||
|
||||
raw_output = list(filter(None, raw_output))
|
||||
if raw:
|
||||
return raw_output
|
||||
else:
|
||||
return process(raw_output)
|
||||
@@ -3,39 +3,102 @@
|
||||
Usage:
|
||||
specify --env as the first argument if the piped input is coming from env
|
||||
|
||||
Example:
|
||||
$ env | jc --env -p
|
||||
[
|
||||
{
|
||||
"TERM": "xterm-256color"
|
||||
},
|
||||
{
|
||||
"SHELL": "/bin/bash"
|
||||
},
|
||||
{
|
||||
"USER": "root"
|
||||
},
|
||||
{
|
||||
"PATH": "/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin"
|
||||
},
|
||||
{
|
||||
"PWD": "/bin"
|
||||
},
|
||||
{
|
||||
"LANG": "en_US.UTF-8"
|
||||
},
|
||||
{
|
||||
"HOME": "/root"
|
||||
},
|
||||
{
|
||||
"_": "/usr/bin/env"
|
||||
}
|
||||
]
|
||||
Examples:
|
||||
|
||||
$ env | jc --env -p
|
||||
[
|
||||
{
|
||||
"name": "XDG_SESSION_ID",
|
||||
"value": "1"
|
||||
},
|
||||
{
|
||||
"name": "HOSTNAME",
|
||||
"value": "localhost.localdomain"
|
||||
},
|
||||
{
|
||||
"name": "TERM",
|
||||
"value": "vt220"
|
||||
},
|
||||
{
|
||||
"name": "SHELL",
|
||||
"value": "/bin/bash"
|
||||
},
|
||||
{
|
||||
"name": "HISTSIZE",
|
||||
"value": "1000"
|
||||
},
|
||||
...
|
||||
]
|
||||
|
||||
$ env | jc --env -p -r
|
||||
{
|
||||
"TERM": "xterm-256color",
|
||||
"SHELL": "/bin/bash",
|
||||
"USER": "root",
|
||||
"PATH": "/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin",
|
||||
"PWD": "/root",
|
||||
"LANG": "en_US.UTF-8",
|
||||
"HOME": "/root",
|
||||
"LOGNAME": "root",
|
||||
"_": "/usr/bin/env"
|
||||
}
|
||||
"""
|
||||
import jc.utils
|
||||
|
||||
|
||||
def parse(data):
|
||||
output = []
|
||||
def process(proc_data):
|
||||
"""
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary structured data with the following schema:
|
||||
|
||||
[
|
||||
{
|
||||
"name": string,
|
||||
"value": string
|
||||
}
|
||||
]
|
||||
"""
|
||||
|
||||
# rebuild output for added semantic information
|
||||
processed = []
|
||||
for k, v in proc_data.items():
|
||||
proc_line = {}
|
||||
proc_line['name'] = k
|
||||
proc_line['value'] = v
|
||||
processed.append(proc_line)
|
||||
|
||||
return processed
|
||||
|
||||
|
||||
def parse(data, raw=False, quiet=False):
|
||||
"""
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
"""
|
||||
|
||||
# compatible options: linux, darwin, cygwin, win32, aix, freebsd
|
||||
compatible = ['linux', 'darwin', 'cygwin', 'win32', 'aix', 'freebsd']
|
||||
|
||||
if not quiet:
|
||||
jc.utils.compatibility(__name__, compatible)
|
||||
|
||||
raw_output = {}
|
||||
|
||||
linedata = data.splitlines()
|
||||
|
||||
@@ -45,9 +108,10 @@ def parse(data):
|
||||
if cleandata:
|
||||
|
||||
for entry in cleandata:
|
||||
output_line = {}
|
||||
parsed_line = entry.split('=', maxsplit=1)
|
||||
output_line[parsed_line[0]] = parsed_line[1]
|
||||
output.append(output_line)
|
||||
raw_output[parsed_line[0]] = parsed_line[1]
|
||||
|
||||
return output
|
||||
if raw:
|
||||
return raw_output
|
||||
else:
|
||||
return process(raw_output)
|
||||
|
||||
76
jc/parsers/foo.py
Normal file
76
jc/parsers/foo.py
Normal file
@@ -0,0 +1,76 @@
|
||||
"""jc - JSON CLI output utility foo Parser
|
||||
|
||||
Usage:
|
||||
specify --foo as the first argument if the piped input is coming from foo
|
||||
|
||||
Examples:
|
||||
|
||||
$ foo | jc --foo -p
|
||||
[]
|
||||
|
||||
$ foo | jc --foo -p -r
|
||||
[]
|
||||
"""
|
||||
import jc.utils
|
||||
|
||||
|
||||
def process(proc_data):
|
||||
"""
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary structured data with the following schema:
|
||||
|
||||
[
|
||||
{
|
||||
"foo": string,
|
||||
"bar": boolean,
|
||||
"baz": integer
|
||||
}
|
||||
]
|
||||
"""
|
||||
|
||||
# rebuild output for added semantic information
|
||||
return proc_data
|
||||
|
||||
|
||||
def parse(data, raw=False, quiet=False):
|
||||
"""
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
"""
|
||||
|
||||
# compatible options: linux, darwin, cygwin, win32, aix, freebsd
|
||||
compatible = ['linux', 'darwin', 'cygwin', 'win32', 'aix', 'freebsd']
|
||||
|
||||
if not quiet:
|
||||
jc.utils.compatibility(__name__, compatible)
|
||||
|
||||
raw_output = []
|
||||
cleandata = data.splitlines()
|
||||
|
||||
# Clear any blank lines
|
||||
cleandata = list(filter(None, cleandata))
|
||||
|
||||
if cleandata:
|
||||
# parse the content
|
||||
pass
|
||||
|
||||
if raw:
|
||||
return raw_output
|
||||
else:
|
||||
return process(raw_output)
|
||||
@@ -3,43 +3,126 @@
|
||||
Usage:
|
||||
specify --free as the first argument if the piped input is coming from free
|
||||
|
||||
Example:
|
||||
Examples:
|
||||
|
||||
$ free | jc --free -p
|
||||
[
|
||||
{
|
||||
"type": "Mem",
|
||||
"total": "2017300",
|
||||
"used": "213104",
|
||||
"free": "1148452",
|
||||
"shared": "1176",
|
||||
"buff/cache": "655744",
|
||||
"available": "1622204"
|
||||
},
|
||||
{
|
||||
"type": "Swap",
|
||||
"total": "2097148",
|
||||
"used": "0",
|
||||
"free": "2097148"
|
||||
}
|
||||
]
|
||||
$ free | jc --free -p
|
||||
[
|
||||
{
|
||||
"type": "Mem",
|
||||
"total": 3861340,
|
||||
"used": 220508,
|
||||
"free": 3381972,
|
||||
"shared": 11800,
|
||||
"buff_cache": 258860,
|
||||
"available": 3397784
|
||||
},
|
||||
{
|
||||
"type": "Swap",
|
||||
"total": 2097148,
|
||||
"used": 0,
|
||||
"free": 2097148
|
||||
}
|
||||
]
|
||||
|
||||
$ free | jc --free -p -r
|
||||
[
|
||||
{
|
||||
"type": "Mem",
|
||||
"total": "2017300",
|
||||
"used": "213104",
|
||||
"free": "1148452",
|
||||
"shared": "1176",
|
||||
"buff_cache": "655744",
|
||||
"available": "1622204"
|
||||
},
|
||||
{
|
||||
"type": "Swap",
|
||||
"total": "2097148",
|
||||
"used": "0",
|
||||
"free": "2097148"
|
||||
}
|
||||
]
|
||||
"""
|
||||
import jc.utils
|
||||
|
||||
|
||||
def parse(data):
|
||||
def process(proc_data):
|
||||
"""
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary structured data with the following schema:
|
||||
|
||||
[
|
||||
{
|
||||
"type": string,
|
||||
"total": integer,
|
||||
"used": integer,
|
||||
"free": integer,
|
||||
"shared": integer,
|
||||
"buff_cache": integer,
|
||||
"available": integer
|
||||
}
|
||||
]
|
||||
"""
|
||||
|
||||
for entry in proc_data:
|
||||
int_list = ['total', 'used', 'free', 'shared', 'buff_cache', 'available']
|
||||
for key in int_list:
|
||||
if key in entry:
|
||||
try:
|
||||
key_int = int(entry[key])
|
||||
entry[key] = key_int
|
||||
except (ValueError):
|
||||
entry[key] = None
|
||||
|
||||
return proc_data
|
||||
|
||||
|
||||
def parse(data, raw=False, quiet=False):
|
||||
"""
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
"""
|
||||
|
||||
# compatible options: linux, darwin, cygwin, win32, aix, freebsd
|
||||
compatible = ['linux']
|
||||
|
||||
if not quiet:
|
||||
jc.utils.compatibility(__name__, compatible)
|
||||
|
||||
# code adapted from Conor Heine at:
|
||||
# https://gist.github.com/cahna/43a1a3ff4d075bcd71f9d7120037a501
|
||||
|
||||
cleandata = data.splitlines()
|
||||
headers = [h for h in ' '.join(cleandata[0].strip().split()).split() if h]
|
||||
|
||||
headers = [h for h in ' '.join(cleandata[0].lower().strip().split()).split() if h]
|
||||
headers.insert(0, "type")
|
||||
|
||||
raw_data = map(lambda s: s.strip().split(None, len(headers) - 1), cleandata[1:])
|
||||
output = [dict(zip(headers, r)) for r in raw_data]
|
||||
# clean up 'buff/cache' header
|
||||
# even though forward slash in a key is valid json, it can make things difficult
|
||||
headers = ['buff_cache' if x == 'buff/cache' else x for x in headers]
|
||||
|
||||
for entry in output:
|
||||
raw_data = map(lambda s: s.strip().split(None, len(headers) - 1), cleandata[1:])
|
||||
raw_output = [dict(zip(headers, r)) for r in raw_data]
|
||||
|
||||
for entry in raw_output:
|
||||
entry['type'] = entry['type'].rstrip(':')
|
||||
|
||||
return output
|
||||
if raw:
|
||||
return raw_output
|
||||
else:
|
||||
return process(raw_output)
|
||||
|
||||
157
jc/parsers/fstab.py
Normal file
157
jc/parsers/fstab.py
Normal file
@@ -0,0 +1,157 @@
|
||||
"""jc - JSON CLI output utility fstab Parser
|
||||
|
||||
Usage:
|
||||
specify --fstab as the first argument if the piped input is coming from a fstab file
|
||||
|
||||
Examples:
|
||||
|
||||
$ cat /etc/fstab | jc --fstab -p
|
||||
[
|
||||
{
|
||||
"fs_spec": "/dev/mapper/centos-root",
|
||||
"fs_file": "/",
|
||||
"fs_vfstype": "xfs",
|
||||
"fs_mntops": "defaults",
|
||||
"fs_freq": 0,
|
||||
"fs_passno": 0
|
||||
},
|
||||
{
|
||||
"fs_spec": "UUID=05d927bb-5875-49e3-ada1-7f46cb31c932",
|
||||
"fs_file": "/boot",
|
||||
"fs_vfstype": "xfs",
|
||||
"fs_mntops": "defaults",
|
||||
"fs_freq": 0,
|
||||
"fs_passno": 0
|
||||
},
|
||||
{
|
||||
"fs_spec": "/dev/mapper/centos-swap",
|
||||
"fs_file": "swap",
|
||||
"fs_vfstype": "swap",
|
||||
"fs_mntops": "defaults",
|
||||
"fs_freq": 0,
|
||||
"fs_passno": 0
|
||||
}
|
||||
]
|
||||
|
||||
$ cat /etc/fstab | jc --fstab -p -r
|
||||
[
|
||||
{
|
||||
"fs_spec": "/dev/mapper/centos-root",
|
||||
"fs_file": "/",
|
||||
"fs_vfstype": "xfs",
|
||||
"fs_mntops": "defaults",
|
||||
"fs_freq": "0",
|
||||
"fs_passno": "0"
|
||||
},
|
||||
{
|
||||
"fs_spec": "UUID=05d927bb-5875-49e3-ada1-7f46cb31c932",
|
||||
"fs_file": "/boot",
|
||||
"fs_vfstype": "xfs",
|
||||
"fs_mntops": "defaults",
|
||||
"fs_freq": "0",
|
||||
"fs_passno": "0"
|
||||
},
|
||||
{
|
||||
"fs_spec": "/dev/mapper/centos-swap",
|
||||
"fs_file": "swap",
|
||||
"fs_vfstype": "swap",
|
||||
"fs_mntops": "defaults",
|
||||
"fs_freq": "0",
|
||||
"fs_passno": "0"
|
||||
}
|
||||
]
|
||||
"""
|
||||
import jc.utils
|
||||
|
||||
|
||||
def process(proc_data):
|
||||
"""
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary structured data with the following schema:
|
||||
|
||||
[
|
||||
{
|
||||
"fs_spec": string,
|
||||
"fs_file": string,
|
||||
"fs_vfstype": string,
|
||||
"fs_mntops": string,
|
||||
"fs_freq": integer,
|
||||
"fs_passno": integer
|
||||
}
|
||||
]
|
||||
"""
|
||||
for entry in proc_data:
|
||||
int_list = ['fs_freq', 'fs_passno']
|
||||
for key in int_list:
|
||||
if key in entry:
|
||||
try:
|
||||
key_int = int(entry[key])
|
||||
entry[key] = key_int
|
||||
except (ValueError):
|
||||
entry[key] = None
|
||||
|
||||
return proc_data
|
||||
|
||||
|
||||
def parse(data, raw=False, quiet=False):
|
||||
"""
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
"""
|
||||
|
||||
# compatible options: linux, darwin, cygwin, win32, aix, freebsd
|
||||
compatible = ['linux']
|
||||
|
||||
if not quiet:
|
||||
jc.utils.compatibility(__name__, compatible)
|
||||
|
||||
raw_output = []
|
||||
cleandata = data.splitlines()
|
||||
|
||||
# Clear any blank lines
|
||||
cleandata = list(filter(None, cleandata))
|
||||
|
||||
if cleandata:
|
||||
for line in cleandata:
|
||||
output_line = {}
|
||||
# ignore commented lines
|
||||
if line.strip().find('#') == 0:
|
||||
continue
|
||||
|
||||
line_list = line.split(maxsplit=6)
|
||||
fs_spec = line_list[0]
|
||||
fs_file = line_list[1]
|
||||
fs_vfstype = line_list[2]
|
||||
fs_mntops = line_list[3]
|
||||
fs_freq = line_list[4]
|
||||
fs_passno = line_list[5]
|
||||
|
||||
output_line['fs_spec'] = fs_spec
|
||||
output_line['fs_file'] = fs_file
|
||||
output_line['fs_vfstype'] = fs_vfstype
|
||||
output_line['fs_mntops'] = fs_mntops
|
||||
output_line['fs_freq'] = fs_freq
|
||||
output_line['fs_passno'] = fs_passno
|
||||
|
||||
raw_output.append(output_line)
|
||||
|
||||
if raw:
|
||||
return raw_output
|
||||
else:
|
||||
return process(raw_output)
|
||||
113
jc/parsers/history.py
Normal file
113
jc/parsers/history.py
Normal file
@@ -0,0 +1,113 @@
|
||||
"""jc - JSON CLI output utility history Parser
|
||||
|
||||
Usage:
|
||||
specify --history as the first argument if the piped input is coming from history
|
||||
|
||||
Examples:
|
||||
|
||||
$ history | jc --history -p
|
||||
[
|
||||
{
|
||||
"line": "118",
|
||||
"command": "sleep 100"
|
||||
},
|
||||
{
|
||||
"line": "119",
|
||||
"command": "ls /bin"
|
||||
},
|
||||
{
|
||||
"line": "120",
|
||||
"command": "echo \"hello\""
|
||||
},
|
||||
{
|
||||
"line": "121",
|
||||
"command": "docker images"
|
||||
},
|
||||
...
|
||||
]
|
||||
|
||||
$ history | jc --history -p -r
|
||||
{
|
||||
"118": "sleep 100",
|
||||
"119": "ls /bin",
|
||||
"120": "echo \"hello\"",
|
||||
"121": "docker images",
|
||||
...
|
||||
}
|
||||
"""
|
||||
import jc
|
||||
|
||||
|
||||
def process(proc_data):
|
||||
"""
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary structured data with the following schema:
|
||||
|
||||
[
|
||||
{
|
||||
"line": string,
|
||||
"command": string
|
||||
}
|
||||
]
|
||||
"""
|
||||
|
||||
# rebuild output for added semantic information
|
||||
processed = []
|
||||
for k, v in proc_data.items():
|
||||
proc_line = {}
|
||||
proc_line['line'] = k
|
||||
proc_line['command'] = v
|
||||
processed.append(proc_line)
|
||||
|
||||
return processed
|
||||
|
||||
|
||||
def parse(data, raw=False, quiet=False):
|
||||
"""
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
"""
|
||||
|
||||
# compatible options: linux, darwin, cygwin, win32, aix, freebsd
|
||||
compatible = ['linux', 'darwin', 'cygwin', 'win32', 'aix', 'freebsd']
|
||||
|
||||
if not quiet:
|
||||
jc.utils.compatibility(__name__, compatible)
|
||||
|
||||
raw_output = {}
|
||||
|
||||
# split lines and clear out any non-ascii chars
|
||||
linedata = data.encode('ascii', errors='ignore').decode().splitlines()
|
||||
|
||||
# Clear any blank lines
|
||||
cleandata = list(filter(None, linedata))
|
||||
|
||||
if cleandata:
|
||||
for entry in cleandata:
|
||||
try:
|
||||
parsed_line = entry.split(maxsplit=1)
|
||||
raw_output[parsed_line[0]] = parsed_line[1]
|
||||
except IndexError:
|
||||
# need to catch indexerror in case there is weird input from prior commands
|
||||
pass
|
||||
|
||||
if raw:
|
||||
return raw_output
|
||||
else:
|
||||
return process(raw_output)
|
||||
141
jc/parsers/hosts.py
Normal file
141
jc/parsers/hosts.py
Normal file
@@ -0,0 +1,141 @@
|
||||
"""jc - JSON CLI output utility hosts Parser
|
||||
|
||||
Usage:
|
||||
specify --hosts as the first argument if the piped input is coming from a hosts file
|
||||
|
||||
Examples:
|
||||
|
||||
$ cat /etc/hosts | jc --hosts -p
|
||||
[
|
||||
{
|
||||
"ip": "127.0.0.1",
|
||||
"hostname": [
|
||||
"localhost"
|
||||
]
|
||||
},
|
||||
{
|
||||
"ip": "127.0.1.1",
|
||||
"hostname": [
|
||||
"root-ubuntu"
|
||||
]
|
||||
},
|
||||
{
|
||||
"ip": "::1",
|
||||
"hostname": [
|
||||
"ip6-localhost",
|
||||
"ip6-loopback"
|
||||
]
|
||||
},
|
||||
{
|
||||
"ip": "fe00::0",
|
||||
"hostname": [
|
||||
"ip6-localnet"
|
||||
]
|
||||
},
|
||||
{
|
||||
"ip": "ff00::0",
|
||||
"hostname": [
|
||||
"ip6-mcastprefix"
|
||||
]
|
||||
},
|
||||
{
|
||||
"ip": "ff02::1",
|
||||
"hostname": [
|
||||
"ip6-allnodes"
|
||||
]
|
||||
},
|
||||
{
|
||||
"ip": "ff02::2",
|
||||
"hostname": [
|
||||
"ip6-allrouters"
|
||||
]
|
||||
}
|
||||
]
|
||||
"""
|
||||
import jc.utils
|
||||
|
||||
|
||||
def process(proc_data):
|
||||
"""
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary structured data with the following schema:
|
||||
|
||||
[
|
||||
{
|
||||
"ip": string,
|
||||
"hostname": [
|
||||
string
|
||||
]
|
||||
}
|
||||
]
|
||||
"""
|
||||
|
||||
# no additional processing needed
|
||||
return proc_data
|
||||
|
||||
|
||||
def parse(data, raw=False, quiet=False):
|
||||
"""
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
"""
|
||||
|
||||
# compatible options: linux, darwin, cygwin, win32, aix, freebsd
|
||||
compatible = ['linux', 'darwin', 'cygwin', 'win32', 'aix', 'freebsd']
|
||||
|
||||
if not quiet:
|
||||
jc.utils.compatibility(__name__, compatible)
|
||||
|
||||
raw_output = []
|
||||
cleandata = data.splitlines()
|
||||
|
||||
# Clear any blank lines
|
||||
cleandata = list(filter(None, cleandata))
|
||||
|
||||
if cleandata:
|
||||
for line in cleandata:
|
||||
output_line = {}
|
||||
# ignore commented lines
|
||||
if line.strip().find('#') == 0:
|
||||
continue
|
||||
|
||||
line_list = line.split(maxsplit=1)
|
||||
ip = line_list[0]
|
||||
hosts = line_list[1]
|
||||
hosts_list = hosts.split()
|
||||
|
||||
comment_found = False
|
||||
for i, item in enumerate(hosts_list):
|
||||
if item.find('#') != -1:
|
||||
comment_found = True
|
||||
comment_item = i
|
||||
break
|
||||
|
||||
if comment_found:
|
||||
hosts_list = hosts_list[:comment_item]
|
||||
|
||||
output_line['ip'] = ip
|
||||
output_line['hostname'] = hosts_list
|
||||
|
||||
raw_output.append(output_line)
|
||||
|
||||
if raw:
|
||||
return raw_output
|
||||
else:
|
||||
return process(raw_output)
|
||||
@@ -5,17 +5,202 @@ Usage:
|
||||
|
||||
no ifconfig options are supported.
|
||||
|
||||
Example:
|
||||
Examples:
|
||||
|
||||
$ ifconfig | jc --ifconfig -p
|
||||
$ ifconfig | jc --ifconfig -p
|
||||
[
|
||||
{
|
||||
"name": "ens33",
|
||||
"flags": 4163,
|
||||
"state": "UP,BROADCAST,RUNNING,MULTICAST",
|
||||
"mtu": 1500,
|
||||
"ipv4_addr": "192.168.71.138",
|
||||
"ipv4_mask": "255.255.255.0",
|
||||
"ipv4_bcast": "192.168.71.255",
|
||||
"ipv6_addr": "fe80::c1cb:715d:bc3e:b8a0",
|
||||
"ipv6_mask": 64,
|
||||
"ipv6_scope": "link",
|
||||
"mac_addr": "00:0c:29:3b:58:0e",
|
||||
"type": "Ethernet",
|
||||
"rx_packets": 6374,
|
||||
"rx_errors": 0,
|
||||
"rx_dropped": 0,
|
||||
"rx_overruns": 0,
|
||||
"rx_frame": 0,
|
||||
"tx_packets": 3707,
|
||||
"tx_errors": 0,
|
||||
"tx_dropped": 0,
|
||||
"tx_overruns": 0,
|
||||
"tx_carrier": 0,
|
||||
"tx_collisions": 0,
|
||||
"metric": null
|
||||
},
|
||||
{
|
||||
"name": "lo",
|
||||
"flags": 73,
|
||||
"state": "UP,LOOPBACK,RUNNING",
|
||||
"mtu": 65536,
|
||||
"ipv4_addr": "127.0.0.1",
|
||||
"ipv4_mask": "255.0.0.0",
|
||||
"ipv4_bcast": null,
|
||||
"ipv6_addr": "::1",
|
||||
"ipv6_mask": 128,
|
||||
"ipv6_scope": "host",
|
||||
"mac_addr": null,
|
||||
"type": "Local Loopback",
|
||||
"rx_packets": 81,
|
||||
"rx_errors": 0,
|
||||
"rx_dropped": 0,
|
||||
"rx_overruns": 0,
|
||||
"rx_frame": 0,
|
||||
"tx_packets": 81,
|
||||
"tx_errors": 0,
|
||||
"tx_dropped": 0,
|
||||
"tx_overruns": 0,
|
||||
"tx_carrier": 0,
|
||||
"tx_collisions": 0,
|
||||
"metric": null
|
||||
}
|
||||
]
|
||||
|
||||
$ ifconfig | jc --ifconfig -p -r
|
||||
[
|
||||
{
|
||||
"name": "ens33",
|
||||
"flags": "4163",
|
||||
"state": "UP,BROADCAST,RUNNING,MULTICAST",
|
||||
"mtu": "1500",
|
||||
"ipv4_addr": "192.168.71.135",
|
||||
"ipv4_mask": "255.255.255.0",
|
||||
"ipv4_bcast": "192.168.71.255",
|
||||
"ipv6_addr": "fe80::c1cb:715d:bc3e:b8a0",
|
||||
"ipv6_mask": "64",
|
||||
"ipv6_scope": "link",
|
||||
"mac_addr": "00:0c:29:3b:58:0e",
|
||||
"type": "Ethernet",
|
||||
"rx_packets": "26348",
|
||||
"rx_errors": "0",
|
||||
"rx_dropped": "0",
|
||||
"rx_overruns": "0",
|
||||
"rx_frame": "0",
|
||||
"tx_packets": "5308",
|
||||
"tx_errors": "0",
|
||||
"tx_dropped": "0",
|
||||
"tx_overruns": "0",
|
||||
"tx_carrier": "0",
|
||||
"tx_collisions": "0",
|
||||
"metric": null
|
||||
},
|
||||
{
|
||||
"name": "lo",
|
||||
"flags": "73",
|
||||
"state": "UP,LOOPBACK,RUNNING",
|
||||
"mtu": "65536",
|
||||
"ipv4_addr": "127.0.0.1",
|
||||
"ipv4_mask": "255.0.0.0",
|
||||
"ipv4_bcast": null,
|
||||
"ipv6_addr": "::1",
|
||||
"ipv6_mask": "128",
|
||||
"ipv6_scope": "host",
|
||||
"mac_addr": null,
|
||||
"type": "Local Loopback",
|
||||
"rx_packets": "64",
|
||||
"rx_errors": "0",
|
||||
"rx_dropped": "0",
|
||||
"rx_overruns": "0",
|
||||
"rx_frame": "0",
|
||||
"tx_packets": "64",
|
||||
"tx_errors": "0",
|
||||
"tx_dropped": "0",
|
||||
"tx_overruns": "0",
|
||||
"tx_carrier": "0",
|
||||
"tx_collisions": "0",
|
||||
"metric": null
|
||||
}
|
||||
]
|
||||
"""
|
||||
from collections import namedtuple
|
||||
import jc.utils
|
||||
from ifconfigparser import IfconfigParser
|
||||
|
||||
|
||||
def parse(data):
|
||||
output = []
|
||||
def process(proc_data):
|
||||
"""
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary structured data with the following schema:
|
||||
|
||||
[
|
||||
{
|
||||
"name": string,
|
||||
"flags": integer,
|
||||
"state": string,
|
||||
"mtu": integer,
|
||||
"ipv4_addr": string,
|
||||
"ipv4_mask": string,
|
||||
"ipv4_bcast": string,
|
||||
"ipv6_addr": string,
|
||||
"ipv6_mask": integer,
|
||||
"ipv6_scope": string,
|
||||
"mac_addr": string,
|
||||
"type": string,
|
||||
"rx_packets": integer,
|
||||
"rx_errors": integer,
|
||||
"rx_dropped": integer,
|
||||
"rx_overruns": integer,
|
||||
"rx_frame": integer,
|
||||
"tx_packets": integer,
|
||||
"tx_errors": integer,
|
||||
"tx_dropped": integer,
|
||||
"tx_overruns": integer,
|
||||
"tx_carrier": integer,
|
||||
"tx_collisions": integer,
|
||||
"metric": integer
|
||||
}
|
||||
]
|
||||
"""
|
||||
for entry in proc_data:
|
||||
int_list = ['flags', 'mtu', 'ipv6_mask', 'rx_packets', 'rx_errors', 'rx_dropped', 'rx_overruns',
|
||||
'rx_frame', 'tx_packets', 'tx_errors', 'tx_dropped', 'tx_overruns', 'tx_carrier',
|
||||
'tx_collisions', 'metric']
|
||||
for key in int_list:
|
||||
if key in entry:
|
||||
try:
|
||||
key_int = int(entry[key])
|
||||
entry[key] = key_int
|
||||
except (ValueError, TypeError):
|
||||
entry[key] = None
|
||||
|
||||
return proc_data
|
||||
|
||||
|
||||
def parse(data, raw=False, quiet=False):
|
||||
"""
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
"""
|
||||
|
||||
# compatible options: linux, darwin, cygwin, win32, aix, freebsd
|
||||
compatible = ['linux', 'aix', 'freebsd']
|
||||
|
||||
if not quiet:
|
||||
jc.utils.compatibility(__name__, compatible)
|
||||
|
||||
raw_output = []
|
||||
|
||||
parsed = IfconfigParser(console_output=data)
|
||||
interfaces = parsed.get_interfaces()
|
||||
@@ -24,6 +209,9 @@ def parse(data):
|
||||
for iface in interfaces:
|
||||
d = interfaces[iface]._asdict()
|
||||
dct = dict(d)
|
||||
output.append(dct)
|
||||
raw_output.append(dct)
|
||||
|
||||
return output
|
||||
if raw:
|
||||
return raw_output
|
||||
else:
|
||||
return process(raw_output)
|
||||
|
||||
@@ -3,330 +3,228 @@
|
||||
Usage:
|
||||
Specify --iptables as the first argument if the piped input is coming from iptables
|
||||
|
||||
Supports -vLn for all tables
|
||||
Supports -vLn and --line-numbers for all tables
|
||||
|
||||
Examples:
|
||||
|
||||
$ sudo iptables -L -t nat | jc --iptables -p
|
||||
[
|
||||
{
|
||||
"chain": "PREROUTING",
|
||||
"rules": [
|
||||
$ sudo iptables --line-numbers -v -L -t nat | jc --iptables -p
|
||||
[
|
||||
{
|
||||
"target": "PREROUTING_direct",
|
||||
"prot": "all",
|
||||
"opt": "--",
|
||||
"source": "anywhere",
|
||||
"destination": "anywhere"
|
||||
"chain": "PREROUTING",
|
||||
"rules": [
|
||||
{
|
||||
"num": 1,
|
||||
"pkts": 2183,
|
||||
"bytes": 186000,
|
||||
"target": "PREROUTING_direct",
|
||||
"prot": "all",
|
||||
"opt": null,
|
||||
"in": "any",
|
||||
"out": "any",
|
||||
"source": "anywhere",
|
||||
"destination": "anywhere"
|
||||
},
|
||||
{
|
||||
"num": 2,
|
||||
"pkts": 2183,
|
||||
"bytes": 186000,
|
||||
"target": "PREROUTING_ZONES_SOURCE",
|
||||
"prot": "all",
|
||||
"opt": null,
|
||||
"in": "any",
|
||||
"out": "any",
|
||||
"source": "anywhere",
|
||||
"destination": "anywhere"
|
||||
},
|
||||
{
|
||||
"num": 3,
|
||||
"pkts": 2183,
|
||||
"bytes": 186000,
|
||||
"target": "PREROUTING_ZONES",
|
||||
"prot": "all",
|
||||
"opt": null,
|
||||
"in": "any",
|
||||
"out": "any",
|
||||
"source": "anywhere",
|
||||
"destination": "anywhere"
|
||||
},
|
||||
{
|
||||
"num": 4,
|
||||
"pkts": 0,
|
||||
"bytes": 0,
|
||||
"target": "DOCKER",
|
||||
"prot": "all",
|
||||
"opt": null,
|
||||
"in": "any",
|
||||
"out": "any",
|
||||
"source": "anywhere",
|
||||
"destination": "anywhere",
|
||||
"options": "ADDRTYPE match dst-type LOCAL"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "PREROUTING_ZONES_SOURCE",
|
||||
"prot": "all",
|
||||
"opt": "--",
|
||||
"source": "anywhere",
|
||||
"destination": "anywhere"
|
||||
},
|
||||
{
|
||||
"target": "PREROUTING_ZONES",
|
||||
"prot": "all",
|
||||
"opt": "--",
|
||||
"source": "anywhere",
|
||||
"destination": "anywhere"
|
||||
},
|
||||
{
|
||||
"target": "DOCKER",
|
||||
"prot": "all",
|
||||
"opt": "--",
|
||||
"source": "anywhere",
|
||||
"destination": "anywhere",
|
||||
"options": "ADDRTYPE match dst-type LOCAL"
|
||||
}
|
||||
...
|
||||
]
|
||||
},
|
||||
{
|
||||
"chain": "INPUT",
|
||||
"rules": []
|
||||
},
|
||||
{
|
||||
"chain": "OUTPUT",
|
||||
"rules": [
|
||||
{
|
||||
"target": "OUTPUT_direct",
|
||||
"prot": "all",
|
||||
"opt": "--",
|
||||
"source": "anywhere",
|
||||
"destination": "anywhere"
|
||||
},
|
||||
{
|
||||
"target": "DOCKER",
|
||||
"prot": "all",
|
||||
"opt": "--",
|
||||
"source": "anywhere",
|
||||
"destination": "!loopback/8",
|
||||
"options": "ADDRTYPE match dst-type LOCAL"
|
||||
}
|
||||
]
|
||||
},
|
||||
...
|
||||
]
|
||||
|
||||
$ sudo iptables -vnL -t filter | jc --iptables -p
|
||||
[
|
||||
{
|
||||
"chain": "INPUT",
|
||||
"rules": [
|
||||
$ sudo iptables --line-numbers -v -L -t nat | jc --iptables -p -r
|
||||
[
|
||||
{
|
||||
"pkts": "1571",
|
||||
"bytes": "3394K",
|
||||
"target": "ACCEPT",
|
||||
"prot": "all",
|
||||
"opt": "--",
|
||||
"in": "*",
|
||||
"out": "*",
|
||||
"source": "0.0.0.0/0",
|
||||
"destination": "0.0.0.0/0",
|
||||
"options": "ctstate RELATED,ESTABLISHED"
|
||||
"chain": "PREROUTING",
|
||||
"rules": [
|
||||
{
|
||||
"num": "1",
|
||||
"pkts": "2183",
|
||||
"bytes": "186K",
|
||||
"target": "PREROUTING_direct",
|
||||
"prot": "all",
|
||||
"opt": "--",
|
||||
"in": "any",
|
||||
"out": "any",
|
||||
"source": "anywhere",
|
||||
"destination": "anywhere"
|
||||
},
|
||||
{
|
||||
"num": "2",
|
||||
"pkts": "2183",
|
||||
"bytes": "186K",
|
||||
"target": "PREROUTING_ZONES_SOURCE",
|
||||
"prot": "all",
|
||||
"opt": "--",
|
||||
"in": "any",
|
||||
"out": "any",
|
||||
"source": "anywhere",
|
||||
"destination": "anywhere"
|
||||
},
|
||||
{
|
||||
"num": "3",
|
||||
"pkts": "2183",
|
||||
"bytes": "186K",
|
||||
"target": "PREROUTING_ZONES",
|
||||
"prot": "all",
|
||||
"opt": "--",
|
||||
"in": "any",
|
||||
"out": "any",
|
||||
"source": "anywhere",
|
||||
"destination": "anywhere"
|
||||
},
|
||||
{
|
||||
"num": "4",
|
||||
"pkts": "0",
|
||||
"bytes": "0",
|
||||
"target": "DOCKER",
|
||||
"prot": "all",
|
||||
"opt": "--",
|
||||
"in": "any",
|
||||
"out": "any",
|
||||
"source": "anywhere",
|
||||
"destination": "anywhere",
|
||||
"options": "ADDRTYPE match dst-type LOCAL"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"pkts": "0",
|
||||
"bytes": "0",
|
||||
"target": "ACCEPT",
|
||||
"prot": "all",
|
||||
"opt": "--",
|
||||
"in": "lo",
|
||||
"out": "*",
|
||||
"source": "0.0.0.0/0",
|
||||
"destination": "0.0.0.0/0"
|
||||
},
|
||||
{
|
||||
"pkts": "711",
|
||||
"bytes": "60126",
|
||||
"target": "INPUT_direct",
|
||||
"prot": "all",
|
||||
"opt": "--",
|
||||
"in": "*",
|
||||
"out": "*",
|
||||
"source": "0.0.0.0/0",
|
||||
"destination": "0.0.0.0/0"
|
||||
},
|
||||
{
|
||||
"pkts": "711",
|
||||
"bytes": "60126",
|
||||
"target": "INPUT_ZONES_SOURCE",
|
||||
"prot": "all",
|
||||
"opt": "--",
|
||||
"in": "*",
|
||||
"out": "*",
|
||||
"source": "0.0.0.0/0",
|
||||
"destination": "0.0.0.0/0"
|
||||
},
|
||||
{
|
||||
"pkts": "711",
|
||||
"bytes": "60126",
|
||||
"target": "INPUT_ZONES",
|
||||
"prot": "all",
|
||||
"opt": "--",
|
||||
"in": "*",
|
||||
"out": "*",
|
||||
"source": "0.0.0.0/0",
|
||||
"destination": "0.0.0.0/0"
|
||||
},
|
||||
{
|
||||
"pkts": "0",
|
||||
"bytes": "0",
|
||||
"target": "DROP",
|
||||
"prot": "all",
|
||||
"opt": "--",
|
||||
"in": "*",
|
||||
"out": "*",
|
||||
"source": "0.0.0.0/0",
|
||||
"destination": "0.0.0.0/0",
|
||||
"options": "ctstate INVALID"
|
||||
},
|
||||
{
|
||||
"pkts": "710",
|
||||
"bytes": "60078",
|
||||
"target": "REJECT",
|
||||
"prot": "all",
|
||||
"opt": "--",
|
||||
"in": "*",
|
||||
"out": "*",
|
||||
"source": "0.0.0.0/0",
|
||||
"destination": "0.0.0.0/0",
|
||||
"options": "reject-with icmp-host-prohibited"
|
||||
}
|
||||
...
|
||||
]
|
||||
},
|
||||
{
|
||||
"chain": "FORWARD",
|
||||
"rules": [
|
||||
{
|
||||
"pkts": "0",
|
||||
"bytes": "0",
|
||||
"target": "DOCKER-ISOLATION",
|
||||
"prot": "all",
|
||||
"opt": "--",
|
||||
"in": "*",
|
||||
"out": "*",
|
||||
"source": "0.0.0.0/0",
|
||||
"destination": "0.0.0.0/0"
|
||||
},
|
||||
{
|
||||
"pkts": "0",
|
||||
"bytes": "0",
|
||||
"target": "DOCKER",
|
||||
"prot": "all",
|
||||
"opt": "--",
|
||||
"in": "*",
|
||||
"out": "docker0",
|
||||
"source": "0.0.0.0/0",
|
||||
"destination": "0.0.0.0/0"
|
||||
},
|
||||
{
|
||||
"pkts": "0",
|
||||
"bytes": "0",
|
||||
"target": "ACCEPT",
|
||||
"prot": "all",
|
||||
"opt": "--",
|
||||
"in": "*",
|
||||
"out": "docker0",
|
||||
"source": "0.0.0.0/0",
|
||||
"destination": "0.0.0.0/0",
|
||||
"options": "ctstate RELATED,ESTABLISHED"
|
||||
},
|
||||
{
|
||||
"pkts": "0",
|
||||
"bytes": "0",
|
||||
"target": "ACCEPT",
|
||||
"prot": "all",
|
||||
"opt": "--",
|
||||
"in": "docker0",
|
||||
"out": "!docker0",
|
||||
"source": "0.0.0.0/0",
|
||||
"destination": "0.0.0.0/0"
|
||||
},
|
||||
{
|
||||
"pkts": "0",
|
||||
"bytes": "0",
|
||||
"target": "ACCEPT",
|
||||
"prot": "all",
|
||||
"opt": "--",
|
||||
"in": "docker0",
|
||||
"out": "docker0",
|
||||
"source": "0.0.0.0/0",
|
||||
"destination": "0.0.0.0/0"
|
||||
},
|
||||
{
|
||||
"pkts": "0",
|
||||
"bytes": "0",
|
||||
"target": "ACCEPT",
|
||||
"prot": "all",
|
||||
"opt": "--",
|
||||
"in": "*",
|
||||
"out": "*",
|
||||
"source": "0.0.0.0/0",
|
||||
"destination": "0.0.0.0/0",
|
||||
"options": "ctstate RELATED,ESTABLISHED"
|
||||
},
|
||||
{
|
||||
"pkts": "0",
|
||||
"bytes": "0",
|
||||
"target": "ACCEPT",
|
||||
"prot": "all",
|
||||
"opt": "--",
|
||||
"in": "lo",
|
||||
"out": "*",
|
||||
"source": "0.0.0.0/0",
|
||||
"destination": "0.0.0.0/0"
|
||||
},
|
||||
{
|
||||
"pkts": "0",
|
||||
"bytes": "0",
|
||||
"target": "FORWARD_direct",
|
||||
"prot": "all",
|
||||
"opt": "--",
|
||||
"in": "*",
|
||||
"out": "*",
|
||||
"source": "0.0.0.0/0",
|
||||
"destination": "0.0.0.0/0"
|
||||
},
|
||||
{
|
||||
"pkts": "0",
|
||||
"bytes": "0",
|
||||
"target": "FORWARD_IN_ZONES_SOURCE",
|
||||
"prot": "all",
|
||||
"opt": "--",
|
||||
"in": "*",
|
||||
"out": "*",
|
||||
"source": "0.0.0.0/0",
|
||||
"destination": "0.0.0.0/0"
|
||||
},
|
||||
{
|
||||
"pkts": "0",
|
||||
"bytes": "0",
|
||||
"target": "FORWARD_IN_ZONES",
|
||||
"prot": "all",
|
||||
"opt": "--",
|
||||
"in": "*",
|
||||
"out": "*",
|
||||
"source": "0.0.0.0/0",
|
||||
"destination": "0.0.0.0/0"
|
||||
},
|
||||
{
|
||||
"pkts": "0",
|
||||
"bytes": "0",
|
||||
"target": "FORWARD_OUT_ZONES_SOURCE",
|
||||
"prot": "all",
|
||||
"opt": "--",
|
||||
"in": "*",
|
||||
"out": "*",
|
||||
"source": "0.0.0.0/0",
|
||||
"destination": "0.0.0.0/0"
|
||||
},
|
||||
{
|
||||
"pkts": "0",
|
||||
"bytes": "0",
|
||||
"target": "FORWARD_OUT_ZONES",
|
||||
"prot": "all",
|
||||
"opt": "--",
|
||||
"in": "*",
|
||||
"out": "*",
|
||||
"source": "0.0.0.0/0",
|
||||
"destination": "0.0.0.0/0"
|
||||
},
|
||||
{
|
||||
"pkts": "0",
|
||||
"bytes": "0",
|
||||
"target": "DROP",
|
||||
"prot": "all",
|
||||
"opt": "--",
|
||||
"in": "*",
|
||||
"out": "*",
|
||||
"source": "0.0.0.0/0",
|
||||
"destination": "0.0.0.0/0",
|
||||
"options": "ctstate INVALID"
|
||||
},
|
||||
{
|
||||
"pkts": "0",
|
||||
"bytes": "0",
|
||||
"target": "REJECT",
|
||||
"prot": "all",
|
||||
"opt": "--",
|
||||
"in": "*",
|
||||
"out": "*",
|
||||
"source": "0.0.0.0/0",
|
||||
"destination": "0.0.0.0/0",
|
||||
"options": "reject-with icmp-host-prohibited"
|
||||
}
|
||||
]
|
||||
},
|
||||
...
|
||||
]
|
||||
"""
|
||||
import jc.utils
|
||||
|
||||
|
||||
def parse(data):
|
||||
output = []
|
||||
def process(proc_data):
|
||||
"""
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary structured data with the following schema:
|
||||
|
||||
[
|
||||
{
|
||||
"chain": string,
|
||||
"rules": [
|
||||
{
|
||||
"num" integer,
|
||||
"pkts": integer,
|
||||
"bytes": integer, # converted based on suffix
|
||||
"target": string,
|
||||
"prot": string,
|
||||
"opt": string, # "--" = Null
|
||||
"in": string,
|
||||
"out": string,
|
||||
"source": string,
|
||||
"destination": string,
|
||||
"options": string
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
"""
|
||||
for entry in proc_data:
|
||||
for rule in entry['rules']:
|
||||
int_list = ['num', 'pkts']
|
||||
for key in int_list:
|
||||
if key in rule:
|
||||
try:
|
||||
key_int = int(rule[key])
|
||||
rule[key] = key_int
|
||||
except (ValueError):
|
||||
rule[key] = None
|
||||
|
||||
if 'bytes' in rule:
|
||||
multiplier = 1
|
||||
if rule['bytes'][-1] == 'K':
|
||||
multiplier = 1000
|
||||
rule['bytes'] = rule['bytes'].rstrip('K')
|
||||
elif rule['bytes'][-1] == 'M':
|
||||
multiplier = 1000000
|
||||
rule['bytes'] = rule['bytes'].rstrip('M')
|
||||
elif rule['bytes'][-1] == 'G':
|
||||
multiplier = 1000000000
|
||||
rule['bytes'] = rule['bytes'].rstrip('G')
|
||||
elif rule['bytes'][-1] == 'T':
|
||||
multiplier = 1000000000000
|
||||
rule['bytes'] = rule['bytes'].rstrip('T')
|
||||
elif rule['bytes'][-1] == 'P':
|
||||
multiplier = 1000000000000000
|
||||
rule['bytes'] = rule['bytes'].rstrip('P')
|
||||
|
||||
try:
|
||||
bytes_int = int(rule['bytes'])
|
||||
rule['bytes'] = bytes_int * multiplier
|
||||
except (ValueError):
|
||||
rule['bytes'] = None
|
||||
|
||||
if 'opt' in rule:
|
||||
if rule['opt'] == '--':
|
||||
rule['opt'] = None
|
||||
|
||||
return proc_data
|
||||
|
||||
|
||||
def parse(data, raw=False, quiet=False):
|
||||
"""
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
"""
|
||||
|
||||
# compatible options: linux, darwin, cygwin, win32, aix, freebsd
|
||||
compatible = ['linux']
|
||||
|
||||
if not quiet:
|
||||
jc.utils.compatibility(__name__, compatible)
|
||||
|
||||
raw_output = []
|
||||
chain = {}
|
||||
headers = []
|
||||
|
||||
@@ -335,7 +233,7 @@ def parse(data):
|
||||
for line in cleandata:
|
||||
|
||||
if line.find('Chain') == 0:
|
||||
output.append(chain)
|
||||
raw_output.append(chain)
|
||||
chain = {}
|
||||
headers = []
|
||||
|
||||
@@ -346,9 +244,9 @@ def parse(data):
|
||||
|
||||
continue
|
||||
|
||||
elif line.find('target') == 0 or line.find('pkts') == 1:
|
||||
elif line.find('target') == 0 or line.find('pkts') == 1 or line.find('num') == 0:
|
||||
headers = []
|
||||
headers = [h for h in ' '.join(line.strip().split()).split() if h]
|
||||
headers = [h for h in ' '.join(line.lower().strip().split()).split() if h]
|
||||
headers.append("options")
|
||||
|
||||
continue
|
||||
@@ -359,6 +257,9 @@ def parse(data):
|
||||
if temp_rule:
|
||||
chain['rules'].append(temp_rule)
|
||||
|
||||
output = list(filter(None, output))
|
||||
raw_output = list(filter(None, raw_output))
|
||||
|
||||
return output
|
||||
if raw:
|
||||
return raw_output
|
||||
else:
|
||||
return process(raw_output)
|
||||
|
||||
@@ -7,49 +7,126 @@ Usage:
|
||||
|
||||
Example:
|
||||
|
||||
$ jobs -l | jc --jobs -p
|
||||
[
|
||||
{
|
||||
"job_number": 1,
|
||||
"pid": 14798,
|
||||
"status": "Running",
|
||||
"command": "sleep 10000 &"
|
||||
},
|
||||
{
|
||||
"job_number": 2,
|
||||
"pid": 14799,
|
||||
"status": "Running",
|
||||
"command": "sleep 10001 &"
|
||||
},
|
||||
{
|
||||
"job_number": 3,
|
||||
"pid": 14800,
|
||||
"status": "Running",
|
||||
"command": "sleep 10002 &"
|
||||
},
|
||||
{
|
||||
"job_number": 4,
|
||||
"pid": 14814,
|
||||
"history": "previous",
|
||||
"status": "Running",
|
||||
"command": "sleep 10003 &"
|
||||
},
|
||||
{
|
||||
"job_number": 5,
|
||||
"pid": 14815,
|
||||
"history": "current",
|
||||
"status": "Running",
|
||||
"command": "sleep 10004 &"
|
||||
}
|
||||
]
|
||||
$ jobs -l | jc --jobs -p
|
||||
[
|
||||
{
|
||||
"job_number": 1,
|
||||
"pid": 5283,
|
||||
"status": "Running",
|
||||
"command": "sleep 10000 &"
|
||||
},
|
||||
{
|
||||
"job_number": 2,
|
||||
"pid": 5284,
|
||||
"status": "Running",
|
||||
"command": "sleep 10100 &"
|
||||
},
|
||||
{
|
||||
"job_number": 3,
|
||||
"pid": 5285,
|
||||
"history": "previous",
|
||||
"status": "Running",
|
||||
"command": "sleep 10001 &"
|
||||
},
|
||||
{
|
||||
"job_number": 4,
|
||||
"pid": 5286,
|
||||
"history": "current",
|
||||
"status": "Running",
|
||||
"command": "sleep 10112 &"
|
||||
}
|
||||
]
|
||||
|
||||
$ jobs -l | jc --jobs -p -r
|
||||
[
|
||||
{
|
||||
"job_number": "1",
|
||||
"pid": "19510",
|
||||
"status": "Running",
|
||||
"command": "sleep 1000 &"
|
||||
},
|
||||
{
|
||||
"job_number": "2",
|
||||
"pid": "19511",
|
||||
"status": "Running",
|
||||
"command": "sleep 1001 &"
|
||||
},
|
||||
{
|
||||
"job_number": "3",
|
||||
"pid": "19512",
|
||||
"history": "previous",
|
||||
"status": "Running",
|
||||
"command": "sleep 1002 &"
|
||||
},
|
||||
{
|
||||
"job_number": "4",
|
||||
"pid": "19513",
|
||||
"history": "current",
|
||||
"status": "Running",
|
||||
"command": "sleep 1003 &"
|
||||
}
|
||||
]
|
||||
"""
|
||||
|
||||
|
||||
import string
|
||||
import jc.utils
|
||||
|
||||
|
||||
def parse(data):
|
||||
output = []
|
||||
def process(proc_data):
|
||||
"""
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary structured data with the following schema:
|
||||
[
|
||||
{
|
||||
"job_number": integer,
|
||||
"pid": integer,
|
||||
"history": string,
|
||||
"status": string,
|
||||
"command": string
|
||||
}
|
||||
]
|
||||
"""
|
||||
for entry in proc_data:
|
||||
int_list = ['job_number', 'pid']
|
||||
for key in int_list:
|
||||
if key in entry:
|
||||
try:
|
||||
key_int = int(entry[key])
|
||||
entry[key] = key_int
|
||||
except (ValueError):
|
||||
entry[key] = None
|
||||
|
||||
return proc_data
|
||||
|
||||
|
||||
def parse(data, raw=False, quiet=False):
|
||||
"""
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
"""
|
||||
|
||||
# compatible options: linux, darwin, cygwin, win32, aix, freebsd
|
||||
compatible = ['linux', 'darwin', 'cygwin', 'aix', 'freebsd']
|
||||
|
||||
if not quiet:
|
||||
jc.utils.compatibility(__name__, compatible)
|
||||
|
||||
raw_output = []
|
||||
|
||||
linedata = data.splitlines()
|
||||
|
||||
@@ -73,7 +150,7 @@ def parse(data):
|
||||
remainder = parsed_line.pop(1)
|
||||
job_number = parsed_line.pop(0)
|
||||
remainder = remainder.split(maxsplit=1)
|
||||
|
||||
|
||||
# rebuild parsed_line
|
||||
parsed_line = []
|
||||
|
||||
@@ -95,14 +172,17 @@ def parse(data):
|
||||
parsed_line[0] = parsed_line[0].lstrip('[').rstrip(']')
|
||||
|
||||
# create list of dictionaries
|
||||
output_line['job_number'] = int(parsed_line[0])
|
||||
output_line['job_number'] = parsed_line[0]
|
||||
if pid:
|
||||
output_line['pid'] = int(pid)
|
||||
output_line['pid'] = pid
|
||||
if job_history:
|
||||
output_line['history'] = job_history
|
||||
output_line['status'] = parsed_line[1]
|
||||
output_line['command'] = parsed_line[2]
|
||||
|
||||
output.append(output_line)
|
||||
raw_output.append(output_line)
|
||||
|
||||
return output
|
||||
if raw:
|
||||
return raw_output
|
||||
else:
|
||||
return process(raw_output)
|
||||
|
||||
271
jc/parsers/ls.py
271
jc/parsers/ls.py
@@ -5,89 +5,199 @@ Usage:
|
||||
|
||||
ls options supported:
|
||||
- None
|
||||
- l
|
||||
- a
|
||||
- la
|
||||
- h file sizes will be available in text form with -r but larger file sizes
|
||||
with human readable suffixes will be converted to Null in default view
|
||||
since the parser attempts to convert this field to an integer.
|
||||
|
||||
Examples:
|
||||
|
||||
$ ls -a /usr/bin | jc --ls -p
|
||||
[
|
||||
{
|
||||
"filename": "."
|
||||
},
|
||||
{
|
||||
"filename": ".."
|
||||
},
|
||||
{
|
||||
"filename": "2to3-"
|
||||
},
|
||||
{
|
||||
"filename": "2to3-2.7"
|
||||
},
|
||||
{
|
||||
"filename": "AssetCacheLocatorUtil"
|
||||
},
|
||||
...
|
||||
]
|
||||
$ ls /usr/bin | jc --ls -p
|
||||
[
|
||||
{
|
||||
"filename": "apropos"
|
||||
},
|
||||
{
|
||||
"filename": "arch"
|
||||
},
|
||||
{
|
||||
"filename": "awk"
|
||||
},
|
||||
{
|
||||
"filename": "base64"
|
||||
},
|
||||
...
|
||||
]
|
||||
|
||||
$ ls -al /usr/bin | jc --ls -p
|
||||
[
|
||||
{
|
||||
"filename": ".",
|
||||
"flags": "drwxr-xr-x",
|
||||
"links": 970,
|
||||
"owner": "root",
|
||||
"group": "wheel",
|
||||
"bytes": 31040,
|
||||
"date": "Aug 27 21:20"
|
||||
},
|
||||
{
|
||||
"filename": "..",
|
||||
"flags": "drwxr-xr-x@",
|
||||
"links": 9,
|
||||
"owner": "root",
|
||||
"group": "wheel",
|
||||
"bytes": 288,
|
||||
"date": "May 3 22:14"
|
||||
},
|
||||
{
|
||||
"filename": "2to3-",
|
||||
"flags": "-rwxr-xr-x",
|
||||
"links": 4,
|
||||
"owner": "root",
|
||||
"group": "wheel",
|
||||
"bytes": 925,
|
||||
"date": "Feb 22 2019"
|
||||
},
|
||||
{
|
||||
"filename": "2to3-2.7",
|
||||
"link_to": "../../System/Library/Frameworks/Python.framework/Versions/2.7/bin/2to3-2.7",
|
||||
"flags": "lrwxr-xr-x",
|
||||
"links": 1,
|
||||
"owner": "root",
|
||||
"group": "wheel",
|
||||
"bytes": 74,
|
||||
"date": "May 4 02:12"
|
||||
},
|
||||
...
|
||||
]
|
||||
$ ls -l /usr/bin | jc --ls -p
|
||||
[
|
||||
{
|
||||
"filename": "apropos",
|
||||
"link_to": "whatis",
|
||||
"flags": "lrwxrwxrwx.",
|
||||
"links": 1,
|
||||
"owner": "root",
|
||||
"group": "root",
|
||||
"size": 6,
|
||||
"date": "Aug 15 10:53"
|
||||
},
|
||||
{
|
||||
"filename": "ar",
|
||||
"flags": "-rwxr-xr-x.",
|
||||
"links": 1,
|
||||
"owner": "root",
|
||||
"group": "root",
|
||||
"size": 62744,
|
||||
"date": "Aug 8 16:14"
|
||||
},
|
||||
{
|
||||
"filename": "arch",
|
||||
"flags": "-rwxr-xr-x.",
|
||||
"links": 1,
|
||||
"owner": "root",
|
||||
"group": "root",
|
||||
"size": 33080,
|
||||
"date": "Aug 19 23:25"
|
||||
},
|
||||
...
|
||||
]
|
||||
|
||||
$ $ ls -l /usr/bin | jc --ls | jq '.[] | 'select(.bytes > 50000000)'
|
||||
{
|
||||
"filename": "emacs",
|
||||
"flags": "-r-xr-xr-x",
|
||||
"links": 1,
|
||||
"owner": "root",
|
||||
"group": "wheel",
|
||||
"bytes": 117164432,
|
||||
"date": "May 3 22:26"
|
||||
}
|
||||
$ ls -l /usr/bin | jc --ls -p -r
|
||||
[
|
||||
{
|
||||
"filename": "apropos",
|
||||
"link_to": "whatis",
|
||||
"flags": "lrwxrwxrwx.",
|
||||
"links": "1",
|
||||
"owner": "root",
|
||||
"group": "root",
|
||||
"size": "6",
|
||||
"date": "Aug 15 10:53"
|
||||
},
|
||||
{
|
||||
"filename": "arch",
|
||||
"flags": "-rwxr-xr-x.",
|
||||
"links": "1",
|
||||
"owner": "root",
|
||||
"group": "root",
|
||||
"size": "33080",
|
||||
"date": "Aug 19 23:25"
|
||||
},
|
||||
{
|
||||
"filename": "awk",
|
||||
"link_to": "gawk",
|
||||
"flags": "lrwxrwxrwx.",
|
||||
"links": "1",
|
||||
"owner": "root",
|
||||
"group": "root",
|
||||
"size": "4",
|
||||
"date": "Aug 15 10:53"
|
||||
},
|
||||
{
|
||||
"filename": "base64",
|
||||
"flags": "-rwxr-xr-x.",
|
||||
"links": "1",
|
||||
"owner": "root",
|
||||
"group": "root",
|
||||
"size": "37360",
|
||||
"date": "Aug 19 23:25"
|
||||
},
|
||||
{
|
||||
"filename": "basename",
|
||||
"flags": "-rwxr-xr-x.",
|
||||
"links": "1",
|
||||
"owner": "root",
|
||||
"group": "root",
|
||||
"size": "29032",
|
||||
"date": "Aug 19 23:25"
|
||||
},
|
||||
{
|
||||
"filename": "bash",
|
||||
"flags": "-rwxr-xr-x.",
|
||||
"links": "1",
|
||||
"owner": "root",
|
||||
"group": "root",
|
||||
"size": "964600",
|
||||
"date": "Aug 8 05:06"
|
||||
},
|
||||
...
|
||||
]
|
||||
|
||||
$ ls -l /usr/bin | jc --ls | jq '.[] | select(.size > 50000000)'
|
||||
{
|
||||
"filename": "emacs",
|
||||
"flags": "-r-xr-xr-x",
|
||||
"links": 1,
|
||||
"owner": "root",
|
||||
"group": "wheel",
|
||||
"size": 117164432,
|
||||
"date": "May 3 2019"
|
||||
}
|
||||
"""
|
||||
import re
|
||||
import jc.utils
|
||||
|
||||
|
||||
def parse(data):
|
||||
output = []
|
||||
def process(proc_data):
|
||||
"""
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary structured data with the following schema:
|
||||
|
||||
[
|
||||
{
|
||||
"filename": string,
|
||||
"flags": string,
|
||||
"links": integer,
|
||||
"owner": string,
|
||||
"group": string,
|
||||
"size": integer,
|
||||
"date": string
|
||||
}
|
||||
]
|
||||
"""
|
||||
|
||||
for entry in proc_data:
|
||||
int_list = ['links', 'size']
|
||||
for key in int_list:
|
||||
if key in entry:
|
||||
try:
|
||||
key_int = int(entry[key])
|
||||
entry[key] = key_int
|
||||
except (ValueError):
|
||||
entry[key] = None
|
||||
|
||||
return proc_data
|
||||
|
||||
|
||||
def parse(data, raw=False, quiet=False):
|
||||
"""
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
"""
|
||||
|
||||
# compatible options: linux, darwin, cygwin, win32, aix, freebsd
|
||||
compatible = ['linux', 'darwin', 'cygwin', 'aix', 'freebsd']
|
||||
|
||||
if not quiet:
|
||||
jc.utils.compatibility(__name__, compatible)
|
||||
|
||||
raw_output = []
|
||||
|
||||
linedata = data.splitlines()
|
||||
|
||||
@@ -117,16 +227,19 @@ def parse(data):
|
||||
output_line['link_to'] = filename_field[1]
|
||||
|
||||
output_line['flags'] = parsed_line[0]
|
||||
output_line['links'] = int(parsed_line[1])
|
||||
output_line['links'] = parsed_line[1]
|
||||
output_line['owner'] = parsed_line[2]
|
||||
output_line['group'] = parsed_line[3]
|
||||
output_line['bytes'] = int(parsed_line[4])
|
||||
output_line['size'] = parsed_line[4]
|
||||
output_line['date'] = ' '.join(parsed_line[5:8])
|
||||
output.append(output_line)
|
||||
raw_output.append(output_line)
|
||||
else:
|
||||
for entry in cleandata:
|
||||
output_line = {}
|
||||
output_line['filename'] = entry
|
||||
output.append(output_line)
|
||||
raw_output.append(output_line)
|
||||
|
||||
return output
|
||||
if raw:
|
||||
return raw_output
|
||||
else:
|
||||
return process(raw_output)
|
||||
|
||||
@@ -3,68 +3,384 @@
|
||||
Usage:
|
||||
specify --lsblk as the first argument if the piped input is coming from lsblk
|
||||
|
||||
Example:
|
||||
Examples:
|
||||
|
||||
$ lsblk | jc --lsblk -p
|
||||
[
|
||||
{
|
||||
"NAME": "loop0",
|
||||
"MAJ:MIN": "7:0",
|
||||
"RM": "0",
|
||||
"SIZE": "54.5M",
|
||||
"RO": "1",
|
||||
"TYPE": "loop",
|
||||
"MOUNTPOINT": "/snap/core18/1223"
|
||||
},
|
||||
{
|
||||
"NAME": "sda",
|
||||
"MAJ:MIN": "8:0",
|
||||
"RM": "0",
|
||||
"SIZE": "20G",
|
||||
"RO": "0",
|
||||
"TYPE": "disk"
|
||||
},
|
||||
{
|
||||
"NAME": "sda1",
|
||||
"MAJ:MIN": "8:1",
|
||||
"RM": "0",
|
||||
"SIZE": "1M",
|
||||
"RO": "0",
|
||||
"TYPE": "part"
|
||||
},
|
||||
{
|
||||
"NAME": "sda2",
|
||||
"MAJ:MIN": "8:2",
|
||||
"RM": "0",
|
||||
"SIZE": "20G",
|
||||
"RO": "0",
|
||||
"TYPE": "part",
|
||||
"MOUNTPOINT": "/"
|
||||
},
|
||||
{
|
||||
"NAME": "sr0",
|
||||
"MAJ:MIN": "11:0",
|
||||
"RM": "1",
|
||||
"SIZE": "64.8M",
|
||||
"RO": "0",
|
||||
"TYPE": "rom"
|
||||
}
|
||||
]
|
||||
$ lsblk | jc --lsblk -p
|
||||
[
|
||||
{
|
||||
"name": "sda",
|
||||
"maj_min": "8:0",
|
||||
"rm": false,
|
||||
"size": "20G",
|
||||
"ro": false,
|
||||
"type": "disk",
|
||||
"mountpoint": null
|
||||
},
|
||||
{
|
||||
"name": "sda1",
|
||||
"maj_min": "8:1",
|
||||
"rm": false,
|
||||
"size": "1G",
|
||||
"ro": false,
|
||||
"type": "part",
|
||||
"mountpoint": "/boot"
|
||||
},
|
||||
...
|
||||
]
|
||||
|
||||
$ lsblk -o +KNAME,FSTYPE,LABEL,UUID,PARTLABEL,PARTUUID,RA,MODEL,SERIAL,STATE,OWNER,GROUP,MODE,ALIGNMENT,MIN-IO,OPT-IO,PHY-SEC,LOG-SEC,ROTA,SCHED,RQ-SIZE,DISC-ALN,DISC-GRAN,DISC-MAX,DISC-ZERO,WSAME,WWN,RAND,PKNAME,HCTL,TRAN,REV,VENDOR | jc --lsblk -p
|
||||
[
|
||||
{
|
||||
"name": "sda",
|
||||
"maj_min": "8:0",
|
||||
"rm": false,
|
||||
"size": "20G",
|
||||
"ro": false,
|
||||
"type": "disk",
|
||||
"mountpoint": null,
|
||||
"kname": "sda",
|
||||
"fstype": null,
|
||||
"label": null,
|
||||
"uuid": null,
|
||||
"partlabel": null,
|
||||
"partuuid": null,
|
||||
"ra": 4096,
|
||||
"model": "VMware Virtual S",
|
||||
"serial": null,
|
||||
"state": "running",
|
||||
"owner": "root",
|
||||
"group": "disk",
|
||||
"mode": "brw-rw----",
|
||||
"alignment": 0,
|
||||
"min_io": 512,
|
||||
"opt_io": 0,
|
||||
"phy_sec": 512,
|
||||
"log_sec": 512,
|
||||
"rota": true,
|
||||
"sched": "deadline",
|
||||
"rq_size": 128,
|
||||
"disc_aln": 0,
|
||||
"disc_gran": "0B",
|
||||
"disc_max": "0B",
|
||||
"disc_zero": false,
|
||||
"wsame": "32M",
|
||||
"wwn": null,
|
||||
"rand": true,
|
||||
"pkname": null,
|
||||
"hctl": "0:0:0:0",
|
||||
"tran": "spi",
|
||||
"rev": "1.0",
|
||||
"vendor": "VMware,"
|
||||
},
|
||||
{
|
||||
"name": "sda1",
|
||||
"maj_min": "8:1",
|
||||
"rm": false,
|
||||
"size": "1G",
|
||||
"ro": false,
|
||||
"type": "part",
|
||||
"mountpoint": "/boot",
|
||||
"kname": "sda1",
|
||||
"fstype": "xfs",
|
||||
"label": null,
|
||||
"uuid": "05d927bb-5875-49e3-ada1-7f46cb31c932",
|
||||
"partlabel": null,
|
||||
"partuuid": null,
|
||||
"ra": 4096,
|
||||
"model": null,
|
||||
"serial": null,
|
||||
"state": null,
|
||||
"owner": "root",
|
||||
"group": "disk",
|
||||
"mode": "brw-rw----",
|
||||
"alignment": 0,
|
||||
"min_io": 512,
|
||||
"opt_io": 0,
|
||||
"phy_sec": 512,
|
||||
"log_sec": 512,
|
||||
"rota": true,
|
||||
"sched": "deadline",
|
||||
"rq_size": 128,
|
||||
"disc_aln": 0,
|
||||
"disc_gran": "0B",
|
||||
"disc_max": "0B",
|
||||
"disc_zero": false,
|
||||
"wsame": "32M",
|
||||
"wwn": null,
|
||||
"rand": true,
|
||||
"pkname": "sda",
|
||||
"hctl": null,
|
||||
"tran": null,
|
||||
"rev": null,
|
||||
"vendor": null
|
||||
},
|
||||
...
|
||||
]
|
||||
|
||||
$ lsblk -o +KNAME,FSTYPE,LABEL,UUID,PARTLABEL,PARTUUID,RA,MODEL,SERIAL,STATE,OWNER,GROUP,MODE,ALIGNMENT,MIN-IO,OPT-IO,PHY-SEC,LOG-SEC,ROTA,SCHED,RQ-SIZE,DISC-ALN,DISC-GRAN,DISC-MAX,DISC-ZERO,WSAME,WWN,RAND,PKNAME,HCTL,TRAN,REV,VENDOR | jc --lsblk -p -r
|
||||
[
|
||||
{
|
||||
"name": "sda",
|
||||
"maj_min": "8:0",
|
||||
"rm": "0",
|
||||
"size": "20G",
|
||||
"ro": "0",
|
||||
"type": "disk",
|
||||
"mountpoint": null,
|
||||
"kname": "sda",
|
||||
"fstype": null,
|
||||
"label": null,
|
||||
"uuid": null,
|
||||
"partlabel": null,
|
||||
"partuuid": null,
|
||||
"ra": "4096",
|
||||
"model": "VMware Virtual S",
|
||||
"serial": null,
|
||||
"state": "running",
|
||||
"owner": "root",
|
||||
"group": "disk",
|
||||
"mode": "brw-rw----",
|
||||
"alignment": "0",
|
||||
"min_io": "512",
|
||||
"opt_io": "0",
|
||||
"phy_sec": "512",
|
||||
"log_sec": "512",
|
||||
"rota": "1",
|
||||
"sched": "deadline",
|
||||
"rq_size": "128",
|
||||
"disc_aln": "0",
|
||||
"disc_gran": "0B",
|
||||
"disc_max": "0B",
|
||||
"disc_zero": "0",
|
||||
"wsame": "32M",
|
||||
"wwn": null,
|
||||
"rand": "1",
|
||||
"pkname": null,
|
||||
"hctl": "0:0:0:0",
|
||||
"tran": "spi",
|
||||
"rev": "1.0",
|
||||
"vendor": "VMware,"
|
||||
},
|
||||
{
|
||||
"name": "sda1",
|
||||
"maj_min": "8:1",
|
||||
"rm": "0",
|
||||
"size": "1G",
|
||||
"ro": "0",
|
||||
"type": "part",
|
||||
"mountpoint": "/boot",
|
||||
"kname": "sda1",
|
||||
"fstype": "xfs",
|
||||
"label": null,
|
||||
"uuid": "05d927bb-5875-49e3-ada1-7f46cb31c932",
|
||||
"partlabel": null,
|
||||
"partuuid": null,
|
||||
"ra": "4096",
|
||||
"model": null,
|
||||
"serial": null,
|
||||
"state": null,
|
||||
"owner": "root",
|
||||
"group": "disk",
|
||||
"mode": "brw-rw----",
|
||||
"alignment": "0",
|
||||
"min_io": "512",
|
||||
"opt_io": "0",
|
||||
"phy_sec": "512",
|
||||
"log_sec": "512",
|
||||
"rota": "1",
|
||||
"sched": "deadline",
|
||||
"rq_size": "128",
|
||||
"disc_aln": "0",
|
||||
"disc_gran": "0B",
|
||||
"disc_max": "0B",
|
||||
"disc_zero": "0",
|
||||
"wsame": "32M",
|
||||
"wwn": null,
|
||||
"rand": "1",
|
||||
"pkname": "sda",
|
||||
"hctl": null,
|
||||
"tran": null,
|
||||
"rev": null,
|
||||
"vendor": null
|
||||
},
|
||||
...
|
||||
]
|
||||
"""
|
||||
import string
|
||||
import jc.utils
|
||||
|
||||
|
||||
def parse(data):
|
||||
def process(proc_data):
|
||||
"""
|
||||
Final processing to conform to the schema.
|
||||
|
||||
# code adapted from Conor Heine at:
|
||||
# https://gist.github.com/cahna/43a1a3ff4d075bcd71f9d7120037a501
|
||||
Parameters:
|
||||
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary structured data with the following schema:
|
||||
|
||||
[
|
||||
{
|
||||
"name": string,
|
||||
"maj_min": string,
|
||||
"rm": boolean,
|
||||
"size": string,
|
||||
"ro": boolean,
|
||||
"type": string,
|
||||
"mountpoint": string,
|
||||
"kname": string,
|
||||
"fstype": string,
|
||||
"label": string,
|
||||
"uuid": string,
|
||||
"partlabel": string,
|
||||
"partuuid": string,
|
||||
"ra": integer,
|
||||
"model": string,
|
||||
"serial": string,
|
||||
"state": string,
|
||||
"owner": string,
|
||||
"group": string,
|
||||
"mode": string,
|
||||
"alignment": integer,
|
||||
"min_io": integer,
|
||||
"opt_io": integer,
|
||||
"phy_sec": integer,
|
||||
"log_sec": integer,
|
||||
"rota": boolean,
|
||||
"sched": string,
|
||||
"rq_size": integer,
|
||||
"disc_aln": integer,
|
||||
"disc_gran": string,
|
||||
"disc_max": string,
|
||||
"disc_zero": boolean,
|
||||
"wsame": string,
|
||||
"wwn": string,
|
||||
"rand": boolean,
|
||||
"pkname": string,
|
||||
"hctl": string,
|
||||
"tran": string,
|
||||
"rev": string,
|
||||
"vendor": string
|
||||
}
|
||||
]
|
||||
"""
|
||||
for entry in proc_data:
|
||||
# boolean changes
|
||||
bool_list = ['rm', 'ro', 'rota', 'disc_zero', 'rand']
|
||||
for key in bool_list:
|
||||
if key in entry:
|
||||
try:
|
||||
key_bool = bool(int(entry[key]))
|
||||
entry[key] = key_bool
|
||||
except (ValueError):
|
||||
entry[key] = None
|
||||
|
||||
# integer changes
|
||||
int_list = ['ra', 'alignment', 'min_io', 'opt_io', 'phy_sec', 'log_sec', 'rq_size', 'disc_aln']
|
||||
for key in int_list:
|
||||
if key in entry:
|
||||
try:
|
||||
key_int = int(entry[key])
|
||||
entry[key] = key_int
|
||||
except (ValueError):
|
||||
entry[key] = None
|
||||
|
||||
return proc_data
|
||||
|
||||
|
||||
def parse(data, raw=False, quiet=False):
|
||||
"""
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
"""
|
||||
|
||||
# compatible options: linux, darwin, cygwin, win32, aix, freebsd
|
||||
compatible = ['linux']
|
||||
|
||||
if not quiet:
|
||||
jc.utils.compatibility(__name__, compatible)
|
||||
|
||||
# unicode \u2063 = invisible separator and should not be seen in lsblk output
|
||||
delim = '\u2063'
|
||||
|
||||
raw_output = []
|
||||
linedata = data.splitlines()
|
||||
# Clear any blank lines
|
||||
cleandata = list(filter(None, linedata))
|
||||
cleandata = data.splitlines()
|
||||
headers = [h for h in ' '.join(cleandata[0].strip().split()).split() if h]
|
||||
|
||||
raw_data = map(lambda s: s.strip().split(None, len(headers) - 1), cleandata[1:])
|
||||
output = [dict(zip(headers, r)) for r in raw_data]
|
||||
header_text = cleandata.pop(0).lower()
|
||||
header_text = header_text.replace(':', '_')
|
||||
header_text = header_text.replace('-', '_')
|
||||
header_text = header_text + ' '
|
||||
|
||||
for entry in output:
|
||||
entry['NAME'] = entry['NAME'].encode('ascii', errors='ignore').decode()
|
||||
header_list = header_text.split()
|
||||
|
||||
return output
|
||||
# find each column index and end position
|
||||
header_search = [header_list[0]]
|
||||
for h in header_list[1:]:
|
||||
header_search.append(' ' + h + ' ')
|
||||
|
||||
header_spec_list = []
|
||||
for i, column in enumerate(header_list[0:len(header_list) - 1]):
|
||||
header_spec = {
|
||||
'name': column,
|
||||
'end': header_text.find(header_search[i + 1])
|
||||
}
|
||||
|
||||
header_spec_list.append(header_spec)
|
||||
|
||||
# parse lines
|
||||
if cleandata:
|
||||
for entry in cleandata:
|
||||
output_line = {}
|
||||
|
||||
# insert new separator since data can contain spaces
|
||||
for col in reversed(header_list):
|
||||
# find the right header_spec
|
||||
for h_spec in header_spec_list:
|
||||
if h_spec['name'] == col:
|
||||
h_end = h_spec['end']
|
||||
# check if the location contains whitespace. if not
|
||||
# then move to the left until a space is found
|
||||
while h_end > 0 and entry[h_end] not in string.whitespace:
|
||||
h_end -= 1
|
||||
|
||||
# insert custom delimiter
|
||||
entry = entry[:h_end] + delim + entry[h_end + 1:]
|
||||
|
||||
# create the entry list from the new custom delimiter
|
||||
entry_list = entry.split(delim, maxsplit=len(header_list) - 1)
|
||||
|
||||
# clean up leading and trailing spaces in entry
|
||||
clean_entry_list = []
|
||||
for col in entry_list:
|
||||
clean_entry = col.strip().rstrip()
|
||||
if clean_entry == '':
|
||||
clean_entry = None
|
||||
|
||||
clean_entry_list.append(clean_entry)
|
||||
|
||||
output_line = dict(zip(header_list, clean_entry_list))
|
||||
raw_output.append(output_line)
|
||||
|
||||
# clean up non-ascii characters, if any
|
||||
for entry in raw_output:
|
||||
entry['name'] = entry['name'].encode('ascii', errors='ignore').decode()
|
||||
|
||||
if raw:
|
||||
return raw_output
|
||||
else:
|
||||
return process(raw_output)
|
||||
|
||||
@@ -3,78 +3,175 @@
|
||||
Usage:
|
||||
specify --lsmod as the first argument if the piped input is coming from lsmod
|
||||
|
||||
Example:
|
||||
Examples:
|
||||
|
||||
$ lsmod | jc --lsmod -p
|
||||
[
|
||||
{
|
||||
"Module": "nf_nat_ipv4",
|
||||
"Size": "14115",
|
||||
"Used": "1",
|
||||
"By": [
|
||||
"iptable_nat"
|
||||
$ lsmod | jc --lsmod -p
|
||||
[
|
||||
...
|
||||
{
|
||||
"module": "nf_nat",
|
||||
"size": 26583,
|
||||
"used": 3,
|
||||
"by": [
|
||||
"nf_nat_ipv4",
|
||||
"nf_nat_ipv6",
|
||||
"nf_nat_masquerade_ipv4"
|
||||
]
|
||||
},
|
||||
{
|
||||
"module": "iptable_mangle",
|
||||
"size": 12695,
|
||||
"used": 1
|
||||
},
|
||||
{
|
||||
"module": "iptable_security",
|
||||
"size": 12705,
|
||||
"used": 1
|
||||
},
|
||||
{
|
||||
"module": "iptable_raw",
|
||||
"size": 12678,
|
||||
"used": 1
|
||||
},
|
||||
{
|
||||
"module": "nf_conntrack",
|
||||
"size": 139224,
|
||||
"used": 7,
|
||||
"by": [
|
||||
"nf_nat",
|
||||
"nf_nat_ipv4",
|
||||
"nf_nat_ipv6",
|
||||
"xt_conntrack",
|
||||
"nf_nat_masquerade_ipv4",
|
||||
"nf_conntrack_ipv4",
|
||||
"nf_conntrack_ipv6"
|
||||
]
|
||||
},
|
||||
...
|
||||
]
|
||||
},
|
||||
{
|
||||
"Module": "nf_nat",
|
||||
"Size": "26583",
|
||||
"Used": "3",
|
||||
"By": [
|
||||
"nf_nat_ipv4",
|
||||
"nf_nat_ipv6",
|
||||
"nf_nat_masquerade_ipv4"
|
||||
|
||||
$ lsmod | jc --lsmod -p -r
|
||||
[
|
||||
...
|
||||
{
|
||||
"module": "nf_conntrack",
|
||||
"size": "139224",
|
||||
"used": "7",
|
||||
"by": [
|
||||
"nf_nat",
|
||||
"nf_nat_ipv4",
|
||||
"nf_nat_ipv6",
|
||||
"xt_conntrack",
|
||||
"nf_nat_masquerade_ipv4",
|
||||
"nf_conntrack_ipv4",
|
||||
"nf_conntrack_ipv6"
|
||||
]
|
||||
},
|
||||
{
|
||||
"module": "ip_set",
|
||||
"size": "45799",
|
||||
"used": "0"
|
||||
},
|
||||
{
|
||||
"module": "nfnetlink",
|
||||
"size": "14519",
|
||||
"used": "1",
|
||||
"by": [
|
||||
"ip_set"
|
||||
]
|
||||
},
|
||||
{
|
||||
"module": "ebtable_filter",
|
||||
"size": "12827",
|
||||
"used": "1"
|
||||
},
|
||||
{
|
||||
"module": "ebtables",
|
||||
"size": "35009",
|
||||
"used": "2",
|
||||
"by": [
|
||||
"ebtable_nat",
|
||||
"ebtable_filter"
|
||||
]
|
||||
},
|
||||
...
|
||||
]
|
||||
},
|
||||
{
|
||||
"Module": "iptable_mangle",
|
||||
"Size": "12695",
|
||||
"Used": "1"
|
||||
},
|
||||
{
|
||||
"Module": "iptable_security",
|
||||
"Size": "12705",
|
||||
"Used": "1"
|
||||
},
|
||||
{
|
||||
"Module": "iptable_raw",
|
||||
"Size": "12678",
|
||||
"Used": "1"
|
||||
},
|
||||
{
|
||||
"Module": "nf_conntrack",
|
||||
"Size": "139224",
|
||||
"Used": "7",
|
||||
"By": [
|
||||
"nf_nat",
|
||||
"nf_nat_ipv4",
|
||||
"nf_nat_ipv6",
|
||||
"xt_conntrack",
|
||||
"nf_nat_masquerade_ipv4",
|
||||
"nf_conntrack_ipv4",
|
||||
"nf_conntrack_ipv6"
|
||||
]
|
||||
},
|
||||
...
|
||||
]
|
||||
"""
|
||||
import jc.utils
|
||||
|
||||
|
||||
def parse(data):
|
||||
def process(proc_data):
|
||||
"""
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary structured data with the following schema:
|
||||
|
||||
[
|
||||
{
|
||||
"module": string,
|
||||
"size": integer,
|
||||
"used": integer,
|
||||
"by": [
|
||||
string
|
||||
]
|
||||
}
|
||||
]
|
||||
"""
|
||||
for entry in proc_data:
|
||||
# integer changes
|
||||
int_list = ['size', 'used']
|
||||
for key in int_list:
|
||||
if key in entry:
|
||||
try:
|
||||
key_int = int(entry[key])
|
||||
entry[key] = key_int
|
||||
except (ValueError):
|
||||
entry[key] = None
|
||||
|
||||
return proc_data
|
||||
|
||||
|
||||
def parse(data, raw=False, quiet=False):
|
||||
"""
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
"""
|
||||
|
||||
# compatible options: linux, darwin, cygwin, win32, aix, freebsd
|
||||
compatible = ['linux']
|
||||
|
||||
if not quiet:
|
||||
jc.utils.compatibility(__name__, compatible)
|
||||
|
||||
# code adapted from Conor Heine at:
|
||||
# https://gist.github.com/cahna/43a1a3ff4d075bcd71f9d7120037a501
|
||||
|
||||
cleandata = data.splitlines()
|
||||
headers = [h for h in ' '.join(cleandata[0].strip().split()).split() if h]
|
||||
|
||||
headers.pop(-1)
|
||||
headers.append('By')
|
||||
headers = [h for h in ' '.join(cleandata[0].lower().strip().split()).split() if h]
|
||||
|
||||
raw_data = map(lambda s: s.strip().split(None, len(headers) - 1), cleandata[1:])
|
||||
output = [dict(zip(headers, r)) for r in raw_data]
|
||||
raw_output = [dict(zip(headers, r)) for r in raw_data]
|
||||
|
||||
for mod in output:
|
||||
if 'By' in mod:
|
||||
mod['By'] = mod['By'].split(',')
|
||||
for mod in raw_output:
|
||||
if 'by' in mod:
|
||||
mod['by'] = mod['by'].split(',')
|
||||
|
||||
return output
|
||||
if raw:
|
||||
return raw_output
|
||||
else:
|
||||
return process(raw_output)
|
||||
|
||||
@@ -3,80 +3,156 @@
|
||||
Usage:
|
||||
specify --lsof as the first argument if the piped input is coming from lsof
|
||||
|
||||
Limitations:
|
||||
No additional columns are supported
|
||||
Examples:
|
||||
|
||||
Example:
|
||||
$ sudo lsof | jc --lsof -p
|
||||
[
|
||||
{
|
||||
"command": "systemd",
|
||||
"pid": 1,
|
||||
"tid": null,
|
||||
"user": "root",
|
||||
"fd": "cwd",
|
||||
"type": "DIR",
|
||||
"device": "253,0",
|
||||
"size_off": 224,
|
||||
"node": 64,
|
||||
"name": "/"
|
||||
},
|
||||
{
|
||||
"command": "systemd",
|
||||
"pid": 1,
|
||||
"tid": null,
|
||||
"user": "root",
|
||||
"fd": "rtd",
|
||||
"type": "DIR",
|
||||
"device": "253,0",
|
||||
"size_off": 224,
|
||||
"node": 64,
|
||||
"name": "/"
|
||||
},
|
||||
{
|
||||
"command": "systemd",
|
||||
"pid": 1,
|
||||
"tid": null,
|
||||
"user": "root",
|
||||
"fd": "txt",
|
||||
"type": "REG",
|
||||
"device": "253,0",
|
||||
"size_off": 1624520,
|
||||
"node": 50360451,
|
||||
"name": "/usr/lib/systemd/systemd"
|
||||
},
|
||||
...
|
||||
]
|
||||
|
||||
$ sudo lsof | jc --lsof -p
|
||||
[
|
||||
{
|
||||
"COMMAND": "systemd",
|
||||
"PID": "1",
|
||||
"TID": null,
|
||||
"USER": "root",
|
||||
"FD": "cwd",
|
||||
"TYPE": "DIR",
|
||||
"DEVICE": "253,0",
|
||||
"SIZE/OFF": "224",
|
||||
"NODE": "64",
|
||||
"NAME": "/"
|
||||
},
|
||||
{
|
||||
"COMMAND": "systemd",
|
||||
"PID": "1",
|
||||
"TID": null,
|
||||
"USER": "root",
|
||||
"FD": "rtd",
|
||||
"TYPE": "DIR",
|
||||
"DEVICE": "253,0",
|
||||
"SIZE/OFF": "224",
|
||||
"NODE": "64",
|
||||
"NAME": "/"
|
||||
},
|
||||
{
|
||||
"COMMAND": "systemd",
|
||||
"PID": "1",
|
||||
"TID": null,
|
||||
"USER": "root",
|
||||
"FD": "txt",
|
||||
"TYPE": "REG",
|
||||
"DEVICE": "253,0",
|
||||
"SIZE/OFF": "1624520",
|
||||
"NODE": "50360451",
|
||||
"NAME": "/usr/lib/systemd/systemd"
|
||||
},
|
||||
{
|
||||
"COMMAND": "systemd",
|
||||
"PID": "1",
|
||||
"TID": null,
|
||||
"USER": "root",
|
||||
"FD": "mem",
|
||||
"TYPE": "REG",
|
||||
"DEVICE": "253,0",
|
||||
"SIZE/OFF": "20064",
|
||||
"NODE": "8146",
|
||||
"NAME": "/usr/lib64/libuuid.so.1.3.0"
|
||||
},
|
||||
{
|
||||
"COMMAND": "systemd",
|
||||
"PID": "1",
|
||||
"TID": null,
|
||||
"USER": "root",
|
||||
"FD": "mem",
|
||||
"TYPE": "REG",
|
||||
"DEVICE": "253,0",
|
||||
"SIZE/OFF": "265600",
|
||||
"NODE": "8147",
|
||||
"NAME": "/usr/lib64/libblkid.so.1.1.0"
|
||||
},
|
||||
...
|
||||
]
|
||||
$ sudo lsof | jc --lsof -p -r
|
||||
[
|
||||
{
|
||||
"command": "systemd",
|
||||
"pid": "1",
|
||||
"tid": null,
|
||||
"user": "root",
|
||||
"fd": "cwd",
|
||||
"type": "DIR",
|
||||
"device": "8,2",
|
||||
"size_off": "4096",
|
||||
"node": "2",
|
||||
"name": "/"
|
||||
},
|
||||
{
|
||||
"command": "systemd",
|
||||
"pid": "1",
|
||||
"tid": null,
|
||||
"user": "root",
|
||||
"fd": "rtd",
|
||||
"type": "DIR",
|
||||
"device": "8,2",
|
||||
"size_off": "4096",
|
||||
"node": "2",
|
||||
"name": "/"
|
||||
},
|
||||
{
|
||||
"command": "systemd",
|
||||
"pid": "1",
|
||||
"tid": null,
|
||||
"user": "root",
|
||||
"fd": "txt",
|
||||
"type": "REG",
|
||||
"device": "8,2",
|
||||
"size_off": "1595792",
|
||||
"node": "668802",
|
||||
"name": "/lib/systemd/systemd"
|
||||
},
|
||||
...
|
||||
]
|
||||
"""
|
||||
import string
|
||||
import jc.utils
|
||||
|
||||
|
||||
def parse(data):
|
||||
output = []
|
||||
def process(proc_data):
|
||||
"""
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary structured data with the following schema:
|
||||
|
||||
[
|
||||
{
|
||||
"command": string,
|
||||
"pid": integer,
|
||||
"tid": integer,
|
||||
"user": string,
|
||||
"fd": string,
|
||||
"type": string,
|
||||
"device": string,
|
||||
"size_off": integer,
|
||||
"node": integer,
|
||||
"name": string
|
||||
}
|
||||
]
|
||||
"""
|
||||
for entry in proc_data:
|
||||
# integer changes
|
||||
int_list = ['pid', 'tid', 'size_off', 'node']
|
||||
for key in int_list:
|
||||
if key in entry:
|
||||
try:
|
||||
key_int = int(entry[key])
|
||||
entry[key] = key_int
|
||||
except (ValueError, TypeError):
|
||||
entry[key] = None
|
||||
return proc_data
|
||||
|
||||
|
||||
def parse(data, raw=False, quiet=False):
|
||||
"""
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
"""
|
||||
|
||||
# compatible options: linux, darwin, cygwin, win32, aix, freebsd
|
||||
compatible = ['linux']
|
||||
|
||||
if not quiet:
|
||||
jc.utils.compatibility(__name__, compatible)
|
||||
|
||||
raw_output = []
|
||||
|
||||
linedata = data.splitlines()
|
||||
|
||||
@@ -86,10 +162,15 @@ def parse(data):
|
||||
if cleandata:
|
||||
|
||||
# find column value of last character of each header
|
||||
header_row = cleandata.pop(0)
|
||||
headers = header_row.split()
|
||||
header_spec = []
|
||||
header_text = cleandata.pop(0).lower()
|
||||
|
||||
# clean up 'size/off' header
|
||||
# even though forward slash in a key is valid json, it can make things difficult
|
||||
header_row = header_text.replace('/', '_')
|
||||
|
||||
headers = header_row.split()
|
||||
|
||||
header_spec = []
|
||||
for i, h in enumerate(headers):
|
||||
# header tuple is (index, header_name, col)
|
||||
header_spec.append((i, h, header_row.find(h) + len(h)))
|
||||
@@ -102,16 +183,24 @@ def parse(data):
|
||||
temp_line = entry.split(maxsplit=len(headers) - 1)
|
||||
|
||||
for spec in header_spec:
|
||||
if spec[1] == 'COMMAND' or spec[1] == 'NAME':
|
||||
|
||||
index = spec[0]
|
||||
header_name = spec[1]
|
||||
col = spec[2] - 1 # subtract one since column starts at 0 instead of 1
|
||||
|
||||
if header_name == 'command' or header_name == 'name':
|
||||
continue
|
||||
if entry[spec[2] - 1] == ' ':
|
||||
temp_line.insert(spec[0], None)
|
||||
if entry[col] in string.whitespace:
|
||||
temp_line.insert(index, None)
|
||||
|
||||
name = ' '.join(temp_line[9:])
|
||||
fixed_line = temp_line[0:9]
|
||||
fixed_line.append(name)
|
||||
|
||||
output_line = dict(zip(headers, fixed_line))
|
||||
output.append(output_line)
|
||||
raw_output.append(output_line)
|
||||
|
||||
return output
|
||||
if raw:
|
||||
return raw_output
|
||||
else:
|
||||
return process(raw_output)
|
||||
|
||||
@@ -5,52 +5,100 @@ Usage:
|
||||
|
||||
Example:
|
||||
|
||||
$ mount | jc --mount -p
|
||||
[
|
||||
{
|
||||
"filesystem": "sysfs",
|
||||
"mount_point": "/sys",
|
||||
"type": "sysfs",
|
||||
"access": [
|
||||
"rw",
|
||||
"nosuid",
|
||||
"nodev",
|
||||
"noexec",
|
||||
"relatime"
|
||||
$ mount | jc --mount -p
|
||||
[
|
||||
{
|
||||
"filesystem": "sysfs",
|
||||
"mount_point": "/sys",
|
||||
"type": "sysfs",
|
||||
"access": [
|
||||
"rw",
|
||||
"nosuid",
|
||||
"nodev",
|
||||
"noexec",
|
||||
"relatime"
|
||||
]
|
||||
},
|
||||
{
|
||||
"filesystem": "proc",
|
||||
"mount_point": "/proc",
|
||||
"type": "proc",
|
||||
"access": [
|
||||
"rw",
|
||||
"nosuid",
|
||||
"nodev",
|
||||
"noexec",
|
||||
"relatime"
|
||||
]
|
||||
},
|
||||
{
|
||||
"filesystem": "udev",
|
||||
"mount_point": "/dev",
|
||||
"type": "devtmpfs",
|
||||
"access": [
|
||||
"rw",
|
||||
"nosuid",
|
||||
"relatime",
|
||||
"size=977500k",
|
||||
"nr_inodes=244375",
|
||||
"mode=755"
|
||||
]
|
||||
},
|
||||
...
|
||||
]
|
||||
},
|
||||
{
|
||||
"filesystem": "proc",
|
||||
"mount_point": "/proc",
|
||||
"type": "proc",
|
||||
"access": [
|
||||
"rw",
|
||||
"nosuid",
|
||||
"nodev",
|
||||
"noexec",
|
||||
"relatime"
|
||||
]
|
||||
},
|
||||
{
|
||||
"filesystem": "udev",
|
||||
"mount_point": "/dev",
|
||||
"type": "devtmpfs",
|
||||
"access": [
|
||||
"rw",
|
||||
"nosuid",
|
||||
"relatime",
|
||||
"size=977500k",
|
||||
"nr_inodes=244375",
|
||||
"mode=755"
|
||||
]
|
||||
},
|
||||
...
|
||||
]
|
||||
"""
|
||||
import jc.utils
|
||||
|
||||
|
||||
def parse(data):
|
||||
output = []
|
||||
def process(proc_data):
|
||||
"""
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary structured data with the following schema:
|
||||
|
||||
[
|
||||
{
|
||||
"filesystem": string,
|
||||
"mount_point": string,
|
||||
"type": string,
|
||||
"access": [
|
||||
string
|
||||
]
|
||||
}
|
||||
]
|
||||
"""
|
||||
# nothing to process
|
||||
return proc_data
|
||||
|
||||
|
||||
def parse(data, raw=False, quiet=False):
|
||||
"""
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
"""
|
||||
|
||||
# compatible options: linux, darwin, cygwin, win32, aix, freebsd
|
||||
compatible = ['linux']
|
||||
|
||||
if not quiet:
|
||||
jc.utils.compatibility(__name__, compatible)
|
||||
|
||||
raw_output = []
|
||||
|
||||
linedata = data.splitlines()
|
||||
|
||||
@@ -68,8 +116,11 @@ def parse(data):
|
||||
|
||||
access = parsed_line[5].lstrip('(').rstrip(')').split(',')
|
||||
|
||||
output_line['access'] = access
|
||||
output_line['options'] = access
|
||||
|
||||
output.append(output_line)
|
||||
raw_output.append(output_line)
|
||||
|
||||
return output
|
||||
if raw:
|
||||
return raw_output
|
||||
else:
|
||||
return process(raw_output)
|
||||
|
||||
@@ -3,195 +3,562 @@
|
||||
Usage:
|
||||
Specify --netstat as the first argument if the piped input is coming from netstat
|
||||
|
||||
Supports -lnp netstat options
|
||||
|
||||
Limitations:
|
||||
Only supports TCP and UDP
|
||||
|
||||
Examples:
|
||||
|
||||
$ netstat -p | jc --netstat -p
|
||||
[
|
||||
{
|
||||
"transport_protocol": "tcp",
|
||||
"network_protocol": "ipv4",
|
||||
"local_address": "localhost.localdo",
|
||||
"local_port": "34480",
|
||||
"foreign_address": "lb-192-30-255-113",
|
||||
"foreign_port": "https",
|
||||
"state": "ESTABLISHED",
|
||||
"pid": 53550,
|
||||
"program_name": "git-remote-ht",
|
||||
"receive_q": 0,
|
||||
"send_q": 0
|
||||
},
|
||||
{
|
||||
"transport_protocol": "tcp",
|
||||
"network_protocol": "ipv4",
|
||||
"local_address": "localhost.localdo",
|
||||
"local_port": "34478",
|
||||
"foreign_address": "lb-192-30-255-113",
|
||||
"foreign_port": "https",
|
||||
"state": "ESTABLISHED",
|
||||
"pid": 53550,
|
||||
"program_name": "git-remote-ht",
|
||||
"receive_q": 0,
|
||||
"send_q": 0
|
||||
}
|
||||
]
|
||||
$ sudo netstat -apee | jc --netstat -p
|
||||
[
|
||||
{
|
||||
"proto": "tcp",
|
||||
"recv_q": 0,
|
||||
"send_q": 0,
|
||||
"local_address": "localhost",
|
||||
"foreign_address": "0.0.0.0",
|
||||
"state": "LISTEN",
|
||||
"user": "systemd-resolve",
|
||||
"inode": 26958,
|
||||
"program_name": "systemd-resolve",
|
||||
"kind": "network",
|
||||
"pid": 887,
|
||||
"local_port": "domain",
|
||||
"foreign_port": "*",
|
||||
"transport_protocol": "tcp",
|
||||
"network_protocol": "ipv4"
|
||||
},
|
||||
{
|
||||
"proto": "tcp",
|
||||
"recv_q": 0,
|
||||
"send_q": 0,
|
||||
"local_address": "0.0.0.0",
|
||||
"foreign_address": "0.0.0.0",
|
||||
"state": "LISTEN",
|
||||
"user": "root",
|
||||
"inode": 30499,
|
||||
"program_name": "sshd",
|
||||
"kind": "network",
|
||||
"pid": 1186,
|
||||
"local_port": "ssh",
|
||||
"foreign_port": "*",
|
||||
"transport_protocol": "tcp",
|
||||
"network_protocol": "ipv4"
|
||||
},
|
||||
{
|
||||
"proto": "tcp",
|
||||
"recv_q": 0,
|
||||
"send_q": 0,
|
||||
"local_address": "localhost",
|
||||
"foreign_address": "localhost",
|
||||
"state": "ESTABLISHED",
|
||||
"user": "root",
|
||||
"inode": 46829,
|
||||
"program_name": "sshd: root",
|
||||
"kind": "network",
|
||||
"pid": 2242,
|
||||
"local_port": "ssh",
|
||||
"foreign_port": "52186",
|
||||
"transport_protocol": "tcp",
|
||||
"network_protocol": "ipv4",
|
||||
"foreign_port_num": 52186
|
||||
},
|
||||
{
|
||||
"proto": "tcp",
|
||||
"recv_q": 0,
|
||||
"send_q": 0,
|
||||
"local_address": "localhost",
|
||||
"foreign_address": "localhost",
|
||||
"state": "ESTABLISHED",
|
||||
"user": "root",
|
||||
"inode": 46828,
|
||||
"program_name": "ssh",
|
||||
"kind": "network",
|
||||
"pid": 2241,
|
||||
"local_port": "52186",
|
||||
"foreign_port": "ssh",
|
||||
"transport_protocol": "tcp",
|
||||
"network_protocol": "ipv4",
|
||||
"local_port_num": 52186
|
||||
},
|
||||
{
|
||||
"proto": "tcp6",
|
||||
"recv_q": 0,
|
||||
"send_q": 0,
|
||||
"local_address": "[::]",
|
||||
"foreign_address": "[::]",
|
||||
"state": "LISTEN",
|
||||
"user": "root",
|
||||
"inode": 30510,
|
||||
"program_name": "sshd",
|
||||
"kind": "network",
|
||||
"pid": 1186,
|
||||
"local_port": "ssh",
|
||||
"foreign_port": "*",
|
||||
"transport_protocol": "tcp",
|
||||
"network_protocol": "ipv6"
|
||||
},
|
||||
{
|
||||
"proto": "udp",
|
||||
"recv_q": 0,
|
||||
"send_q": 0,
|
||||
"local_address": "localhost",
|
||||
"foreign_address": "0.0.0.0",
|
||||
"state": null,
|
||||
"user": "systemd-resolve",
|
||||
"inode": 26957,
|
||||
"program_name": "systemd-resolve",
|
||||
"kind": "network",
|
||||
"pid": 887,
|
||||
"local_port": "domain",
|
||||
"foreign_port": "*",
|
||||
"transport_protocol": "udp",
|
||||
"network_protocol": "ipv4"
|
||||
},
|
||||
{
|
||||
"proto": "raw6",
|
||||
"recv_q": 0,
|
||||
"send_q": 0,
|
||||
"local_address": "[::]",
|
||||
"foreign_address": "[::]",
|
||||
"state": "7",
|
||||
"user": "systemd-network",
|
||||
"inode": 27001,
|
||||
"program_name": "systemd-network",
|
||||
"kind": "network",
|
||||
"pid": 867,
|
||||
"local_port": "ipv6-icmp",
|
||||
"foreign_port": "*",
|
||||
"transport_protocol": null,
|
||||
"network_protocol": "ipv6"
|
||||
},
|
||||
{
|
||||
"proto": "unix",
|
||||
"refcnt": 2,
|
||||
"flags": null,
|
||||
"type": "DGRAM",
|
||||
"state": null,
|
||||
"inode": 33322,
|
||||
"program_name": "systemd",
|
||||
"path": "/run/user/1000/systemd/notify",
|
||||
"kind": "socket",
|
||||
"pid": 1607
|
||||
},
|
||||
{
|
||||
"proto": "unix",
|
||||
"refcnt": 2,
|
||||
"flags": "ACC",
|
||||
"type": "SEQPACKET",
|
||||
"state": "LISTENING",
|
||||
"inode": 20835,
|
||||
"program_name": "init",
|
||||
"path": "/run/udev/control",
|
||||
"kind": "socket",
|
||||
"pid": 1
|
||||
},
|
||||
...
|
||||
]
|
||||
|
||||
$ netstat -lpn | jc --netstat -p
|
||||
[
|
||||
{
|
||||
"transport_protocol": "tcp",
|
||||
"network_protocol": "ipv4",
|
||||
"local_address": "127.0.0.1",
|
||||
"local_port": "42351",
|
||||
"foreign_address": "0.0.0.0",
|
||||
"foreign_port": "*",
|
||||
"state": "LISTEN",
|
||||
"pid": 1112,
|
||||
"program_name": "containerd",
|
||||
"receive_q": 0,
|
||||
"send_q": 0
|
||||
},
|
||||
{
|
||||
"transport_protocol": "tcp",
|
||||
"network_protocol": "ipv4",
|
||||
"local_address": "127.0.0.53",
|
||||
"local_port": "53",
|
||||
"foreign_address": "0.0.0.0",
|
||||
"foreign_port": "*",
|
||||
"state": "LISTEN",
|
||||
"pid": 885,
|
||||
"program_name": "systemd-resolve",
|
||||
"receive_q": 0,
|
||||
"send_q": 0
|
||||
},
|
||||
{
|
||||
"transport_protocol": "tcp",
|
||||
"network_protocol": "ipv4",
|
||||
"local_address": "0.0.0.0",
|
||||
"local_port": "22",
|
||||
"foreign_address": "0.0.0.0",
|
||||
"foreign_port": "*",
|
||||
"state": "LISTEN",
|
||||
"pid": 1127,
|
||||
"program_name": "sshd",
|
||||
"receive_q": 0,
|
||||
"send_q": 0
|
||||
},
|
||||
{
|
||||
"transport_protocol": "tcp",
|
||||
"network_protocol": "ipv6",
|
||||
"local_address": "::",
|
||||
"local_port": "22",
|
||||
"foreign_address": "::",
|
||||
"foreign_port": "*",
|
||||
"state": "LISTEN",
|
||||
"pid": 1127,
|
||||
"program_name": "sshd",
|
||||
"receive_q": 0,
|
||||
"send_q": 0
|
||||
},
|
||||
{
|
||||
"transport_protocol": "udp",
|
||||
"network_protocol": "ipv4",
|
||||
"local_address": "127.0.0.53",
|
||||
"local_port": "53",
|
||||
"foreign_address": "0.0.0.0",
|
||||
"foreign_port": "*",
|
||||
"pid": 885,
|
||||
"program_name": "systemd-resolve",
|
||||
"receive_q": 0,
|
||||
"send_q": 0
|
||||
},
|
||||
{
|
||||
"transport_protocol": "udp",
|
||||
"network_protocol": "ipv4",
|
||||
"local_address": "192.168.71.131",
|
||||
"local_port": "68",
|
||||
"foreign_address": "0.0.0.0",
|
||||
"foreign_port": "*",
|
||||
"pid": 867,
|
||||
"program_name": "systemd-network",
|
||||
"receive_q": 0,
|
||||
"send_q": 0
|
||||
}
|
||||
]
|
||||
$ sudo netstat -apee | jc --netstat -p -r
|
||||
[
|
||||
{
|
||||
"proto": "tcp",
|
||||
"recv_q": "0",
|
||||
"send_q": "0",
|
||||
"local_address": "localhost",
|
||||
"foreign_address": "0.0.0.0",
|
||||
"state": "LISTEN",
|
||||
"user": "systemd-resolve",
|
||||
"inode": "26958",
|
||||
"program_name": "systemd-resolve",
|
||||
"kind": "network",
|
||||
"pid": "887",
|
||||
"local_port": "domain",
|
||||
"foreign_port": "*",
|
||||
"transport_protocol": "tcp",
|
||||
"network_protocol": "ipv4"
|
||||
},
|
||||
{
|
||||
"proto": "tcp",
|
||||
"recv_q": "0",
|
||||
"send_q": "0",
|
||||
"local_address": "0.0.0.0",
|
||||
"foreign_address": "0.0.0.0",
|
||||
"state": "LISTEN",
|
||||
"user": "root",
|
||||
"inode": "30499",
|
||||
"program_name": "sshd",
|
||||
"kind": "network",
|
||||
"pid": "1186",
|
||||
"local_port": "ssh",
|
||||
"foreign_port": "*",
|
||||
"transport_protocol": "tcp",
|
||||
"network_protocol": "ipv4"
|
||||
},
|
||||
{
|
||||
"proto": "tcp",
|
||||
"recv_q": "0",
|
||||
"send_q": "0",
|
||||
"local_address": "localhost",
|
||||
"foreign_address": "localhost",
|
||||
"state": "ESTABLISHED",
|
||||
"user": "root",
|
||||
"inode": "46829",
|
||||
"program_name": "sshd: root",
|
||||
"kind": "network",
|
||||
"pid": "2242",
|
||||
"local_port": "ssh",
|
||||
"foreign_port": "52186",
|
||||
"transport_protocol": "tcp",
|
||||
"network_protocol": "ipv4"
|
||||
},
|
||||
{
|
||||
"proto": "tcp",
|
||||
"recv_q": "0",
|
||||
"send_q": "0",
|
||||
"local_address": "localhost",
|
||||
"foreign_address": "localhost",
|
||||
"state": "ESTABLISHED",
|
||||
"user": "root",
|
||||
"inode": "46828",
|
||||
"program_name": "ssh",
|
||||
"kind": "network",
|
||||
"pid": "2241",
|
||||
"local_port": "52186",
|
||||
"foreign_port": "ssh",
|
||||
"transport_protocol": "tcp",
|
||||
"network_protocol": "ipv4"
|
||||
},
|
||||
{
|
||||
"proto": "tcp6",
|
||||
"recv_q": "0",
|
||||
"send_q": "0",
|
||||
"local_address": "[::]",
|
||||
"foreign_address": "[::]",
|
||||
"state": "LISTEN",
|
||||
"user": "root",
|
||||
"inode": "30510",
|
||||
"program_name": "sshd",
|
||||
"kind": "network",
|
||||
"pid": "1186",
|
||||
"local_port": "ssh",
|
||||
"foreign_port": "*",
|
||||
"transport_protocol": "tcp",
|
||||
"network_protocol": "ipv6"
|
||||
},
|
||||
{
|
||||
"proto": "udp",
|
||||
"recv_q": "0",
|
||||
"send_q": "0",
|
||||
"local_address": "localhost",
|
||||
"foreign_address": "0.0.0.0",
|
||||
"state": null,
|
||||
"user": "systemd-resolve",
|
||||
"inode": "26957",
|
||||
"program_name": "systemd-resolve",
|
||||
"kind": "network",
|
||||
"pid": "887",
|
||||
"local_port": "domain",
|
||||
"foreign_port": "*",
|
||||
"transport_protocol": "udp",
|
||||
"network_protocol": "ipv4"
|
||||
},
|
||||
{
|
||||
"proto": "raw6",
|
||||
"recv_q": "0",
|
||||
"send_q": "0",
|
||||
"local_address": "[::]",
|
||||
"foreign_address": "[::]",
|
||||
"state": "7",
|
||||
"user": "systemd-network",
|
||||
"inode": "27001",
|
||||
"program_name": "systemd-network",
|
||||
"kind": "network",
|
||||
"pid": "867",
|
||||
"local_port": "ipv6-icmp",
|
||||
"foreign_port": "*",
|
||||
"transport_protocol": null,
|
||||
"network_protocol": "ipv6"
|
||||
},
|
||||
{
|
||||
"proto": "unix",
|
||||
"refcnt": "2",
|
||||
"flags": null,
|
||||
"type": "DGRAM",
|
||||
"state": null,
|
||||
"inode": "33322",
|
||||
"program_name": "systemd",
|
||||
"path": "/run/user/1000/systemd/notify",
|
||||
"kind": "socket",
|
||||
"pid": " 1607"
|
||||
},
|
||||
{
|
||||
"proto": "unix",
|
||||
"refcnt": "2",
|
||||
"flags": "ACC",
|
||||
"type": "SEQPACKET",
|
||||
"state": "LISTENING",
|
||||
"inode": "20835",
|
||||
"program_name": "init",
|
||||
"path": "/run/udev/control",
|
||||
"kind": "socket",
|
||||
"pid": " 1"
|
||||
},
|
||||
...
|
||||
]
|
||||
"""
|
||||
import string
|
||||
|
||||
output = []
|
||||
import jc.utils
|
||||
|
||||
|
||||
def parse_line(entry):
|
||||
output_line = {}
|
||||
def process(proc_data):
|
||||
"""
|
||||
Final processing to conform to the schema.
|
||||
|
||||
if entry.find('tcp') == 0:
|
||||
output_line['transport_protocol'] = 'tcp'
|
||||
Parameters:
|
||||
|
||||
if entry.find('p6') == 2:
|
||||
output_line['network_protocol'] = 'ipv6'
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
else:
|
||||
output_line['network_protocol'] = 'ipv4'
|
||||
Returns:
|
||||
|
||||
elif entry.find('udp') == 0:
|
||||
output_line['transport_protocol'] = 'udp'
|
||||
dictionary structured data with the following schema:
|
||||
|
||||
if entry.find('p6') == 2:
|
||||
output_line['network_protocol'] = 'ipv6'
|
||||
[
|
||||
{
|
||||
"proto": string,
|
||||
"recv_q": integer,
|
||||
"send_q": integer,
|
||||
"transport_protocol" string,
|
||||
"network_protocol": string,
|
||||
"local_address": string,
|
||||
"local_port": string,
|
||||
"local_port_num": integer,
|
||||
"foreign_address": string,
|
||||
"foreign_port": string,
|
||||
"foreign_port_num": integer,
|
||||
"state": string,
|
||||
"program_name": string,
|
||||
"pid": integer,
|
||||
"user": string,
|
||||
"security_context": string,
|
||||
"refcnt": integer,
|
||||
"flags": string,
|
||||
"type": string,
|
||||
"inode": integer,
|
||||
"path": string,
|
||||
"kind": string
|
||||
}
|
||||
]
|
||||
"""
|
||||
for entry in proc_data:
|
||||
# integer changes
|
||||
int_list = ['recv_q', 'send_q', 'pid', 'refcnt', 'inode']
|
||||
for key in int_list:
|
||||
if key in entry:
|
||||
try:
|
||||
key_int = int(entry[key])
|
||||
entry[key] = key_int
|
||||
except (ValueError):
|
||||
entry[key] = None
|
||||
|
||||
else:
|
||||
output_line['network_protocol'] = 'ipv4'
|
||||
else:
|
||||
return
|
||||
if 'local_port' in entry:
|
||||
try:
|
||||
entry['local_port_num'] = int(entry['local_port'])
|
||||
except (ValueError):
|
||||
pass
|
||||
|
||||
parsed_line = entry.split()
|
||||
if 'foreign_port' in entry:
|
||||
try:
|
||||
entry['foreign_port_num'] = int(entry['foreign_port'])
|
||||
except (ValueError):
|
||||
pass
|
||||
|
||||
output_line['local_address'] = parsed_line[3].rsplit(':', 1)[0]
|
||||
output_line['local_port'] = parsed_line[3].rsplit(':', 1)[-1]
|
||||
output_line['foreign_address'] = parsed_line[4].rsplit(':', 1)[0]
|
||||
output_line['foreign_port'] = parsed_line[4].rsplit(':', 1)[-1]
|
||||
return proc_data
|
||||
|
||||
if len(parsed_line) > 5:
|
||||
|
||||
if parsed_line[5][0] not in string.digits and parsed_line[5][0] != '-':
|
||||
output_line['state'] = parsed_line[5]
|
||||
def normalize_headers(header):
|
||||
header = header.lower()
|
||||
header = header.replace('local address', 'local_address')
|
||||
header = header.replace('foreign address', 'foreign_address')
|
||||
header = header.replace('pid/program name', 'program_name')
|
||||
header = header.replace('security context', 'security_context')
|
||||
header = header.replace('i-node', 'inode')
|
||||
header = header.replace('-', '_')
|
||||
|
||||
if len(parsed_line) > 6 and parsed_line[6][0] in string.digits:
|
||||
output_line['pid'] = int(parsed_line[6].split('/')[0])
|
||||
output_line['program_name'] = parsed_line[6].split('/')[1]
|
||||
else:
|
||||
if parsed_line[5][0] in string.digits:
|
||||
output_line['pid'] = int(parsed_line[5].split('/')[0])
|
||||
output_line['program_name'] = parsed_line[5].split('/')[1]
|
||||
return header
|
||||
|
||||
output_line['receive_q'] = int(parsed_line[1])
|
||||
output_line['send_q'] = int(parsed_line[2])
|
||||
|
||||
def parse_network(headers, entry):
|
||||
# Count words in header
|
||||
# if len of line is one less than len of header, then insert None in field 5
|
||||
entry = entry.split(maxsplit=len(headers) - 1)
|
||||
|
||||
if len(entry) == len(headers) - 1:
|
||||
entry.insert(5, None)
|
||||
|
||||
output_line = dict(zip(headers, entry))
|
||||
output_line['kind'] = 'network'
|
||||
|
||||
return output_line
|
||||
|
||||
|
||||
def parse(data):
|
||||
def parse_socket(header_text, headers, entry):
|
||||
output_line = {}
|
||||
# get the column # of first letter of "state"
|
||||
state_col = header_text.find('state')
|
||||
# get the program name column area
|
||||
pn_start = header_text.find('program_name')
|
||||
pn_end = header_text.find('path') - 1
|
||||
|
||||
# remove [ and ] from each line
|
||||
entry = entry.replace('[ ]', '---')
|
||||
entry = entry.replace('[', ' ').replace(']', ' ')
|
||||
|
||||
# find program_name column area and substitute spaces with \u2063 there
|
||||
old_pn = entry[pn_start:pn_end]
|
||||
new_pn = old_pn.replace(' ', '\u2063')
|
||||
entry = entry.replace(old_pn, new_pn)
|
||||
|
||||
entry_list = entry.split(maxsplit=len(headers) - 1)
|
||||
# check column # to see if state column is populated
|
||||
if entry[state_col] in string.whitespace:
|
||||
entry_list.insert(4, None)
|
||||
|
||||
output_line = dict(zip(headers, entry_list))
|
||||
output_line['kind'] = 'socket'
|
||||
|
||||
# fix program_name field to turn \u2063 back to spaces
|
||||
if 'program_name' in output_line:
|
||||
if output_line['program_name']:
|
||||
old_d_pn = output_line['program_name']
|
||||
new_d_pn = old_d_pn.replace('\u2063', ' ')
|
||||
output_line['program_name'] = new_d_pn
|
||||
|
||||
return output_line
|
||||
|
||||
|
||||
def parse_post(raw_data):
|
||||
# clean up trailing whitespace on each item in each entry
|
||||
# flags --- = null
|
||||
# program_name - = null
|
||||
# split pid and program name and ip addresses and ports
|
||||
# create network and transport protocol fields
|
||||
|
||||
for entry in raw_data:
|
||||
for item in entry:
|
||||
try:
|
||||
entry[item] = entry[item].rstrip()
|
||||
except (AttributeError):
|
||||
# skips trying to rstrip Null entries
|
||||
pass
|
||||
|
||||
if 'flags' in entry:
|
||||
if entry['flags'] == '---':
|
||||
entry['flags'] = None
|
||||
|
||||
if 'program_name' in entry:
|
||||
entry['program_name'] = entry['program_name'].strip()
|
||||
if entry['program_name'] == '-':
|
||||
entry['program_name'] = None
|
||||
|
||||
if entry['program_name']:
|
||||
pid = entry['program_name'].split('/', maxsplit=1)[0]
|
||||
name = entry['program_name'].split('/', maxsplit=1)[1]
|
||||
entry['pid'] = pid
|
||||
entry['program_name'] = name
|
||||
|
||||
if 'local_address' in entry:
|
||||
if entry['local_address']:
|
||||
ladd = entry['local_address'].rsplit(':', maxsplit=1)[0]
|
||||
lport = entry['local_address'].rsplit(':', maxsplit=1)[1]
|
||||
entry['local_address'] = ladd
|
||||
entry['local_port'] = lport
|
||||
|
||||
if 'foreign_address' in entry:
|
||||
if entry['foreign_address']:
|
||||
fadd = entry['foreign_address'].rsplit(':', maxsplit=1)[0]
|
||||
fport = entry['foreign_address'].rsplit(':', maxsplit=1)[1]
|
||||
entry['foreign_address'] = fadd
|
||||
entry['foreign_port'] = fport
|
||||
|
||||
if 'proto' in entry and 'kind' in entry:
|
||||
if entry['kind'] == 'network':
|
||||
if entry['proto'].find('tcp') != -1:
|
||||
entry['transport_protocol'] = 'tcp'
|
||||
elif entry['proto'].find('udp') != -1:
|
||||
entry['transport_protocol'] = 'udp'
|
||||
else:
|
||||
entry['transport_protocol'] = None
|
||||
|
||||
if entry['proto'].find('6') != -1:
|
||||
entry['network_protocol'] = 'ipv6'
|
||||
else:
|
||||
entry['network_protocol'] = 'ipv4'
|
||||
|
||||
return raw_data
|
||||
|
||||
|
||||
def parse(data, raw=False, quiet=False):
|
||||
"""
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
"""
|
||||
|
||||
# compatible options: linux, darwin, cygwin, win32, aix, freebsd
|
||||
compatible = ['linux']
|
||||
|
||||
if not quiet:
|
||||
jc.utils.compatibility(__name__, compatible)
|
||||
|
||||
cleandata = data.splitlines()
|
||||
cleandata = list(filter(None, cleandata))
|
||||
|
||||
raw_output = []
|
||||
network = False
|
||||
socket = False
|
||||
headers = ''
|
||||
network_list = []
|
||||
socket_list = []
|
||||
|
||||
for line in cleandata:
|
||||
|
||||
if line.find('Active Internet connections (w/o servers)') == 0:
|
||||
continue
|
||||
|
||||
if line.find('Active Internet connections (only servers)') == 0:
|
||||
continue
|
||||
|
||||
if line.find('Proto') == 0:
|
||||
if line.find('Active Internet') == 0:
|
||||
network_list = []
|
||||
network = True
|
||||
socket = False
|
||||
continue
|
||||
|
||||
if line.find('Active UNIX') == 0:
|
||||
break
|
||||
socket_list = []
|
||||
network = False
|
||||
socket = True
|
||||
continue
|
||||
|
||||
output.append(parse_line(line))
|
||||
if line.find('Proto') == 0:
|
||||
header_text = normalize_headers(line)
|
||||
headers = header_text.split()
|
||||
continue
|
||||
|
||||
clean_output = list(filter(None, output))
|
||||
return clean_output
|
||||
if network:
|
||||
network_list.append(parse_network(headers, line))
|
||||
continue
|
||||
|
||||
if socket:
|
||||
socket_list.append(parse_socket(header_text, headers, line))
|
||||
continue
|
||||
|
||||
for item in [network_list, socket_list]:
|
||||
for entry in item:
|
||||
raw_output.append(entry)
|
||||
|
||||
raw_output = parse_post(raw_output)
|
||||
|
||||
if raw:
|
||||
return raw_output
|
||||
else:
|
||||
return process(raw_output)
|
||||
|
||||
301
jc/parsers/ps.py
301
jc/parsers/ps.py
@@ -7,61 +7,266 @@ Usage:
|
||||
- ef
|
||||
- axu
|
||||
|
||||
Example:
|
||||
Examples:
|
||||
|
||||
$ ps -ef | jc --ps -p
|
||||
[
|
||||
{
|
||||
"UID": "root",
|
||||
"PID": "1",
|
||||
"PPID": "0",
|
||||
"C": "0",
|
||||
"STIME": "13:58",
|
||||
"TTY": "?",
|
||||
"TIME": "00:00:05",
|
||||
"CMD": "/lib/systemd/systemd --system --deserialize 35"
|
||||
},
|
||||
{
|
||||
"UID": "root",
|
||||
"PID": "2",
|
||||
"PPID": "0",
|
||||
"C": "0",
|
||||
"STIME": "13:58",
|
||||
"TTY": "?",
|
||||
"TIME": "00:00:00",
|
||||
"CMD": "[kthreadd]"
|
||||
},
|
||||
{
|
||||
"UID": "root",
|
||||
"PID": "4",
|
||||
"PPID": "2",
|
||||
"C": "0",
|
||||
"STIME": "13:58",
|
||||
"TTY": "?",
|
||||
"TIME": "00:00:00",
|
||||
"CMD": "[kworker/0:0H]"
|
||||
},
|
||||
{
|
||||
"UID": "root",
|
||||
"PID": "6",
|
||||
"PPID": "2",
|
||||
"C": "0",
|
||||
"STIME": "13:58",
|
||||
"TTY": "?",
|
||||
"TIME": "00:00:00",
|
||||
"CMD": "[mm_percpu_wq]"
|
||||
},
|
||||
...
|
||||
]
|
||||
$ ps -ef | jc --ps -p
|
||||
[
|
||||
{
|
||||
"uid": "root",
|
||||
"pid": 1,
|
||||
"ppid": 0,
|
||||
"c": 0,
|
||||
"stime": "Nov01",
|
||||
"tty": null,
|
||||
"time": "00:00:11",
|
||||
"cmd": "/usr/lib/systemd/systemd --switched-root --system --deserialize 22"
|
||||
},
|
||||
{
|
||||
"uid": "root",
|
||||
"pid": 2,
|
||||
"ppid": 0,
|
||||
"c": 0,
|
||||
"stime": "Nov01",
|
||||
"tty": null,
|
||||
"time": "00:00:00",
|
||||
"cmd": "[kthreadd]"
|
||||
},
|
||||
{
|
||||
"uid": "root",
|
||||
"pid": 4,
|
||||
"ppid": 2,
|
||||
"c": 0,
|
||||
"stime": "Nov01",
|
||||
"tty": null,
|
||||
"time": "00:00:00",
|
||||
"cmd": "[kworker/0:0H]"
|
||||
},
|
||||
...
|
||||
]
|
||||
|
||||
$ ps -ef | jc --ps -p -r
|
||||
[
|
||||
{
|
||||
"uid": "root",
|
||||
"pid": "1",
|
||||
"ppid": "0",
|
||||
"c": "0",
|
||||
"stime": "Nov01",
|
||||
"tty": "?",
|
||||
"time": "00:00:11",
|
||||
"cmd": "/usr/lib/systemd/systemd --switched-root --system --deserialize 22"
|
||||
},
|
||||
{
|
||||
"uid": "root",
|
||||
"pid": "2",
|
||||
"ppid": "0",
|
||||
"c": "0",
|
||||
"stime": "Nov01",
|
||||
"tty": "?",
|
||||
"time": "00:00:00",
|
||||
"cmd": "[kthreadd]"
|
||||
},
|
||||
{
|
||||
"uid": "root",
|
||||
"pid": "4",
|
||||
"ppid": "2",
|
||||
"c": "0",
|
||||
"stime": "Nov01",
|
||||
"tty": "?",
|
||||
"time": "00:00:00",
|
||||
"cmd": "[kworker/0:0H]"
|
||||
},
|
||||
...
|
||||
]
|
||||
|
||||
$ ps axu | jc --ps -p
|
||||
[
|
||||
{
|
||||
"user": "root",
|
||||
"pid": 1,
|
||||
"cpu_percent": 0.0,
|
||||
"mem_percent": 0.1,
|
||||
"vsz": 128072,
|
||||
"rss": 6784,
|
||||
"tty": null,
|
||||
"stat": "Ss",
|
||||
"start": "Nov09",
|
||||
"time": "0:08",
|
||||
"command": "/usr/lib/systemd/systemd --switched-root --system --deserialize 22"
|
||||
},
|
||||
{
|
||||
"user": "root",
|
||||
"pid": 2,
|
||||
"cpu_percent": 0.0,
|
||||
"mem_percent": 0.0,
|
||||
"vsz": 0,
|
||||
"rss": 0,
|
||||
"tty": null,
|
||||
"stat": "S",
|
||||
"start": "Nov09",
|
||||
"time": "0:00",
|
||||
"command": "[kthreadd]"
|
||||
},
|
||||
{
|
||||
"user": "root",
|
||||
"pid": 4,
|
||||
"cpu_percent": 0.0,
|
||||
"mem_percent": 0.0,
|
||||
"vsz": 0,
|
||||
"rss": 0,
|
||||
"tty": null,
|
||||
"stat": "S<",
|
||||
"start": "Nov09",
|
||||
"time": "0:00",
|
||||
"command": "[kworker/0:0H]"
|
||||
},
|
||||
...
|
||||
]
|
||||
|
||||
$ ps axu | jc --ps -p -r
|
||||
[
|
||||
{
|
||||
"user": "root",
|
||||
"pid": "1",
|
||||
"cpu_percent": "0.0",
|
||||
"mem_percent": "0.1",
|
||||
"vsz": "128072",
|
||||
"rss": "6784",
|
||||
"tty": "?",
|
||||
"stat": "Ss",
|
||||
"start": "Nov09",
|
||||
"time": "0:08",
|
||||
"command": "/usr/lib/systemd/systemd --switched-root --system --deserialize 22"
|
||||
},
|
||||
{
|
||||
"user": "root",
|
||||
"pid": "2",
|
||||
"cpu_percent": "0.0",
|
||||
"mem_percent": "0.0",
|
||||
"vsz": "0",
|
||||
"rss": "0",
|
||||
"tty": "?",
|
||||
"stat": "S",
|
||||
"start": "Nov09",
|
||||
"time": "0:00",
|
||||
"command": "[kthreadd]"
|
||||
},
|
||||
{
|
||||
"user": "root",
|
||||
"pid": "4",
|
||||
"cpu_percent": "0.0",
|
||||
"mem_percent": "0.0",
|
||||
"vsz": "0",
|
||||
"rss": "0",
|
||||
"tty": "?",
|
||||
"stat": "S<",
|
||||
"start": "Nov09",
|
||||
"time": "0:00",
|
||||
"command": "[kworker/0:0H]"
|
||||
},
|
||||
...
|
||||
]
|
||||
"""
|
||||
import jc.utils
|
||||
|
||||
|
||||
def parse(data):
|
||||
def process(proc_data):
|
||||
"""
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary structured data with the following schema:
|
||||
|
||||
[
|
||||
{
|
||||
"uid": string,
|
||||
"pid": integer,
|
||||
"ppid": integer,
|
||||
"c": integer,
|
||||
"stime": string,
|
||||
"tty": string, # ? = Null
|
||||
"time": string,
|
||||
"cmd": string,
|
||||
"user": string,
|
||||
"cpu_percent": float,
|
||||
"mem_percent": float,
|
||||
"vsz": integer,
|
||||
"rss": integer,
|
||||
"stat": string,
|
||||
"start": string,
|
||||
"command": string
|
||||
}
|
||||
]
|
||||
"""
|
||||
for entry in proc_data:
|
||||
# change to int
|
||||
int_list = ['pid', 'ppid', 'c', 'vsz', 'rss']
|
||||
for key in int_list:
|
||||
if key in entry:
|
||||
try:
|
||||
key_int = int(entry[key])
|
||||
entry[key] = key_int
|
||||
except (ValueError):
|
||||
entry[key] = None
|
||||
|
||||
# change to float
|
||||
float_list = ['cpu_percent', 'mem_percent']
|
||||
for key in float_list:
|
||||
if key in entry:
|
||||
try:
|
||||
key_float = float(entry[key])
|
||||
entry[key] = key_float
|
||||
except (ValueError):
|
||||
entry[key] = None
|
||||
|
||||
if 'tty' in entry:
|
||||
if entry['tty'] == '?':
|
||||
entry['tty'] = None
|
||||
|
||||
return proc_data
|
||||
|
||||
|
||||
def parse(data, raw=False, quiet=False):
|
||||
"""
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
"""
|
||||
|
||||
# compatible options: linux, darwin, cygwin, win32, aix, freebsd
|
||||
compatible = ['linux', 'darwin', 'cygwin', 'aix', 'freebsd']
|
||||
|
||||
if not quiet:
|
||||
jc.utils.compatibility(__name__, compatible)
|
||||
|
||||
# code adapted from Conor Heine at:
|
||||
# https://gist.github.com/cahna/43a1a3ff4d075bcd71f9d7120037a501
|
||||
|
||||
cleandata = data.splitlines()
|
||||
headers = [h for h in ' '.join(cleandata[0].strip().split()).split() if h]
|
||||
headers = [h for h in ' '.join(cleandata[0].lower().strip().split()).split() if h]
|
||||
|
||||
# clean up '%cpu' and '%mem' headers
|
||||
# even though % in a key is valid json, it can make things difficult
|
||||
headers = ['cpu_percent' if x == '%cpu' else x for x in headers]
|
||||
headers = ['mem_percent' if x == '%mem' else x for x in headers]
|
||||
|
||||
raw_data = map(lambda s: s.strip().split(None, len(headers) - 1), cleandata[1:])
|
||||
return [dict(zip(headers, r)) for r in raw_data]
|
||||
raw_output = [dict(zip(headers, r)) for r in raw_data]
|
||||
|
||||
if raw:
|
||||
return raw_output
|
||||
else:
|
||||
return process(raw_output)
|
||||
|
||||
@@ -3,61 +3,168 @@
|
||||
Usage:
|
||||
specify --route as the first argument if the piped input is coming from route
|
||||
|
||||
Examples:
|
||||
|
||||
Example:
|
||||
$ route -ee | jc --route -p
|
||||
[
|
||||
{
|
||||
"destination": "default",
|
||||
"gateway": "gateway",
|
||||
"genmask": "0.0.0.0",
|
||||
"flags": "UG",
|
||||
"metric": 100,
|
||||
"ref": 0,
|
||||
"use": 0,
|
||||
"iface": "ens33",
|
||||
"mss": 0,
|
||||
"window": 0,
|
||||
"irtt": 0
|
||||
},
|
||||
{
|
||||
"destination": "172.17.0.0",
|
||||
"gateway": "0.0.0.0",
|
||||
"genmask": "255.255.0.0",
|
||||
"flags": "U",
|
||||
"metric": 0,
|
||||
"ref": 0,
|
||||
"use": 0,
|
||||
"iface": "docker",
|
||||
"mss": 0,
|
||||
"window": 0,
|
||||
"irtt": 0
|
||||
},
|
||||
{
|
||||
"destination": "192.168.71.0",
|
||||
"gateway": "0.0.0.0",
|
||||
"genmask": "255.255.255.0",
|
||||
"flags": "U",
|
||||
"metric": 100,
|
||||
"ref": 0,
|
||||
"use": 0,
|
||||
"iface": "ens33",
|
||||
"mss": 0,
|
||||
"window": 0,
|
||||
"irtt": 0
|
||||
}
|
||||
]
|
||||
|
||||
$ route -n | jc --route -p
|
||||
[
|
||||
{
|
||||
"Destination": "0.0.0.0",
|
||||
"Gateway": "192.168.71.2",
|
||||
"Genmask": "0.0.0.0",
|
||||
"Flags": "UG",
|
||||
"Metric": "100",
|
||||
"Ref": "0",
|
||||
"Use": "0",
|
||||
"Iface": "ens33"
|
||||
},
|
||||
{
|
||||
"Destination": "172.17.0.0",
|
||||
"Gateway": "0.0.0.0",
|
||||
"Genmask": "255.255.0.0",
|
||||
"Flags": "U",
|
||||
"Metric": "0",
|
||||
"Ref": "0",
|
||||
"Use": "0",
|
||||
"Iface": "docker0"
|
||||
},
|
||||
{
|
||||
"Destination": "192.168.71.0",
|
||||
"Gateway": "0.0.0.0",
|
||||
"Genmask": "255.255.255.0",
|
||||
"Flags": "U",
|
||||
"Metric": "0",
|
||||
"Ref": "0",
|
||||
"Use": "0",
|
||||
"Iface": "ens33"
|
||||
},
|
||||
{
|
||||
"Destination": "192.168.71.2",
|
||||
"Gateway": "0.0.0.0",
|
||||
"Genmask": "255.255.255.255",
|
||||
"Flags": "UH",
|
||||
"Metric": "100",
|
||||
"Ref": "0",
|
||||
"Use": "0",
|
||||
"Iface": "ens33"
|
||||
}
|
||||
]
|
||||
$ route -ee | jc --route -p -r
|
||||
[
|
||||
{
|
||||
"destination": "default",
|
||||
"gateway": "gateway",
|
||||
"genmask": "0.0.0.0",
|
||||
"flags": "UG",
|
||||
"metric": "100",
|
||||
"ref": "0",
|
||||
"use": "0",
|
||||
"iface": "ens33",
|
||||
"mss": "0",
|
||||
"window": "0",
|
||||
"irtt": "0"
|
||||
},
|
||||
{
|
||||
"destination": "172.17.0.0",
|
||||
"gateway": "0.0.0.0",
|
||||
"genmask": "255.255.0.0",
|
||||
"flags": "U",
|
||||
"metric": "0",
|
||||
"ref": "0",
|
||||
"use": "0",
|
||||
"iface": "docker",
|
||||
"mss": "0",
|
||||
"window": "0",
|
||||
"irtt": "0"
|
||||
},
|
||||
{
|
||||
"destination": "192.168.71.0",
|
||||
"gateway": "0.0.0.0",
|
||||
"genmask": "255.255.255.0",
|
||||
"flags": "U",
|
||||
"metric": "100",
|
||||
"ref": "0",
|
||||
"use": "0",
|
||||
"iface": "ens33",
|
||||
"mss": "0",
|
||||
"window": "0",
|
||||
"irtt": "0"
|
||||
}
|
||||
]
|
||||
"""
|
||||
import jc.utils
|
||||
|
||||
|
||||
def parse(data):
|
||||
def process(proc_data):
|
||||
"""
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary structured data with the following schema:
|
||||
|
||||
[
|
||||
{
|
||||
"destination": string,
|
||||
"gateway": string,
|
||||
"genmask": string,
|
||||
"flags": string,
|
||||
"metric": integer,
|
||||
"ref": integer,
|
||||
"use": integer,
|
||||
"mss": integer,
|
||||
"window": integer,
|
||||
"irtt": integer,
|
||||
"iface": string
|
||||
}
|
||||
]
|
||||
"""
|
||||
for entry in proc_data:
|
||||
int_list = ['metric', 'ref', 'use', 'mss', 'window', 'irtt']
|
||||
for key in int_list:
|
||||
if key in entry:
|
||||
try:
|
||||
key_int = int(entry[key])
|
||||
entry[key] = key_int
|
||||
except (ValueError):
|
||||
entry[key] = None
|
||||
|
||||
return proc_data
|
||||
|
||||
|
||||
def parse(data, raw=False, quiet=False):
|
||||
"""
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
"""
|
||||
|
||||
# compatible options: linux, darwin, cygwin, win32, aix, freebsd
|
||||
compatible = ['linux', 'aix', 'freebsd']
|
||||
|
||||
if not quiet:
|
||||
jc.utils.compatibility(__name__, compatible)
|
||||
|
||||
# code adapted from Conor Heine at:
|
||||
# https://gist.github.com/cahna/43a1a3ff4d075bcd71f9d7120037a501
|
||||
|
||||
cleandata = data.splitlines()[1:]
|
||||
headers = [h for h in ' '.join(cleandata[0].strip().split()).split() if h]
|
||||
headers = [h for h in ' '.join(cleandata[0].lower().strip().split()).split() if h]
|
||||
raw_data = map(lambda s: s.strip().split(None, len(headers) - 1), cleandata[1:])
|
||||
return [dict(zip(headers, r)) for r in raw_data]
|
||||
raw_output = [dict(zip(headers, r)) for r in raw_data]
|
||||
|
||||
if raw:
|
||||
return raw_output
|
||||
else:
|
||||
return process(raw_output)
|
||||
|
||||
397
jc/parsers/ss.py
Normal file
397
jc/parsers/ss.py
Normal file
@@ -0,0 +1,397 @@
|
||||
"""jc - JSON CLI output utility ss Parser
|
||||
|
||||
Usage:
|
||||
specify --ss as the first argument if the piped input is coming from ss
|
||||
|
||||
Limitations:
|
||||
Extended information options like -e and -p are not supported and may cause parsing irregularities
|
||||
|
||||
Examples:
|
||||
|
||||
$ sudo ss -a | jc --ss -p
|
||||
[
|
||||
{
|
||||
"netid": "nl",
|
||||
"state": "UNCONN",
|
||||
"recv_q": 0,
|
||||
"send_q": 0,
|
||||
"peer_address": "*",
|
||||
"channel": "rtnl:kernel"
|
||||
},
|
||||
{
|
||||
"netid": "nl",
|
||||
"state": "UNCONN",
|
||||
"recv_q": 0,
|
||||
"send_q": 0,
|
||||
"peer_address": "*",
|
||||
"pid": 893,
|
||||
"channel": "rtnl:systemd-resolve"
|
||||
},
|
||||
...
|
||||
{
|
||||
"netid": "p_raw",
|
||||
"state": "UNCONN",
|
||||
"recv_q": 0,
|
||||
"send_q": 0,
|
||||
"peer_address": "*",
|
||||
"link_layer": "LLDP",
|
||||
"interface": "ens33"
|
||||
},
|
||||
{
|
||||
"netid": "u_dgr",
|
||||
"state": "UNCONN",
|
||||
"recv_q": 0,
|
||||
"send_q": 0,
|
||||
"local_port": "93066",
|
||||
"peer_address": "*",
|
||||
"peer_port": "0",
|
||||
"path": "/run/user/1000/systemd/notify"
|
||||
},
|
||||
{
|
||||
"netid": "u_seq",
|
||||
"state": "LISTEN",
|
||||
"recv_q": 0,
|
||||
"send_q": 128,
|
||||
"local_port": "20699",
|
||||
"peer_address": "*",
|
||||
"peer_port": "0",
|
||||
"path": "/run/udev/control"
|
||||
},
|
||||
...
|
||||
{
|
||||
"netid": "icmp6",
|
||||
"state": "UNCONN",
|
||||
"recv_q": 0,
|
||||
"send_q": 0,
|
||||
"local_address": "*",
|
||||
"local_port": "ipv6-icmp",
|
||||
"peer_address": "*",
|
||||
"peer_port": "*",
|
||||
"interface": "ens33"
|
||||
},
|
||||
{
|
||||
"netid": "udp",
|
||||
"state": "UNCONN",
|
||||
"recv_q": 0,
|
||||
"send_q": 0,
|
||||
"local_address": "127.0.0.53",
|
||||
"local_port": "domain",
|
||||
"peer_address": "0.0.0.0",
|
||||
"peer_port": "*",
|
||||
"interface": "lo"
|
||||
},
|
||||
{
|
||||
"netid": "tcp",
|
||||
"state": "LISTEN",
|
||||
"recv_q": 0,
|
||||
"send_q": 128,
|
||||
"local_address": "127.0.0.53",
|
||||
"local_port": "domain",
|
||||
"peer_address": "0.0.0.0",
|
||||
"peer_port": "*",
|
||||
"interface": "lo"
|
||||
},
|
||||
{
|
||||
"netid": "tcp",
|
||||
"state": "LISTEN",
|
||||
"recv_q": 0,
|
||||
"send_q": 128,
|
||||
"local_address": "0.0.0.0",
|
||||
"local_port": "ssh",
|
||||
"peer_address": "0.0.0.0",
|
||||
"peer_port": "*"
|
||||
},
|
||||
{
|
||||
"netid": "tcp",
|
||||
"state": "LISTEN",
|
||||
"recv_q": 0,
|
||||
"send_q": 128,
|
||||
"local_address": "[::]",
|
||||
"local_port": "ssh",
|
||||
"peer_address": "[::]",
|
||||
"peer_port": "*"
|
||||
},
|
||||
{
|
||||
"netid": "v_str",
|
||||
"state": "ESTAB",
|
||||
"recv_q": 0,
|
||||
"send_q": 0,
|
||||
"local_address": "999900439",
|
||||
"local_port": "1023",
|
||||
"peer_address": "0",
|
||||
"peer_port": "976",
|
||||
"local_port_num": 1023,
|
||||
"peer_port_num": 976
|
||||
}
|
||||
]
|
||||
|
||||
$ sudo ss -a | jc --ss -p -r
|
||||
[
|
||||
{
|
||||
"netid": "nl",
|
||||
"state": "UNCONN",
|
||||
"recv_q": "0",
|
||||
"send_q": "0",
|
||||
"peer_address": "*",
|
||||
"channel": "rtnl:kernel"
|
||||
},
|
||||
{
|
||||
"netid": "nl",
|
||||
"state": "UNCONN",
|
||||
"recv_q": "0",
|
||||
"send_q": "0",
|
||||
"peer_address": "*",
|
||||
"pid": "893",
|
||||
"channel": "rtnl:systemd-resolve"
|
||||
},
|
||||
...
|
||||
{
|
||||
"netid": "p_raw",
|
||||
"state": "UNCONN",
|
||||
"recv_q": "0",
|
||||
"send_q": "0",
|
||||
"peer_address": "*",
|
||||
"link_layer": "LLDP",
|
||||
"interface": "ens33"
|
||||
},
|
||||
{
|
||||
"netid": "u_dgr",
|
||||
"state": "UNCONN",
|
||||
"recv_q": "0",
|
||||
"send_q": "0",
|
||||
"local_port": "93066",
|
||||
"peer_address": "*",
|
||||
"peer_port": "0",
|
||||
"path": "/run/user/1000/systemd/notify"
|
||||
},
|
||||
{
|
||||
"netid": "u_seq",
|
||||
"state": "LISTEN",
|
||||
"recv_q": "0",
|
||||
"send_q": "128",
|
||||
"local_port": "20699",
|
||||
"peer_address": "*",
|
||||
"peer_port": "0",
|
||||
"path": "/run/udev/control"
|
||||
},
|
||||
...
|
||||
{
|
||||
"netid": "icmp6",
|
||||
"state": "UNCONN",
|
||||
"recv_q": "0",
|
||||
"send_q": "0",
|
||||
"local_address": "*",
|
||||
"local_port": "ipv6-icmp",
|
||||
"peer_address": "*",
|
||||
"peer_port": "*",
|
||||
"interface": "ens33"
|
||||
},
|
||||
{
|
||||
"netid": "udp",
|
||||
"state": "UNCONN",
|
||||
"recv_q": "0",
|
||||
"send_q": "0",
|
||||
"local_address": "127.0.0.53",
|
||||
"local_port": "domain",
|
||||
"peer_address": "0.0.0.0",
|
||||
"peer_port": "*",
|
||||
"interface": "lo"
|
||||
},
|
||||
{
|
||||
"netid": "tcp",
|
||||
"state": "LISTEN",
|
||||
"recv_q": "0",
|
||||
"send_q": "128",
|
||||
"local_address": "127.0.0.53",
|
||||
"local_port": "domain",
|
||||
"peer_address": "0.0.0.0",
|
||||
"peer_port": "*",
|
||||
"interface": "lo"
|
||||
},
|
||||
{
|
||||
"netid": "tcp",
|
||||
"state": "LISTEN",
|
||||
"recv_q": "0",
|
||||
"send_q": "128",
|
||||
"local_address": "0.0.0.0",
|
||||
"local_port": "ssh",
|
||||
"peer_address": "0.0.0.0",
|
||||
"peer_port": "*"
|
||||
},
|
||||
{
|
||||
"netid": "tcp",
|
||||
"state": "LISTEN",
|
||||
"recv_q": "0",
|
||||
"send_q": "128",
|
||||
"local_address": "[::]",
|
||||
"local_port": "ssh",
|
||||
"peer_address": "[::]",
|
||||
"peer_port": "*"
|
||||
},
|
||||
{
|
||||
"netid": "v_str",
|
||||
"state": "ESTAB",
|
||||
"recv_q": "0",
|
||||
"send_q": "0",
|
||||
"local_address": "999900439",
|
||||
"local_port": "1023",
|
||||
"peer_address": "0",
|
||||
"peer_port": "976"
|
||||
}
|
||||
]
|
||||
"""
|
||||
import string
|
||||
import jc.utils
|
||||
|
||||
|
||||
def process(proc_data):
|
||||
"""
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary structured data with the following schema:
|
||||
|
||||
[
|
||||
{
|
||||
"netid": string,
|
||||
"state": string,
|
||||
"recv_q": integer,
|
||||
"send_q": integer,
|
||||
"local_address": string,
|
||||
"local_port": string,
|
||||
"local_port_num": integer,
|
||||
"peer_address": string,
|
||||
"peer_port": string,
|
||||
"peer_port_num": integer,
|
||||
"interface": string,
|
||||
"link_layer" string,
|
||||
"channel": string,
|
||||
"path": string,
|
||||
"pid": integer
|
||||
}
|
||||
]
|
||||
|
||||
Information from https://www.cyberciti.biz/files/ss.html used to define field names
|
||||
"""
|
||||
for entry in proc_data:
|
||||
int_list = ['recv_q', 'send_q', 'pid']
|
||||
for key in int_list:
|
||||
if key in entry:
|
||||
try:
|
||||
key_int = int(entry[key])
|
||||
entry[key] = key_int
|
||||
except (ValueError):
|
||||
entry[key] = None
|
||||
|
||||
if 'local_port' in entry:
|
||||
try:
|
||||
entry['local_port_num'] = int(entry['local_port'])
|
||||
except (ValueError):
|
||||
pass
|
||||
|
||||
if 'peer_port' in entry:
|
||||
try:
|
||||
entry['peer_port_num'] = int(entry['peer_port'])
|
||||
except (ValueError):
|
||||
pass
|
||||
|
||||
return proc_data
|
||||
|
||||
|
||||
def parse(data, raw=False, quiet=False):
|
||||
"""
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
"""
|
||||
|
||||
# compatible options: linux, darwin, cygwin, win32, aix, freebsd
|
||||
compatible = ['linux']
|
||||
|
||||
if not quiet:
|
||||
jc.utils.compatibility(__name__, compatible)
|
||||
|
||||
contains_colon = ['nl', 'p_raw', 'raw', 'udp', 'tcp', 'v_str', 'icmp6']
|
||||
raw_output = []
|
||||
cleandata = data.splitlines()
|
||||
|
||||
# Clear any blank lines
|
||||
cleandata = list(filter(None, cleandata))
|
||||
|
||||
if cleandata:
|
||||
header_text = cleandata[0].lower()
|
||||
header_text = header_text.replace('netidstate', 'netid state')
|
||||
header_text = header_text.replace('local address:port', 'local_address local_port')
|
||||
header_text = header_text.replace('peer address:port', 'peer_address peer_port')
|
||||
header_text = header_text.replace('-', '_')
|
||||
|
||||
header_list = header_text.split()
|
||||
|
||||
for entry in cleandata[1:]:
|
||||
output_line = {}
|
||||
if entry[0] not in string.whitespace:
|
||||
|
||||
# fix weird ss bug where first two columns have no space between them sometimes
|
||||
entry = entry[:5] + ' ' + entry[5:]
|
||||
|
||||
entry_list = entry.split()
|
||||
|
||||
if entry_list[0] in contains_colon and ':' in entry_list[4]:
|
||||
l_field = entry_list[4].rsplit(':', maxsplit=1)
|
||||
l_address = l_field[0]
|
||||
l_port = l_field[1]
|
||||
entry_list[4] = l_address
|
||||
entry_list.insert(5, l_port)
|
||||
|
||||
if entry_list[0] in contains_colon and ':' in entry_list[6]:
|
||||
p_field = entry_list[6].rsplit(':', maxsplit=1)
|
||||
p_address = p_field[0]
|
||||
p_port = p_field[1]
|
||||
entry_list[6] = p_address
|
||||
entry_list.insert(7, p_port)
|
||||
|
||||
output_line = dict(zip(header_list, entry_list))
|
||||
|
||||
# some post processing to pull out fields: interface, link_layer, path, pid, channel
|
||||
# Information from https://www.cyberciti.biz/files/ss.html used to define field names
|
||||
if '%' in output_line['local_address']:
|
||||
i_field = output_line['local_address'].rsplit('%', maxsplit=1)
|
||||
output_line['local_address'] = i_field[0]
|
||||
output_line['interface'] = i_field[1]
|
||||
|
||||
if output_line['netid'] == 'nl':
|
||||
channel = output_line.pop('local_address')
|
||||
channel = channel + ':' + output_line.pop('local_port')
|
||||
if '/' in channel:
|
||||
pid = channel.rsplit('/', maxsplit=1)[1]
|
||||
channel = channel.rsplit('/', maxsplit=1)[0]
|
||||
output_line['pid'] = pid
|
||||
|
||||
output_line['channel'] = channel
|
||||
|
||||
if output_line['netid'] == 'p_raw':
|
||||
output_line['link_layer'] = output_line.pop('local_address')
|
||||
output_line['interface'] = output_line.pop('local_port')
|
||||
|
||||
if output_line['netid'] not in contains_colon:
|
||||
output_line['path'] = output_line.pop('local_address')
|
||||
|
||||
raw_output.append(output_line)
|
||||
|
||||
if raw:
|
||||
return raw_output
|
||||
else:
|
||||
return process(raw_output)
|
||||
265
jc/parsers/stat.py
Normal file
265
jc/parsers/stat.py
Normal file
@@ -0,0 +1,265 @@
|
||||
"""jc - JSON CLI output utility stats Parser
|
||||
|
||||
Usage:
|
||||
specify --stats as the first argument if the piped input is coming from stats
|
||||
|
||||
Examples:
|
||||
|
||||
$ stat /bin/* | jc --stat -p
|
||||
[
|
||||
{
|
||||
"file": "/bin/bash",
|
||||
"size": 1113504,
|
||||
"blocks": 2176,
|
||||
"io_blocks": 4096,
|
||||
"type": "regular file",
|
||||
"device": "802h/2050d",
|
||||
"inode": 131099,
|
||||
"links": 1,
|
||||
"access": "0755",
|
||||
"flags": "-rwxr-xr-x",
|
||||
"uid": 0,
|
||||
"user": "root",
|
||||
"gid": 0,
|
||||
"group": "root",
|
||||
"access_time": "2019-11-14 08:18:03.509681766 +0000",
|
||||
"modify_time": "2019-06-06 22:28:15.000000000 +0000",
|
||||
"change_time": "2019-08-12 17:21:29.521945390 +0000",
|
||||
"birth_time": null
|
||||
},
|
||||
{
|
||||
"file": "/bin/btrfs",
|
||||
"size": 716464,
|
||||
"blocks": 1400,
|
||||
"io_blocks": 4096,
|
||||
"type": "regular file",
|
||||
"device": "802h/2050d",
|
||||
"inode": 131100,
|
||||
"links": 1,
|
||||
"access": "0755",
|
||||
"flags": "-rwxr-xr-x",
|
||||
"uid": 0,
|
||||
"user": "root",
|
||||
"gid": 0,
|
||||
"group": "root",
|
||||
"access_time": "2019-11-14 08:18:28.990834276 +0000",
|
||||
"modify_time": "2018-03-12 23:04:27.000000000 +0000",
|
||||
"change_time": "2019-08-12 17:21:29.545944399 +0000",
|
||||
"birth_time": null
|
||||
},
|
||||
...
|
||||
]
|
||||
|
||||
$ stat /bin/* | jc --stat -p -r
|
||||
[
|
||||
{
|
||||
"file": "/bin/bash",
|
||||
"size": "1113504",
|
||||
"blocks": "2176",
|
||||
"io_blocks": "4096",
|
||||
"type": "regular file",
|
||||
"device": "802h/2050d",
|
||||
"inode": "131099",
|
||||
"links": "1",
|
||||
"access": "0755",
|
||||
"flags": "-rwxr-xr-x",
|
||||
"uid": "0",
|
||||
"user": "root",
|
||||
"gid": "0",
|
||||
"group": "root",
|
||||
"access_time": "2019-11-14 08:18:03.509681766 +0000",
|
||||
"modify_time": "2019-06-06 22:28:15.000000000 +0000",
|
||||
"change_time": "2019-08-12 17:21:29.521945390 +0000",
|
||||
"birth_time": null
|
||||
},
|
||||
{
|
||||
"file": "/bin/btrfs",
|
||||
"size": "716464",
|
||||
"blocks": "1400",
|
||||
"io_blocks": "4096",
|
||||
"type": "regular file",
|
||||
"device": "802h/2050d",
|
||||
"inode": "131100",
|
||||
"links": "1",
|
||||
"access": "0755",
|
||||
"flags": "-rwxr-xr-x",
|
||||
"uid": "0",
|
||||
"user": "root",
|
||||
"gid": "0",
|
||||
"group": "root",
|
||||
"access_time": "2019-11-14 08:18:28.990834276 +0000",
|
||||
"modify_time": "2018-03-12 23:04:27.000000000 +0000",
|
||||
"change_time": "2019-08-12 17:21:29.545944399 +0000",
|
||||
"birth_time": null
|
||||
},
|
||||
..
|
||||
]
|
||||
"""
|
||||
import jc.utils
|
||||
|
||||
|
||||
def process(proc_data):
|
||||
"""
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary structured data with the following schema:
|
||||
|
||||
[
|
||||
{
|
||||
"file": string,
|
||||
"link_to" string,
|
||||
"size": integer,
|
||||
"blocks": integer,
|
||||
"io_blocks": integer,
|
||||
"type": string,
|
||||
"device": string,
|
||||
"inode": integer,
|
||||
"links": integer,
|
||||
"access": string,
|
||||
"flags": string,
|
||||
"uid": integer,
|
||||
"user": string,
|
||||
"gid": integer,
|
||||
"group": string,
|
||||
"access_time": string, # - = null
|
||||
"modify_time": string, # - = null
|
||||
"change_time": string, # - = null
|
||||
"birth_time": string # - = null
|
||||
}
|
||||
]
|
||||
"""
|
||||
for entry in proc_data:
|
||||
int_list = ['size', 'blocks', 'io_blocks', 'inode', 'links', 'uid', 'gid']
|
||||
for key in int_list:
|
||||
if key in entry:
|
||||
try:
|
||||
key_int = int(entry[key])
|
||||
entry[key] = key_int
|
||||
except (ValueError):
|
||||
entry[key] = None
|
||||
|
||||
# turn - into null for time fields
|
||||
for entry in proc_data:
|
||||
null_list = ['access_time', 'modify_time', 'change_time', 'birth_time']
|
||||
for key in null_list:
|
||||
if key in entry:
|
||||
if entry[key] == '-':
|
||||
entry[key] = None
|
||||
|
||||
return proc_data
|
||||
|
||||
|
||||
def parse(data, raw=False, quiet=False):
|
||||
"""
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
"""
|
||||
|
||||
# compatible options: linux, darwin, cygwin, win32, aix, freebsd
|
||||
compatible = ['linux']
|
||||
|
||||
if not quiet:
|
||||
jc.utils.compatibility(__name__, compatible)
|
||||
|
||||
raw_output = []
|
||||
cleandata = data.splitlines()
|
||||
|
||||
# Clear any blank lines
|
||||
cleandata = list(filter(None, cleandata))
|
||||
|
||||
if cleandata:
|
||||
# stats output contains 8 lines
|
||||
for line in cleandata:
|
||||
|
||||
# line #1
|
||||
if line.find('File:') == 2:
|
||||
output_line = {}
|
||||
line_list = line.split(maxsplit=1)
|
||||
output_line['file'] = line_list[1]
|
||||
|
||||
# populate link_to field if -> found
|
||||
if output_line['file'].find(' -> ') != -1:
|
||||
filename = output_line['file'].split(' -> ')[0].strip('\u2018').rstrip('\u2019')
|
||||
link = output_line['file'].split(' -> ')[1].strip('\u2018').rstrip('\u2019')
|
||||
output_line['file'] = filename
|
||||
output_line['link_to'] = link
|
||||
else:
|
||||
filename = output_line['file'].split(' -> ')[0].strip('\u2018').rstrip('\u2019')
|
||||
output_line['file'] = filename
|
||||
|
||||
continue
|
||||
|
||||
# line #2
|
||||
if line.find('Size:') == 2:
|
||||
line_list = line.split(maxsplit=7)
|
||||
output_line['size'] = line_list[1]
|
||||
output_line['blocks'] = line_list[3]
|
||||
output_line['io_blocks'] = line_list[6]
|
||||
output_line['type'] = line_list[7]
|
||||
continue
|
||||
|
||||
# line #3
|
||||
if line.find('Device:') == 0:
|
||||
line_list = line.split()
|
||||
output_line['device'] = line_list[1]
|
||||
output_line['inode'] = line_list[3]
|
||||
output_line['links'] = line_list[5]
|
||||
continue
|
||||
|
||||
# line #4
|
||||
if line.find('Access: (') == 0:
|
||||
line = line.replace('(', ' ').replace(')', ' ').replace('/', ' ')
|
||||
line_list = line.split()
|
||||
output_line['access'] = line_list[1]
|
||||
output_line['flags'] = line_list[2]
|
||||
output_line['uid'] = line_list[4]
|
||||
output_line['user'] = line_list[5]
|
||||
output_line['gid'] = line_list[7]
|
||||
output_line['group'] = line_list[8]
|
||||
continue
|
||||
|
||||
# line #5
|
||||
if line.find('Access: 2') == 0:
|
||||
line_list = line.split(maxsplit=1)
|
||||
output_line['access_time'] = line_list[1]
|
||||
continue
|
||||
|
||||
# line #6
|
||||
if line.find('Modify:') == 0:
|
||||
line_list = line.split(maxsplit=1)
|
||||
output_line['modify_time'] = line_list[1]
|
||||
continue
|
||||
|
||||
# line #7
|
||||
if line.find('Change:') == 0:
|
||||
line_list = line.split(maxsplit=1)
|
||||
output_line['change_time'] = line_list[1]
|
||||
continue
|
||||
|
||||
# line #8
|
||||
if line.find('Birth:') == 1:
|
||||
line_list = line.split(maxsplit=1)
|
||||
output_line['birth_time'] = line_list[1]
|
||||
|
||||
raw_output.append(output_line)
|
||||
continue
|
||||
|
||||
if raw:
|
||||
return raw_output
|
||||
else:
|
||||
return process(raw_output)
|
||||
109
jc/parsers/systemctl.py
Normal file
109
jc/parsers/systemctl.py
Normal file
@@ -0,0 +1,109 @@
|
||||
"""jc - JSON CLI output utility systemctl Parser
|
||||
|
||||
Usage:
|
||||
specify --systemctl as the first argument if the piped input is coming from systemctl
|
||||
|
||||
Examples:
|
||||
|
||||
$ systemctl -a | jc --systemctl -p
|
||||
[
|
||||
{
|
||||
"unit": "proc-sys-fs-binfmt_misc.automount",
|
||||
"load": "loaded",
|
||||
"active": "active",
|
||||
"sub": "waiting",
|
||||
"description": "Arbitrary Executable File Formats File System Automount Point"
|
||||
},
|
||||
{
|
||||
"unit": "dev-block-8:2.device",
|
||||
"load": "loaded",
|
||||
"active": "active",
|
||||
"sub": "plugged",
|
||||
"description": "LVM PV 3klkIj-w1qk-DkJi-0XBJ-y3o7-i2Ac-vHqWBM on /dev/sda2 2"
|
||||
},
|
||||
{
|
||||
"unit": "dev-cdrom.device",
|
||||
"load": "loaded",
|
||||
"active": "active",
|
||||
"sub": "plugged",
|
||||
"description": "VMware_Virtual_IDE_CDROM_Drive"
|
||||
},
|
||||
...
|
||||
]
|
||||
"""
|
||||
import jc.utils
|
||||
|
||||
|
||||
def process(proc_data):
|
||||
"""
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary structured data with the following schema:
|
||||
|
||||
[
|
||||
{
|
||||
"unit": string,
|
||||
"load": string,
|
||||
"active": string,
|
||||
"sub": string,
|
||||
"description": string
|
||||
}
|
||||
]
|
||||
"""
|
||||
# nothing more to process
|
||||
return proc_data
|
||||
|
||||
|
||||
def parse(data, raw=False, quiet=False):
|
||||
"""
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
"""
|
||||
|
||||
# compatible options: linux, darwin, cygwin, win32, aix, systemctlbsd
|
||||
compatible = ['linux']
|
||||
|
||||
if not quiet:
|
||||
jc.utils.compatibility(__name__, compatible)
|
||||
|
||||
linedata = data.splitlines()
|
||||
# Clear any blank lines
|
||||
linedata = list(filter(None, linedata))
|
||||
# clean up non-ascii characters, if any
|
||||
cleandata = []
|
||||
for entry in linedata:
|
||||
cleandata.append(entry.encode('ascii', errors='ignore').decode())
|
||||
|
||||
header_text = cleandata[0]
|
||||
header_list = header_text.lower().split()
|
||||
|
||||
raw_output = []
|
||||
|
||||
for entry in cleandata[1:]:
|
||||
if entry.find('LOAD = ') != -1:
|
||||
break
|
||||
|
||||
else:
|
||||
entry_list = entry.rstrip().split(maxsplit=4)
|
||||
output_line = dict(zip(header_list, entry_list))
|
||||
raw_output.append(output_line)
|
||||
|
||||
if raw:
|
||||
return raw_output
|
||||
else:
|
||||
return process(raw_output)
|
||||
136
jc/parsers/systemctl_lj.py
Normal file
136
jc/parsers/systemctl_lj.py
Normal file
@@ -0,0 +1,136 @@
|
||||
"""jc - JSON CLI output utility systemctl-lj Parser
|
||||
|
||||
Usage:
|
||||
specify --systemctl-lj as the first argument if the piped input is coming from systemctl list-jobs
|
||||
|
||||
Examples:
|
||||
|
||||
$ systemctl list-jobs| jc --systemctl-lj -p
|
||||
[
|
||||
{
|
||||
"job": 3543,
|
||||
"unit": "nginxAfterGlusterfs.service",
|
||||
"type": "start",
|
||||
"state": "waiting"
|
||||
},
|
||||
{
|
||||
"job": 3545,
|
||||
"unit": "glusterReadyForLocalhostMount.service",
|
||||
"type": "start",
|
||||
"state": "running"
|
||||
},
|
||||
{
|
||||
"job": 3506,
|
||||
"unit": "nginx.service",
|
||||
"type": "start",
|
||||
"state": "waiting"
|
||||
}
|
||||
]
|
||||
|
||||
$ systemctl list-jobs| jc --systemctl-lj -p -r
|
||||
[
|
||||
{
|
||||
"job": "3543",
|
||||
"unit": "nginxAfterGlusterfs.service",
|
||||
"type": "start",
|
||||
"state": "waiting"
|
||||
},
|
||||
{
|
||||
"job": "3545",
|
||||
"unit": "glusterReadyForLocalhostMount.service",
|
||||
"type": "start",
|
||||
"state": "running"
|
||||
},
|
||||
{
|
||||
"job": "3506",
|
||||
"unit": "nginx.service",
|
||||
"type": "start",
|
||||
"state": "waiting"
|
||||
}
|
||||
]
|
||||
|
||||
"""
|
||||
import jc.utils
|
||||
|
||||
|
||||
def process(proc_data):
|
||||
"""
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary structured data with the following schema:
|
||||
|
||||
[
|
||||
{
|
||||
"job": integer,
|
||||
"unit": string,
|
||||
"type": string,
|
||||
"state": string
|
||||
}
|
||||
]
|
||||
"""
|
||||
for entry in proc_data:
|
||||
int_list = ['job']
|
||||
for key in int_list:
|
||||
if key in entry:
|
||||
try:
|
||||
key_int = int(entry[key])
|
||||
entry[key] = key_int
|
||||
except (ValueError):
|
||||
entry[key] = None
|
||||
return proc_data
|
||||
|
||||
|
||||
def parse(data, raw=False, quiet=False):
|
||||
"""
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
"""
|
||||
|
||||
# compatible options: linux, darwin, cygwin, win32, aix, systemctlbsd
|
||||
compatible = ['linux']
|
||||
|
||||
if not quiet:
|
||||
jc.utils.compatibility(__name__, compatible)
|
||||
|
||||
linedata = data.splitlines()
|
||||
# Clear any blank lines
|
||||
linedata = list(filter(None, linedata))
|
||||
# clean up non-ascii characters, if any
|
||||
cleandata = []
|
||||
for entry in linedata:
|
||||
cleandata.append(entry.encode('ascii', errors='ignore').decode())
|
||||
|
||||
header_text = cleandata[0]
|
||||
header_text = header_text.lower()
|
||||
header_list = header_text.split()
|
||||
|
||||
raw_output = []
|
||||
|
||||
for entry in cleandata[1:]:
|
||||
if entry.find('No jobs running.') != -1 or entry.find('jobs listed.') != -1:
|
||||
break
|
||||
|
||||
else:
|
||||
entry_list = entry.split(maxsplit=4)
|
||||
output_line = dict(zip(header_list, entry_list))
|
||||
raw_output.append(output_line)
|
||||
|
||||
if raw:
|
||||
return raw_output
|
||||
else:
|
||||
return process(raw_output)
|
||||
101
jc/parsers/systemctl_ls.py
Normal file
101
jc/parsers/systemctl_ls.py
Normal file
@@ -0,0 +1,101 @@
|
||||
"""jc - JSON CLI output utility systemctl-ls Parser
|
||||
|
||||
Usage:
|
||||
specify --systemctl-ls as the first argument if the piped input is coming from systemctl list-sockets
|
||||
|
||||
Examples:
|
||||
|
||||
$ systemctl list-sockets | jc --systemctl-ls -p
|
||||
[
|
||||
{
|
||||
"listen": "/dev/log",
|
||||
"unit": "systemd-journald.socket",
|
||||
"activates": "systemd-journald.service"
|
||||
},
|
||||
{
|
||||
"listen": "/run/dbus/system_bus_socket",
|
||||
"unit": "dbus.socket",
|
||||
"activates": "dbus.service"
|
||||
},
|
||||
{
|
||||
"listen": "/run/dmeventd-client",
|
||||
"unit": "dm-event.socket",
|
||||
"activates": "dm-event.service"
|
||||
},
|
||||
...
|
||||
]
|
||||
"""
|
||||
import jc.utils
|
||||
|
||||
|
||||
def process(proc_data):
|
||||
"""
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary structured data with the following schema:
|
||||
|
||||
[
|
||||
{
|
||||
"listen": string,
|
||||
"unit": string,
|
||||
"activates": string
|
||||
}
|
||||
]
|
||||
"""
|
||||
# nothing more to process
|
||||
return proc_data
|
||||
|
||||
|
||||
def parse(data, raw=False, quiet=False):
|
||||
"""
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
"""
|
||||
|
||||
# compatible options: linux, darwin, cygwin, win32, aix, systemctlbsd
|
||||
compatible = ['linux']
|
||||
|
||||
if not quiet:
|
||||
jc.utils.compatibility(__name__, compatible)
|
||||
|
||||
linedata = data.splitlines()
|
||||
# Clear any blank lines
|
||||
linedata = list(filter(None, linedata))
|
||||
# clean up non-ascii characters, if any
|
||||
cleandata = []
|
||||
for entry in linedata:
|
||||
cleandata.append(entry.encode('ascii', errors='ignore').decode())
|
||||
|
||||
header_text = cleandata[0].lower()
|
||||
header_list = header_text.split()
|
||||
|
||||
raw_output = []
|
||||
|
||||
for entry in cleandata[1:]:
|
||||
if entry.find('sockets listed.') != -1:
|
||||
break
|
||||
|
||||
else:
|
||||
entry_list = entry.rsplit(maxsplit=2)
|
||||
output_line = dict(zip(header_list, entry_list))
|
||||
raw_output.append(output_line)
|
||||
|
||||
if raw:
|
||||
return raw_output
|
||||
else:
|
||||
return process(raw_output)
|
||||
98
jc/parsers/systemctl_luf.py
Normal file
98
jc/parsers/systemctl_luf.py
Normal file
@@ -0,0 +1,98 @@
|
||||
"""jc - JSON CLI output utility systemctl-luf Parser
|
||||
|
||||
Usage:
|
||||
specify --systemctl-luf as the first argument if the piped input is coming from systemctl list-unit-files
|
||||
|
||||
Examples:
|
||||
|
||||
$ systemctl list-unit-files | jc --systemctl-luf -p
|
||||
[
|
||||
{
|
||||
"unit_file": "proc-sys-fs-binfmt_misc.automount",
|
||||
"state": "static"
|
||||
},
|
||||
{
|
||||
"unit_file": "dev-hugepages.mount",
|
||||
"state": "static"
|
||||
},
|
||||
{
|
||||
"unit_file": "dev-mqueue.mount",
|
||||
"state": "static"
|
||||
},
|
||||
...
|
||||
]
|
||||
"""
|
||||
import jc.utils
|
||||
|
||||
|
||||
def process(proc_data):
|
||||
"""
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary structured data with the following schema:
|
||||
|
||||
[
|
||||
{
|
||||
"unit_file": string,
|
||||
"state": string
|
||||
}
|
||||
]
|
||||
"""
|
||||
# nothing more to process
|
||||
return proc_data
|
||||
|
||||
|
||||
def parse(data, raw=False, quiet=False):
|
||||
"""
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
"""
|
||||
|
||||
# compatible options: linux, darwin, cygwin, win32, aix, systemctlbsd
|
||||
compatible = ['linux']
|
||||
|
||||
if not quiet:
|
||||
jc.utils.compatibility(__name__, compatible)
|
||||
|
||||
linedata = data.splitlines()
|
||||
# Clear any blank lines
|
||||
linedata = list(filter(None, linedata))
|
||||
# clean up non-ascii characters, if any
|
||||
cleandata = []
|
||||
for entry in linedata:
|
||||
cleandata.append(entry.encode('ascii', errors='ignore').decode())
|
||||
|
||||
header_text = cleandata[0]
|
||||
header_text = header_text.lower().replace('unit file', 'unit_file')
|
||||
header_list = header_text.split()
|
||||
|
||||
raw_output = []
|
||||
|
||||
for entry in cleandata[1:]:
|
||||
if entry.find('unit files listed.') != -1:
|
||||
break
|
||||
|
||||
else:
|
||||
entry_list = entry.split(maxsplit=4)
|
||||
output_line = dict(zip(header_list, entry_list))
|
||||
raw_output.append(output_line)
|
||||
|
||||
if raw:
|
||||
return raw_output
|
||||
else:
|
||||
return process(raw_output)
|
||||
@@ -8,35 +8,88 @@ Limitations:
|
||||
|
||||
Example:
|
||||
|
||||
$ uname -a | jc --uname -p
|
||||
{
|
||||
"kernel_name": "Linux",
|
||||
"node_name": "user-ubuntu",
|
||||
"kernel_release": "4.15.0-65-generic",
|
||||
"operating_system": "GNU/Linux",
|
||||
"hardware_platform": "x86_64",
|
||||
"processor": "x86_64",
|
||||
"machine": "x86_64",
|
||||
"kernel_version": "#74-Ubuntu SMP Tue Sep 17 17:06:04 UTC 2019"
|
||||
}
|
||||
$ uname -a | jc --uname -p
|
||||
{
|
||||
"kernel_name": "Linux",
|
||||
"node_name": "user-ubuntu",
|
||||
"kernel_release": "4.15.0-65-generic",
|
||||
"operating_system": "GNU/Linux",
|
||||
"hardware_platform": "x86_64",
|
||||
"processor": "x86_64",
|
||||
"machine": "x86_64",
|
||||
"kernel_version": "#74-Ubuntu SMP Tue Sep 17 17:06:04 UTC 2019"
|
||||
}
|
||||
"""
|
||||
import jc.utils
|
||||
|
||||
|
||||
def parse(data):
|
||||
output = {}
|
||||
def process(proc_data):
|
||||
"""
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary structured data with the following schema:
|
||||
|
||||
{
|
||||
"kernel_name": string,
|
||||
"node_name": string,
|
||||
"kernel_release": string,
|
||||
"operating_system": string,
|
||||
"hardware_platform": string,
|
||||
"processor": string,
|
||||
"machine": string,
|
||||
"kernel_version": string
|
||||
}
|
||||
"""
|
||||
# nothing to process
|
||||
return proc_data
|
||||
|
||||
|
||||
def parse(data, raw=False, quiet=False):
|
||||
"""
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
"""
|
||||
|
||||
# compatible options: linux, darwin, cygwin, win32, aix, freebsd
|
||||
compatible = ['linux']
|
||||
|
||||
if not quiet:
|
||||
jc.utils.compatibility(__name__, compatible)
|
||||
|
||||
raw_output = {}
|
||||
parsed_line = data.split(maxsplit=3)
|
||||
|
||||
output['kernel_name'] = parsed_line.pop(0)
|
||||
output['node_name'] = parsed_line.pop(0)
|
||||
output['kernel_release'] = parsed_line.pop(0)
|
||||
if len(parsed_line) > 1:
|
||||
|
||||
parsed_line = parsed_line[-1].rsplit(maxsplit=4)
|
||||
raw_output['kernel_name'] = parsed_line.pop(0)
|
||||
raw_output['node_name'] = parsed_line.pop(0)
|
||||
raw_output['kernel_release'] = parsed_line.pop(0)
|
||||
|
||||
output['operating_system'] = parsed_line.pop(-1)
|
||||
output['hardware_platform'] = parsed_line.pop(-1)
|
||||
output['processor'] = parsed_line.pop(-1)
|
||||
output['machine'] = parsed_line.pop(-1)
|
||||
parsed_line = parsed_line[-1].rsplit(maxsplit=4)
|
||||
|
||||
output['kernel_version'] = parsed_line.pop(0)
|
||||
raw_output['operating_system'] = parsed_line.pop(-1)
|
||||
raw_output['hardware_platform'] = parsed_line.pop(-1)
|
||||
raw_output['processor'] = parsed_line.pop(-1)
|
||||
raw_output['machine'] = parsed_line.pop(-1)
|
||||
|
||||
return output
|
||||
raw_output['kernel_version'] = parsed_line.pop(0)
|
||||
|
||||
if raw:
|
||||
return raw_output
|
||||
else:
|
||||
return process(raw_output)
|
||||
|
||||
121
jc/parsers/uptime.py
Normal file
121
jc/parsers/uptime.py
Normal file
@@ -0,0 +1,121 @@
|
||||
"""jc - JSON CLI output utility uptime Parser
|
||||
|
||||
Usage:
|
||||
specify --uptime as the first argument if the piped input is coming from uptime
|
||||
|
||||
Example:
|
||||
|
||||
$ uptime | jc --uptime -p
|
||||
{
|
||||
"time": "11:30:44",
|
||||
"uptime": "1 day, 21:17",
|
||||
"users": 1,
|
||||
"load_1m": 0.01,
|
||||
"load_5m": 0.04,
|
||||
"load_15m": 0.05
|
||||
}
|
||||
|
||||
$ uptime | jc --uptime -p -r
|
||||
{
|
||||
"time": "11:31:09",
|
||||
"uptime": "1 day, 21:17",
|
||||
"users": "1",
|
||||
"load_1m": "0.00",
|
||||
"load_5m": "0.04",
|
||||
"load_15m": "0.05"
|
||||
}
|
||||
"""
|
||||
import jc.utils
|
||||
|
||||
|
||||
def process(proc_data):
|
||||
"""
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary structured data with the following schema:
|
||||
|
||||
{
|
||||
"time": string,
|
||||
"uptime": string,
|
||||
"users": integer,
|
||||
"load_1m": float,
|
||||
"load_5m": float,
|
||||
"load_15m": float
|
||||
}
|
||||
"""
|
||||
int_list = ['users']
|
||||
for key in int_list:
|
||||
if key in proc_data:
|
||||
try:
|
||||
key_int = int(proc_data[key])
|
||||
proc_data[key] = key_int
|
||||
except (ValueError):
|
||||
proc_data[key] = None
|
||||
|
||||
float_list = ['load_1m', 'load_5m', 'load_15m']
|
||||
for key in float_list:
|
||||
if key in proc_data:
|
||||
try:
|
||||
key_float = float(proc_data[key])
|
||||
proc_data[key] = key_float
|
||||
except (ValueError):
|
||||
proc_data[key] = None
|
||||
|
||||
return proc_data
|
||||
|
||||
|
||||
def parse(data, raw=False, quiet=False):
|
||||
"""
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
"""
|
||||
|
||||
# compatible options: linux, darwin, cygwin, win32, aix, freebsd
|
||||
compatible = ['linux', 'darwin', 'cygwin', 'aix', 'freebsd']
|
||||
|
||||
if not quiet:
|
||||
jc.utils.compatibility(__name__, compatible)
|
||||
|
||||
raw_output = {}
|
||||
|
||||
cleandata = data.splitlines()
|
||||
|
||||
if cleandata:
|
||||
parsed_line = cleandata[0].split()
|
||||
|
||||
# allow space for odd times
|
||||
while len(parsed_line) < 20:
|
||||
parsed_line.insert(2, ' ')
|
||||
|
||||
# find first part of time
|
||||
for i, word in enumerate(parsed_line[2:]):
|
||||
if word != ' ':
|
||||
marker = i + 2
|
||||
break
|
||||
|
||||
raw_output['time'] = parsed_line[0]
|
||||
raw_output['uptime'] = ' '.join(parsed_line[marker:13]).lstrip().rstrip(',')
|
||||
raw_output['users'] = parsed_line[13]
|
||||
raw_output['load_1m'] = parsed_line[17].rstrip(',')
|
||||
raw_output['load_5m'] = parsed_line[18].rstrip(',')
|
||||
raw_output['load_15m'] = parsed_line[19]
|
||||
|
||||
if raw:
|
||||
return raw_output
|
||||
else:
|
||||
return process(raw_output)
|
||||
165
jc/parsers/w.py
Normal file
165
jc/parsers/w.py
Normal file
@@ -0,0 +1,165 @@
|
||||
"""jc - JSON CLI output utility w Parser
|
||||
|
||||
Usage:
|
||||
specify --w as the first argument if the piped input is coming from w
|
||||
|
||||
Examples:
|
||||
|
||||
$ w | jc --w -p
|
||||
[
|
||||
{
|
||||
"user": "root",
|
||||
"tty": "tty1",
|
||||
"from": null,
|
||||
"login_at": "07:49",
|
||||
"idle": "1:15m",
|
||||
"jcpu": "0.00s",
|
||||
"pcpu": "0.00s",
|
||||
"what": "-bash"
|
||||
},
|
||||
{
|
||||
"user": "root",
|
||||
"tty": "ttyS0",
|
||||
"from": null,
|
||||
"login_at": "06:24",
|
||||
"idle": "0.00s",
|
||||
"jcpu": "0.43s",
|
||||
"pcpu": "0.00s",
|
||||
"what": "w"
|
||||
},
|
||||
{
|
||||
"user": "root",
|
||||
"tty": "pts/0",
|
||||
"from": "192.168.71.1",
|
||||
"login_at": "06:29",
|
||||
"idle": "2:35m",
|
||||
"jcpu": "0.00s",
|
||||
"pcpu": "0.00s",
|
||||
"what": "-bash"
|
||||
}
|
||||
]
|
||||
|
||||
$ w | jc --w -p -r
|
||||
[
|
||||
{
|
||||
"user": "kbrazil",
|
||||
"tty": "tty1",
|
||||
"from": "-",
|
||||
"login_at": "07:49",
|
||||
"idle": "1:16m",
|
||||
"jcpu": "0.00s",
|
||||
"pcpu": "0.00s",
|
||||
"what": "-bash"
|
||||
},
|
||||
{
|
||||
"user": "kbrazil",
|
||||
"tty": "ttyS0",
|
||||
"from": "-",
|
||||
"login_at": "06:24",
|
||||
"idle": "2.00s",
|
||||
"jcpu": "0.46s",
|
||||
"pcpu": "0.00s",
|
||||
"what": "w"
|
||||
},
|
||||
{
|
||||
"user": "kbrazil",
|
||||
"tty": "pts/0",
|
||||
"from": "192.168.71.1",
|
||||
"login_at": "06:29",
|
||||
"idle": "2:36m",
|
||||
"jcpu": "0.00s",
|
||||
"pcpu": "0.00s",
|
||||
"what": "-bash"
|
||||
}
|
||||
]
|
||||
"""
|
||||
import string
|
||||
import jc.utils
|
||||
|
||||
|
||||
def process(proc_data):
|
||||
"""
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary structured data with the following schema:
|
||||
|
||||
[
|
||||
{
|
||||
"user": string, # '-'' = null
|
||||
"tty": string, # '-'' = null
|
||||
"from": string, # '-'' = null
|
||||
"login_at": string, # '-'' = null
|
||||
"idle": string, # '-'' = null
|
||||
"jcpu": string,
|
||||
"pcpu": string,
|
||||
"what": string # '-'' = null
|
||||
}
|
||||
]
|
||||
"""
|
||||
for entry in proc_data:
|
||||
null_list = ['user', 'tty', 'from', 'login_at', 'idle', 'what']
|
||||
for key in null_list:
|
||||
if key in entry:
|
||||
if entry[key] == '-':
|
||||
entry[key] = None
|
||||
|
||||
return proc_data
|
||||
|
||||
|
||||
def parse(data, raw=False, quiet=False):
|
||||
"""
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) output preprocessed JSON if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
dictionary raw or processed structured data
|
||||
"""
|
||||
|
||||
# compatible options: linux, darwin, cygwin, win32, aix, freebsd
|
||||
compatible = ['linux', 'darwin', 'cygwin', 'aix', 'freebsd']
|
||||
|
||||
if not quiet:
|
||||
jc.utils.compatibility(__name__, compatible)
|
||||
|
||||
cleandata = data.splitlines()[1:]
|
||||
header_text = cleandata[0].lower()
|
||||
# fixup for 'from' column that can be blank
|
||||
from_col = header_text.find('from')
|
||||
# clean up 'login@' header
|
||||
# even though @ in a key is valid json, it can make things difficult
|
||||
header_text = header_text.replace('login@', 'login_at')
|
||||
headers = [h for h in ' '.join(header_text.strip().split()).split() if h]
|
||||
|
||||
# parse lines
|
||||
raw_output = []
|
||||
if cleandata:
|
||||
for entry in cleandata[1:]:
|
||||
output_line = {}
|
||||
|
||||
# normalize data by inserting Null for missing data
|
||||
temp_line = entry.split(maxsplit=len(headers) - 1)
|
||||
|
||||
# fix from column, always at column 2
|
||||
if 'from' in headers:
|
||||
if entry[from_col] in string.whitespace:
|
||||
temp_line.insert(2, '-')
|
||||
|
||||
output_line = dict(zip(headers, temp_line))
|
||||
raw_output.append(output_line)
|
||||
|
||||
if raw:
|
||||
return raw_output
|
||||
else:
|
||||
return process(raw_output)
|
||||
62
jc/utils.py
Normal file
62
jc/utils.py
Normal file
@@ -0,0 +1,62 @@
|
||||
"""jc - JSON CLI output utility utils"""
|
||||
import textwrap
|
||||
import sys
|
||||
|
||||
|
||||
def warning_message(message):
|
||||
"""
|
||||
Prints a warning message for non-fatal issues
|
||||
|
||||
Parameters:
|
||||
|
||||
message: (string) text of message
|
||||
|
||||
Returns:
|
||||
|
||||
no return, just prints output to STDERR
|
||||
"""
|
||||
|
||||
error_string = f'''
|
||||
jc: Warning - {message}
|
||||
'''
|
||||
print(textwrap.dedent(error_string), file=sys.stderr)
|
||||
|
||||
|
||||
def error_message(message):
|
||||
"""
|
||||
Prints an error message for fatal issues
|
||||
|
||||
Parameters:
|
||||
|
||||
message: (string) text of message
|
||||
|
||||
Returns:
|
||||
|
||||
no return, just prints output to STDERR
|
||||
"""
|
||||
|
||||
error_string = f'''
|
||||
jc: Error - {message}
|
||||
'''
|
||||
print(textwrap.dedent(error_string), file=sys.stderr)
|
||||
|
||||
|
||||
def compatibility(mod_name, compatible):
|
||||
"""Checks for the parser's compatibility with the running OS platform.
|
||||
|
||||
Parameters:
|
||||
|
||||
mod_name: (string) __name__ of the calling module
|
||||
|
||||
compatible: (list) sys.platform name(s) compatible with the parser
|
||||
compatible options:
|
||||
linux, darwin, cygwin, win32, aix, freebsd
|
||||
|
||||
Returns:
|
||||
|
||||
no return, just prints output to STDERR
|
||||
"""
|
||||
if sys.platform not in compatible:
|
||||
mod = mod_name.split('.')[-1]
|
||||
compat_list = ', '.join(compatible)
|
||||
warning_message(f'{mod} parser not compatible with your OS ({sys.platform}).\n Compatible platforms: {compat_list}')
|
||||
3
runtests.sh
Executable file
3
runtests.sh
Executable file
@@ -0,0 +1,3 @@
|
||||
#!/bin/bash
|
||||
|
||||
python3 -m unittest -v
|
||||
9
setup.py
9
setup.py
@@ -5,7 +5,7 @@ with open('README.md', 'r') as f:
|
||||
|
||||
setuptools.setup(
|
||||
name='jc',
|
||||
version='0.9.1',
|
||||
version='1.5.1',
|
||||
author='Kelly Brazil',
|
||||
author_email='kellyjonbrazil@gmail.com',
|
||||
description='This tool serializes the output of popular command line tools to structured JSON output.',
|
||||
@@ -15,17 +15,18 @@ setuptools.setup(
|
||||
license='MIT',
|
||||
long_description=long_description,
|
||||
long_description_content_type='text/markdown',
|
||||
python_requires='~=3.4',
|
||||
python_requires='~=3.6',
|
||||
url='https://github.com/kellyjonbrazil/jc',
|
||||
packages=setuptools.find_packages(),
|
||||
entry_points={
|
||||
'console_scripts': [
|
||||
'jc=jc.jc:main',
|
||||
],
|
||||
'jc=jc.cli:main'
|
||||
]
|
||||
},
|
||||
classifiers=[
|
||||
'Programming Language :: Python :: 3',
|
||||
'License :: OSI Approved :: MIT License',
|
||||
'Operating System :: OS Independent',
|
||||
'Topic :: Utilities'
|
||||
]
|
||||
)
|
||||
|
||||
0
tests/__init__.py
Normal file
0
tests/__init__.py
Normal file
1
tests/fixtures/centos-7.7/arp-a.json
vendored
Normal file
1
tests/fixtures/centos-7.7/arp-a.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
[{"name": "gateway", "address": "192.168.71.2", "hwtype": "ether", "hwaddress": "00:50:56:f7:4a:fc", "iface": "ens33"}, {"name": null, "address": "192.168.71.1", "hwtype": "ether", "hwaddress": "00:50:56:c0:00:08", "iface": "ens33"}, {"name": null, "address": "192.168.71.254", "hwtype": "ether", "hwaddress": "00:50:56:fe:7a:b4", "iface": "ens33"}]
|
||||
3
tests/fixtures/centos-7.7/arp-a.out
vendored
Normal file
3
tests/fixtures/centos-7.7/arp-a.out
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
gateway (192.168.71.2) at 00:50:56:f7:4a:fc [ether] on ens33
|
||||
? (192.168.71.1) at 00:50:56:c0:00:08 [ether] on ens33
|
||||
? (192.168.71.254) at 00:50:56:fe:7a:b4 [ether] on ens33
|
||||
1
tests/fixtures/centos-7.7/arp-v.json
vendored
Normal file
1
tests/fixtures/centos-7.7/arp-v.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
[{"address": "gateway", "hwtype": "ether", "hwaddress": "00:50:56:f7:4a:fc", "flags_mask": "C", "iface": "ens33"}, {"address": "192.168.71.254", "hwtype": "ether", "hwaddress": "00:50:56:fe:7a:b4", "flags_mask": "C", "iface": "ens33"}]
|
||||
4
tests/fixtures/centos-7.7/arp-v.out
vendored
Normal file
4
tests/fixtures/centos-7.7/arp-v.out
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
Address HWtype HWaddress Flags Mask Iface
|
||||
gateway ether 00:50:56:f7:4a:fc C ens33
|
||||
192.168.71.254 ether 00:50:56:fe:7a:b4 C ens33
|
||||
Entries: 2 Skipped: 0 Found: 2
|
||||
1
tests/fixtures/centos-7.7/arp.json
vendored
Normal file
1
tests/fixtures/centos-7.7/arp.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
[{"address": "gateway", "hwtype": "ether", "hwaddress": "00:50:56:f7:4a:fc", "flags_mask": "C", "iface": "ens33"}, {"address": "192.168.71.254", "hwtype": "ether", "hwaddress": "00:50:56:fe:7a:b4", "flags_mask": "C", "iface": "ens33"}]
|
||||
3
tests/fixtures/centos-7.7/arp.out
vendored
Normal file
3
tests/fixtures/centos-7.7/arp.out
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
Address HWtype HWaddress Flags Mask Iface
|
||||
gateway ether 00:50:56:f7:4a:fc C ens33
|
||||
192.168.71.254 ether 00:50:56:fe:7a:b4 C ens33
|
||||
1
tests/fixtures/centos-7.7/df-h.json
vendored
Normal file
1
tests/fixtures/centos-7.7/df-h.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
[{"filesystem": "devtmpfs", "size": "1.9G", "used": 0, "available": null, "use_percent": 0, "mounted_on": "/dev"}, {"filesystem": "tmpfs", "size": "1.9G", "used": 0, "available": null, "use_percent": 0, "mounted_on": "/dev/shm"}, {"filesystem": "tmpfs", "size": "1.9G", "used": null, "available": null, "use_percent": 1, "mounted_on": "/run"}, {"filesystem": "tmpfs", "size": "1.9G", "used": 0, "available": null, "use_percent": 0, "mounted_on": "/sys/fs/cgroup"}, {"filesystem": "/dev/mapper/centos-root", "size": "17G", "used": null, "available": null, "use_percent": 11, "mounted_on": "/"}, {"filesystem": "/dev/sda1", "size": "1014M", "used": null, "available": null, "use_percent": 23, "mounted_on": "/boot"}, {"filesystem": "tmpfs", "size": "378M", "used": 0, "available": null, "use_percent": 0, "mounted_on": "/run/user/1000"}]
|
||||
8
tests/fixtures/centos-7.7/df-h.out
vendored
Normal file
8
tests/fixtures/centos-7.7/df-h.out
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
Filesystem Size Used Avail Use% Mounted on
|
||||
devtmpfs 1.9G 0 1.9G 0% /dev
|
||||
tmpfs 1.9G 0 1.9G 0% /dev/shm
|
||||
tmpfs 1.9G 12M 1.9G 1% /run
|
||||
tmpfs 1.9G 0 1.9G 0% /sys/fs/cgroup
|
||||
/dev/mapper/centos-root 17G 1.8G 16G 11% /
|
||||
/dev/sda1 1014M 233M 782M 23% /boot
|
||||
tmpfs 378M 0 378M 0% /run/user/1000
|
||||
1
tests/fixtures/centos-7.7/df.json
vendored
Normal file
1
tests/fixtures/centos-7.7/df.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
[{"filesystem": "devtmpfs", "1k-blocks": 1918816, "used": 0, "available": 1918816, "use_percent": 0, "mounted_on": "/dev"}, {"filesystem": "tmpfs", "1k-blocks": 1930664, "used": 0, "available": 1930664, "use_percent": 0, "mounted_on": "/dev/shm"}, {"filesystem": "tmpfs", "1k-blocks": 1930664, "used": 11832, "available": 1918832, "use_percent": 1, "mounted_on": "/run"}, {"filesystem": "tmpfs", "1k-blocks": 1930664, "used": 0, "available": 1930664, "use_percent": 0, "mounted_on": "/sys/fs/cgroup"}, {"filesystem": "/dev/mapper/centos-root", "1k-blocks": 17811456, "used": 1805580, "available": 16005876, "use_percent": 11, "mounted_on": "/"}, {"filesystem": "/dev/sda1", "1k-blocks": 1038336, "used": 237600, "available": 800736, "use_percent": 23, "mounted_on": "/boot"}, {"filesystem": "tmpfs", "1k-blocks": 386136, "used": 0, "available": 386136, "use_percent": 0, "mounted_on": "/run/user/1000"}]
|
||||
8
tests/fixtures/centos-7.7/df.out
vendored
Normal file
8
tests/fixtures/centos-7.7/df.out
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
Filesystem 1K-blocks Used Available Use% Mounted on
|
||||
devtmpfs 1918816 0 1918816 0% /dev
|
||||
tmpfs 1930664 0 1930664 0% /dev/shm
|
||||
tmpfs 1930664 11832 1918832 1% /run
|
||||
tmpfs 1930664 0 1930664 0% /sys/fs/cgroup
|
||||
/dev/mapper/centos-root 17811456 1805580 16005876 11% /
|
||||
/dev/sda1 1038336 237600 800736 23% /boot
|
||||
tmpfs 386136 0 386136 0% /run/user/1000
|
||||
1
tests/fixtures/centos-7.7/dig-aaaa.json
vendored
Normal file
1
tests/fixtures/centos-7.7/dig-aaaa.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
[{"id": 25779, "opcode": "QUERY", "status": "NOERROR", "flags": ["qr", "rd", "ra"], "query_num": 1, "answer_num": 1, "authority_num": 0, "additional_num": 1, "question": {"name": "www.google.com.", "class": "IN", "type": "AAAA"}, "answer": [{"name": "www.google.com.", "class": "IN", "type": "AAAA", "ttl": 5, "data": "2607:f8b0:4000:808::2004"}], "query_time": 28, "server": "192.168.71.2#53(192.168.71.2)", "when": "Wed Oct 30 05:12:53 PDT 2019", "rcvd": 71}]
|
||||
20
tests/fixtures/centos-7.7/dig-aaaa.out
vendored
Normal file
20
tests/fixtures/centos-7.7/dig-aaaa.out
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
|
||||
; <<>> DiG 9.11.4-P2-RedHat-9.11.4-9.P2.el7 <<>> www.google.com AAAA
|
||||
;; global options: +cmd
|
||||
;; Got answer:
|
||||
;; ->>HEADER<<- opcode: QUERY, status: NOERROR, id: 25779
|
||||
;; flags: qr rd ra; QUERY: 1, ANSWER: 1, AUTHORITY: 0, ADDITIONAL: 1
|
||||
|
||||
;; OPT PSEUDOSECTION:
|
||||
; EDNS: version: 0, flags:; MBZ: 0x0005, udp: 4096
|
||||
;; QUESTION SECTION:
|
||||
;www.google.com. IN AAAA
|
||||
|
||||
;; ANSWER SECTION:
|
||||
www.google.com. 5 IN AAAA 2607:f8b0:4000:808::2004
|
||||
|
||||
;; Query time: 28 msec
|
||||
;; SERVER: 192.168.71.2#53(192.168.71.2)
|
||||
;; WHEN: Wed Oct 30 05:12:53 PDT 2019
|
||||
;; MSG SIZE rcvd: 71
|
||||
|
||||
1
tests/fixtures/centos-7.7/dig-x.json
vendored
Normal file
1
tests/fixtures/centos-7.7/dig-x.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
[{"id": 36298, "opcode": "QUERY", "status": "NOERROR", "flags": ["qr", "rd", "ra"], "query_num": 1, "answer_num": 1, "authority_num": 0, "additional_num": 1, "question": {"name": "1.1.1.1.in-addr.arpa.", "class": "IN", "type": "PTR"}, "answer": [{"name": "1.1.1.1.in-addr.arpa.", "class": "IN", "type": "PTR", "ttl": 5, "data": "one.one.one.one."}], "query_time": 32, "server": "192.168.71.2#53(192.168.71.2)", "when": "Wed Oct 30 05:13:36 PDT 2019", "rcvd": 78}]
|
||||
20
tests/fixtures/centos-7.7/dig-x.out
vendored
Normal file
20
tests/fixtures/centos-7.7/dig-x.out
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
|
||||
; <<>> DiG 9.11.4-P2-RedHat-9.11.4-9.P2.el7 <<>> -x 1.1.1.1
|
||||
;; global options: +cmd
|
||||
;; Got answer:
|
||||
;; ->>HEADER<<- opcode: QUERY, status: NOERROR, id: 36298
|
||||
;; flags: qr rd ra; QUERY: 1, ANSWER: 1, AUTHORITY: 0, ADDITIONAL: 1
|
||||
|
||||
;; OPT PSEUDOSECTION:
|
||||
; EDNS: version: 0, flags:; MBZ: 0x0005, udp: 4096
|
||||
;; QUESTION SECTION:
|
||||
;1.1.1.1.in-addr.arpa. IN PTR
|
||||
|
||||
;; ANSWER SECTION:
|
||||
1.1.1.1.in-addr.arpa. 5 IN PTR one.one.one.one.
|
||||
|
||||
;; Query time: 32 msec
|
||||
;; SERVER: 192.168.71.2#53(192.168.71.2)
|
||||
;; WHEN: Wed Oct 30 05:13:36 PDT 2019
|
||||
;; MSG SIZE rcvd: 78
|
||||
|
||||
1
tests/fixtures/centos-7.7/dig.json
vendored
Normal file
1
tests/fixtures/centos-7.7/dig.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
[{"id": 44295, "opcode": "QUERY", "status": "NOERROR", "flags": ["qr", "rd", "ra"], "query_num": 1, "answer_num": 2, "authority_num": 0, "additional_num": 1, "question": {"name": "www.cnn.com.", "class": "IN", "type": "A"}, "answer": [{"name": "www.cnn.com.", "class": "IN", "type": "CNAME", "ttl": 5, "data": "turner-tls.map.fastly.net."}, {"name": "turner-tls.map.fastly.net.", "class": "IN", "type": "A", "ttl": 5, "data": "151.101.189.67"}], "query_time": 25, "server": "192.168.71.2#53(192.168.71.2)", "when": "Wed Oct 30 05:13:22 PDT 2019", "rcvd": 95}, {"id": 34074, "opcode": "QUERY", "status": "NOERROR", "flags": ["qr", "rd", "ra"], "query_num": 1, "answer_num": 1, "authority_num": 0, "additional_num": 1, "question": {"name": "www.google.com.", "class": "IN", "type": "A"}, "answer": [{"name": "www.google.com.", "class": "IN", "type": "A", "ttl": 5, "data": "216.58.194.100"}], "query_time": 25, "server": "192.168.71.2#53(192.168.71.2)", "when": "Wed Oct 30 05:13:22 PDT 2019", "rcvd": 59}]
|
||||
38
tests/fixtures/centos-7.7/dig.out
vendored
Normal file
38
tests/fixtures/centos-7.7/dig.out
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
|
||||
; <<>> DiG 9.11.4-P2-RedHat-9.11.4-9.P2.el7 <<>> www.cnn.com www.google.com
|
||||
;; global options: +cmd
|
||||
;; Got answer:
|
||||
;; ->>HEADER<<- opcode: QUERY, status: NOERROR, id: 44295
|
||||
;; flags: qr rd ra; QUERY: 1, ANSWER: 2, AUTHORITY: 0, ADDITIONAL: 1
|
||||
|
||||
;; OPT PSEUDOSECTION:
|
||||
; EDNS: version: 0, flags:; MBZ: 0x0005, udp: 4096
|
||||
;; QUESTION SECTION:
|
||||
;www.cnn.com. IN A
|
||||
|
||||
;; ANSWER SECTION:
|
||||
www.cnn.com. 5 IN CNAME turner-tls.map.fastly.net.
|
||||
turner-tls.map.fastly.net. 5 IN A 151.101.189.67
|
||||
|
||||
;; Query time: 25 msec
|
||||
;; SERVER: 192.168.71.2#53(192.168.71.2)
|
||||
;; WHEN: Wed Oct 30 05:13:22 PDT 2019
|
||||
;; MSG SIZE rcvd: 95
|
||||
|
||||
;; Got answer:
|
||||
;; ->>HEADER<<- opcode: QUERY, status: NOERROR, id: 34074
|
||||
;; flags: qr rd ra; QUERY: 1, ANSWER: 1, AUTHORITY: 0, ADDITIONAL: 1
|
||||
|
||||
;; OPT PSEUDOSECTION:
|
||||
; EDNS: version: 0, flags:; MBZ: 0x0005, udp: 4096
|
||||
;; QUESTION SECTION:
|
||||
;www.google.com. IN A
|
||||
|
||||
;; ANSWER SECTION:
|
||||
www.google.com. 5 IN A 216.58.194.100
|
||||
|
||||
;; Query time: 25 msec
|
||||
;; SERVER: 192.168.71.2#53(192.168.71.2)
|
||||
;; WHEN: Wed Oct 30 05:13:22 PDT 2019
|
||||
;; MSG SIZE rcvd: 59
|
||||
|
||||
1
tests/fixtures/centos-7.7/env.json
vendored
Normal file
1
tests/fixtures/centos-7.7/env.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
[{"name": "XDG_SESSION_ID", "value": "17"}, {"name": "HOSTNAME", "value": "localhost.localdomain"}, {"name": "SELINUX_ROLE_REQUESTED", "value": ""}, {"name": "SHELL", "value": "/bin/bash"}, {"name": "TERM", "value": "xterm-256color"}, {"name": "HISTSIZE", "value": "1000"}, {"name": "SSH_CLIENT", "value": "192.168.71.1 58727 22"}, {"name": "SELINUX_USE_CURRENT_RANGE", "value": ""}, {"name": "SSH_TTY", "value": "/dev/pts/0"}, {"name": "USER", "value": "kbrazil"}, {"name": "LS_COLORS", "value": "rs=0:di=38;5;27:ln=38;5;51:mh=44;38;5;15:pi=40;38;5;11:so=38;5;13:do=38;5;5:bd=48;5;232;38;5;11:cd=48;5;232;38;5;3:or=48;5;232;38;5;9:mi=05;48;5;232;38;5;15:su=48;5;196;38;5;15:sg=48;5;11;38;5;16:ca=48;5;196;38;5;226:tw=48;5;10;38;5;16:ow=48;5;10;38;5;21:st=48;5;21;38;5;15:ex=38;5;34:*.tar=38;5;9:*.tgz=38;5;9:*.arc=38;5;9:*.arj=38;5;9:*.taz=38;5;9:*.lha=38;5;9:*.lz4=38;5;9:*.lzh=38;5;9:*.lzma=38;5;9:*.tlz=38;5;9:*.txz=38;5;9:*.tzo=38;5;9:*.t7z=38;5;9:*.zip=38;5;9:*.z=38;5;9:*.Z=38;5;9:*.dz=38;5;9:*.gz=38;5;9:*.lrz=38;5;9:*.lz=38;5;9:*.lzo=38;5;9:*.xz=38;5;9:*.bz2=38;5;9:*.bz=38;5;9:*.tbz=38;5;9:*.tbz2=38;5;9:*.tz=38;5;9:*.deb=38;5;9:*.rpm=38;5;9:*.jar=38;5;9:*.war=38;5;9:*.ear=38;5;9:*.sar=38;5;9:*.rar=38;5;9:*.alz=38;5;9:*.ace=38;5;9:*.zoo=38;5;9:*.cpio=38;5;9:*.7z=38;5;9:*.rz=38;5;9:*.cab=38;5;9:*.jpg=38;5;13:*.jpeg=38;5;13:*.gif=38;5;13:*.bmp=38;5;13:*.pbm=38;5;13:*.pgm=38;5;13:*.ppm=38;5;13:*.tga=38;5;13:*.xbm=38;5;13:*.xpm=38;5;13:*.tif=38;5;13:*.tiff=38;5;13:*.png=38;5;13:*.svg=38;5;13:*.svgz=38;5;13:*.mng=38;5;13:*.pcx=38;5;13:*.mov=38;5;13:*.mpg=38;5;13:*.mpeg=38;5;13:*.m2v=38;5;13:*.mkv=38;5;13:*.webm=38;5;13:*.ogm=38;5;13:*.mp4=38;5;13:*.m4v=38;5;13:*.mp4v=38;5;13:*.vob=38;5;13:*.qt=38;5;13:*.nuv=38;5;13:*.wmv=38;5;13:*.asf=38;5;13:*.rm=38;5;13:*.rmvb=38;5;13:*.flc=38;5;13:*.avi=38;5;13:*.fli=38;5;13:*.flv=38;5;13:*.gl=38;5;13:*.dl=38;5;13:*.xcf=38;5;13:*.xwd=38;5;13:*.yuv=38;5;13:*.cgm=38;5;13:*.emf=38;5;13:*.axv=38;5;13:*.anx=38;5;13:*.ogv=38;5;13:*.ogx=38;5;13:*.aac=38;5;45:*.au=38;5;45:*.flac=38;5;45:*.mid=38;5;45:*.midi=38;5;45:*.mka=38;5;45:*.mp3=38;5;45:*.mpc=38;5;45:*.ogg=38;5;45:*.ra=38;5;45:*.wav=38;5;45:*.axa=38;5;45:*.oga=38;5;45:*.spx=38;5;45:*.xspf=38;5;45:"}, {"name": "PATH", "value": "/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin:/home/kbrazil/.local/bin:/home/kbrazil/bin"}, {"name": "MAIL", "value": "/var/spool/mail/kbrazil"}, {"name": "PWD", "value": "/home/kbrazil/testfiles"}, {"name": "LANG", "value": "en_US.UTF-8"}, {"name": "SELINUX_LEVEL_REQUESTED", "value": ""}, {"name": "HISTCONTROL", "value": "ignoredups"}, {"name": "HOME", "value": "/home/kbrazil"}, {"name": "SHLVL", "value": "2"}, {"name": "LOGNAME", "value": "kbrazil"}, {"name": "SSH_CONNECTION", "value": "192.168.71.1 58727 192.168.71.137 22"}, {"name": "LESSOPEN", "value": "||/usr/bin/lesspipe.sh %s"}, {"name": "XDG_RUNTIME_DIR", "value": "/run/user/1000"}, {"name": "_", "value": "/usr/bin/env"}]
|
||||
24
tests/fixtures/centos-7.7/env.out
vendored
Normal file
24
tests/fixtures/centos-7.7/env.out
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
XDG_SESSION_ID=17
|
||||
HOSTNAME=localhost.localdomain
|
||||
SELINUX_ROLE_REQUESTED=
|
||||
SHELL=/bin/bash
|
||||
TERM=xterm-256color
|
||||
HISTSIZE=1000
|
||||
SSH_CLIENT=192.168.71.1 58727 22
|
||||
SELINUX_USE_CURRENT_RANGE=
|
||||
SSH_TTY=/dev/pts/0
|
||||
USER=kbrazil
|
||||
LS_COLORS=rs=0:di=38;5;27:ln=38;5;51:mh=44;38;5;15:pi=40;38;5;11:so=38;5;13:do=38;5;5:bd=48;5;232;38;5;11:cd=48;5;232;38;5;3:or=48;5;232;38;5;9:mi=05;48;5;232;38;5;15:su=48;5;196;38;5;15:sg=48;5;11;38;5;16:ca=48;5;196;38;5;226:tw=48;5;10;38;5;16:ow=48;5;10;38;5;21:st=48;5;21;38;5;15:ex=38;5;34:*.tar=38;5;9:*.tgz=38;5;9:*.arc=38;5;9:*.arj=38;5;9:*.taz=38;5;9:*.lha=38;5;9:*.lz4=38;5;9:*.lzh=38;5;9:*.lzma=38;5;9:*.tlz=38;5;9:*.txz=38;5;9:*.tzo=38;5;9:*.t7z=38;5;9:*.zip=38;5;9:*.z=38;5;9:*.Z=38;5;9:*.dz=38;5;9:*.gz=38;5;9:*.lrz=38;5;9:*.lz=38;5;9:*.lzo=38;5;9:*.xz=38;5;9:*.bz2=38;5;9:*.bz=38;5;9:*.tbz=38;5;9:*.tbz2=38;5;9:*.tz=38;5;9:*.deb=38;5;9:*.rpm=38;5;9:*.jar=38;5;9:*.war=38;5;9:*.ear=38;5;9:*.sar=38;5;9:*.rar=38;5;9:*.alz=38;5;9:*.ace=38;5;9:*.zoo=38;5;9:*.cpio=38;5;9:*.7z=38;5;9:*.rz=38;5;9:*.cab=38;5;9:*.jpg=38;5;13:*.jpeg=38;5;13:*.gif=38;5;13:*.bmp=38;5;13:*.pbm=38;5;13:*.pgm=38;5;13:*.ppm=38;5;13:*.tga=38;5;13:*.xbm=38;5;13:*.xpm=38;5;13:*.tif=38;5;13:*.tiff=38;5;13:*.png=38;5;13:*.svg=38;5;13:*.svgz=38;5;13:*.mng=38;5;13:*.pcx=38;5;13:*.mov=38;5;13:*.mpg=38;5;13:*.mpeg=38;5;13:*.m2v=38;5;13:*.mkv=38;5;13:*.webm=38;5;13:*.ogm=38;5;13:*.mp4=38;5;13:*.m4v=38;5;13:*.mp4v=38;5;13:*.vob=38;5;13:*.qt=38;5;13:*.nuv=38;5;13:*.wmv=38;5;13:*.asf=38;5;13:*.rm=38;5;13:*.rmvb=38;5;13:*.flc=38;5;13:*.avi=38;5;13:*.fli=38;5;13:*.flv=38;5;13:*.gl=38;5;13:*.dl=38;5;13:*.xcf=38;5;13:*.xwd=38;5;13:*.yuv=38;5;13:*.cgm=38;5;13:*.emf=38;5;13:*.axv=38;5;13:*.anx=38;5;13:*.ogv=38;5;13:*.ogx=38;5;13:*.aac=38;5;45:*.au=38;5;45:*.flac=38;5;45:*.mid=38;5;45:*.midi=38;5;45:*.mka=38;5;45:*.mp3=38;5;45:*.mpc=38;5;45:*.ogg=38;5;45:*.ra=38;5;45:*.wav=38;5;45:*.axa=38;5;45:*.oga=38;5;45:*.spx=38;5;45:*.xspf=38;5;45:
|
||||
PATH=/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin:/home/kbrazil/.local/bin:/home/kbrazil/bin
|
||||
MAIL=/var/spool/mail/kbrazil
|
||||
PWD=/home/kbrazil/testfiles
|
||||
LANG=en_US.UTF-8
|
||||
SELINUX_LEVEL_REQUESTED=
|
||||
HISTCONTROL=ignoredups
|
||||
HOME=/home/kbrazil
|
||||
SHLVL=2
|
||||
LOGNAME=kbrazil
|
||||
SSH_CONNECTION=192.168.71.1 58727 192.168.71.137 22
|
||||
LESSOPEN=||/usr/bin/lesspipe.sh %s
|
||||
XDG_RUNTIME_DIR=/run/user/1000
|
||||
_=/usr/bin/env
|
||||
1
tests/fixtures/centos-7.7/free-h.json
vendored
Normal file
1
tests/fixtures/centos-7.7/free-h.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
[{"type": "Mem", "total": null, "used": null, "free": null, "shared": null, "buff_cache": null, "available": null}, {"type": "Swap", "total": null, "used": null, "free": null}]
|
||||
3
tests/fixtures/centos-7.7/free-h.out
vendored
Normal file
3
tests/fixtures/centos-7.7/free-h.out
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
total used free shared buff/cache available
|
||||
Mem: 3.7G 217M 3.2G 11M 267M 3.2G
|
||||
Swap: 2.0G 0B 2.0G
|
||||
1
tests/fixtures/centos-7.7/free.json
vendored
Normal file
1
tests/fixtures/centos-7.7/free.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
[{"type": "Mem", "total": 3861332, "used": 222820, "free": 3364176, "shared": 11832, "buff_cache": 274336, "available": 3389588}, {"type": "Swap", "total": 2097148, "used": 0, "free": 2097148}]
|
||||
3
tests/fixtures/centos-7.7/free.out
vendored
Normal file
3
tests/fixtures/centos-7.7/free.out
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
total used free shared buff/cache available
|
||||
Mem: 3861332 222820 3364176 11832 274336 3389588
|
||||
Swap: 2097148 0 2097148
|
||||
1
tests/fixtures/centos-7.7/fstab.json
vendored
Normal file
1
tests/fixtures/centos-7.7/fstab.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
[{"fs_spec": "/dev/mapper/centos-root", "fs_file": "/", "fs_vfstype": "xfs", "fs_mntops": "defaults", "fs_freq": 0, "fs_passno": 0}, {"fs_spec": "UUID=05d927bb-5875-49e3-ada1-7f46cb31c932", "fs_file": "/boot", "fs_vfstype": "xfs", "fs_mntops": "defaults", "fs_freq": 0, "fs_passno": 0}, {"fs_spec": "/dev/mapper/centos-swap", "fs_file": "swap", "fs_vfstype": "swap", "fs_mntops": "defaults", "fs_freq": 0, "fs_passno": 0}]
|
||||
11
tests/fixtures/centos-7.7/fstab.out
vendored
Normal file
11
tests/fixtures/centos-7.7/fstab.out
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
|
||||
#
|
||||
# /etc/fstab
|
||||
# Created by anaconda on Thu Aug 15 10:53:00 2019
|
||||
#
|
||||
# Accessible filesystems, by reference, are maintained under '/dev/disk'
|
||||
# See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info
|
||||
#
|
||||
/dev/mapper/centos-root / xfs defaults 0 0
|
||||
UUID=05d927bb-5875-49e3-ada1-7f46cb31c932 /boot xfs defaults 0 0 # this is a comment
|
||||
/dev/mapper/centos-swap swap swap defaults 0 0
|
||||
1
tests/fixtures/centos-7.7/history.json
vendored
Normal file
1
tests/fixtures/centos-7.7/history.json
vendored
Normal file
File diff suppressed because one or more lines are too long
1000
tests/fixtures/centos-7.7/history.out
vendored
Normal file
1000
tests/fixtures/centos-7.7/history.out
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1
tests/fixtures/centos-7.7/hosts.json
vendored
Normal file
1
tests/fixtures/centos-7.7/hosts.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
[{"ip": "127.0.0.1", "hostname": ["localhost", "localhost.localdomain", "localhost4", "localhost4.localdomain4"]}, {"ip": "::1", "hostname": ["localhost", "localhost.localdomain", "localhost6", "localhost6.localdomain6"]}]
|
||||
3
tests/fixtures/centos-7.7/hosts.out
vendored
Normal file
3
tests/fixtures/centos-7.7/hosts.out
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
# comment line
|
||||
127.0.0.1 localhost localhost.localdomain localhost4 localhost4.localdomain4
|
||||
::1 localhost localhost.localdomain localhost6 localhost6.localdomain6 # this is a comment
|
||||
1
tests/fixtures/centos-7.7/ifconfig.json
vendored
Normal file
1
tests/fixtures/centos-7.7/ifconfig.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
[{"name": "docker0", "flags": 4099, "state": "UP,BROADCAST,MULTICAST", "mtu": 1500, "ipv4_addr": "172.17.0.1", "ipv4_mask": "255.255.0.0", "ipv4_bcast": "0.0.0.0", "mac_addr": "02:42:b1:9a:ea:02", "type": "Ethernet", "rx_packets": 0, "rx_errors": 0, "rx_dropped": 0, "rx_overruns": 0, "rx_frame": 0, "tx_packets": 0, "tx_errors": 0, "tx_dropped": 0, "tx_overruns": 0, "tx_carrier": 0, "tx_collisions": 0, "ipv6_addr": null, "ipv6_mask": null, "ipv6_scope": null, "metric": null}, {"name": "ens33", "flags": 4163, "state": "UP,BROADCAST,RUNNING,MULTICAST", "mtu": 1500, "ipv4_addr": "192.168.71.137", "ipv4_mask": "255.255.255.0", "ipv4_bcast": "192.168.71.255", "ipv6_addr": "fe80::c1cb:715d:bc3e:b8a0", "ipv6_mask": 64, "ipv6_scope": "link", "mac_addr": "00:0c:29:3b:58:0e", "type": "Ethernet", "rx_packets": 8061, "rx_errors": 0, "rx_dropped": 0, "rx_overruns": 0, "rx_frame": 0, "tx_packets": 4502, "tx_errors": 0, "tx_dropped": 0, "tx_overruns": 0, "tx_carrier": 0, "tx_collisions": 0, "metric": null}, {"name": "lo", "flags": 73, "state": "UP,LOOPBACK,RUNNING", "mtu": 65536, "ipv4_addr": "127.0.0.1", "ipv4_mask": "255.0.0.0", "ipv4_bcast": null, "ipv6_addr": "::1", "ipv6_mask": 128, "ipv6_scope": "host", "mac_addr": null, "type": "Local Loopback", "rx_packets": 73, "rx_errors": 0, "rx_dropped": 0, "rx_overruns": 0, "rx_frame": 0, "tx_packets": 73, "tx_errors": 0, "tx_dropped": 0, "tx_overruns": 0, "tx_carrier": 0, "tx_collisions": 0, "metric": null}]
|
||||
26
tests/fixtures/centos-7.7/ifconfig.out
vendored
Normal file
26
tests/fixtures/centos-7.7/ifconfig.out
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
docker0: flags=4099<UP,BROADCAST,MULTICAST> mtu 1500
|
||||
inet 172.17.0.1 netmask 255.255.0.0 broadcast 0.0.0.0
|
||||
ether 02:42:b1:9a:ea:02 txqueuelen 0 (Ethernet)
|
||||
RX packets 0 bytes 0 (0.0 B)
|
||||
RX errors 0 dropped 0 overruns 0 frame 0
|
||||
TX packets 0 bytes 0 (0.0 B)
|
||||
TX errors 0 dropped 0 overruns 0 carrier 0 collisions 0
|
||||
|
||||
ens33: flags=4163<UP,BROADCAST,RUNNING,MULTICAST> mtu 1500
|
||||
inet 192.168.71.137 netmask 255.255.255.0 broadcast 192.168.71.255
|
||||
inet6 fe80::c1cb:715d:bc3e:b8a0 prefixlen 64 scopeid 0x20<link>
|
||||
ether 00:0c:29:3b:58:0e txqueuelen 1000 (Ethernet)
|
||||
RX packets 8061 bytes 1514413 (1.4 MiB)
|
||||
RX errors 0 dropped 0 overruns 0 frame 0
|
||||
TX packets 4502 bytes 866622 (846.3 KiB)
|
||||
TX errors 0 dropped 0 overruns 0 carrier 0 collisions 0
|
||||
|
||||
lo: flags=73<UP,LOOPBACK,RUNNING> mtu 65536
|
||||
inet 127.0.0.1 netmask 255.0.0.0
|
||||
inet6 ::1 prefixlen 128 scopeid 0x10<host>
|
||||
loop txqueuelen 1000 (Local Loopback)
|
||||
RX packets 73 bytes 6009 (5.8 KiB)
|
||||
RX errors 0 dropped 0 overruns 0 frame 0
|
||||
TX packets 73 bytes 6009 (5.8 KiB)
|
||||
TX errors 0 dropped 0 overruns 0 carrier 0 collisions 0
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user