mirror of
https://github.com/kellyjonbrazil/jc.git
synced 2025-06-17 00:07:37 +02:00
16
CHANGELOG
16
CHANGELOG
@ -1,5 +1,21 @@
|
|||||||
jc changelog
|
jc changelog
|
||||||
|
|
||||||
|
20230227 v1.23.0
|
||||||
|
- Add input slicing as a `jc` command-line option
|
||||||
|
- Add `ssh` configuration file parser
|
||||||
|
- Add `ver` Version string parser
|
||||||
|
- Add `zpool iostat` command parser
|
||||||
|
- Add `zpool status` command parser
|
||||||
|
- Fix `acpi` command parser for "will never fully discharge" battery state
|
||||||
|
- Fix `crontab` and `crontab-u` command and file parsers for cases where only
|
||||||
|
shortcut schedule items exist
|
||||||
|
- Fix `ifconfig` command parser for older-style linux output
|
||||||
|
- Fix `xrandr` command parser for proper `is_current` output
|
||||||
|
- Fix `xrandr` command parser for infinite loop with some device configurations
|
||||||
|
- Add `reflection` key to `xrandr` parser schema
|
||||||
|
- Add display model info from EDID to `xrandr` parser
|
||||||
|
- Add `MPX-specific VMA` support for VM Flags in `/proc/<pid>/smaps` parser
|
||||||
|
|
||||||
20230111 v1.22.5
|
20230111 v1.22.5
|
||||||
- Add TOML file parser
|
- Add TOML file parser
|
||||||
- Add INI with duplicate key support file parser
|
- Add INI with duplicate key support file parser
|
||||||
|
334
README.md
334
README.md
@ -3,7 +3,7 @@
|
|||||||
|
|
||||||
> Check out the `jc` Python [package documentation](https://github.com/kellyjonbrazil/jc/tree/master/docs) for developers
|
> Check out the `jc` Python [package documentation](https://github.com/kellyjonbrazil/jc/tree/master/docs) for developers
|
||||||
|
|
||||||
> Try the `jc` [web demo](https://jc-web.onrender.com/)
|
> Try the `jc` [web demo](https://jc-web.onrender.com/) and [REST API](https://github.com/kellyjonbrazil/jc-restapi)
|
||||||
|
|
||||||
> JC is [now available](https://galaxy.ansible.com/community/general) as an
|
> JC is [now available](https://galaxy.ansible.com/community/general) as an
|
||||||
Ansible filter plugin in the `community.general` collection. See this
|
Ansible filter plugin in the `community.general` collection. See this
|
||||||
@ -44,8 +44,8 @@ $ jc dig example.com | jq -r '.[].answer[].data'
|
|||||||
93.184.216.34
|
93.184.216.34
|
||||||
```
|
```
|
||||||
|
|
||||||
`jc` can also be used as a python library. In this case the output will be
|
`jc` can also be used as a python library. In this case the returned value
|
||||||
a python dictionary, a list of dictionaries, or even a
|
will be a python dictionary, a list of dictionaries, or even a
|
||||||
[lazy iterable of dictionaries](#using-streaming-parsers-as-python-modules)
|
[lazy iterable of dictionaries](#using-streaming-parsers-as-python-modules)
|
||||||
instead of JSON:
|
instead of JSON:
|
||||||
```python
|
```python
|
||||||
@ -133,9 +133,9 @@ on Github.
|
|||||||
`jc` accepts piped input from `STDIN` and outputs a JSON representation of the
|
`jc` accepts piped input from `STDIN` and outputs a JSON representation of the
|
||||||
previous command's output to `STDOUT`.
|
previous command's output to `STDOUT`.
|
||||||
```bash
|
```bash
|
||||||
COMMAND | jc [OPTIONS] PARSER
|
COMMAND | jc [SLICE] [OPTIONS] PARSER
|
||||||
cat FILE | jc [OPTIONS] PARSER
|
cat FILE | jc [SLICE] [OPTIONS] PARSER
|
||||||
echo STRING | jc [OPTIONS] PARSER
|
echo STRING | jc [SLICE] [OPTIONS] PARSER
|
||||||
```
|
```
|
||||||
|
|
||||||
Alternatively, the "magic" syntax can be used by prepending `jc` to the command
|
Alternatively, the "magic" syntax can be used by prepending `jc` to the command
|
||||||
@ -143,8 +143,8 @@ to be converted or in front of the absolute path for Proc files. Options can be
|
|||||||
passed to `jc` immediately before the command or Proc file path is given.
|
passed to `jc` immediately before the command or Proc file path is given.
|
||||||
(Note: command aliases and shell builtins are not supported)
|
(Note: command aliases and shell builtins are not supported)
|
||||||
```bash
|
```bash
|
||||||
jc [OPTIONS] COMMAND
|
jc [SLICE] [OPTIONS] COMMAND
|
||||||
jc [OPTIONS] /proc/<path-to-procfile>
|
jc [SLICE] [OPTIONS] /proc/<path-to-procfile>
|
||||||
```
|
```
|
||||||
|
|
||||||
The JSON output can be compact (default) or pretty formatted with the `-p`
|
The JSON output can be compact (default) or pretty formatted with the `-p`
|
||||||
@ -154,143 +154,147 @@ option.
|
|||||||
|
|
||||||
| Argument | Command or Filetype | Documentation |
|
| Argument | Command or Filetype | Documentation |
|
||||||
|-------------------|---------------------------------------------------------|----------------------------------------------------------------------------|
|
|-------------------|---------------------------------------------------------|----------------------------------------------------------------------------|
|
||||||
| ` --acpi` | `acpi` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/acpi) |
|
| `--acpi` | `acpi` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/acpi) |
|
||||||
| ` --airport` | `airport -I` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/airport) |
|
| `--airport` | `airport -I` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/airport) |
|
||||||
| ` --airport-s` | `airport -s` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/airport_s) |
|
| `--airport-s` | `airport -s` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/airport_s) |
|
||||||
| ` --arp` | `arp` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/arp) |
|
| `--arp` | `arp` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/arp) |
|
||||||
| ` --asciitable` | ASCII and Unicode table parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/asciitable) |
|
| `--asciitable` | ASCII and Unicode table parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/asciitable) |
|
||||||
| ` --asciitable-m` | multi-line ASCII and Unicode table parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/asciitable_m) |
|
| `--asciitable-m` | multi-line ASCII and Unicode table parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/asciitable_m) |
|
||||||
| ` --blkid` | `blkid` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/blkid) |
|
| `--blkid` | `blkid` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/blkid) |
|
||||||
| ` --cbt` | `cbt` (Google Bigtable) command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/cbt) |
|
| `--cbt` | `cbt` (Google Bigtable) command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/cbt) |
|
||||||
| ` --cef` | CEF string parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/cef) |
|
| `--cef` | CEF string parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/cef) |
|
||||||
| ` --cef-s` | CEF string streaming parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/cef_s) |
|
| `--cef-s` | CEF string streaming parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/cef_s) |
|
||||||
| ` --chage` | `chage --list` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/chage) |
|
| `--chage` | `chage --list` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/chage) |
|
||||||
| ` --cksum` | `cksum` and `sum` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/cksum) |
|
| `--cksum` | `cksum` and `sum` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/cksum) |
|
||||||
| ` --clf` | Common and Combined Log Format file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/clf) |
|
| `--clf` | Common and Combined Log Format file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/clf) |
|
||||||
| ` --clf-s` | Common and Combined Log Format file streaming parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/clf_s) |
|
| `--clf-s` | Common and Combined Log Format file streaming parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/clf_s) |
|
||||||
| ` --crontab` | `crontab` command and file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/crontab) |
|
| `--crontab` | `crontab` command and file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/crontab) |
|
||||||
| ` --crontab-u` | `crontab` file parser with user support | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/crontab_u) |
|
| `--crontab-u` | `crontab` file parser with user support | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/crontab_u) |
|
||||||
| ` --csv` | CSV file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/csv) |
|
| `--csv` | CSV file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/csv) |
|
||||||
| ` --csv-s` | CSV file streaming parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/csv_s) |
|
| `--csv-s` | CSV file streaming parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/csv_s) |
|
||||||
| ` --date` | `date` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/date) |
|
| `--date` | `date` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/date) |
|
||||||
| ` --datetime-iso` | ISO 8601 Datetime string parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/datetime_iso) |
|
| `--datetime-iso` | ISO 8601 Datetime string parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/datetime_iso) |
|
||||||
| ` --df` | `df` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/df) |
|
| `--df` | `df` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/df) |
|
||||||
| ` --dig` | `dig` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/dig) |
|
| `--dig` | `dig` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/dig) |
|
||||||
| ` --dir` | `dir` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/dir) |
|
| `--dir` | `dir` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/dir) |
|
||||||
| ` --dmidecode` | `dmidecode` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/dmidecode) |
|
| `--dmidecode` | `dmidecode` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/dmidecode) |
|
||||||
| ` --dpkg-l` | `dpkg -l` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/dpkg_l) |
|
| `--dpkg-l` | `dpkg -l` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/dpkg_l) |
|
||||||
| ` --du` | `du` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/du) |
|
| `--du` | `du` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/du) |
|
||||||
| `--email-address` | Email Address string parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/email_address) |
|
| `--email-address` | Email Address string parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/email_address) |
|
||||||
| ` --env` | `env` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/env) |
|
| `--env` | `env` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/env) |
|
||||||
| ` --file` | `file` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/file) |
|
| `--file` | `file` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/file) |
|
||||||
| ` --findmnt` | `findmnt` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/findmnt) |
|
| `--findmnt` | `findmnt` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/findmnt) |
|
||||||
| ` --finger` | `finger` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/finger) |
|
| `--finger` | `finger` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/finger) |
|
||||||
| ` --free` | `free` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/free) |
|
| `--free` | `free` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/free) |
|
||||||
| ` --fstab` | `/etc/fstab` file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/fstab) |
|
| `--fstab` | `/etc/fstab` file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/fstab) |
|
||||||
| ` --git-log` | `git log` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/git_log) |
|
| `--git-log` | `git log` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/git_log) |
|
||||||
| ` --git-log-s` | `git log` command streaming parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/git_log_s) |
|
| `--git-log-s` | `git log` command streaming parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/git_log_s) |
|
||||||
| `--git-ls-remote` | `git ls-remote` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/git_ls_remote) |
|
| `--git-ls-remote` | `git ls-remote` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/git_ls_remote) |
|
||||||
| ` --gpg` | `gpg --with-colons` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/gpg) |
|
| `--gpg` | `gpg --with-colons` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/gpg) |
|
||||||
| ` --group` | `/etc/group` file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/group) |
|
| `--group` | `/etc/group` file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/group) |
|
||||||
| ` --gshadow` | `/etc/gshadow` file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/gshadow) |
|
| `--gshadow` | `/etc/gshadow` file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/gshadow) |
|
||||||
| ` --hash` | `hash` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/hash) |
|
| `--hash` | `hash` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/hash) |
|
||||||
| ` --hashsum` | hashsum command parser (`md5sum`, `shasum`, etc.) | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/hashsum) |
|
| `--hashsum` | hashsum command parser (`md5sum`, `shasum`, etc.) | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/hashsum) |
|
||||||
| ` --hciconfig` | `hciconfig` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/hciconfig) |
|
| `--hciconfig` | `hciconfig` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/hciconfig) |
|
||||||
| ` --history` | `history` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/history) |
|
| `--history` | `history` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/history) |
|
||||||
| ` --hosts` | `/etc/hosts` file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/hosts) |
|
| `--hosts` | `/etc/hosts` file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/hosts) |
|
||||||
| ` --id` | `id` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/id) |
|
| `--id` | `id` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/id) |
|
||||||
| ` --ifconfig` | `ifconfig` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/ifconfig) |
|
| `--ifconfig` | `ifconfig` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/ifconfig) |
|
||||||
| ` --ini` | INI file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/ini) |
|
| `--ini` | INI file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/ini) |
|
||||||
| ` --ini-dup` | INI with duplicate key file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/ini_dup) |
|
| `--ini-dup` | INI with duplicate key file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/ini_dup) |
|
||||||
| ` --iostat` | `iostat` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/iostat) |
|
| `--iostat` | `iostat` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/iostat) |
|
||||||
| ` --iostat-s` | `iostat` command streaming parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/iostat_s) |
|
| `--iostat-s` | `iostat` command streaming parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/iostat_s) |
|
||||||
| ` --ip-address` | IPv4 and IPv6 Address string parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/ip_address) |
|
| `--ip-address` | IPv4 and IPv6 Address string parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/ip_address) |
|
||||||
| ` --iptables` | `iptables` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/iptables) |
|
| `--iptables` | `iptables` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/iptables) |
|
||||||
| ` --iw-scan` | `iw dev [device] scan` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/iw_scan) |
|
| `--iw-scan` | `iw dev [device] scan` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/iw_scan) |
|
||||||
| ` --iwconfig` | `iwconfig` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/iwconfig) |
|
| `--iwconfig` | `iwconfig` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/iwconfig) |
|
||||||
| ` --jar-manifest` | Java MANIFEST.MF file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/jar_manifest) |
|
| `--jar-manifest` | Java MANIFEST.MF file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/jar_manifest) |
|
||||||
| ` --jobs` | `jobs` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/jobs) |
|
| `--jobs` | `jobs` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/jobs) |
|
||||||
| ` --jwt` | JWT string parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/jwt) |
|
| `--jwt` | JWT string parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/jwt) |
|
||||||
| ` --kv` | Key/Value file and string parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/kv) |
|
| `--kv` | Key/Value file and string parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/kv) |
|
||||||
| ` --last` | `last` and `lastb` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/last) |
|
| `--last` | `last` and `lastb` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/last) |
|
||||||
| ` --ls` | `ls` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/ls) |
|
| `--ls` | `ls` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/ls) |
|
||||||
| ` --ls-s` | `ls` command streaming parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/ls_s) |
|
| `--ls-s` | `ls` command streaming parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/ls_s) |
|
||||||
| ` --lsblk` | `lsblk` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/lsblk) |
|
| `--lsblk` | `lsblk` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/lsblk) |
|
||||||
| ` --lsmod` | `lsmod` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/lsmod) |
|
| `--lsmod` | `lsmod` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/lsmod) |
|
||||||
| ` --lsof` | `lsof` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/lsof) |
|
| `--lsof` | `lsof` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/lsof) |
|
||||||
| ` --lspci` | `lspci -mmv` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/lspci) |
|
| `--lspci` | `lspci -mmv` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/lspci) |
|
||||||
| ` --lsusb` | `lsusb` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/lsusb) |
|
| `--lsusb` | `lsusb` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/lsusb) |
|
||||||
| ` --m3u` | M3U and M3U8 file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/m3u) |
|
| `--m3u` | M3U and M3U8 file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/m3u) |
|
||||||
| ` --mdadm` | `mdadm` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/mdadm) |
|
| `--mdadm` | `mdadm` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/mdadm) |
|
||||||
| ` --mount` | `mount` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/mount) |
|
| `--mount` | `mount` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/mount) |
|
||||||
| ` --mpstat` | `mpstat` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/mpstat) |
|
| `--mpstat` | `mpstat` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/mpstat) |
|
||||||
| ` --mpstat-s` | `mpstat` command streaming parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/mpstat_s) |
|
| `--mpstat-s` | `mpstat` command streaming parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/mpstat_s) |
|
||||||
| ` --netstat` | `netstat` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/netstat) |
|
| `--netstat` | `netstat` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/netstat) |
|
||||||
| ` --nmcli` | `nmcli` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/nmcli) |
|
| `--nmcli` | `nmcli` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/nmcli) |
|
||||||
| ` --ntpq` | `ntpq -p` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/ntpq) |
|
| `--ntpq` | `ntpq -p` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/ntpq) |
|
||||||
| ` --openvpn` | openvpn-status.log file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/openvpn) |
|
| `--openvpn` | openvpn-status.log file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/openvpn) |
|
||||||
| ` --os-prober` | `os-prober` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/os_prober) |
|
| `--os-prober` | `os-prober` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/os_prober) |
|
||||||
| ` --passwd` | `/etc/passwd` file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/passwd) |
|
| `--passwd` | `/etc/passwd` file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/passwd) |
|
||||||
| ` --pci-ids` | `pci.ids` file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/pci_ids) |
|
| `--pci-ids` | `pci.ids` file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/pci_ids) |
|
||||||
| ` --pgpass` | PostgreSQL password file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/pgpass) |
|
| `--pgpass` | PostgreSQL password file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/pgpass) |
|
||||||
| ` --pidstat` | `pidstat -H` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/pidstat) |
|
| `--pidstat` | `pidstat -H` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/pidstat) |
|
||||||
| ` --pidstat-s` | `pidstat -H` command streaming parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/pidstat_s) |
|
| `--pidstat-s` | `pidstat -H` command streaming parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/pidstat_s) |
|
||||||
| ` --ping` | `ping` and `ping6` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/ping) |
|
| `--ping` | `ping` and `ping6` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/ping) |
|
||||||
| ` --ping-s` | `ping` and `ping6` command streaming parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/ping_s) |
|
| `--ping-s` | `ping` and `ping6` command streaming parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/ping_s) |
|
||||||
| ` --pip-list` | `pip list` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/pip_list) |
|
| `--pip-list` | `pip list` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/pip_list) |
|
||||||
| ` --pip-show` | `pip show` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/pip_show) |
|
| `--pip-show` | `pip show` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/pip_show) |
|
||||||
| ` --plist` | PLIST file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/plist) |
|
| `--plist` | PLIST file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/plist) |
|
||||||
| ` --postconf` | `postconf -M` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/postconf) |
|
| `--postconf` | `postconf -M` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/postconf) |
|
||||||
| ` --proc` | `/proc/` file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/proc) |
|
| `--proc` | `/proc/` file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/proc) |
|
||||||
| ` --ps` | `ps` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/ps) |
|
| `--ps` | `ps` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/ps) |
|
||||||
| ` --route` | `route` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/route) |
|
| `--route` | `route` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/route) |
|
||||||
| ` --rpm-qi` | `rpm -qi` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/rpm_qi) |
|
| `--rpm-qi` | `rpm -qi` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/rpm_qi) |
|
||||||
| ` --rsync` | `rsync` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/rsync) |
|
| `--rsync` | `rsync` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/rsync) |
|
||||||
| ` --rsync-s` | `rsync` command streaming parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/rsync_s) |
|
| `--rsync-s` | `rsync` command streaming parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/rsync_s) |
|
||||||
| ` --semver` | Semantic Version string parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/semver) |
|
| `--semver` | Semantic Version string parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/semver) |
|
||||||
| ` --sfdisk` | `sfdisk` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/sfdisk) |
|
| `--sfdisk` | `sfdisk` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/sfdisk) |
|
||||||
| ` --shadow` | `/etc/shadow` file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/shadow) |
|
| `--shadow` | `/etc/shadow` file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/shadow) |
|
||||||
| ` --ss` | `ss` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/ss) |
|
| `--ss` | `ss` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/ss) |
|
||||||
| ` --sshd-conf` | sshd config file and `sshd -T` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/sshd_conf) |
|
| `--ssh-conf` | `ssh` config file and `ssh -G` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/ssh_conf) |
|
||||||
| ` --stat` | `stat` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/stat) |
|
| `--sshd-conf` | `sshd` config file and `sshd -T` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/sshd_conf) |
|
||||||
| ` --stat-s` | `stat` command streaming parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/stat_s) |
|
| `--stat` | `stat` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/stat) |
|
||||||
| ` --sysctl` | `sysctl` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/sysctl) |
|
| `--stat-s` | `stat` command streaming parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/stat_s) |
|
||||||
| ` --syslog` | Syslog RFC 5424 string parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/syslog) |
|
| `--sysctl` | `sysctl` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/sysctl) |
|
||||||
| ` --syslog-s` | Syslog RFC 5424 string streaming parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/syslog_s) |
|
| `--syslog` | Syslog RFC 5424 string parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/syslog) |
|
||||||
| ` --syslog-bsd` | Syslog RFC 3164 string parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/syslog_bsd) |
|
| `--syslog-s` | Syslog RFC 5424 string streaming parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/syslog_s) |
|
||||||
| ` --syslog-bsd-s` | Syslog RFC 3164 string streaming parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/syslog_bsd_s) |
|
| `--syslog-bsd` | Syslog RFC 3164 string parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/syslog_bsd) |
|
||||||
| ` --systemctl` | `systemctl` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/systemctl) |
|
| `--syslog-bsd-s` | Syslog RFC 3164 string streaming parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/syslog_bsd_s) |
|
||||||
| ` --systemctl-lj` | `systemctl list-jobs` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/systemctl_lj) |
|
| `--systemctl` | `systemctl` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/systemctl) |
|
||||||
| ` --systemctl-ls` | `systemctl list-sockets` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/systemctl_ls) |
|
| `--systemctl-lj` | `systemctl list-jobs` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/systemctl_lj) |
|
||||||
|
| `--systemctl-ls` | `systemctl list-sockets` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/systemctl_ls) |
|
||||||
| `--systemctl-luf` | `systemctl list-unit-files` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/systemctl_luf) |
|
| `--systemctl-luf` | `systemctl list-unit-files` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/systemctl_luf) |
|
||||||
| ` --systeminfo` | `systeminfo` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/systeminfo) |
|
| `--systeminfo` | `systeminfo` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/systeminfo) |
|
||||||
| ` --time` | `/usr/bin/time` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/time) |
|
| `--time` | `/usr/bin/time` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/time) |
|
||||||
| ` --timedatectl` | `timedatectl status` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/timedatectl) |
|
| `--timedatectl` | `timedatectl status` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/timedatectl) |
|
||||||
| ` --timestamp` | Unix Epoch Timestamp string parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/timestamp) |
|
| `--timestamp` | Unix Epoch Timestamp string parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/timestamp) |
|
||||||
| ` --toml` | TOML file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/toml) |
|
| `--toml` | TOML file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/toml) |
|
||||||
| ` --top` | `top -b` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/top) |
|
| `--top` | `top -b` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/top) |
|
||||||
| ` --top-s` | `top -b` command streaming parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/top_s) |
|
| `--top-s` | `top -b` command streaming parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/top_s) |
|
||||||
| ` --tracepath` | `tracepath` and `tracepath6` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/tracepath) |
|
| `--tracepath` | `tracepath` and `tracepath6` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/tracepath) |
|
||||||
| ` --traceroute` | `traceroute` and `traceroute6` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/traceroute) |
|
| `--traceroute` | `traceroute` and `traceroute6` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/traceroute) |
|
||||||
| ` --udevadm` | `udevadm info` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/udevadm) |
|
| `--udevadm` | `udevadm info` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/udevadm) |
|
||||||
| ` --ufw` | `ufw status` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/ufw) |
|
| `--ufw` | `ufw status` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/ufw) |
|
||||||
| ` --ufw-appinfo` | `ufw app info [application]` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/ufw_appinfo) |
|
| `--ufw-appinfo` | `ufw app info [application]` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/ufw_appinfo) |
|
||||||
| ` --uname` | `uname -a` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/uname) |
|
| `--uname` | `uname -a` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/uname) |
|
||||||
| `--update-alt-gs` | `update-alternatives --get-selections` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/update_alt_gs) |
|
| `--update-alt-gs` | `update-alternatives --get-selections` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/update_alt_gs) |
|
||||||
| ` --update-alt-q` | `update-alternatives --query` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/update_alt_q) |
|
| `--update-alt-q` | `update-alternatives --query` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/update_alt_q) |
|
||||||
| ` --upower` | `upower` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/upower) |
|
| `--upower` | `upower` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/upower) |
|
||||||
| ` --uptime` | `uptime` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/uptime) |
|
| `--uptime` | `uptime` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/uptime) |
|
||||||
| ` --url` | URL string parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/url) |
|
| `--url` | URL string parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/url) |
|
||||||
| ` --vmstat` | `vmstat` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/vmstat) |
|
| `--ver` | Version string parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/ver) |
|
||||||
| ` --vmstat-s` | `vmstat` command streaming parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/vmstat_s) |
|
| `--vmstat` | `vmstat` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/vmstat) |
|
||||||
| ` --w` | `w` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/w) |
|
| `--vmstat-s` | `vmstat` command streaming parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/vmstat_s) |
|
||||||
| ` --wc` | `wc` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/wc) |
|
| `--w` | `w` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/w) |
|
||||||
| ` --who` | `who` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/who) |
|
| `--wc` | `wc` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/wc) |
|
||||||
| ` --x509-cert` | X.509 PEM and DER certificate file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/x509_cert) |
|
| `--who` | `who` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/who) |
|
||||||
| ` --xml` | XML file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/xml) |
|
| `--x509-cert` | X.509 PEM and DER certificate file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/x509_cert) |
|
||||||
| ` --xrandr` | `xrandr` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/xrandr) |
|
| `--xml` | XML file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/xml) |
|
||||||
| ` --yaml` | YAML file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/yaml) |
|
| `--xrandr` | `xrandr` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/xrandr) |
|
||||||
| ` --zipinfo` | `zipinfo` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/zipinfo) |
|
| `--yaml` | YAML file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/yaml) |
|
||||||
|
| `--zipinfo` | `zipinfo` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/zipinfo) |
|
||||||
|
| `--zpool-iostat` | `zpool iostat` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/zpool_iostat) |
|
||||||
|
| `--zpool-status` | `zpool status` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/zpool_status) |
|
||||||
|
|
||||||
### Options
|
### Options
|
||||||
|
|
||||||
@ -311,6 +315,54 @@ option.
|
|||||||
| `-B` | `--bash-comp` | Generate Bash shell completion script ([more info](https://github.com/kellyjonbrazil/jc/wiki/Shell-Completions)) |
|
| `-B` | `--bash-comp` | Generate Bash shell completion script ([more info](https://github.com/kellyjonbrazil/jc/wiki/Shell-Completions)) |
|
||||||
| `-Z` | `--zsh-comp` | Generate Zsh shell completion script ([more info](https://github.com/kellyjonbrazil/jc/wiki/Shell-Completions)) |
|
| `-Z` | `--zsh-comp` | Generate Zsh shell completion script ([more info](https://github.com/kellyjonbrazil/jc/wiki/Shell-Completions)) |
|
||||||
|
|
||||||
|
### Slice
|
||||||
|
Line slicing is supported using the `START:STOP` syntax similar to Python
|
||||||
|
slicing. This allows you to skip lines at the beginning and/or end of the
|
||||||
|
`STDIN` input you would like `jc` to convert.
|
||||||
|
|
||||||
|
`START` and `STOP` can be positive or negative integers or blank and allow
|
||||||
|
you to specify how many lines to skip and how many lines to process.
|
||||||
|
Positive and blank slices are the most memory efficient. Any negative
|
||||||
|
integers in the slice will use more memory.
|
||||||
|
|
||||||
|
For example, to skip the first and last line of the following text, you
|
||||||
|
could express the slice in a couple ways:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ cat table.txt
|
||||||
|
### We want to skip this header ###
|
||||||
|
col1 col2
|
||||||
|
foo 1
|
||||||
|
bar 2
|
||||||
|
### We want to skip this footer ###
|
||||||
|
$ cat table.txt | jc 1:-1 --asciitable
|
||||||
|
[{"col1":"foo","col2":"1"},{"col1":"bar","col2":"2"}]
|
||||||
|
$ cat table.txt | jc 1:4 --asciitable
|
||||||
|
[{"col1":"foo","col2":"1"},{"col1":"bar","col2":"2"}]
|
||||||
|
```
|
||||||
|
In this example `1:-1` and `1:4` line slices provide the same output.
|
||||||
|
|
||||||
|
When using positive integers the index location of `STOP` is non-inclusive.
|
||||||
|
Positive slices count from the first line of the input toward the end
|
||||||
|
starting at `0` as the first line. Negative slices count from the last line
|
||||||
|
toward the beginning starting at `-1` as the last line. This is also the way
|
||||||
|
[Python's slicing](https://stackoverflow.com/questions/509211/understanding-slicing)
|
||||||
|
feature works.
|
||||||
|
|
||||||
|
Here is a breakdown of line slice options:
|
||||||
|
|
||||||
|
| Slice Notation | Input Lines Processed |
|
||||||
|
|----------------|--------------------------------------------------------------|
|
||||||
|
| `START:STOP` | lines `START` through `STOP - 1` |
|
||||||
|
| `START:` | lines `START` through the rest of the output |
|
||||||
|
| `:STOP` | lines from the beginning through `STOP - 1` |
|
||||||
|
| `-START:STOP` | `START` lines from the end through `STOP - 1` |
|
||||||
|
| `START:-STOP` | lines `START` through `STOP` lines from the end |
|
||||||
|
| `-START:-STOP` | `START` lines from the end through `STOP` lines from the end |
|
||||||
|
| `-START:` | `START` lines from the end through the rest of the output |
|
||||||
|
| `:-STOP` | lines from the beginning through `STOP` lines from the end |
|
||||||
|
| `:` | all lines |
|
||||||
|
|
||||||
### Exit Codes
|
### Exit Codes
|
||||||
Any fatal errors within `jc` will generate an exit code of `100`, otherwise the
|
Any fatal errors within `jc` will generate an exit code of `100`, otherwise the
|
||||||
exit code will be `0`.
|
exit code will be `0`.
|
||||||
|
@ -3,8 +3,8 @@ _jc()
|
|||||||
local cur prev words cword jc_commands jc_parsers jc_options \
|
local cur prev words cword jc_commands jc_parsers jc_options \
|
||||||
jc_about_options jc_about_mod_options jc_help_options jc_special_options
|
jc_about_options jc_about_mod_options jc_help_options jc_special_options
|
||||||
|
|
||||||
jc_commands=(acpi airport arp blkid cbt chage cksum crontab date df dig dmidecode dpkg du env file findmnt finger free git gpg hciconfig id ifconfig iostat iptables iw iwconfig jobs last lastb ls lsblk lsmod lsof lspci lsusb md5 md5sum mdadm mount mpstat netstat nmcli ntpq os-prober pidstat ping ping6 pip pip3 postconf printenv ps route rpm rsync sfdisk sha1sum sha224sum sha256sum sha384sum sha512sum shasum ss sshd stat sum sysctl systemctl systeminfo timedatectl top tracepath tracepath6 traceroute traceroute6 udevadm ufw uname update-alternatives upower uptime vdir vmstat w wc who xrandr zipinfo)
|
jc_commands=(acpi airport arp blkid cbt chage cksum crontab date df dig dmidecode dpkg du env file findmnt finger free git gpg hciconfig id ifconfig iostat iptables iw iwconfig jobs last lastb ls lsblk lsmod lsof lspci lsusb md5 md5sum mdadm mount mpstat netstat nmcli ntpq os-prober pidstat ping ping6 pip pip3 postconf printenv ps route rpm rsync sfdisk sha1sum sha224sum sha256sum sha384sum sha512sum shasum ss ssh sshd stat sum sysctl systemctl systeminfo timedatectl top tracepath tracepath6 traceroute traceroute6 udevadm ufw uname update-alternatives upower uptime vdir vmstat w wc who xrandr zipinfo zpool)
|
||||||
jc_parsers=(--acpi --airport --airport-s --arp --asciitable --asciitable-m --blkid --cbt --cef --cef-s --chage --cksum --clf --clf-s --crontab --crontab-u --csv --csv-s --date --datetime-iso --df --dig --dir --dmidecode --dpkg-l --du --email-address --env --file --findmnt --finger --free --fstab --git-log --git-log-s --git-ls-remote --gpg --group --gshadow --hash --hashsum --hciconfig --history --hosts --id --ifconfig --ini --ini-dup --iostat --iostat-s --ip-address --iptables --iw-scan --iwconfig --jar-manifest --jobs --jwt --kv --last --ls --ls-s --lsblk --lsmod --lsof --lspci --lsusb --m3u --mdadm --mount --mpstat --mpstat-s --netstat --nmcli --ntpq --openvpn --os-prober --passwd --pci-ids --pgpass --pidstat --pidstat-s --ping --ping-s --pip-list --pip-show --plist --postconf --proc --proc-buddyinfo --proc-consoles --proc-cpuinfo --proc-crypto --proc-devices --proc-diskstats --proc-filesystems --proc-interrupts --proc-iomem --proc-ioports --proc-loadavg --proc-locks --proc-meminfo --proc-modules --proc-mtrr --proc-pagetypeinfo --proc-partitions --proc-slabinfo --proc-softirqs --proc-stat --proc-swaps --proc-uptime --proc-version --proc-vmallocinfo --proc-vmstat --proc-zoneinfo --proc-driver-rtc --proc-net-arp --proc-net-dev --proc-net-dev-mcast --proc-net-if-inet6 --proc-net-igmp --proc-net-igmp6 --proc-net-ipv6-route --proc-net-netlink --proc-net-netstat --proc-net-packet --proc-net-protocols --proc-net-route --proc-net-unix --proc-pid-fdinfo --proc-pid-io --proc-pid-maps --proc-pid-mountinfo --proc-pid-numa-maps --proc-pid-smaps --proc-pid-stat --proc-pid-statm --proc-pid-status --ps --route --rpm-qi --rsync --rsync-s --semver --sfdisk --shadow --ss --sshd-conf --stat --stat-s --sysctl --syslog --syslog-s --syslog-bsd --syslog-bsd-s --systemctl --systemctl-lj --systemctl-ls --systemctl-luf --systeminfo --time --timedatectl --timestamp --toml --top --top-s --tracepath --traceroute --udevadm --ufw --ufw-appinfo --uname --update-alt-gs --update-alt-q --upower --uptime --url --vmstat --vmstat-s --w --wc --who --x509-cert --xml --xrandr --yaml --zipinfo)
|
jc_parsers=(--acpi --airport --airport-s --arp --asciitable --asciitable-m --blkid --cbt --cef --cef-s --chage --cksum --clf --clf-s --crontab --crontab-u --csv --csv-s --date --datetime-iso --df --dig --dir --dmidecode --dpkg-l --du --email-address --env --file --findmnt --finger --free --fstab --git-log --git-log-s --git-ls-remote --gpg --group --gshadow --hash --hashsum --hciconfig --history --hosts --id --ifconfig --ini --ini-dup --iostat --iostat-s --ip-address --iptables --iw-scan --iwconfig --jar-manifest --jobs --jwt --kv --last --ls --ls-s --lsblk --lsmod --lsof --lspci --lsusb --m3u --mdadm --mount --mpstat --mpstat-s --netstat --nmcli --ntpq --openvpn --os-prober --passwd --pci-ids --pgpass --pidstat --pidstat-s --ping --ping-s --pip-list --pip-show --plist --postconf --proc --proc-buddyinfo --proc-consoles --proc-cpuinfo --proc-crypto --proc-devices --proc-diskstats --proc-filesystems --proc-interrupts --proc-iomem --proc-ioports --proc-loadavg --proc-locks --proc-meminfo --proc-modules --proc-mtrr --proc-pagetypeinfo --proc-partitions --proc-slabinfo --proc-softirqs --proc-stat --proc-swaps --proc-uptime --proc-version --proc-vmallocinfo --proc-vmstat --proc-zoneinfo --proc-driver-rtc --proc-net-arp --proc-net-dev --proc-net-dev-mcast --proc-net-if-inet6 --proc-net-igmp --proc-net-igmp6 --proc-net-ipv6-route --proc-net-netlink --proc-net-netstat --proc-net-packet --proc-net-protocols --proc-net-route --proc-net-unix --proc-pid-fdinfo --proc-pid-io --proc-pid-maps --proc-pid-mountinfo --proc-pid-numa-maps --proc-pid-smaps --proc-pid-stat --proc-pid-statm --proc-pid-status --ps --route --rpm-qi --rsync --rsync-s --semver --sfdisk --shadow --ss --ssh-conf --sshd-conf --stat --stat-s --sysctl --syslog --syslog-s --syslog-bsd --syslog-bsd-s --systemctl --systemctl-lj --systemctl-ls --systemctl-luf --systeminfo --time --timedatectl --timestamp --toml --top --top-s --tracepath --traceroute --udevadm --ufw --ufw-appinfo --uname --update-alt-gs --update-alt-q --upower --uptime --url --ver --vmstat --vmstat-s --w --wc --who --x509-cert --xml --xrandr --yaml --zipinfo --zpool-iostat --zpool-status)
|
||||||
jc_options=(--force-color -C --debug -d --monochrome -m --meta-out -M --pretty -p --quiet -q --raw -r --unbuffer -u --yaml-out -y)
|
jc_options=(--force-color -C --debug -d --monochrome -m --meta-out -M --pretty -p --quiet -q --raw -r --unbuffer -u --yaml-out -y)
|
||||||
jc_about_options=(--about -a)
|
jc_about_options=(--about -a)
|
||||||
jc_about_mod_options=(--pretty -p --yaml-out -y --monochrome -m --force-color -C)
|
jc_about_mod_options=(--pretty -p --yaml-out -y --monochrome -m --force-color -C)
|
||||||
|
@ -9,7 +9,7 @@ _jc() {
|
|||||||
jc_help_options jc_help_options_describe \
|
jc_help_options jc_help_options_describe \
|
||||||
jc_special_options jc_special_options_describe
|
jc_special_options jc_special_options_describe
|
||||||
|
|
||||||
jc_commands=(acpi airport arp blkid cbt chage cksum crontab date df dig dmidecode dpkg du env file findmnt finger free git gpg hciconfig id ifconfig iostat iptables iw iwconfig jobs last lastb ls lsblk lsmod lsof lspci lsusb md5 md5sum mdadm mount mpstat netstat nmcli ntpq os-prober pidstat ping ping6 pip pip3 postconf printenv ps route rpm rsync sfdisk sha1sum sha224sum sha256sum sha384sum sha512sum shasum ss sshd stat sum sysctl systemctl systeminfo timedatectl top tracepath tracepath6 traceroute traceroute6 udevadm ufw uname update-alternatives upower uptime vdir vmstat w wc who xrandr zipinfo)
|
jc_commands=(acpi airport arp blkid cbt chage cksum crontab date df dig dmidecode dpkg du env file findmnt finger free git gpg hciconfig id ifconfig iostat iptables iw iwconfig jobs last lastb ls lsblk lsmod lsof lspci lsusb md5 md5sum mdadm mount mpstat netstat nmcli ntpq os-prober pidstat ping ping6 pip pip3 postconf printenv ps route rpm rsync sfdisk sha1sum sha224sum sha256sum sha384sum sha512sum shasum ss ssh sshd stat sum sysctl systemctl systeminfo timedatectl top tracepath tracepath6 traceroute traceroute6 udevadm ufw uname update-alternatives upower uptime vdir vmstat w wc who xrandr zipinfo zpool)
|
||||||
jc_commands_describe=(
|
jc_commands_describe=(
|
||||||
'acpi:run "acpi" command with magic syntax.'
|
'acpi:run "acpi" command with magic syntax.'
|
||||||
'airport:run "airport" command with magic syntax.'
|
'airport:run "airport" command with magic syntax.'
|
||||||
@ -76,6 +76,7 @@ _jc() {
|
|||||||
'sha512sum:run "sha512sum" command with magic syntax.'
|
'sha512sum:run "sha512sum" command with magic syntax.'
|
||||||
'shasum:run "shasum" command with magic syntax.'
|
'shasum:run "shasum" command with magic syntax.'
|
||||||
'ss:run "ss" command with magic syntax.'
|
'ss:run "ss" command with magic syntax.'
|
||||||
|
'ssh:run "ssh" command with magic syntax.'
|
||||||
'sshd:run "sshd" command with magic syntax.'
|
'sshd:run "sshd" command with magic syntax.'
|
||||||
'stat:run "stat" command with magic syntax.'
|
'stat:run "stat" command with magic syntax.'
|
||||||
'sum:run "sum" command with magic syntax.'
|
'sum:run "sum" command with magic syntax.'
|
||||||
@ -101,8 +102,9 @@ _jc() {
|
|||||||
'who:run "who" command with magic syntax.'
|
'who:run "who" command with magic syntax.'
|
||||||
'xrandr:run "xrandr" command with magic syntax.'
|
'xrandr:run "xrandr" command with magic syntax.'
|
||||||
'zipinfo:run "zipinfo" command with magic syntax.'
|
'zipinfo:run "zipinfo" command with magic syntax.'
|
||||||
|
'zpool:run "zpool" command with magic syntax.'
|
||||||
)
|
)
|
||||||
jc_parsers=(--acpi --airport --airport-s --arp --asciitable --asciitable-m --blkid --cbt --cef --cef-s --chage --cksum --clf --clf-s --crontab --crontab-u --csv --csv-s --date --datetime-iso --df --dig --dir --dmidecode --dpkg-l --du --email-address --env --file --findmnt --finger --free --fstab --git-log --git-log-s --git-ls-remote --gpg --group --gshadow --hash --hashsum --hciconfig --history --hosts --id --ifconfig --ini --ini-dup --iostat --iostat-s --ip-address --iptables --iw-scan --iwconfig --jar-manifest --jobs --jwt --kv --last --ls --ls-s --lsblk --lsmod --lsof --lspci --lsusb --m3u --mdadm --mount --mpstat --mpstat-s --netstat --nmcli --ntpq --openvpn --os-prober --passwd --pci-ids --pgpass --pidstat --pidstat-s --ping --ping-s --pip-list --pip-show --plist --postconf --proc --proc-buddyinfo --proc-consoles --proc-cpuinfo --proc-crypto --proc-devices --proc-diskstats --proc-filesystems --proc-interrupts --proc-iomem --proc-ioports --proc-loadavg --proc-locks --proc-meminfo --proc-modules --proc-mtrr --proc-pagetypeinfo --proc-partitions --proc-slabinfo --proc-softirqs --proc-stat --proc-swaps --proc-uptime --proc-version --proc-vmallocinfo --proc-vmstat --proc-zoneinfo --proc-driver-rtc --proc-net-arp --proc-net-dev --proc-net-dev-mcast --proc-net-if-inet6 --proc-net-igmp --proc-net-igmp6 --proc-net-ipv6-route --proc-net-netlink --proc-net-netstat --proc-net-packet --proc-net-protocols --proc-net-route --proc-net-unix --proc-pid-fdinfo --proc-pid-io --proc-pid-maps --proc-pid-mountinfo --proc-pid-numa-maps --proc-pid-smaps --proc-pid-stat --proc-pid-statm --proc-pid-status --ps --route --rpm-qi --rsync --rsync-s --semver --sfdisk --shadow --ss --sshd-conf --stat --stat-s --sysctl --syslog --syslog-s --syslog-bsd --syslog-bsd-s --systemctl --systemctl-lj --systemctl-ls --systemctl-luf --systeminfo --time --timedatectl --timestamp --toml --top --top-s --tracepath --traceroute --udevadm --ufw --ufw-appinfo --uname --update-alt-gs --update-alt-q --upower --uptime --url --vmstat --vmstat-s --w --wc --who --x509-cert --xml --xrandr --yaml --zipinfo)
|
jc_parsers=(--acpi --airport --airport-s --arp --asciitable --asciitable-m --blkid --cbt --cef --cef-s --chage --cksum --clf --clf-s --crontab --crontab-u --csv --csv-s --date --datetime-iso --df --dig --dir --dmidecode --dpkg-l --du --email-address --env --file --findmnt --finger --free --fstab --git-log --git-log-s --git-ls-remote --gpg --group --gshadow --hash --hashsum --hciconfig --history --hosts --id --ifconfig --ini --ini-dup --iostat --iostat-s --ip-address --iptables --iw-scan --iwconfig --jar-manifest --jobs --jwt --kv --last --ls --ls-s --lsblk --lsmod --lsof --lspci --lsusb --m3u --mdadm --mount --mpstat --mpstat-s --netstat --nmcli --ntpq --openvpn --os-prober --passwd --pci-ids --pgpass --pidstat --pidstat-s --ping --ping-s --pip-list --pip-show --plist --postconf --proc --proc-buddyinfo --proc-consoles --proc-cpuinfo --proc-crypto --proc-devices --proc-diskstats --proc-filesystems --proc-interrupts --proc-iomem --proc-ioports --proc-loadavg --proc-locks --proc-meminfo --proc-modules --proc-mtrr --proc-pagetypeinfo --proc-partitions --proc-slabinfo --proc-softirqs --proc-stat --proc-swaps --proc-uptime --proc-version --proc-vmallocinfo --proc-vmstat --proc-zoneinfo --proc-driver-rtc --proc-net-arp --proc-net-dev --proc-net-dev-mcast --proc-net-if-inet6 --proc-net-igmp --proc-net-igmp6 --proc-net-ipv6-route --proc-net-netlink --proc-net-netstat --proc-net-packet --proc-net-protocols --proc-net-route --proc-net-unix --proc-pid-fdinfo --proc-pid-io --proc-pid-maps --proc-pid-mountinfo --proc-pid-numa-maps --proc-pid-smaps --proc-pid-stat --proc-pid-statm --proc-pid-status --ps --route --rpm-qi --rsync --rsync-s --semver --sfdisk --shadow --ss --ssh-conf --sshd-conf --stat --stat-s --sysctl --syslog --syslog-s --syslog-bsd --syslog-bsd-s --systemctl --systemctl-lj --systemctl-ls --systemctl-luf --systeminfo --time --timedatectl --timestamp --toml --top --top-s --tracepath --traceroute --udevadm --ufw --ufw-appinfo --uname --update-alt-gs --update-alt-q --upower --uptime --url --ver --vmstat --vmstat-s --w --wc --who --x509-cert --xml --xrandr --yaml --zipinfo --zpool-iostat --zpool-status)
|
||||||
jc_parsers_describe=(
|
jc_parsers_describe=(
|
||||||
'--acpi:`acpi` command parser'
|
'--acpi:`acpi` command parser'
|
||||||
'--airport:`airport -I` command parser'
|
'--airport:`airport -I` command parser'
|
||||||
@ -250,7 +252,8 @@ _jc() {
|
|||||||
'--sfdisk:`sfdisk` command parser'
|
'--sfdisk:`sfdisk` command parser'
|
||||||
'--shadow:`/etc/shadow` file parser'
|
'--shadow:`/etc/shadow` file parser'
|
||||||
'--ss:`ss` command parser'
|
'--ss:`ss` command parser'
|
||||||
'--sshd-conf:sshd config file and `sshd -T` command parser'
|
'--ssh-conf:`ssh` config file and `ssh -G` command parser'
|
||||||
|
'--sshd-conf:`sshd` config file and `sshd -T` command parser'
|
||||||
'--stat:`stat` command parser'
|
'--stat:`stat` command parser'
|
||||||
'--stat-s:`stat` command streaming parser'
|
'--stat-s:`stat` command streaming parser'
|
||||||
'--sysctl:`sysctl` command parser'
|
'--sysctl:`sysctl` command parser'
|
||||||
@ -280,6 +283,7 @@ _jc() {
|
|||||||
'--upower:`upower` command parser'
|
'--upower:`upower` command parser'
|
||||||
'--uptime:`uptime` command parser'
|
'--uptime:`uptime` command parser'
|
||||||
'--url:URL string parser'
|
'--url:URL string parser'
|
||||||
|
'--ver:Version string parser'
|
||||||
'--vmstat:`vmstat` command parser'
|
'--vmstat:`vmstat` command parser'
|
||||||
'--vmstat-s:`vmstat` command streaming parser'
|
'--vmstat-s:`vmstat` command streaming parser'
|
||||||
'--w:`w` command parser'
|
'--w:`w` command parser'
|
||||||
@ -290,6 +294,8 @@ _jc() {
|
|||||||
'--xrandr:`xrandr` command parser'
|
'--xrandr:`xrandr` command parser'
|
||||||
'--yaml:YAML file parser'
|
'--yaml:YAML file parser'
|
||||||
'--zipinfo:`zipinfo` command parser'
|
'--zipinfo:`zipinfo` command parser'
|
||||||
|
'--zpool-iostat:`zpool iostat` command parser'
|
||||||
|
'--zpool-status:`zpool status` command parser'
|
||||||
)
|
)
|
||||||
jc_options=(--force-color -C --debug -d --monochrome -m --meta-out -M --pretty -p --quiet -q --raw -r --unbuffer -u --yaml-out -y)
|
jc_options=(--force-color -C --debug -d --monochrome -m --meta-out -M --pretty -p --quiet -q --raw -r --unbuffer -u --yaml-out -y)
|
||||||
jc_options_describe=(
|
jc_options_describe=(
|
||||||
|
@ -26,7 +26,7 @@ def parse(
|
|||||||
data: Union[str, bytes, Iterable[str]],
|
data: Union[str, bytes, Iterable[str]],
|
||||||
quiet: bool = False,
|
quiet: bool = False,
|
||||||
raw: bool = False,
|
raw: bool = False,
|
||||||
ignore_exceptions: bool = None,
|
ignore_exceptions: Optional[bool] = None,
|
||||||
**kwargs
|
**kwargs
|
||||||
) -> Union[JSONDictType, List[JSONDictType], Iterator[JSONDictType]]
|
) -> Union[JSONDictType, List[JSONDictType], Iterator[JSONDictType]]
|
||||||
```
|
```
|
||||||
|
@ -250,4 +250,4 @@ Returns:
|
|||||||
### Parser Information
|
### Parser Information
|
||||||
Compatibility: linux
|
Compatibility: linux
|
||||||
|
|
||||||
Version 1.4 by Kelly Brazil (kellyjonbrazil@gmail.com)
|
Version 1.5 by Kelly Brazil (kellyjonbrazil@gmail.com)
|
||||||
|
@ -196,4 +196,4 @@ Returns:
|
|||||||
### Parser Information
|
### Parser Information
|
||||||
Compatibility: linux, darwin, aix, freebsd
|
Compatibility: linux, darwin, aix, freebsd
|
||||||
|
|
||||||
Version 1.6 by Kelly Brazil (kellyjonbrazil@gmail.com)
|
Version 1.7 by Kelly Brazil (kellyjonbrazil@gmail.com)
|
||||||
|
@ -193,4 +193,4 @@ Returns:
|
|||||||
### Parser Information
|
### Parser Information
|
||||||
Compatibility: linux, darwin, aix, freebsd
|
Compatibility: linux, darwin, aix, freebsd
|
||||||
|
|
||||||
Version 1.7 by Kelly Brazil (kellyjonbrazil@gmail.com)
|
Version 1.8 by Kelly Brazil (kellyjonbrazil@gmail.com)
|
||||||
|
@ -240,4 +240,4 @@ Returns:
|
|||||||
### Parser Information
|
### Parser Information
|
||||||
Compatibility: linux, aix, freebsd, darwin
|
Compatibility: linux, aix, freebsd, darwin
|
||||||
|
|
||||||
Version 2.2 by Kelly Brazil (kellyjonbrazil@gmail.com)
|
Version 2.3 by Kelly Brazil (kellyjonbrazil@gmail.com)
|
||||||
|
548
docs/parsers/ssh_conf.md
Normal file
548
docs/parsers/ssh_conf.md
Normal file
@ -0,0 +1,548 @@
|
|||||||
|
[Home](https://kellyjonbrazil.github.io/jc/)
|
||||||
|
<a id="jc.parsers.ssh_conf"></a>
|
||||||
|
|
||||||
|
# jc.parsers.ssh\_conf
|
||||||
|
|
||||||
|
jc - JSON Convert `ssh` configuration file and `ssh -G` command output parser
|
||||||
|
|
||||||
|
This parser will work with `ssh` configuration files or the output of
|
||||||
|
`ssh -G`. Any `Match` blocks in the `ssh` configuration file will be
|
||||||
|
ignored.
|
||||||
|
|
||||||
|
Usage (cli):
|
||||||
|
|
||||||
|
$ ssh -G hostname | jc --ssh-conf
|
||||||
|
|
||||||
|
or
|
||||||
|
|
||||||
|
$ jc ssh -G hostname
|
||||||
|
|
||||||
|
or
|
||||||
|
|
||||||
|
$ cat ~/.ssh/config | jc --ssh-conf
|
||||||
|
|
||||||
|
Usage (module):
|
||||||
|
|
||||||
|
import jc
|
||||||
|
result = jc.parse('ssh_conf', ssh_conf_output)
|
||||||
|
|
||||||
|
Schema:
|
||||||
|
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"host": string,
|
||||||
|
"host_list": [
|
||||||
|
string
|
||||||
|
],
|
||||||
|
"addkeystoagent": string,
|
||||||
|
"addressfamily": string,
|
||||||
|
"batchmode": string,
|
||||||
|
"bindaddress": string,
|
||||||
|
"bindinterface": string,
|
||||||
|
"canonicaldomains": [
|
||||||
|
string
|
||||||
|
],
|
||||||
|
"canonicalizefallbacklocal": string,
|
||||||
|
"canonicalizehostname": string,
|
||||||
|
"canonicalizemaxdots": integer,
|
||||||
|
"canonicalizepermittedcnames": [
|
||||||
|
string
|
||||||
|
],
|
||||||
|
"casignaturealgorithms": [
|
||||||
|
string
|
||||||
|
],
|
||||||
|
"certificatefile": [
|
||||||
|
string
|
||||||
|
],
|
||||||
|
"checkhostip": string,
|
||||||
|
"ciphers": [
|
||||||
|
string
|
||||||
|
],
|
||||||
|
"clearallforwardings": string,
|
||||||
|
"compression": string,
|
||||||
|
"connectionattempts": integer,
|
||||||
|
"connecttimeout": integer,
|
||||||
|
"controlmaster": string,
|
||||||
|
"controlpath": string,
|
||||||
|
"controlpersist": string,
|
||||||
|
"dynamicforward": string,
|
||||||
|
"enableescapecommandline": string,
|
||||||
|
"enablesshkeysign": string,
|
||||||
|
"escapechar": string,
|
||||||
|
"exitonforwardfailure": string,
|
||||||
|
"fingerprinthash": string,
|
||||||
|
"forkafterauthentication": string,
|
||||||
|
"forwardagent": string,
|
||||||
|
"forwardx11": string,
|
||||||
|
"forwardx11timeout": integer,
|
||||||
|
"forwardx11trusted": string,
|
||||||
|
"gatewayports": string,
|
||||||
|
"globalknownhostsfile": [
|
||||||
|
string
|
||||||
|
],
|
||||||
|
"gssapiauthentication": string,
|
||||||
|
"gssapidelegatecredentials": string,
|
||||||
|
"hashknownhosts": string,
|
||||||
|
"hostbasedacceptedalgorithms": [
|
||||||
|
string
|
||||||
|
],
|
||||||
|
"hostbasedauthentication": string,
|
||||||
|
"hostkeyalgorithms": [
|
||||||
|
string
|
||||||
|
],
|
||||||
|
"hostkeyalias": string,
|
||||||
|
"hostname": string,
|
||||||
|
"identitiesonly": string,
|
||||||
|
"identityagent": string,
|
||||||
|
"identityfile": [
|
||||||
|
string
|
||||||
|
],
|
||||||
|
"ignoreunknown": string,
|
||||||
|
"include": [
|
||||||
|
string
|
||||||
|
],
|
||||||
|
"ipqos": [
|
||||||
|
string
|
||||||
|
],
|
||||||
|
"kbdinteractiveauthentication": string,
|
||||||
|
"kbdinteractivedevices": [
|
||||||
|
string
|
||||||
|
],
|
||||||
|
"kexalgorithms": [
|
||||||
|
string
|
||||||
|
],
|
||||||
|
"kexalgorithms_strategy": string,
|
||||||
|
"knownhostscommand": string,
|
||||||
|
"localcommand": string,
|
||||||
|
"localforward": [
|
||||||
|
string
|
||||||
|
],
|
||||||
|
"loglevel": string,
|
||||||
|
"logverbose": [
|
||||||
|
string
|
||||||
|
],
|
||||||
|
"macs": [
|
||||||
|
string
|
||||||
|
],
|
||||||
|
"macs_strategy": string,
|
||||||
|
"nohostauthenticationforlocalhost": string,
|
||||||
|
"numberofpasswordprompts": integer,
|
||||||
|
"passwordauthentication": string,
|
||||||
|
"permitlocalcommand": string,
|
||||||
|
"permitremoteopen": [
|
||||||
|
string
|
||||||
|
],
|
||||||
|
"pkcs11provider": string,
|
||||||
|
"port": integer,
|
||||||
|
"preferredauthentications": [
|
||||||
|
string
|
||||||
|
],
|
||||||
|
"protocol": integer,
|
||||||
|
"proxycommand": string,
|
||||||
|
"proxyjump": [
|
||||||
|
string
|
||||||
|
],
|
||||||
|
"proxyusefdpass": string,
|
||||||
|
"pubkeyacceptedalgorithms": [
|
||||||
|
string
|
||||||
|
],
|
||||||
|
"pubkeyacceptedalgorithms_strategy": string,
|
||||||
|
"pubkeyauthentication": string,
|
||||||
|
"rekeylimit": string,
|
||||||
|
"remotecommand": string,
|
||||||
|
"remoteforward": string,
|
||||||
|
"requesttty": string,
|
||||||
|
"requiredrsasize": integer,
|
||||||
|
"revokedhostkeys": string,
|
||||||
|
"securitykeyprovider": string,
|
||||||
|
"sendenv": [
|
||||||
|
string
|
||||||
|
],
|
||||||
|
"serveralivecountmax": integer,
|
||||||
|
"serveraliveinterval": integer,
|
||||||
|
"sessiontype": string,
|
||||||
|
"setenv": [
|
||||||
|
string
|
||||||
|
],
|
||||||
|
"stdinnull": string,
|
||||||
|
"streamlocalbindmask": string,
|
||||||
|
"streamlocalbindunlink": string,
|
||||||
|
"stricthostkeychecking": string,
|
||||||
|
"syslogfacility": string,
|
||||||
|
"tcpkeepalive": string,
|
||||||
|
"tunnel": string,
|
||||||
|
"tunneldevice": string,
|
||||||
|
"updatehostkeys": string,
|
||||||
|
"user": string,
|
||||||
|
"userknownhostsfile": [
|
||||||
|
string
|
||||||
|
],
|
||||||
|
"verifyhostkeydns": string,
|
||||||
|
"visualhostkey": string,
|
||||||
|
"xauthlocation": string
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
|
||||||
|
$ ssh -G - | jc --ssh-conf -p
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"user": "foo",
|
||||||
|
"hostname": "-",
|
||||||
|
"port": 22,
|
||||||
|
"addressfamily": "any",
|
||||||
|
"batchmode": "no",
|
||||||
|
"canonicalizefallbacklocal": "yes",
|
||||||
|
"canonicalizehostname": "false",
|
||||||
|
"checkhostip": "no",
|
||||||
|
"compression": "no",
|
||||||
|
"controlmaster": "false",
|
||||||
|
"enablesshkeysign": "no",
|
||||||
|
"clearallforwardings": "no",
|
||||||
|
"exitonforwardfailure": "no",
|
||||||
|
"fingerprinthash": "SHA256",
|
||||||
|
"forwardx11": "no",
|
||||||
|
"forwardx11trusted": "no",
|
||||||
|
"gatewayports": "no",
|
||||||
|
"gssapiauthentication": "no",
|
||||||
|
"gssapidelegatecredentials": "no",
|
||||||
|
"hashknownhosts": "no",
|
||||||
|
"hostbasedauthentication": "no",
|
||||||
|
"identitiesonly": "no",
|
||||||
|
"kbdinteractiveauthentication": "yes",
|
||||||
|
"nohostauthenticationforlocalhost": "no",
|
||||||
|
"passwordauthentication": "yes",
|
||||||
|
"permitlocalcommand": "no",
|
||||||
|
"proxyusefdpass": "no",
|
||||||
|
"pubkeyauthentication": "true",
|
||||||
|
"requesttty": "auto",
|
||||||
|
"sessiontype": "default",
|
||||||
|
"stdinnull": "no",
|
||||||
|
"forkafterauthentication": "no",
|
||||||
|
"streamlocalbindunlink": "no",
|
||||||
|
"stricthostkeychecking": "ask",
|
||||||
|
"tcpkeepalive": "yes",
|
||||||
|
"tunnel": "false",
|
||||||
|
"verifyhostkeydns": "false",
|
||||||
|
"visualhostkey": "no",
|
||||||
|
"updatehostkeys": "true",
|
||||||
|
"applemultipath": "no",
|
||||||
|
"canonicalizemaxdots": 1,
|
||||||
|
"connectionattempts": 1,
|
||||||
|
"forwardx11timeout": 1200,
|
||||||
|
"numberofpasswordprompts": 3,
|
||||||
|
"serveralivecountmax": 3,
|
||||||
|
"serveraliveinterval": 0,
|
||||||
|
"ciphers": [
|
||||||
|
"chacha20-poly1305@openssh.com",
|
||||||
|
"aes128-ctr",
|
||||||
|
"aes192-ctr",
|
||||||
|
"aes256-ctr",
|
||||||
|
"aes128-gcm@openssh.com",
|
||||||
|
"aes256-gcm@openssh.com"
|
||||||
|
],
|
||||||
|
"hostkeyalgorithms": [
|
||||||
|
"ssh-ed25519-cert-v01@openssh.com",
|
||||||
|
"ecdsa-sha2-nistp256-cert-v01@openssh.com",
|
||||||
|
"ecdsa-sha2-nistp384-cert-v01@openssh.com",
|
||||||
|
"ecdsa-sha2-nistp521-cert-v01@openssh.com",
|
||||||
|
"rsa-sha2-512-cert-v01@openssh.com",
|
||||||
|
"rsa-sha2-256-cert-v01@openssh.com",
|
||||||
|
"ssh-ed25519",
|
||||||
|
"ecdsa-sha2-nistp256",
|
||||||
|
"ecdsa-sha2-nistp384",
|
||||||
|
"ecdsa-sha2-nistp521",
|
||||||
|
"rsa-sha2-512",
|
||||||
|
"rsa-sha2-256"
|
||||||
|
],
|
||||||
|
"hostbasedacceptedalgorithms": [
|
||||||
|
"ssh-ed25519-cert-v01@openssh.com",
|
||||||
|
"ecdsa-sha2-nistp256-cert-v01@openssh.com",
|
||||||
|
"ecdsa-sha2-nistp384-cert-v01@openssh.com",
|
||||||
|
"ecdsa-sha2-nistp521-cert-v01@openssh.com",
|
||||||
|
"rsa-sha2-512-cert-v01@openssh.com",
|
||||||
|
"rsa-sha2-256-cert-v01@openssh.com",
|
||||||
|
"ssh-ed25519",
|
||||||
|
"ecdsa-sha2-nistp256",
|
||||||
|
"ecdsa-sha2-nistp384",
|
||||||
|
"ecdsa-sha2-nistp521",
|
||||||
|
"rsa-sha2-512",
|
||||||
|
"rsa-sha2-256"
|
||||||
|
],
|
||||||
|
"kexalgorithms": [
|
||||||
|
"sntrup761x25519-sha512@openssh.com",
|
||||||
|
"curve25519-sha256",
|
||||||
|
"curve25519-sha256@libssh.org",
|
||||||
|
"ecdh-sha2-nistp256",
|
||||||
|
"ecdh-sha2-nistp384",
|
||||||
|
"ecdh-sha2-nistp521",
|
||||||
|
"diffie-hellman-group-exchange-sha256",
|
||||||
|
"diffie-hellman-group16-sha512",
|
||||||
|
"diffie-hellman-group18-sha512",
|
||||||
|
"diffie-hellman-group14-sha256"
|
||||||
|
],
|
||||||
|
"casignaturealgorithms": [
|
||||||
|
"ssh-ed25519",
|
||||||
|
"ecdsa-sha2-nistp256",
|
||||||
|
"ecdsa-sha2-nistp384",
|
||||||
|
"ecdsa-sha2-nistp521",
|
||||||
|
"rsa-sha2-512",
|
||||||
|
"rsa-sha2-256"
|
||||||
|
],
|
||||||
|
"loglevel": "INFO",
|
||||||
|
"macs": [
|
||||||
|
"umac-64-etm@openssh.com",
|
||||||
|
"umac-128-etm@openssh.com",
|
||||||
|
"hmac-sha2-256-etm@openssh.com",
|
||||||
|
"hmac-sha2-512-etm@openssh.com",
|
||||||
|
"hmac-sha1-etm@openssh.com",
|
||||||
|
"umac-64@openssh.com",
|
||||||
|
"umac-128@openssh.com",
|
||||||
|
"hmac-sha2-256",
|
||||||
|
"hmac-sha2-512",
|
||||||
|
"hmac-sha1"
|
||||||
|
],
|
||||||
|
"securitykeyprovider": "$SSH_SK_PROVIDER",
|
||||||
|
"pubkeyacceptedalgorithms": [
|
||||||
|
"ssh-ed25519-cert-v01@openssh.com",
|
||||||
|
"ecdsa-sha2-nistp256-cert-v01@openssh.com",
|
||||||
|
"ecdsa-sha2-nistp384-cert-v01@openssh.com",
|
||||||
|
"ecdsa-sha2-nistp521-cert-v01@openssh.com",
|
||||||
|
"rsa-sha2-512-cert-v01@openssh.com",
|
||||||
|
"rsa-sha2-256-cert-v01@openssh.com",
|
||||||
|
"ssh-ed25519",
|
||||||
|
"ecdsa-sha2-nistp256",
|
||||||
|
"ecdsa-sha2-nistp384",
|
||||||
|
"ecdsa-sha2-nistp521",
|
||||||
|
"rsa-sha2-512",
|
||||||
|
"rsa-sha2-256"
|
||||||
|
],
|
||||||
|
"xauthlocation": "/usr/X11R6/bin/xauth",
|
||||||
|
"identityfile": [
|
||||||
|
"~/.ssh/id_rsa",
|
||||||
|
"~/.ssh/id_ecdsa",
|
||||||
|
"~/.ssh/id_ecdsa_sk",
|
||||||
|
"~/.ssh/id_ed25519",
|
||||||
|
"~/.ssh/id_ed25519_sk",
|
||||||
|
"~/.ssh/id_xmss",
|
||||||
|
"~/.ssh/id_dsa"
|
||||||
|
],
|
||||||
|
"canonicaldomains": [
|
||||||
|
"none"
|
||||||
|
],
|
||||||
|
"globalknownhostsfile": [
|
||||||
|
"/etc/ssh/ssh_known_hosts",
|
||||||
|
"/etc/ssh/ssh_known_hosts2"
|
||||||
|
],
|
||||||
|
"userknownhostsfile": [
|
||||||
|
"/Users/foo/.ssh/known_hosts",
|
||||||
|
"/Users/foo/.ssh/known_hosts2"
|
||||||
|
],
|
||||||
|
"sendenv": [
|
||||||
|
"LANG",
|
||||||
|
"LC_*"
|
||||||
|
],
|
||||||
|
"logverbose": [
|
||||||
|
"none"
|
||||||
|
],
|
||||||
|
"permitremoteopen": [
|
||||||
|
"any"
|
||||||
|
],
|
||||||
|
"addkeystoagent": "false",
|
||||||
|
"forwardagent": "no",
|
||||||
|
"connecttimeout": null,
|
||||||
|
"tunneldevice": "any:any",
|
||||||
|
"canonicalizepermittedcnames": [
|
||||||
|
"none"
|
||||||
|
],
|
||||||
|
"controlpersist": "no",
|
||||||
|
"escapechar": "~",
|
||||||
|
"ipqos": [
|
||||||
|
"af21",
|
||||||
|
"cs1"
|
||||||
|
],
|
||||||
|
"rekeylimit": "0 0",
|
||||||
|
"streamlocalbindmask": "0177",
|
||||||
|
"syslogfacility": "USER"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
$ cat ~/.ssh/config | jc --ssh-conf -p
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"host": "server1",
|
||||||
|
"host_list": [
|
||||||
|
"server1"
|
||||||
|
],
|
||||||
|
"hostname": "server1.cyberciti.biz",
|
||||||
|
"user": "nixcraft",
|
||||||
|
"port": 4242,
|
||||||
|
"identityfile": [
|
||||||
|
"/nfs/shared/users/nixcraft/keys/server1/id_rsa"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"host": "nas01",
|
||||||
|
"host_list": [
|
||||||
|
"nas01"
|
||||||
|
],
|
||||||
|
"hostname": "192.168.1.100",
|
||||||
|
"user": "root",
|
||||||
|
"identityfile": [
|
||||||
|
"~/.ssh/nas01.key"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"host": "aws.apache",
|
||||||
|
"host_list": [
|
||||||
|
"aws.apache"
|
||||||
|
],
|
||||||
|
"hostname": "1.2.3.4",
|
||||||
|
"user": "wwwdata",
|
||||||
|
"identityfile": [
|
||||||
|
"~/.ssh/aws.apache.key"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"host": "uk.gw.lan uk.lan",
|
||||||
|
"host_list": [
|
||||||
|
"uk.gw.lan",
|
||||||
|
"uk.lan"
|
||||||
|
],
|
||||||
|
"hostname": "192.168.0.251",
|
||||||
|
"user": "nixcraft",
|
||||||
|
"proxycommand": "ssh nixcraft@gateway.uk.cyberciti.biz nc %h %p 2> /dev/null"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"host": "proxyus",
|
||||||
|
"host_list": [
|
||||||
|
"proxyus"
|
||||||
|
],
|
||||||
|
"hostname": "vps1.cyberciti.biz",
|
||||||
|
"user": "breakfree",
|
||||||
|
"identityfile": [
|
||||||
|
"~/.ssh/vps1.cyberciti.biz.key"
|
||||||
|
],
|
||||||
|
"localforward": [
|
||||||
|
"3128 127.0.0.1:3128"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"host": "*",
|
||||||
|
"host_list": [
|
||||||
|
"*"
|
||||||
|
],
|
||||||
|
"forwardagent": "no",
|
||||||
|
"forwardx11": "no",
|
||||||
|
"forwardx11trusted": "yes",
|
||||||
|
"user": "nixcraft",
|
||||||
|
"port": 22,
|
||||||
|
"protocol": 2,
|
||||||
|
"serveraliveinterval": 60,
|
||||||
|
"serveralivecountmax": 30
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
$ cat ~/.ssh/config | jc --ssh-conf -p -r
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"host": "server1",
|
||||||
|
"host_list": [
|
||||||
|
"server1"
|
||||||
|
],
|
||||||
|
"hostname": "server1.cyberciti.biz",
|
||||||
|
"user": "nixcraft",
|
||||||
|
"port": "4242",
|
||||||
|
"identityfile": [
|
||||||
|
"/nfs/shared/users/nixcraft/keys/server1/id_rsa"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"host": "nas01",
|
||||||
|
"host_list": [
|
||||||
|
"nas01"
|
||||||
|
],
|
||||||
|
"hostname": "192.168.1.100",
|
||||||
|
"user": "root",
|
||||||
|
"identityfile": [
|
||||||
|
"~/.ssh/nas01.key"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"host": "aws.apache",
|
||||||
|
"host_list": [
|
||||||
|
"aws.apache"
|
||||||
|
],
|
||||||
|
"hostname": "1.2.3.4",
|
||||||
|
"user": "wwwdata",
|
||||||
|
"identityfile": [
|
||||||
|
"~/.ssh/aws.apache.key"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"host": "uk.gw.lan uk.lan",
|
||||||
|
"host_list": [
|
||||||
|
"uk.gw.lan",
|
||||||
|
"uk.lan"
|
||||||
|
],
|
||||||
|
"hostname": "192.168.0.251",
|
||||||
|
"user": "nixcraft",
|
||||||
|
"proxycommand": "ssh nixcraft@gateway.uk.cyberciti.biz nc %h %p 2> /dev/null"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"host": "proxyus",
|
||||||
|
"host_list": [
|
||||||
|
"proxyus"
|
||||||
|
],
|
||||||
|
"hostname": "vps1.cyberciti.biz",
|
||||||
|
"user": "breakfree",
|
||||||
|
"identityfile": [
|
||||||
|
"~/.ssh/vps1.cyberciti.biz.key"
|
||||||
|
],
|
||||||
|
"localforward": [
|
||||||
|
"3128 127.0.0.1:3128"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"host": "*",
|
||||||
|
"host_list": [
|
||||||
|
"*"
|
||||||
|
],
|
||||||
|
"forwardagent": "no",
|
||||||
|
"forwardx11": "no",
|
||||||
|
"forwardx11trusted": "yes",
|
||||||
|
"user": "nixcraft",
|
||||||
|
"port": "22",
|
||||||
|
"protocol": "2",
|
||||||
|
"serveraliveinterval": "60",
|
||||||
|
"serveralivecountmax": "30"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
<a id="jc.parsers.ssh_conf.parse"></a>
|
||||||
|
|
||||||
|
### parse
|
||||||
|
|
||||||
|
```python
|
||||||
|
def parse(data: str,
|
||||||
|
raw: bool = False,
|
||||||
|
quiet: bool = False) -> List[JSONDictType]
|
||||||
|
```
|
||||||
|
|
||||||
|
Main text parsing function
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
|
||||||
|
data: (string) text data to parse
|
||||||
|
raw: (boolean) unprocessed output if True
|
||||||
|
quiet: (boolean) suppress warning messages if True
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
|
||||||
|
List of Dictionaries. Raw or processed structured data.
|
||||||
|
|
||||||
|
### Parser Information
|
||||||
|
Compatibility: linux, darwin, freebsd
|
||||||
|
|
||||||
|
Version 1.0 by Kelly Brazil (kellyjonbrazil@gmail.com)
|
@ -3,7 +3,7 @@
|
|||||||
|
|
||||||
# jc.parsers.sshd\_conf
|
# jc.parsers.sshd\_conf
|
||||||
|
|
||||||
jc - JSON Convert sshd configuration file and `sshd -T` command output parser
|
jc - JSON Convert `sshd` configuration file and `sshd -T` command output parser
|
||||||
|
|
||||||
This parser will work with `sshd` configuration files or the output of
|
This parser will work with `sshd` configuration files or the output of
|
||||||
`sshd -T`. Any `Match` blocks in the `sshd` configuration file will be
|
`sshd -T`. Any `Match` blocks in the `sshd` configuration file will be
|
||||||
@ -504,4 +504,4 @@ Returns:
|
|||||||
### Parser Information
|
### Parser Information
|
||||||
Compatibility: linux, darwin, freebsd
|
Compatibility: linux, darwin, freebsd
|
||||||
|
|
||||||
Version 1.0 by Kelly Brazil (kellyjonbrazil@gmail.com)
|
Version 1.1 by Kelly Brazil (kellyjonbrazil@gmail.com)
|
||||||
|
@ -42,8 +42,7 @@ Parameters:
|
|||||||
underscore '_'. You should also ensure headers are
|
underscore '_'. You should also ensure headers are
|
||||||
lowercase by using .lower().
|
lowercase by using .lower().
|
||||||
|
|
||||||
Also, ensure there are no blank lines (list items)
|
Also, ensure there are no blank rows in the data.
|
||||||
in the data.
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
|
|
||||||
|
@ -8,6 +8,7 @@ jc - JSON Convert `xrandr` command output parser
|
|||||||
Usage (cli):
|
Usage (cli):
|
||||||
|
|
||||||
$ xrandr | jc --xrandr
|
$ xrandr | jc --xrandr
|
||||||
|
$ xrandr --properties | jc --xrandr
|
||||||
|
|
||||||
or
|
or
|
||||||
|
|
||||||
@ -49,13 +50,17 @@ Schema:
|
|||||||
"is_connected": boolean,
|
"is_connected": boolean,
|
||||||
"is_primary": boolean,
|
"is_primary": boolean,
|
||||||
"device_name": string,
|
"device_name": string,
|
||||||
|
"model_name": string,
|
||||||
|
"product_id" string,
|
||||||
|
"serial_number": string,
|
||||||
"resolution_width": integer,
|
"resolution_width": integer,
|
||||||
"resolution_height": integer,
|
"resolution_height": integer,
|
||||||
"offset_width": integer,
|
"offset_width": integer,
|
||||||
"offset_height": integer,
|
"offset_height": integer,
|
||||||
"dimension_width": integer,
|
"dimension_width": integer,
|
||||||
"dimension_height": integer,
|
"dimension_height": integer,
|
||||||
"rotation": string
|
"rotation": string,
|
||||||
|
"reflection": string
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"unassociated_devices": [
|
"unassociated_devices": [
|
||||||
@ -132,7 +137,71 @@ Examples:
|
|||||||
"offset_height": 0,
|
"offset_height": 0,
|
||||||
"dimension_width": 310,
|
"dimension_width": 310,
|
||||||
"dimension_height": 170,
|
"dimension_height": 170,
|
||||||
"rotation": "normal"
|
"rotation": "normal",
|
||||||
|
"reflection": "normal"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"unassociated_devices": []
|
||||||
|
}
|
||||||
|
|
||||||
|
$ xrandr --properties | jc --xrandr -p
|
||||||
|
{
|
||||||
|
"screens": [
|
||||||
|
{
|
||||||
|
"screen_number": 0,
|
||||||
|
"minimum_width": 8,
|
||||||
|
"minimum_height": 8,
|
||||||
|
"current_width": 1920,
|
||||||
|
"current_height": 1080,
|
||||||
|
"maximum_width": 32767,
|
||||||
|
"maximum_height": 32767,
|
||||||
|
"associated_device": {
|
||||||
|
"associated_modes": [
|
||||||
|
{
|
||||||
|
"resolution_width": 1920,
|
||||||
|
"resolution_height": 1080,
|
||||||
|
"is_high_resolution": false,
|
||||||
|
"frequencies": [
|
||||||
|
{
|
||||||
|
"frequency": 60.03,
|
||||||
|
"is_current": true,
|
||||||
|
"is_preferred": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"frequency": 59.93,
|
||||||
|
"is_current": false,
|
||||||
|
"is_preferred": false
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"resolution_width": 1680,
|
||||||
|
"resolution_height": 1050,
|
||||||
|
"is_high_resolution": false,
|
||||||
|
"frequencies": [
|
||||||
|
{
|
||||||
|
"frequency": 59.88,
|
||||||
|
"is_current": false,
|
||||||
|
"is_preferred": false
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"is_connected": true,
|
||||||
|
"is_primary": true,
|
||||||
|
"device_name": "eDP1",
|
||||||
|
"model_name": "ASUS VW193S",
|
||||||
|
"product_id": "54297",
|
||||||
|
"serial_number": "78L8021107",
|
||||||
|
"resolution_width": 1920,
|
||||||
|
"resolution_height": 1080,
|
||||||
|
"offset_width": 0,
|
||||||
|
"offset_height": 0,
|
||||||
|
"dimension_width": 310,
|
||||||
|
"dimension_height": 170,
|
||||||
|
"rotation": "normal",
|
||||||
|
"reflection": "normal"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
@ -162,4 +231,4 @@ Returns:
|
|||||||
### Parser Information
|
### Parser Information
|
||||||
Compatibility: linux, darwin, cygwin, aix, freebsd
|
Compatibility: linux, darwin, cygwin, aix, freebsd
|
||||||
|
|
||||||
Version 1.1 by Kevin Lyter (lyter_git at sent.com)
|
Version 1.2 by Kevin Lyter (lyter_git at sent.com)
|
||||||
|
125
docs/parsers/zpool_iostat.md
Normal file
125
docs/parsers/zpool_iostat.md
Normal file
@ -0,0 +1,125 @@
|
|||||||
|
[Home](https://kellyjonbrazil.github.io/jc/)
|
||||||
|
<a id="jc.parsers.zpool_iostat"></a>
|
||||||
|
|
||||||
|
# jc.parsers.zpool\_iostat
|
||||||
|
|
||||||
|
jc - JSON Convert `zpool iostat` command output parser
|
||||||
|
|
||||||
|
Supports with or without the `-v` flag.
|
||||||
|
|
||||||
|
Usage (cli):
|
||||||
|
|
||||||
|
$ zpool iostat | jc --zpool-iostat
|
||||||
|
|
||||||
|
or
|
||||||
|
|
||||||
|
$ jc zpool iostat
|
||||||
|
|
||||||
|
Usage (module):
|
||||||
|
|
||||||
|
import jc
|
||||||
|
result = jc.parse('zpool_iostat', zpool_iostat_command_output)
|
||||||
|
|
||||||
|
Schema:
|
||||||
|
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"pool": string,
|
||||||
|
"parent": string,
|
||||||
|
"cap_alloc": float,
|
||||||
|
"cap_alloc_unit": string,
|
||||||
|
"cap_free": float,
|
||||||
|
"cap_free_unit": string,
|
||||||
|
"ops_read": integer,
|
||||||
|
"ops_write": integer,
|
||||||
|
"bw_read": float,
|
||||||
|
"bw_read_unit": string,
|
||||||
|
"bw_write": float,
|
||||||
|
"bw_write_unit": string
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
|
||||||
|
$ zpool iostat -v | jc --zpool-iostat -p
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"pool": "zhgstera6",
|
||||||
|
"cap_alloc": 2.89,
|
||||||
|
"cap_free": 2.2,
|
||||||
|
"ops_read": 0,
|
||||||
|
"ops_write": 2,
|
||||||
|
"bw_read": 349.0,
|
||||||
|
"bw_write": 448.0,
|
||||||
|
"cap_alloc_unit": "T",
|
||||||
|
"cap_free_unit": "T",
|
||||||
|
"bw_read_unit": "K",
|
||||||
|
"bw_write_unit": "K"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"pool": "726060ALE614-K8JAPRGN:10",
|
||||||
|
"parent": "zhgstera6",
|
||||||
|
"cap_alloc": 2.89,
|
||||||
|
"cap_free": 2.2,
|
||||||
|
"ops_read": 0,
|
||||||
|
"ops_write": 2,
|
||||||
|
"bw_read": 349.0,
|
||||||
|
"bw_write": 448.0,
|
||||||
|
"cap_alloc_unit": "T",
|
||||||
|
"cap_free_unit": "T",
|
||||||
|
"bw_read_unit": "K",
|
||||||
|
"bw_write_unit": "K"
|
||||||
|
},
|
||||||
|
...
|
||||||
|
]
|
||||||
|
|
||||||
|
$ zpool iostat -v | jc --zpool-iostat -p -r
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"pool": "zhgstera6",
|
||||||
|
"cap_alloc": "2.89T",
|
||||||
|
"cap_free": "2.20T",
|
||||||
|
"ops_read": "0",
|
||||||
|
"ops_write": "2",
|
||||||
|
"bw_read": "349K",
|
||||||
|
"bw_write": "448K"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"pool": "726060ALE614-K8JAPRGN:10",
|
||||||
|
"parent": "zhgstera6",
|
||||||
|
"cap_alloc": "2.89T",
|
||||||
|
"cap_free": "2.20T",
|
||||||
|
"ops_read": "0",
|
||||||
|
"ops_write": "2",
|
||||||
|
"bw_read": "349K",
|
||||||
|
"bw_write": "448K"
|
||||||
|
},
|
||||||
|
...
|
||||||
|
]
|
||||||
|
|
||||||
|
<a id="jc.parsers.zpool_iostat.parse"></a>
|
||||||
|
|
||||||
|
### parse
|
||||||
|
|
||||||
|
```python
|
||||||
|
def parse(data: str,
|
||||||
|
raw: bool = False,
|
||||||
|
quiet: bool = False) -> List[JSONDictType]
|
||||||
|
```
|
||||||
|
|
||||||
|
Main text parsing function
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
|
||||||
|
data: (string) text data to parse
|
||||||
|
raw: (boolean) unprocessed output if True
|
||||||
|
quiet: (boolean) suppress warning messages if True
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
|
||||||
|
List of Dictionaries. Raw or processed structured data.
|
||||||
|
|
||||||
|
### Parser Information
|
||||||
|
Compatibility: linux, darwin, freebsd
|
||||||
|
|
||||||
|
Version 1.0 by Kelly Brazil (kellyjonbrazil@gmail.com)
|
163
docs/parsers/zpool_status.md
Normal file
163
docs/parsers/zpool_status.md
Normal file
@ -0,0 +1,163 @@
|
|||||||
|
[Home](https://kellyjonbrazil.github.io/jc/)
|
||||||
|
<a id="jc.parsers.zpool_status"></a>
|
||||||
|
|
||||||
|
# jc.parsers.zpool\_status
|
||||||
|
|
||||||
|
jc - JSON Convert `zpool status` command output parser
|
||||||
|
|
||||||
|
Works with or without the `-v` option.
|
||||||
|
|
||||||
|
Usage (cli):
|
||||||
|
|
||||||
|
$ zpool status | jc --zpool-status
|
||||||
|
|
||||||
|
or
|
||||||
|
|
||||||
|
$ jc zpool status
|
||||||
|
|
||||||
|
Usage (module):
|
||||||
|
|
||||||
|
import jc
|
||||||
|
result = jc.parse('zpool_status', zpool_status_command_output)
|
||||||
|
|
||||||
|
Schema:
|
||||||
|
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"pool": string,
|
||||||
|
"state": string,
|
||||||
|
"status": string,
|
||||||
|
"action": string,
|
||||||
|
"see": string,
|
||||||
|
"scan": string,
|
||||||
|
"scrub": string,
|
||||||
|
"config": [
|
||||||
|
{
|
||||||
|
"name": string,
|
||||||
|
"state": string,
|
||||||
|
"read": integer,
|
||||||
|
"write": integer,
|
||||||
|
"checksum": integer,
|
||||||
|
"errors": string,
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"errors": string
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
|
||||||
|
$ zpool status -v | jc --zpool-status -p
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"pool": "tank",
|
||||||
|
"state": "DEGRADED",
|
||||||
|
"status": "One or more devices could not be opened. Suffic...",
|
||||||
|
"action": "Attach the missing device and online it using 'zpool...",
|
||||||
|
"see": "http://www.sun.com/msg/ZFS-8000-2Q",
|
||||||
|
"scrub": "none requested",
|
||||||
|
"config": [
|
||||||
|
{
|
||||||
|
"name": "tank",
|
||||||
|
"state": "DEGRADED",
|
||||||
|
"read": 0,
|
||||||
|
"write": 0,
|
||||||
|
"checksum": 0
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "mirror-0",
|
||||||
|
"state": "DEGRADED",
|
||||||
|
"read": 0,
|
||||||
|
"write": 0,
|
||||||
|
"checksum": 0
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "c1t0d0",
|
||||||
|
"state": "ONLINE",
|
||||||
|
"read": 0,
|
||||||
|
"write": 0,
|
||||||
|
"checksum": 0
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "c1t1d0",
|
||||||
|
"state": "UNAVAIL",
|
||||||
|
"read": 0,
|
||||||
|
"write": 0,
|
||||||
|
"checksum": 0,
|
||||||
|
"errors": "cannot open"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"errors": "No known data errors"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
$ zpool status -v | jc --zpool-status -p -r
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"pool": "tank",
|
||||||
|
"state": "DEGRADED",
|
||||||
|
"status": "One or more devices could not be opened. Sufficient...",
|
||||||
|
"action": "Attach the missing device and online it using 'zpool...",
|
||||||
|
"see": "http://www.sun.com/msg/ZFS-8000-2Q",
|
||||||
|
"scrub": "none requested",
|
||||||
|
"config": [
|
||||||
|
{
|
||||||
|
"name": "tank",
|
||||||
|
"state": "DEGRADED",
|
||||||
|
"read": "0",
|
||||||
|
"write": "0",
|
||||||
|
"checksum": "0"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "mirror-0",
|
||||||
|
"state": "DEGRADED",
|
||||||
|
"read": "0",
|
||||||
|
"write": "0",
|
||||||
|
"checksum": "0"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "c1t0d0",
|
||||||
|
"state": "ONLINE",
|
||||||
|
"read": "0",
|
||||||
|
"write": "0",
|
||||||
|
"checksum": "0"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "c1t1d0",
|
||||||
|
"state": "UNAVAIL",
|
||||||
|
"read": "0",
|
||||||
|
"write": "0",
|
||||||
|
"checksum": "0",
|
||||||
|
"errors": "cannot open"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"errors": "No known data errors"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
<a id="jc.parsers.zpool_status.parse"></a>
|
||||||
|
|
||||||
|
### parse
|
||||||
|
|
||||||
|
```python
|
||||||
|
def parse(data: str,
|
||||||
|
raw: bool = False,
|
||||||
|
quiet: bool = False) -> List[JSONDictType]
|
||||||
|
```
|
||||||
|
|
||||||
|
Main text parsing function
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
|
||||||
|
data: (string) text data to parse
|
||||||
|
raw: (boolean) unprocessed output if True
|
||||||
|
quiet: (boolean) suppress warning messages if True
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
|
||||||
|
List of Dictionaries. Raw or processed structured data.
|
||||||
|
|
||||||
|
### Parser Information
|
||||||
|
Compatibility: linux, darwin, freebsd
|
||||||
|
|
||||||
|
Version 1.0 by Kelly Brazil (kellyjonbrazil@gmail.com)
|
198
jc/cli.py
198
jc/cli.py
@ -5,11 +5,13 @@ JC cli module
|
|||||||
import io
|
import io
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
|
import re
|
||||||
|
from itertools import islice
|
||||||
from datetime import datetime, timezone
|
from datetime import datetime, timezone
|
||||||
import textwrap
|
import textwrap
|
||||||
import shlex
|
import shlex
|
||||||
import subprocess
|
import subprocess
|
||||||
from typing import List, Dict, Union, Optional, TextIO
|
from typing import List, Dict, Iterable, Union, Optional, TextIO
|
||||||
from types import ModuleType
|
from types import ModuleType
|
||||||
from .lib import (
|
from .lib import (
|
||||||
__version__, parser_info, all_parser_info, parsers, _get_parser, _parser_is_streaming,
|
__version__, parser_info, all_parser_info, parsers, _get_parser, _parser_is_streaming,
|
||||||
@ -40,6 +42,10 @@ except Exception:
|
|||||||
JC_CLEAN_EXIT: int = 0
|
JC_CLEAN_EXIT: int = 0
|
||||||
JC_ERROR_EXIT: int = 100
|
JC_ERROR_EXIT: int = 100
|
||||||
MAX_EXIT: int = 255
|
MAX_EXIT: int = 255
|
||||||
|
SLICER_PATTERN: str = r'-?[0-9]*\:-?[0-9]*$'
|
||||||
|
SLICER_RE = re.compile(SLICER_PATTERN)
|
||||||
|
NEWLINES_PATTERN: str = r'(\r\n|\r|\n)'
|
||||||
|
NEWLINES_RE = re.compile(NEWLINES_PATTERN)
|
||||||
|
|
||||||
|
|
||||||
class info():
|
class info():
|
||||||
@ -69,11 +75,11 @@ class JcCli():
|
|||||||
'help_me', 'pretty', 'quiet', 'ignore_exceptions', 'raw', 'meta_out', 'unbuffer',
|
'help_me', 'pretty', 'quiet', 'ignore_exceptions', 'raw', 'meta_out', 'unbuffer',
|
||||||
'version_info', 'yaml_output', 'bash_comp', 'zsh_comp', 'magic_found_parser',
|
'version_info', 'yaml_output', 'bash_comp', 'zsh_comp', 'magic_found_parser',
|
||||||
'magic_options', 'magic_run_command', 'magic_run_command_str', 'magic_stdout',
|
'magic_options', 'magic_run_command', 'magic_run_command_str', 'magic_stdout',
|
||||||
'magic_stderr', 'magic_returncode'
|
'magic_stderr', 'magic_returncode', 'slice_str', 'slice_start', 'slice_end'
|
||||||
)
|
)
|
||||||
|
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
self.data_in: Optional[Union[str, bytes, TextIO]] = None
|
self.data_in: Optional[Union[str, bytes, TextIO, Iterable[str]]] = None
|
||||||
self.data_out: Optional[Union[List[JSONDictType], JSONDictType]] = None
|
self.data_out: Optional[Union[List[JSONDictType], JSONDictType]] = None
|
||||||
self.options: List[str] = []
|
self.options: List[str] = []
|
||||||
self.args: List[str] = []
|
self.args: List[str] = []
|
||||||
@ -89,6 +95,11 @@ class JcCli():
|
|||||||
self.json_indent: Optional[int] = None
|
self.json_indent: Optional[int] = None
|
||||||
self.run_timestamp: Optional[datetime] = None
|
self.run_timestamp: Optional[datetime] = None
|
||||||
|
|
||||||
|
# slicer
|
||||||
|
self.slice_str: str = ''
|
||||||
|
self.slice_start: Optional[int] = None
|
||||||
|
self.slice_end: Optional[int] = None
|
||||||
|
|
||||||
# cli options
|
# cli options
|
||||||
self.about: bool = False
|
self.about: bool = False
|
||||||
self.debug: bool = False
|
self.debug: bool = False
|
||||||
@ -432,6 +443,17 @@ class JcCli():
|
|||||||
self.magic_options = []
|
self.magic_options = []
|
||||||
return
|
return
|
||||||
|
|
||||||
|
# slicer found
|
||||||
|
if ':' in arg:
|
||||||
|
if SLICER_RE.match(arg):
|
||||||
|
self.slice_str = arg
|
||||||
|
args_given.pop(0)
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
utils.warning_message(['Invalid slice syntax.'])
|
||||||
|
args_given.pop(0)
|
||||||
|
continue
|
||||||
|
|
||||||
# option found - populate option list
|
# option found - populate option list
|
||||||
if arg.startswith('-'):
|
if arg.startswith('-'):
|
||||||
self.magic_options.extend(args_given.pop(0)[1:])
|
self.magic_options.extend(args_given.pop(0)[1:])
|
||||||
@ -574,59 +596,6 @@ class JcCli():
|
|||||||
utils.error_message(['Missing piped data. Use "jc -h" for help.'])
|
utils.error_message(['Missing piped data. Use "jc -h" for help.'])
|
||||||
self.exit_error()
|
self.exit_error()
|
||||||
|
|
||||||
def streaming_parse_and_print(self) -> None:
|
|
||||||
"""only supports UTF-8 string data for now"""
|
|
||||||
self.data_in = sys.stdin
|
|
||||||
if self.parser_module:
|
|
||||||
result = self.parser_module.parse(
|
|
||||||
self.data_in,
|
|
||||||
raw=self.raw,
|
|
||||||
quiet=self.quiet,
|
|
||||||
ignore_exceptions=self.ignore_exceptions
|
|
||||||
)
|
|
||||||
|
|
||||||
for line in result:
|
|
||||||
self.data_out = line
|
|
||||||
if self.meta_out:
|
|
||||||
self.run_timestamp = datetime.now(timezone.utc)
|
|
||||||
self.add_metadata_to_output()
|
|
||||||
|
|
||||||
self.safe_print_out()
|
|
||||||
|
|
||||||
def standard_parse_and_print(self) -> None:
|
|
||||||
"""supports binary and UTF-8 string data"""
|
|
||||||
self.data_in = self.magic_stdout or sys.stdin.buffer.read()
|
|
||||||
|
|
||||||
# convert to UTF-8, if possible. Otherwise, leave as bytes
|
|
||||||
try:
|
|
||||||
if isinstance(self.data_in, bytes):
|
|
||||||
self.data_in = self.data_in.decode('utf-8')
|
|
||||||
except UnicodeDecodeError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
if self.parser_module:
|
|
||||||
self.data_out = self.parser_module.parse(
|
|
||||||
self.data_in,
|
|
||||||
raw=self.raw,
|
|
||||||
quiet=self.quiet
|
|
||||||
)
|
|
||||||
|
|
||||||
if self.meta_out:
|
|
||||||
self.run_timestamp = datetime.now(timezone.utc)
|
|
||||||
self.add_metadata_to_output()
|
|
||||||
|
|
||||||
self.safe_print_out()
|
|
||||||
|
|
||||||
def exit_clean(self) -> None:
|
|
||||||
exit_code: int = self.magic_returncode + JC_CLEAN_EXIT
|
|
||||||
exit_code = min(exit_code, MAX_EXIT)
|
|
||||||
sys.exit(exit_code)
|
|
||||||
|
|
||||||
def exit_error(self) -> None:
|
|
||||||
exit_code: int = self.magic_returncode + JC_ERROR_EXIT
|
|
||||||
exit_code = min(exit_code, MAX_EXIT)
|
|
||||||
sys.exit(exit_code)
|
|
||||||
|
|
||||||
def add_metadata_to_output(self) -> None:
|
def add_metadata_to_output(self) -> None:
|
||||||
"""
|
"""
|
||||||
This function mutates data_out in place. If the _jc_meta field
|
This function mutates data_out in place. If the _jc_meta field
|
||||||
@ -641,7 +610,9 @@ class JcCli():
|
|||||||
if self.run_timestamp:
|
if self.run_timestamp:
|
||||||
meta_obj: JSONDictType = {
|
meta_obj: JSONDictType = {
|
||||||
'parser': self.parser_name,
|
'parser': self.parser_name,
|
||||||
'timestamp': self.run_timestamp.timestamp()
|
'timestamp': self.run_timestamp.timestamp(),
|
||||||
|
'slice_start': self.slice_start,
|
||||||
|
'slice_end': self.slice_end
|
||||||
}
|
}
|
||||||
|
|
||||||
if self.magic_run_command:
|
if self.magic_run_command:
|
||||||
@ -669,6 +640,116 @@ class JcCli():
|
|||||||
utils.error_message(['Parser returned an unsupported object type.'])
|
utils.error_message(['Parser returned an unsupported object type.'])
|
||||||
self.exit_error()
|
self.exit_error()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def lazy_splitlines(text: str) -> Iterable[str]:
|
||||||
|
start = 0
|
||||||
|
for m in NEWLINES_RE.finditer(text):
|
||||||
|
begin, end = m.span()
|
||||||
|
if begin != start:
|
||||||
|
yield text[start:begin]
|
||||||
|
start = end
|
||||||
|
|
||||||
|
if text[start:]:
|
||||||
|
yield text[start:]
|
||||||
|
|
||||||
|
def slicer(self) -> None:
|
||||||
|
"""Slice input data lazily, if possible. Updates self.data_in"""
|
||||||
|
if self.slice_str:
|
||||||
|
slice_start_str, slice_end_str = self.slice_str.split(':', maxsplit=1)
|
||||||
|
if slice_start_str:
|
||||||
|
self.slice_start = int(slice_start_str)
|
||||||
|
if slice_end_str:
|
||||||
|
self.slice_end = int(slice_end_str)
|
||||||
|
|
||||||
|
if not self.slice_start is None or not self.slice_end is None:
|
||||||
|
# standard parsers UTF-8 input
|
||||||
|
if isinstance(self.data_in, str):
|
||||||
|
data_in_iter = self.lazy_splitlines(self.data_in)
|
||||||
|
|
||||||
|
# positive slices
|
||||||
|
if (self.slice_start is None or self.slice_start >= 0) \
|
||||||
|
and (self.slice_end is None or self.slice_end >= 0):
|
||||||
|
|
||||||
|
self.data_in = '\n'.join(islice(data_in_iter, self.slice_start, self.slice_end))
|
||||||
|
|
||||||
|
# negative slices found (non-lazy, uses more memory)
|
||||||
|
else:
|
||||||
|
self.data_in = '\n'.join(list(data_in_iter)[self.slice_start:self.slice_end])
|
||||||
|
|
||||||
|
# standard parsers bytes input
|
||||||
|
elif isinstance(self.data_in, bytes):
|
||||||
|
utils.warning_message(['Cannot slice bytes data.'])
|
||||||
|
|
||||||
|
# streaming parsers UTF-8 input
|
||||||
|
else:
|
||||||
|
# positive slices
|
||||||
|
if (self.slice_start is None or self.slice_start >= 0) \
|
||||||
|
and (self.slice_end is None or self.slice_end >= 0) \
|
||||||
|
and self.data_in:
|
||||||
|
|
||||||
|
self.data_in = islice(self.data_in, self.slice_start, self.slice_end)
|
||||||
|
|
||||||
|
# negative slices found (non-lazy, uses more memory)
|
||||||
|
elif self.data_in:
|
||||||
|
self.data_in = list(self.data_in)[self.slice_start:self.slice_end]
|
||||||
|
|
||||||
|
def streaming_parse_and_print(self) -> None:
|
||||||
|
"""only supports UTF-8 string data for now"""
|
||||||
|
self.data_in = sys.stdin
|
||||||
|
self.slicer()
|
||||||
|
|
||||||
|
if self.parser_module:
|
||||||
|
result = self.parser_module.parse(
|
||||||
|
self.data_in,
|
||||||
|
raw=self.raw,
|
||||||
|
quiet=self.quiet,
|
||||||
|
ignore_exceptions=self.ignore_exceptions
|
||||||
|
)
|
||||||
|
|
||||||
|
for line in result:
|
||||||
|
self.data_out = line
|
||||||
|
if self.meta_out:
|
||||||
|
self.run_timestamp = datetime.now(timezone.utc)
|
||||||
|
self.add_metadata_to_output()
|
||||||
|
|
||||||
|
self.safe_print_out()
|
||||||
|
|
||||||
|
def standard_parse_and_print(self) -> None:
|
||||||
|
"""supports binary and UTF-8 string data"""
|
||||||
|
self.data_in = self.magic_stdout or sys.stdin.buffer.read()
|
||||||
|
|
||||||
|
# convert to UTF-8, if possible. Otherwise, leave as bytes
|
||||||
|
try:
|
||||||
|
if isinstance(self.data_in, bytes):
|
||||||
|
self.data_in = self.data_in.decode('utf-8')
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
self.slicer()
|
||||||
|
|
||||||
|
if self.parser_module:
|
||||||
|
self.data_out = self.parser_module.parse(
|
||||||
|
self.data_in,
|
||||||
|
raw=self.raw,
|
||||||
|
quiet=self.quiet
|
||||||
|
)
|
||||||
|
|
||||||
|
if self.meta_out:
|
||||||
|
self.run_timestamp = datetime.now(timezone.utc)
|
||||||
|
self.add_metadata_to_output()
|
||||||
|
|
||||||
|
self.safe_print_out()
|
||||||
|
|
||||||
|
def exit_clean(self) -> None:
|
||||||
|
exit_code: int = self.magic_returncode + JC_CLEAN_EXIT
|
||||||
|
exit_code = min(exit_code, MAX_EXIT)
|
||||||
|
sys.exit(exit_code)
|
||||||
|
|
||||||
|
def exit_error(self) -> None:
|
||||||
|
exit_code: int = self.magic_returncode + JC_ERROR_EXIT
|
||||||
|
exit_code = min(exit_code, MAX_EXIT)
|
||||||
|
sys.exit(exit_code)
|
||||||
|
|
||||||
def _run(self) -> None:
|
def _run(self) -> None:
|
||||||
# enable colors for Windows cmd.exe terminal
|
# enable colors for Windows cmd.exe terminal
|
||||||
if sys.platform.startswith('win32'):
|
if sys.platform.startswith('win32'):
|
||||||
@ -684,6 +765,9 @@ class JcCli():
|
|||||||
# find options if magic_parser did not find a command
|
# find options if magic_parser did not find a command
|
||||||
if not self.magic_found_parser:
|
if not self.magic_found_parser:
|
||||||
for opt in self.args:
|
for opt in self.args:
|
||||||
|
if SLICER_RE.match(opt):
|
||||||
|
self.slice_str = opt
|
||||||
|
|
||||||
if opt in long_options_map:
|
if opt in long_options_map:
|
||||||
self.options.extend(long_options_map[opt][0])
|
self.options.extend(long_options_map[opt][0])
|
||||||
|
|
||||||
|
@ -63,17 +63,17 @@ Usage:
|
|||||||
|
|
||||||
Standard syntax:
|
Standard syntax:
|
||||||
|
|
||||||
COMMAND | jc [OPTIONS] PARSER
|
COMMAND | jc [SLICE] [OPTIONS] PARSER
|
||||||
|
|
||||||
cat FILE | jc [OPTIONS] PARSER
|
cat FILE | jc [SLICE] [OPTIONS] PARSER
|
||||||
|
|
||||||
echo STRING | jc [OPTIONS] PARSER
|
echo STRING | jc [SLICE] [OPTIONS] PARSER
|
||||||
|
|
||||||
Magic syntax:
|
Magic syntax:
|
||||||
|
|
||||||
jc [OPTIONS] COMMAND
|
jc [SLICE] [OPTIONS] COMMAND
|
||||||
|
|
||||||
jc [OPTIONS] /proc/<path-to-procfile>
|
jc [SLICE] [OPTIONS] /proc/<path-to-procfile>
|
||||||
|
|
||||||
Parsers:
|
Parsers:
|
||||||
'''
|
'''
|
||||||
@ -88,6 +88,9 @@ Examples:
|
|||||||
$ jc --pretty dig www.google.com
|
$ jc --pretty dig www.google.com
|
||||||
$ jc --pretty /proc/meminfo
|
$ jc --pretty /proc/meminfo
|
||||||
|
|
||||||
|
Line Slicing:
|
||||||
|
$ cat file.csv | jc :101 --csv # parse first 100 lines
|
||||||
|
|
||||||
Parser Documentation:
|
Parser Documentation:
|
||||||
$ jc --help --dig
|
$ jc --help --dig
|
||||||
|
|
||||||
|
12
jc/lib.py
12
jc/lib.py
@ -3,13 +3,13 @@ import sys
|
|||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import importlib
|
import importlib
|
||||||
from typing import List, Iterable, Union, Iterator
|
from typing import List, Iterable, Optional, Union, Iterator
|
||||||
from types import ModuleType
|
from types import ModuleType
|
||||||
from .jc_types import ParserInfoType, JSONDictType
|
from .jc_types import ParserInfoType, JSONDictType
|
||||||
from jc import appdirs
|
from jc import appdirs
|
||||||
|
|
||||||
|
|
||||||
__version__ = '1.22.5'
|
__version__ = '1.23.0'
|
||||||
|
|
||||||
parsers: List[str] = [
|
parsers: List[str] = [
|
||||||
'acpi',
|
'acpi',
|
||||||
@ -159,6 +159,7 @@ parsers: List[str] = [
|
|||||||
'sfdisk',
|
'sfdisk',
|
||||||
'shadow',
|
'shadow',
|
||||||
'ss',
|
'ss',
|
||||||
|
'ssh-conf',
|
||||||
'sshd-conf',
|
'sshd-conf',
|
||||||
'stat',
|
'stat',
|
||||||
'stat-s',
|
'stat-s',
|
||||||
@ -189,6 +190,7 @@ parsers: List[str] = [
|
|||||||
'upower',
|
'upower',
|
||||||
'uptime',
|
'uptime',
|
||||||
'url',
|
'url',
|
||||||
|
'ver',
|
||||||
'vmstat',
|
'vmstat',
|
||||||
'vmstat-s',
|
'vmstat-s',
|
||||||
'w',
|
'w',
|
||||||
@ -198,7 +200,9 @@ parsers: List[str] = [
|
|||||||
'xml',
|
'xml',
|
||||||
'xrandr',
|
'xrandr',
|
||||||
'yaml',
|
'yaml',
|
||||||
'zipinfo'
|
'zipinfo',
|
||||||
|
'zpool-iostat',
|
||||||
|
'zpool-status'
|
||||||
]
|
]
|
||||||
|
|
||||||
def _cliname_to_modname(parser_cli_name: str) -> str:
|
def _cliname_to_modname(parser_cli_name: str) -> str:
|
||||||
@ -279,7 +283,7 @@ def parse(
|
|||||||
data: Union[str, bytes, Iterable[str]],
|
data: Union[str, bytes, Iterable[str]],
|
||||||
quiet: bool = False,
|
quiet: bool = False,
|
||||||
raw: bool = False,
|
raw: bool = False,
|
||||||
ignore_exceptions: bool = None,
|
ignore_exceptions: Optional[bool] = None,
|
||||||
**kwargs
|
**kwargs
|
||||||
) -> Union[JSONDictType, List[JSONDictType], Iterator[JSONDictType]]:
|
) -> Union[JSONDictType, List[JSONDictType], Iterator[JSONDictType]]:
|
||||||
"""
|
"""
|
||||||
|
@ -227,7 +227,7 @@ import jc.utils
|
|||||||
|
|
||||||
class info():
|
class info():
|
||||||
"""Provides parser metadata (version, author, etc.)"""
|
"""Provides parser metadata (version, author, etc.)"""
|
||||||
version = '1.4'
|
version = '1.5'
|
||||||
description = '`acpi` command parser'
|
description = '`acpi` command parser'
|
||||||
author = 'Kelly Brazil'
|
author = 'Kelly Brazil'
|
||||||
author_email = 'kellyjonbrazil@gmail.com'
|
author_email = 'kellyjonbrazil@gmail.com'
|
||||||
@ -336,7 +336,9 @@ def parse(data, raw=False, quiet=False):
|
|||||||
if 'Charging' in line or 'Discharging' in line or 'Full' in line:
|
if 'Charging' in line or 'Discharging' in line or 'Full' in line:
|
||||||
output_line['state'] = line.split()[2][:-1]
|
output_line['state'] = line.split()[2][:-1]
|
||||||
output_line['charge_percent'] = line.split()[3].rstrip('%,')
|
output_line['charge_percent'] = line.split()[3].rstrip('%,')
|
||||||
if 'rate information unavailable' not in line:
|
if 'will never fully discharge' in line:
|
||||||
|
pass
|
||||||
|
elif 'rate information unavailable' not in line:
|
||||||
if 'Charging' in line:
|
if 'Charging' in line:
|
||||||
output_line['until_charged'] = line.split()[4]
|
output_line['until_charged'] = line.split()[4]
|
||||||
if 'Discharging' in line:
|
if 'Discharging' in line:
|
||||||
|
@ -174,7 +174,7 @@ import jc.parsers.universal
|
|||||||
|
|
||||||
class info():
|
class info():
|
||||||
"""Provides parser metadata (version, author, etc.)"""
|
"""Provides parser metadata (version, author, etc.)"""
|
||||||
version = '1.6'
|
version = '1.7'
|
||||||
description = '`crontab` command and file parser'
|
description = '`crontab` command and file parser'
|
||||||
author = 'Kelly Brazil'
|
author = 'Kelly Brazil'
|
||||||
author_email = 'kellyjonbrazil@gmail.com'
|
author_email = 'kellyjonbrazil@gmail.com'
|
||||||
@ -273,6 +273,9 @@ def parse(data, raw=False, quiet=False):
|
|||||||
raw_output['schedule'] = cron_list
|
raw_output['schedule'] = cron_list
|
||||||
|
|
||||||
# Add shortcut entries back in
|
# Add shortcut entries back in
|
||||||
|
if 'schedule' not in raw_output:
|
||||||
|
raw_output['schedule'] = []
|
||||||
|
|
||||||
for item in shortcut_list:
|
for item in shortcut_list:
|
||||||
raw_output['schedule'].append(item)
|
raw_output['schedule'].append(item)
|
||||||
|
|
||||||
|
@ -171,7 +171,7 @@ import jc.parsers.universal
|
|||||||
|
|
||||||
class info():
|
class info():
|
||||||
"""Provides parser metadata (version, author, etc.)"""
|
"""Provides parser metadata (version, author, etc.)"""
|
||||||
version = '1.7'
|
version = '1.8'
|
||||||
description = '`crontab` file parser with user support'
|
description = '`crontab` file parser with user support'
|
||||||
author = 'Kelly Brazil'
|
author = 'Kelly Brazil'
|
||||||
author_email = 'kellyjonbrazil@gmail.com'
|
author_email = 'kellyjonbrazil@gmail.com'
|
||||||
@ -271,6 +271,9 @@ def parse(data, raw=False, quiet=False):
|
|||||||
raw_output['schedule'] = cron_list
|
raw_output['schedule'] = cron_list
|
||||||
|
|
||||||
# Add shortcut entries back in
|
# Add shortcut entries back in
|
||||||
|
if 'schedule' not in raw_output:
|
||||||
|
raw_output['schedule'] = []
|
||||||
|
|
||||||
for item in shortcut_list:
|
for item in shortcut_list:
|
||||||
raw_output['schedule'].append(item)
|
raw_output['schedule'].append(item)
|
||||||
|
|
||||||
|
@ -219,7 +219,7 @@ import jc.utils
|
|||||||
|
|
||||||
class info():
|
class info():
|
||||||
"""Provides parser metadata (version, author, etc.)"""
|
"""Provides parser metadata (version, author, etc.)"""
|
||||||
version = '2.2'
|
version = '2.3'
|
||||||
description = '`ifconfig` command parser'
|
description = '`ifconfig` command parser'
|
||||||
author = 'Kelly Brazil'
|
author = 'Kelly Brazil'
|
||||||
author_email = 'kellyjonbrazil@gmail.com'
|
author_email = 'kellyjonbrazil@gmail.com'
|
||||||
@ -425,18 +425,18 @@ def parse(
|
|||||||
# Linux syntax
|
# Linux syntax
|
||||||
re_linux_interface = re.compile(r'''
|
re_linux_interface = re.compile(r'''
|
||||||
(?P<name>[a-zA-Z0-9:._-]+)\s+
|
(?P<name>[a-zA-Z0-9:._-]+)\s+
|
||||||
Link encap:(?P<type>\S+\s?\S+)
|
Link\sencap:(?P<type>\S+\s?\S+)
|
||||||
(\s+HWaddr\s+\b(?P<mac_addr>[0-9A-Fa-f:?]+))?
|
(\s+HWaddr\s+\b(?P<mac_addr>[0-9A-Fa-f:?]+))?
|
||||||
''', re.IGNORECASE | re.VERBOSE
|
''', re.IGNORECASE | re.VERBOSE
|
||||||
)
|
)
|
||||||
re_linux_ipv4 = re.compile(r'''
|
re_linux_ipv4 = re.compile(r'''
|
||||||
inet addr:(?P<address>(?:[0-9]{1,3}\.){3}[0-9]{1,3})(\s+
|
inet\saddr:(?P<address>(?:[0-9]{1,3}\.){3}[0-9]{1,3})(\s+
|
||||||
Bcast:(?P<broadcast>(?:[0-9]{1,3}\.){3}[0-9]{1,3}))?\s+
|
Bcast:(?P<broadcast>(?:[0-9]{1,3}\.){3}[0-9]{1,3}))?\s+
|
||||||
Mask:(?P<mask>(?:[0-9]{1,3}\.){3}[0-9]{1,3})
|
Mask:(?P<mask>(?:[0-9]{1,3}\.){3}[0-9]{1,3})
|
||||||
''', re.IGNORECASE | re.VERBOSE
|
''', re.IGNORECASE | re.VERBOSE
|
||||||
)
|
)
|
||||||
re_linux_ipv6 = re.compile(r'''
|
re_linux_ipv6 = re.compile(r'''
|
||||||
inet6 addr:\s+(?P<address>\S+)/
|
inet6\saddr:\s+(?P<address>\S+)/
|
||||||
(?P<mask>[0-9]+)\s+
|
(?P<mask>[0-9]+)\s+
|
||||||
Scope:(?P<scope>Link|Host)
|
Scope:(?P<scope>Link|Host)
|
||||||
''', re.IGNORECASE | re.VERBOSE
|
''', re.IGNORECASE | re.VERBOSE
|
||||||
@ -448,7 +448,7 @@ def parse(
|
|||||||
''', re.IGNORECASE | re.VERBOSE
|
''', re.IGNORECASE | re.VERBOSE
|
||||||
)
|
)
|
||||||
re_linux_rx = re.compile(r'''
|
re_linux_rx = re.compile(r'''
|
||||||
RX packets:(?P<rx_packets>[0-9]+)\s+
|
RX\spackets:(?P<rx_packets>[0-9]+)\s+
|
||||||
errors:(?P<rx_errors>[0-9]+)\s+
|
errors:(?P<rx_errors>[0-9]+)\s+
|
||||||
dropped:(?P<rx_dropped>[0-9]+)\s+
|
dropped:(?P<rx_dropped>[0-9]+)\s+
|
||||||
overruns:(?P<rx_overruns>[0-9]+)\s+
|
overruns:(?P<rx_overruns>[0-9]+)\s+
|
||||||
@ -456,7 +456,7 @@ def parse(
|
|||||||
''', re.IGNORECASE | re.VERBOSE
|
''', re.IGNORECASE | re.VERBOSE
|
||||||
)
|
)
|
||||||
re_linux_tx = re.compile(r'''
|
re_linux_tx = re.compile(r'''
|
||||||
TX packets:(?P<tx_packets>[0-9]+)\s+
|
TX\spackets:(?P<tx_packets>[0-9]+)\s+
|
||||||
errors:(?P<tx_errors>[0-9]+)\s+
|
errors:(?P<tx_errors>[0-9]+)\s+
|
||||||
dropped:(?P<tx_dropped>[0-9]+)\s+
|
dropped:(?P<tx_dropped>[0-9]+)\s+
|
||||||
overruns:(?P<tx_overruns>[0-9]+)\s+
|
overruns:(?P<tx_overruns>[0-9]+)\s+
|
||||||
@ -464,8 +464,8 @@ def parse(
|
|||||||
''', re.IGNORECASE | re.VERBOSE
|
''', re.IGNORECASE | re.VERBOSE
|
||||||
)
|
)
|
||||||
re_linux_bytes = re.compile(r'''
|
re_linux_bytes = re.compile(r'''
|
||||||
\W+RX bytes:(?P<rx_bytes>\d+)\s+\(.*\)\s+
|
\W+RX\sbytes:(?P<rx_bytes>\d+)\s+\(.*\)\s+
|
||||||
TX bytes:(?P<tx_bytes>\d+)\s+\(.*\)
|
TX\sbytes:(?P<tx_bytes>\d+)\s+\(.*\)
|
||||||
''', re.IGNORECASE | re.VERBOSE
|
''', re.IGNORECASE | re.VERBOSE
|
||||||
)
|
)
|
||||||
re_linux_tx_stats = re.compile(r'''
|
re_linux_tx_stats = re.compile(r'''
|
||||||
|
@ -109,7 +109,8 @@ Examples:
|
|||||||
"mw",
|
"mw",
|
||||||
"me",
|
"me",
|
||||||
"dw",
|
"dw",
|
||||||
"sd"
|
"sd",
|
||||||
|
"mp"
|
||||||
],
|
],
|
||||||
"VmFlags_pretty": [
|
"VmFlags_pretty": [
|
||||||
"readable",
|
"readable",
|
||||||
@ -211,6 +212,7 @@ def _process(proc_data: List[Dict]) -> List[Dict]:
|
|||||||
'mw': 'may write',
|
'mw': 'may write',
|
||||||
'me': 'may execute',
|
'me': 'may execute',
|
||||||
'ms': 'may share',
|
'ms': 'may share',
|
||||||
|
'mp': 'MPX-specific VMA',
|
||||||
'gd': 'stack segment growns down',
|
'gd': 'stack segment growns down',
|
||||||
'pf': 'pure PFN range',
|
'pf': 'pure PFN range',
|
||||||
'dw': 'disabled write to the mapped file',
|
'dw': 'disabled write to the mapped file',
|
||||||
@ -274,10 +276,10 @@ def parse(
|
|||||||
|
|
||||||
if jc.utils.has_data(data):
|
if jc.utils.has_data(data):
|
||||||
map_line = re.compile(r'''
|
map_line = re.compile(r'''
|
||||||
^(?P<start>[0-9a-f]{12,16})-
|
^(?P<start>[0-9a-f]{8,16})-
|
||||||
(?P<end>[0-9a-f]{12,16})\s
|
(?P<end>[0-9a-f]{8,16})\s
|
||||||
(?P<perms>[rwxsp\-]{4})\s
|
(?P<perms>[rwxsp\-]{4})\s
|
||||||
(?P<offset>[0-9a-f]{8})\s
|
(?P<offset>[0-9a-f]{8,9})\s
|
||||||
(?P<maj>[0-9a-f]{2}):
|
(?P<maj>[0-9a-f]{2}):
|
||||||
(?P<min>[0-9a-f]{2})\s
|
(?P<min>[0-9a-f]{2})\s
|
||||||
(?P<inode>\d+)\s+
|
(?P<inode>\d+)\s+
|
||||||
|
18
jc/parsers/pyedid/LICENSE
Normal file
18
jc/parsers/pyedid/LICENSE
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
Copyright 2019-2020 Jonas Lieb, Davydov Denis
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||||
|
this software and associated documentation files (the "Software"), to deal in
|
||||||
|
the Software without restriction, including without limitation the rights to
|
||||||
|
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
||||||
|
the Software, and to permit persons to whom the Software is furnished to do so,
|
||||||
|
subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||||
|
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
||||||
|
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||||
|
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||||
|
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
0
jc/parsers/pyedid/__init__.py
Normal file
0
jc/parsers/pyedid/__init__.py
Normal file
171
jc/parsers/pyedid/edid.py
Executable file
171
jc/parsers/pyedid/edid.py
Executable file
@ -0,0 +1,171 @@
|
|||||||
|
"""
|
||||||
|
Edid module
|
||||||
|
"""
|
||||||
|
|
||||||
|
import struct
|
||||||
|
from collections import namedtuple
|
||||||
|
from typing import ByteString
|
||||||
|
|
||||||
|
__all__ = ["Edid"]
|
||||||
|
|
||||||
|
|
||||||
|
class Edid:
|
||||||
|
"""Edid class
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
`ValueError`: if invalid edid data
|
||||||
|
"""
|
||||||
|
|
||||||
|
_STRUCT_FORMAT = (
|
||||||
|
"<" # little-endian
|
||||||
|
"8s" # constant header (8 bytes)
|
||||||
|
"H" # manufacturer id (2 bytes)
|
||||||
|
"H" # product id (2 bytes)
|
||||||
|
"I" # serial number (4 bytes)
|
||||||
|
"B" # manufactoring week (1 byte)
|
||||||
|
"B" # manufactoring year (1 byte)
|
||||||
|
"B" # edid version (1 byte)
|
||||||
|
"B" # edid revision (1 byte)
|
||||||
|
"B" # video input type (1 byte)
|
||||||
|
"B" # horizontal size in cm (1 byte)
|
||||||
|
"B" # vertical size in cm (1 byte)
|
||||||
|
"B" # display gamma (1 byte)
|
||||||
|
"B" # supported features (1 byte)
|
||||||
|
"10s" # color characteristics (10 bytes)
|
||||||
|
"H" # supported timings (2 bytes)
|
||||||
|
"B" # reserved timing (1 byte)
|
||||||
|
"16s" # EDID supported timings (16 bytes)
|
||||||
|
"18s" # detailed timing block 1 (18 bytes)
|
||||||
|
"18s" # detailed timing block 2 (18 bytes)
|
||||||
|
"18s" # detailed timing block 3 (18 bytes)
|
||||||
|
"18s" # detailed timing block 4 (18 bytes)
|
||||||
|
"B" # extension flag (1 byte)
|
||||||
|
"B"
|
||||||
|
) # checksum (1 byte)
|
||||||
|
|
||||||
|
_TIMINGS = {
|
||||||
|
0: (1280, 1024, 75.0),
|
||||||
|
1: (1024, 768, 75.0),
|
||||||
|
2: (1024, 768, 70.0),
|
||||||
|
3: (1024, 768, 60.0),
|
||||||
|
4: (1024, 768, 87.0),
|
||||||
|
5: (832, 624, 75.0),
|
||||||
|
6: (800, 600, 75.0),
|
||||||
|
7: (800, 600, 72.0),
|
||||||
|
8: (800, 600, 60.0),
|
||||||
|
9: (800, 600, 56.0),
|
||||||
|
10: (640, 480, 75.0),
|
||||||
|
11: (640, 480, 72.0),
|
||||||
|
12: (640, 480, 67.0),
|
||||||
|
13: (640, 480, 60.0),
|
||||||
|
14: (720, 400, 88.0),
|
||||||
|
15: (720, 400, 70.0),
|
||||||
|
}
|
||||||
|
|
||||||
|
_ASPECT_RATIOS = {
|
||||||
|
0b00: (16, 10),
|
||||||
|
0b01: ( 4, 3),
|
||||||
|
0b10: ( 5, 4),
|
||||||
|
0b11: (16, 9),
|
||||||
|
}
|
||||||
|
|
||||||
|
_RawEdid = namedtuple("RawEdid",
|
||||||
|
("header",
|
||||||
|
"manu_id",
|
||||||
|
"prod_id",
|
||||||
|
"serial_no",
|
||||||
|
"manu_week",
|
||||||
|
"manu_year",
|
||||||
|
"edid_version",
|
||||||
|
"edid_revision",
|
||||||
|
"input_type",
|
||||||
|
"width",
|
||||||
|
"height",
|
||||||
|
"gamma",
|
||||||
|
"features",
|
||||||
|
"color",
|
||||||
|
"timings_supported",
|
||||||
|
"timings_reserved",
|
||||||
|
"timings_edid",
|
||||||
|
"timing_1",
|
||||||
|
"timing_2",
|
||||||
|
"timing_3",
|
||||||
|
"timing_4",
|
||||||
|
"extension",
|
||||||
|
"checksum")
|
||||||
|
)
|
||||||
|
|
||||||
|
def __init__(self, edid: ByteString):
|
||||||
|
self._parse_edid(edid)
|
||||||
|
|
||||||
|
def _parse_edid(self, edid: ByteString):
|
||||||
|
"""Convert edid byte string to edid object"""
|
||||||
|
if struct.calcsize(self._STRUCT_FORMAT) != 128:
|
||||||
|
raise ValueError("Wrong edid size.")
|
||||||
|
|
||||||
|
if sum(map(int, edid)) % 256 != 0:
|
||||||
|
raise ValueError("Checksum mismatch.")
|
||||||
|
|
||||||
|
unpacked = struct.unpack(self._STRUCT_FORMAT, edid)
|
||||||
|
raw_edid = self._RawEdid(*unpacked)
|
||||||
|
|
||||||
|
if raw_edid.header != b'\x00\xff\xff\xff\xff\xff\xff\x00':
|
||||||
|
raise ValueError("Invalid header.")
|
||||||
|
|
||||||
|
self.raw = edid
|
||||||
|
self.manufacturer_id = raw_edid.manu_id
|
||||||
|
self.product = raw_edid.prod_id
|
||||||
|
self.year = raw_edid.manu_year + 1990
|
||||||
|
self.edid_version = "{:d}.{:d}".format(raw_edid.edid_version, raw_edid.edid_revision)
|
||||||
|
self.type = "digital" if (raw_edid.input_type & 0xFF) else "analog"
|
||||||
|
self.width = float(raw_edid.width)
|
||||||
|
self.height = float(raw_edid.height)
|
||||||
|
self.gamma = (raw_edid.gamma+100)/100
|
||||||
|
self.dpms_standby = bool(raw_edid.features & 0xFF)
|
||||||
|
self.dpms_suspend = bool(raw_edid.features & 0x7F)
|
||||||
|
self.dpms_activeoff = bool(raw_edid.features & 0x3F)
|
||||||
|
|
||||||
|
self.resolutions = []
|
||||||
|
for i in range(16):
|
||||||
|
bit = raw_edid.timings_supported & (1 << i)
|
||||||
|
if bit:
|
||||||
|
self.resolutions.append(self._TIMINGS[i])
|
||||||
|
|
||||||
|
for i in range(8):
|
||||||
|
bytes_data = raw_edid.timings_edid[2*i:2*i+2]
|
||||||
|
if bytes_data == b'\x01\x01':
|
||||||
|
continue
|
||||||
|
byte1, byte2 = bytes_data
|
||||||
|
x_res = 8*(int(byte1)+31)
|
||||||
|
aspect_ratio = self._ASPECT_RATIOS[(byte2>>6) & 0b11]
|
||||||
|
y_res = int(x_res * aspect_ratio[1]/aspect_ratio[0])
|
||||||
|
rate = (int(byte2) & 0b00111111) + 60.0
|
||||||
|
self.resolutions.append((x_res, y_res, rate))
|
||||||
|
|
||||||
|
self.name = None
|
||||||
|
self.serial = None
|
||||||
|
|
||||||
|
for timing_bytes in (raw_edid.timing_1, raw_edid.timing_2, raw_edid.timing_3, raw_edid.timing_4):
|
||||||
|
# "other" descriptor
|
||||||
|
if timing_bytes[0:2] == b'\x00\x00':
|
||||||
|
timing_type = timing_bytes[3]
|
||||||
|
if timing_type in (0xFF, 0xFE, 0xFC):
|
||||||
|
buffer = timing_bytes[5:]
|
||||||
|
buffer = buffer.partition(b"\x0a")[0]
|
||||||
|
text = buffer.decode("cp437")
|
||||||
|
if timing_type == 0xFF:
|
||||||
|
self.serial = text
|
||||||
|
elif timing_type == 0xFC:
|
||||||
|
self.name = text
|
||||||
|
|
||||||
|
if not self.serial:
|
||||||
|
self.serial = raw_edid.serial_no
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
clsname = self.__class__.__name__
|
||||||
|
attributes = []
|
||||||
|
for name in dir(self):
|
||||||
|
if not name.startswith("_"):
|
||||||
|
value = getattr(self, name)
|
||||||
|
attributes.append("\t{}={}".format(name, value))
|
||||||
|
return "{}(\n{}\n)".format(clsname, ", \n".join(attributes))
|
0
jc/parsers/pyedid/helpers/__init__.py
Normal file
0
jc/parsers/pyedid/helpers/__init__.py
Normal file
61
jc/parsers/pyedid/helpers/edid_helper.py
Normal file
61
jc/parsers/pyedid/helpers/edid_helper.py
Normal file
@ -0,0 +1,61 @@
|
|||||||
|
"""
|
||||||
|
EDID helper
|
||||||
|
"""
|
||||||
|
|
||||||
|
from subprocess import CalledProcessError, check_output
|
||||||
|
from typing import ByteString, List
|
||||||
|
|
||||||
|
__all__ = ["EdidHelper"]
|
||||||
|
|
||||||
|
|
||||||
|
class EdidHelper:
|
||||||
|
"""Class for working with EDID data"""
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def hex2bytes(hex_data: str) -> ByteString:
|
||||||
|
"""Convert hex EDID string to bytes
|
||||||
|
|
||||||
|
Args:
|
||||||
|
hex_data (str): hex edid string
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
ByteString: edid byte string
|
||||||
|
"""
|
||||||
|
# delete edid 1.3 additional block
|
||||||
|
if len(hex_data) > 256:
|
||||||
|
hex_data = hex_data[:256]
|
||||||
|
|
||||||
|
numbers = []
|
||||||
|
for i in range(0, len(hex_data), 2):
|
||||||
|
pair = hex_data[i : i + 2]
|
||||||
|
numbers.append(int(pair, 16))
|
||||||
|
return bytes(numbers)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_edids(cls) -> List[ByteString]:
|
||||||
|
"""Get edids from xrandr
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
`RuntimeError`: if error with retrieving xrandr util data
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List[ByteString]: list with edids
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
output = check_output(["xrandr", "--verbose"])
|
||||||
|
except (CalledProcessError, FileNotFoundError) as err:
|
||||||
|
raise RuntimeError(
|
||||||
|
"Error retrieving xrandr util data: {}".format(err)
|
||||||
|
) from None
|
||||||
|
|
||||||
|
edids = []
|
||||||
|
lines = output.splitlines()
|
||||||
|
for i, line in enumerate(lines):
|
||||||
|
line = line.decode().strip()
|
||||||
|
if line.startswith("EDID:"):
|
||||||
|
selection = lines[i + 1 : i + 9]
|
||||||
|
selection = list(s.decode().strip() for s in selection)
|
||||||
|
selection = "".join(selection)
|
||||||
|
bytes_section = cls.hex2bytes(selection)
|
||||||
|
edids.append(bytes_section)
|
||||||
|
return edids
|
136
jc/parsers/pyedid/helpers/registry.py
Normal file
136
jc/parsers/pyedid/helpers/registry.py
Normal file
@ -0,0 +1,136 @@
|
|||||||
|
"""
|
||||||
|
Module for working with PNP ID REGISTRY
|
||||||
|
"""
|
||||||
|
|
||||||
|
import csv
|
||||||
|
import string
|
||||||
|
from html.parser import HTMLParser
|
||||||
|
from urllib import request
|
||||||
|
|
||||||
|
__all__ = ["Registry"]
|
||||||
|
|
||||||
|
|
||||||
|
class WebPnpIdParser(HTMLParser):
|
||||||
|
"""Parser pnp id from https://uefi.org/PNP_ID_List
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
p = WebPnpIdParser()
|
||||||
|
p.feed(html_data)
|
||||||
|
p.result
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
self._find_table = False
|
||||||
|
self._find_row = False
|
||||||
|
# first -- company name, second -- pnp id, third -- approved date
|
||||||
|
self._last_field = []
|
||||||
|
# key -- pnp id, value -- tuple (company_name, approved_date)
|
||||||
|
self.result = {}
|
||||||
|
|
||||||
|
def handle_starttag(self, tag, attrs):
|
||||||
|
if tag == "tbody":
|
||||||
|
self._find_table = True
|
||||||
|
elif self._find_table and tag == "tr":
|
||||||
|
self._find_row = True
|
||||||
|
|
||||||
|
def handle_endtag(self, tag):
|
||||||
|
if tag == "tbody":
|
||||||
|
self._find_table = False
|
||||||
|
elif self._find_table and tag == "tr":
|
||||||
|
self._find_row = False
|
||||||
|
# add table row to result
|
||||||
|
self.result[self._last_field[1]] = (
|
||||||
|
self._last_field[0],
|
||||||
|
self._last_field[-1],
|
||||||
|
)
|
||||||
|
self._last_field.clear()
|
||||||
|
|
||||||
|
def handle_data(self, data):
|
||||||
|
# skip processing until table is found
|
||||||
|
if not self._find_table:
|
||||||
|
return
|
||||||
|
|
||||||
|
if self._find_row:
|
||||||
|
data = data.strip()
|
||||||
|
if data:
|
||||||
|
self._last_field.append(data)
|
||||||
|
|
||||||
|
def error(self, message):
|
||||||
|
super().close()
|
||||||
|
|
||||||
|
|
||||||
|
class Registry(dict):
|
||||||
|
"""Registry pnp id data dictionary
|
||||||
|
|
||||||
|
key -- pnp_id
|
||||||
|
value -- company name
|
||||||
|
"""
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_web(cls, filter_by_id: str = None):
|
||||||
|
"""Get registry from https://uefi.org/PNP_ID_List
|
||||||
|
|
||||||
|
Args:
|
||||||
|
filter_by_id (str), optional: filter registry by id
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
|
||||||
|
"""
|
||||||
|
url = "https://uefi.org/PNP_ID_List"
|
||||||
|
if filter_by_id:
|
||||||
|
url += "?search={}".format(filter_by_id)
|
||||||
|
|
||||||
|
with request.urlopen(url) as req:
|
||||||
|
parse = WebPnpIdParser()
|
||||||
|
parse.feed(req.read().decode())
|
||||||
|
|
||||||
|
registry = cls()
|
||||||
|
for key, value in parse.result.items():
|
||||||
|
# skip invalid search value
|
||||||
|
if filter_by_id and key != filter_by_id:
|
||||||
|
continue
|
||||||
|
registry[key] = value[0]
|
||||||
|
return registry
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_csv(cls, csv_path: str, filter_by_id: str = None):
|
||||||
|
"""Get registry by csv local file
|
||||||
|
|
||||||
|
Args:
|
||||||
|
csv_path (str): path to csv file
|
||||||
|
filter_by_id (str), optional: filter registry by id
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
|
||||||
|
"""
|
||||||
|
registry = cls()
|
||||||
|
with open(csv_path, "r") as file:
|
||||||
|
reader = csv.reader(file)
|
||||||
|
for line in reader:
|
||||||
|
# filter
|
||||||
|
if filter_by_id and filter_by_id != line[0]:
|
||||||
|
continue
|
||||||
|
registry[line[0]] = line[1]
|
||||||
|
return registry
|
||||||
|
|
||||||
|
def to_csv(self, csv_path: str):
|
||||||
|
"""Dump registry to csv file"""
|
||||||
|
with open(csv_path, "w") as csv_file:
|
||||||
|
writer = csv.writer(csv_file)
|
||||||
|
writer.writerows(self.items())
|
||||||
|
return self
|
||||||
|
|
||||||
|
def get_company_from_id(self, pnp_id: str) -> str:
|
||||||
|
"""Convert PNP id to company name"""
|
||||||
|
return self.get(pnp_id, "Unknown")
|
||||||
|
|
||||||
|
def get_company_from_raw(self, raw: int) -> str:
|
||||||
|
"""Convert raw edid value to company name"""
|
||||||
|
tmp = [(raw >> 10) & 31, (raw >> 5) & 31, raw & 31]
|
||||||
|
pnp_id = "".join(string.ascii_uppercase[n - 1] for n in tmp)
|
||||||
|
return self.get_company_from_id(pnp_id)
|
29
jc/parsers/pyedid/main.py
Normal file
29
jc/parsers/pyedid/main.py
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
"""
|
||||||
|
Entrypoint
|
||||||
|
"""
|
||||||
|
|
||||||
|
from pyedid.edid import Edid
|
||||||
|
from pyedid.helpers.edid_helper import EdidHelper
|
||||||
|
from pyedid.helpers.registry import Registry
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
"""Main func"""
|
||||||
|
|
||||||
|
edid_csv_cache = "/tmp/pyedid-database.csv"
|
||||||
|
|
||||||
|
try:
|
||||||
|
registry = Registry.from_csv(edid_csv_cache)
|
||||||
|
except FileNotFoundError:
|
||||||
|
print("Loading registry from web...")
|
||||||
|
registry = Registry.from_web()
|
||||||
|
print("Done!\n")
|
||||||
|
registry.to_csv(edid_csv_cache)
|
||||||
|
|
||||||
|
for raw in EdidHelper.get_edids():
|
||||||
|
edid = Edid(raw, registry)
|
||||||
|
print(edid)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
@ -2,6 +2,8 @@
|
|||||||
|
|
||||||
This parser conforms to the specification at https://semver.org/
|
This parser conforms to the specification at https://semver.org/
|
||||||
|
|
||||||
|
See Also: `ver` parser.
|
||||||
|
|
||||||
Usage (cli):
|
Usage (cli):
|
||||||
|
|
||||||
$ echo 1.2.3-rc.1+44837 | jc --semver
|
$ echo 1.2.3-rc.1+44837 | jc --semver
|
||||||
|
688
jc/parsers/ssh_conf.py
Normal file
688
jc/parsers/ssh_conf.py
Normal file
@ -0,0 +1,688 @@
|
|||||||
|
"""jc - JSON Convert `ssh` configuration file and `ssh -G` command output parser
|
||||||
|
|
||||||
|
This parser will work with `ssh` configuration files or the output of
|
||||||
|
`ssh -G`. Any `Match` blocks in the `ssh` configuration file will be
|
||||||
|
ignored.
|
||||||
|
|
||||||
|
Usage (cli):
|
||||||
|
|
||||||
|
$ ssh -G hostname | jc --ssh-conf
|
||||||
|
|
||||||
|
or
|
||||||
|
|
||||||
|
$ jc ssh -G hostname
|
||||||
|
|
||||||
|
or
|
||||||
|
|
||||||
|
$ cat ~/.ssh/config | jc --ssh-conf
|
||||||
|
|
||||||
|
Usage (module):
|
||||||
|
|
||||||
|
import jc
|
||||||
|
result = jc.parse('ssh_conf', ssh_conf_output)
|
||||||
|
|
||||||
|
Schema:
|
||||||
|
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"host": string,
|
||||||
|
"host_list": [
|
||||||
|
string
|
||||||
|
],
|
||||||
|
"addkeystoagent": string,
|
||||||
|
"addressfamily": string,
|
||||||
|
"batchmode": string,
|
||||||
|
"bindaddress": string,
|
||||||
|
"bindinterface": string,
|
||||||
|
"canonicaldomains": [
|
||||||
|
string
|
||||||
|
],
|
||||||
|
"canonicalizefallbacklocal": string,
|
||||||
|
"canonicalizehostname": string,
|
||||||
|
"canonicalizemaxdots": integer,
|
||||||
|
"canonicalizepermittedcnames": [
|
||||||
|
string
|
||||||
|
],
|
||||||
|
"casignaturealgorithms": [
|
||||||
|
string
|
||||||
|
],
|
||||||
|
"certificatefile": [
|
||||||
|
string
|
||||||
|
],
|
||||||
|
"checkhostip": string,
|
||||||
|
"ciphers": [
|
||||||
|
string
|
||||||
|
],
|
||||||
|
"clearallforwardings": string,
|
||||||
|
"compression": string,
|
||||||
|
"connectionattempts": integer,
|
||||||
|
"connecttimeout": integer,
|
||||||
|
"controlmaster": string,
|
||||||
|
"controlpath": string,
|
||||||
|
"controlpersist": string,
|
||||||
|
"dynamicforward": string,
|
||||||
|
"enableescapecommandline": string,
|
||||||
|
"enablesshkeysign": string,
|
||||||
|
"escapechar": string,
|
||||||
|
"exitonforwardfailure": string,
|
||||||
|
"fingerprinthash": string,
|
||||||
|
"forkafterauthentication": string,
|
||||||
|
"forwardagent": string,
|
||||||
|
"forwardx11": string,
|
||||||
|
"forwardx11timeout": integer,
|
||||||
|
"forwardx11trusted": string,
|
||||||
|
"gatewayports": string,
|
||||||
|
"globalknownhostsfile": [
|
||||||
|
string
|
||||||
|
],
|
||||||
|
"gssapiauthentication": string,
|
||||||
|
"gssapidelegatecredentials": string,
|
||||||
|
"hashknownhosts": string,
|
||||||
|
"hostbasedacceptedalgorithms": [
|
||||||
|
string
|
||||||
|
],
|
||||||
|
"hostbasedauthentication": string,
|
||||||
|
"hostkeyalgorithms": [
|
||||||
|
string
|
||||||
|
],
|
||||||
|
"hostkeyalias": string,
|
||||||
|
"hostname": string,
|
||||||
|
"identitiesonly": string,
|
||||||
|
"identityagent": string,
|
||||||
|
"identityfile": [
|
||||||
|
string
|
||||||
|
],
|
||||||
|
"ignoreunknown": string,
|
||||||
|
"include": [
|
||||||
|
string
|
||||||
|
],
|
||||||
|
"ipqos": [
|
||||||
|
string
|
||||||
|
],
|
||||||
|
"kbdinteractiveauthentication": string,
|
||||||
|
"kbdinteractivedevices": [
|
||||||
|
string
|
||||||
|
],
|
||||||
|
"kexalgorithms": [
|
||||||
|
string
|
||||||
|
],
|
||||||
|
"kexalgorithms_strategy": string,
|
||||||
|
"knownhostscommand": string,
|
||||||
|
"localcommand": string,
|
||||||
|
"localforward": [
|
||||||
|
string
|
||||||
|
],
|
||||||
|
"loglevel": string,
|
||||||
|
"logverbose": [
|
||||||
|
string
|
||||||
|
],
|
||||||
|
"macs": [
|
||||||
|
string
|
||||||
|
],
|
||||||
|
"macs_strategy": string,
|
||||||
|
"nohostauthenticationforlocalhost": string,
|
||||||
|
"numberofpasswordprompts": integer,
|
||||||
|
"passwordauthentication": string,
|
||||||
|
"permitlocalcommand": string,
|
||||||
|
"permitremoteopen": [
|
||||||
|
string
|
||||||
|
],
|
||||||
|
"pkcs11provider": string,
|
||||||
|
"port": integer,
|
||||||
|
"preferredauthentications": [
|
||||||
|
string
|
||||||
|
],
|
||||||
|
"protocol": integer,
|
||||||
|
"proxycommand": string,
|
||||||
|
"proxyjump": [
|
||||||
|
string
|
||||||
|
],
|
||||||
|
"proxyusefdpass": string,
|
||||||
|
"pubkeyacceptedalgorithms": [
|
||||||
|
string
|
||||||
|
],
|
||||||
|
"pubkeyacceptedalgorithms_strategy": string,
|
||||||
|
"pubkeyauthentication": string,
|
||||||
|
"rekeylimit": string,
|
||||||
|
"remotecommand": string,
|
||||||
|
"remoteforward": string,
|
||||||
|
"requesttty": string,
|
||||||
|
"requiredrsasize": integer,
|
||||||
|
"revokedhostkeys": string,
|
||||||
|
"securitykeyprovider": string,
|
||||||
|
"sendenv": [
|
||||||
|
string
|
||||||
|
],
|
||||||
|
"serveralivecountmax": integer,
|
||||||
|
"serveraliveinterval": integer,
|
||||||
|
"sessiontype": string,
|
||||||
|
"setenv": [
|
||||||
|
string
|
||||||
|
],
|
||||||
|
"stdinnull": string,
|
||||||
|
"streamlocalbindmask": string,
|
||||||
|
"streamlocalbindunlink": string,
|
||||||
|
"stricthostkeychecking": string,
|
||||||
|
"syslogfacility": string,
|
||||||
|
"tcpkeepalive": string,
|
||||||
|
"tunnel": string,
|
||||||
|
"tunneldevice": string,
|
||||||
|
"updatehostkeys": string,
|
||||||
|
"user": string,
|
||||||
|
"userknownhostsfile": [
|
||||||
|
string
|
||||||
|
],
|
||||||
|
"verifyhostkeydns": string,
|
||||||
|
"visualhostkey": string,
|
||||||
|
"xauthlocation": string
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
|
||||||
|
$ ssh -G - | jc --ssh-conf -p
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"user": "foo",
|
||||||
|
"hostname": "-",
|
||||||
|
"port": 22,
|
||||||
|
"addressfamily": "any",
|
||||||
|
"batchmode": "no",
|
||||||
|
"canonicalizefallbacklocal": "yes",
|
||||||
|
"canonicalizehostname": "false",
|
||||||
|
"checkhostip": "no",
|
||||||
|
"compression": "no",
|
||||||
|
"controlmaster": "false",
|
||||||
|
"enablesshkeysign": "no",
|
||||||
|
"clearallforwardings": "no",
|
||||||
|
"exitonforwardfailure": "no",
|
||||||
|
"fingerprinthash": "SHA256",
|
||||||
|
"forwardx11": "no",
|
||||||
|
"forwardx11trusted": "no",
|
||||||
|
"gatewayports": "no",
|
||||||
|
"gssapiauthentication": "no",
|
||||||
|
"gssapidelegatecredentials": "no",
|
||||||
|
"hashknownhosts": "no",
|
||||||
|
"hostbasedauthentication": "no",
|
||||||
|
"identitiesonly": "no",
|
||||||
|
"kbdinteractiveauthentication": "yes",
|
||||||
|
"nohostauthenticationforlocalhost": "no",
|
||||||
|
"passwordauthentication": "yes",
|
||||||
|
"permitlocalcommand": "no",
|
||||||
|
"proxyusefdpass": "no",
|
||||||
|
"pubkeyauthentication": "true",
|
||||||
|
"requesttty": "auto",
|
||||||
|
"sessiontype": "default",
|
||||||
|
"stdinnull": "no",
|
||||||
|
"forkafterauthentication": "no",
|
||||||
|
"streamlocalbindunlink": "no",
|
||||||
|
"stricthostkeychecking": "ask",
|
||||||
|
"tcpkeepalive": "yes",
|
||||||
|
"tunnel": "false",
|
||||||
|
"verifyhostkeydns": "false",
|
||||||
|
"visualhostkey": "no",
|
||||||
|
"updatehostkeys": "true",
|
||||||
|
"applemultipath": "no",
|
||||||
|
"canonicalizemaxdots": 1,
|
||||||
|
"connectionattempts": 1,
|
||||||
|
"forwardx11timeout": 1200,
|
||||||
|
"numberofpasswordprompts": 3,
|
||||||
|
"serveralivecountmax": 3,
|
||||||
|
"serveraliveinterval": 0,
|
||||||
|
"ciphers": [
|
||||||
|
"chacha20-poly1305@openssh.com",
|
||||||
|
"aes128-ctr",
|
||||||
|
"aes192-ctr",
|
||||||
|
"aes256-ctr",
|
||||||
|
"aes128-gcm@openssh.com",
|
||||||
|
"aes256-gcm@openssh.com"
|
||||||
|
],
|
||||||
|
"hostkeyalgorithms": [
|
||||||
|
"ssh-ed25519-cert-v01@openssh.com",
|
||||||
|
"ecdsa-sha2-nistp256-cert-v01@openssh.com",
|
||||||
|
"ecdsa-sha2-nistp384-cert-v01@openssh.com",
|
||||||
|
"ecdsa-sha2-nistp521-cert-v01@openssh.com",
|
||||||
|
"rsa-sha2-512-cert-v01@openssh.com",
|
||||||
|
"rsa-sha2-256-cert-v01@openssh.com",
|
||||||
|
"ssh-ed25519",
|
||||||
|
"ecdsa-sha2-nistp256",
|
||||||
|
"ecdsa-sha2-nistp384",
|
||||||
|
"ecdsa-sha2-nistp521",
|
||||||
|
"rsa-sha2-512",
|
||||||
|
"rsa-sha2-256"
|
||||||
|
],
|
||||||
|
"hostbasedacceptedalgorithms": [
|
||||||
|
"ssh-ed25519-cert-v01@openssh.com",
|
||||||
|
"ecdsa-sha2-nistp256-cert-v01@openssh.com",
|
||||||
|
"ecdsa-sha2-nistp384-cert-v01@openssh.com",
|
||||||
|
"ecdsa-sha2-nistp521-cert-v01@openssh.com",
|
||||||
|
"rsa-sha2-512-cert-v01@openssh.com",
|
||||||
|
"rsa-sha2-256-cert-v01@openssh.com",
|
||||||
|
"ssh-ed25519",
|
||||||
|
"ecdsa-sha2-nistp256",
|
||||||
|
"ecdsa-sha2-nistp384",
|
||||||
|
"ecdsa-sha2-nistp521",
|
||||||
|
"rsa-sha2-512",
|
||||||
|
"rsa-sha2-256"
|
||||||
|
],
|
||||||
|
"kexalgorithms": [
|
||||||
|
"sntrup761x25519-sha512@openssh.com",
|
||||||
|
"curve25519-sha256",
|
||||||
|
"curve25519-sha256@libssh.org",
|
||||||
|
"ecdh-sha2-nistp256",
|
||||||
|
"ecdh-sha2-nistp384",
|
||||||
|
"ecdh-sha2-nistp521",
|
||||||
|
"diffie-hellman-group-exchange-sha256",
|
||||||
|
"diffie-hellman-group16-sha512",
|
||||||
|
"diffie-hellman-group18-sha512",
|
||||||
|
"diffie-hellman-group14-sha256"
|
||||||
|
],
|
||||||
|
"casignaturealgorithms": [
|
||||||
|
"ssh-ed25519",
|
||||||
|
"ecdsa-sha2-nistp256",
|
||||||
|
"ecdsa-sha2-nistp384",
|
||||||
|
"ecdsa-sha2-nistp521",
|
||||||
|
"rsa-sha2-512",
|
||||||
|
"rsa-sha2-256"
|
||||||
|
],
|
||||||
|
"loglevel": "INFO",
|
||||||
|
"macs": [
|
||||||
|
"umac-64-etm@openssh.com",
|
||||||
|
"umac-128-etm@openssh.com",
|
||||||
|
"hmac-sha2-256-etm@openssh.com",
|
||||||
|
"hmac-sha2-512-etm@openssh.com",
|
||||||
|
"hmac-sha1-etm@openssh.com",
|
||||||
|
"umac-64@openssh.com",
|
||||||
|
"umac-128@openssh.com",
|
||||||
|
"hmac-sha2-256",
|
||||||
|
"hmac-sha2-512",
|
||||||
|
"hmac-sha1"
|
||||||
|
],
|
||||||
|
"securitykeyprovider": "$SSH_SK_PROVIDER",
|
||||||
|
"pubkeyacceptedalgorithms": [
|
||||||
|
"ssh-ed25519-cert-v01@openssh.com",
|
||||||
|
"ecdsa-sha2-nistp256-cert-v01@openssh.com",
|
||||||
|
"ecdsa-sha2-nistp384-cert-v01@openssh.com",
|
||||||
|
"ecdsa-sha2-nistp521-cert-v01@openssh.com",
|
||||||
|
"rsa-sha2-512-cert-v01@openssh.com",
|
||||||
|
"rsa-sha2-256-cert-v01@openssh.com",
|
||||||
|
"ssh-ed25519",
|
||||||
|
"ecdsa-sha2-nistp256",
|
||||||
|
"ecdsa-sha2-nistp384",
|
||||||
|
"ecdsa-sha2-nistp521",
|
||||||
|
"rsa-sha2-512",
|
||||||
|
"rsa-sha2-256"
|
||||||
|
],
|
||||||
|
"xauthlocation": "/usr/X11R6/bin/xauth",
|
||||||
|
"identityfile": [
|
||||||
|
"~/.ssh/id_rsa",
|
||||||
|
"~/.ssh/id_ecdsa",
|
||||||
|
"~/.ssh/id_ecdsa_sk",
|
||||||
|
"~/.ssh/id_ed25519",
|
||||||
|
"~/.ssh/id_ed25519_sk",
|
||||||
|
"~/.ssh/id_xmss",
|
||||||
|
"~/.ssh/id_dsa"
|
||||||
|
],
|
||||||
|
"canonicaldomains": [
|
||||||
|
"none"
|
||||||
|
],
|
||||||
|
"globalknownhostsfile": [
|
||||||
|
"/etc/ssh/ssh_known_hosts",
|
||||||
|
"/etc/ssh/ssh_known_hosts2"
|
||||||
|
],
|
||||||
|
"userknownhostsfile": [
|
||||||
|
"/Users/foo/.ssh/known_hosts",
|
||||||
|
"/Users/foo/.ssh/known_hosts2"
|
||||||
|
],
|
||||||
|
"sendenv": [
|
||||||
|
"LANG",
|
||||||
|
"LC_*"
|
||||||
|
],
|
||||||
|
"logverbose": [
|
||||||
|
"none"
|
||||||
|
],
|
||||||
|
"permitremoteopen": [
|
||||||
|
"any"
|
||||||
|
],
|
||||||
|
"addkeystoagent": "false",
|
||||||
|
"forwardagent": "no",
|
||||||
|
"connecttimeout": null,
|
||||||
|
"tunneldevice": "any:any",
|
||||||
|
"canonicalizepermittedcnames": [
|
||||||
|
"none"
|
||||||
|
],
|
||||||
|
"controlpersist": "no",
|
||||||
|
"escapechar": "~",
|
||||||
|
"ipqos": [
|
||||||
|
"af21",
|
||||||
|
"cs1"
|
||||||
|
],
|
||||||
|
"rekeylimit": "0 0",
|
||||||
|
"streamlocalbindmask": "0177",
|
||||||
|
"syslogfacility": "USER"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
$ cat ~/.ssh/config | jc --ssh-conf -p
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"host": "server1",
|
||||||
|
"host_list": [
|
||||||
|
"server1"
|
||||||
|
],
|
||||||
|
"hostname": "server1.cyberciti.biz",
|
||||||
|
"user": "nixcraft",
|
||||||
|
"port": 4242,
|
||||||
|
"identityfile": [
|
||||||
|
"/nfs/shared/users/nixcraft/keys/server1/id_rsa"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"host": "nas01",
|
||||||
|
"host_list": [
|
||||||
|
"nas01"
|
||||||
|
],
|
||||||
|
"hostname": "192.168.1.100",
|
||||||
|
"user": "root",
|
||||||
|
"identityfile": [
|
||||||
|
"~/.ssh/nas01.key"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"host": "aws.apache",
|
||||||
|
"host_list": [
|
||||||
|
"aws.apache"
|
||||||
|
],
|
||||||
|
"hostname": "1.2.3.4",
|
||||||
|
"user": "wwwdata",
|
||||||
|
"identityfile": [
|
||||||
|
"~/.ssh/aws.apache.key"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"host": "uk.gw.lan uk.lan",
|
||||||
|
"host_list": [
|
||||||
|
"uk.gw.lan",
|
||||||
|
"uk.lan"
|
||||||
|
],
|
||||||
|
"hostname": "192.168.0.251",
|
||||||
|
"user": "nixcraft",
|
||||||
|
"proxycommand": "ssh nixcraft@gateway.uk.cyberciti.biz nc %h %p 2> /dev/null"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"host": "proxyus",
|
||||||
|
"host_list": [
|
||||||
|
"proxyus"
|
||||||
|
],
|
||||||
|
"hostname": "vps1.cyberciti.biz",
|
||||||
|
"user": "breakfree",
|
||||||
|
"identityfile": [
|
||||||
|
"~/.ssh/vps1.cyberciti.biz.key"
|
||||||
|
],
|
||||||
|
"localforward": [
|
||||||
|
"3128 127.0.0.1:3128"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"host": "*",
|
||||||
|
"host_list": [
|
||||||
|
"*"
|
||||||
|
],
|
||||||
|
"forwardagent": "no",
|
||||||
|
"forwardx11": "no",
|
||||||
|
"forwardx11trusted": "yes",
|
||||||
|
"user": "nixcraft",
|
||||||
|
"port": 22,
|
||||||
|
"protocol": 2,
|
||||||
|
"serveraliveinterval": 60,
|
||||||
|
"serveralivecountmax": 30
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
$ cat ~/.ssh/config | jc --ssh-conf -p -r
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"host": "server1",
|
||||||
|
"host_list": [
|
||||||
|
"server1"
|
||||||
|
],
|
||||||
|
"hostname": "server1.cyberciti.biz",
|
||||||
|
"user": "nixcraft",
|
||||||
|
"port": "4242",
|
||||||
|
"identityfile": [
|
||||||
|
"/nfs/shared/users/nixcraft/keys/server1/id_rsa"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"host": "nas01",
|
||||||
|
"host_list": [
|
||||||
|
"nas01"
|
||||||
|
],
|
||||||
|
"hostname": "192.168.1.100",
|
||||||
|
"user": "root",
|
||||||
|
"identityfile": [
|
||||||
|
"~/.ssh/nas01.key"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"host": "aws.apache",
|
||||||
|
"host_list": [
|
||||||
|
"aws.apache"
|
||||||
|
],
|
||||||
|
"hostname": "1.2.3.4",
|
||||||
|
"user": "wwwdata",
|
||||||
|
"identityfile": [
|
||||||
|
"~/.ssh/aws.apache.key"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"host": "uk.gw.lan uk.lan",
|
||||||
|
"host_list": [
|
||||||
|
"uk.gw.lan",
|
||||||
|
"uk.lan"
|
||||||
|
],
|
||||||
|
"hostname": "192.168.0.251",
|
||||||
|
"user": "nixcraft",
|
||||||
|
"proxycommand": "ssh nixcraft@gateway.uk.cyberciti.biz nc %h %p 2> /dev/null"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"host": "proxyus",
|
||||||
|
"host_list": [
|
||||||
|
"proxyus"
|
||||||
|
],
|
||||||
|
"hostname": "vps1.cyberciti.biz",
|
||||||
|
"user": "breakfree",
|
||||||
|
"identityfile": [
|
||||||
|
"~/.ssh/vps1.cyberciti.biz.key"
|
||||||
|
],
|
||||||
|
"localforward": [
|
||||||
|
"3128 127.0.0.1:3128"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"host": "*",
|
||||||
|
"host_list": [
|
||||||
|
"*"
|
||||||
|
],
|
||||||
|
"forwardagent": "no",
|
||||||
|
"forwardx11": "no",
|
||||||
|
"forwardx11trusted": "yes",
|
||||||
|
"user": "nixcraft",
|
||||||
|
"port": "22",
|
||||||
|
"protocol": "2",
|
||||||
|
"serveraliveinterval": "60",
|
||||||
|
"serveralivecountmax": "30"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
"""
|
||||||
|
from typing import Set, List, Dict
|
||||||
|
from jc.jc_types import JSONDictType
|
||||||
|
import jc.utils
|
||||||
|
|
||||||
|
|
||||||
|
class info():
|
||||||
|
"""Provides parser metadata (version, author, etc.)"""
|
||||||
|
version = '1.0'
|
||||||
|
description = '`ssh` config file and `ssh -G` command parser'
|
||||||
|
author = 'Kelly Brazil'
|
||||||
|
author_email = 'kellyjonbrazil@gmail.com'
|
||||||
|
compatible = ['linux', 'darwin', 'freebsd']
|
||||||
|
magic_commands = ['ssh -G']
|
||||||
|
tags = ['command', 'file']
|
||||||
|
|
||||||
|
|
||||||
|
__version__ = info.version
|
||||||
|
|
||||||
|
|
||||||
|
def _process(proc_data: List[JSONDictType]) -> List[JSONDictType]:
|
||||||
|
"""
|
||||||
|
Final processing to conform to the schema.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
|
||||||
|
proc_data: (List of Dictionaries) raw structured data to process
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
|
||||||
|
List of Dictionaries. Structured to conform to the schema.
|
||||||
|
"""
|
||||||
|
split_fields_space: Set[str] = {
|
||||||
|
'canonicaldomains', 'globalknownhostsfile', 'include', 'ipqos',
|
||||||
|
'permitremoteopen', 'sendenv', 'setenv', 'userknownhostsfile'
|
||||||
|
}
|
||||||
|
|
||||||
|
split_fields_comma: Set[str] = {
|
||||||
|
'canonicalizepermittedcnames', 'casignaturealgorithms', 'ciphers',
|
||||||
|
'hostbasedacceptedalgorithms', 'hostkeyalgorithms',
|
||||||
|
'kbdinteractivedevices', 'kexalgorithms', 'logverbose', 'macs',
|
||||||
|
'preferredauthentications', 'proxyjump', 'pubkeyacceptedalgorithms'
|
||||||
|
}
|
||||||
|
|
||||||
|
int_list: Set[str] = {
|
||||||
|
'canonicalizemaxdots', 'connectionattempts', 'connecttimeout',
|
||||||
|
'forwardx11timeout', 'numberofpasswordprompts', 'port', 'protocol',
|
||||||
|
'requiredrsasize', 'serveralivecountmax', 'serveraliveinterval'
|
||||||
|
}
|
||||||
|
|
||||||
|
for host in proc_data:
|
||||||
|
dict_copy = host.copy()
|
||||||
|
for key, val in dict_copy.items():
|
||||||
|
# these are list values
|
||||||
|
if key == 'sendenv' or key == 'setenv' or key == 'include':
|
||||||
|
new_list: List[str] = []
|
||||||
|
for item in val:
|
||||||
|
new_list.extend(item.split())
|
||||||
|
host[key] = new_list
|
||||||
|
continue
|
||||||
|
|
||||||
|
if key in split_fields_space:
|
||||||
|
host[key] = val.split()
|
||||||
|
continue
|
||||||
|
|
||||||
|
if key in split_fields_comma:
|
||||||
|
host[key] = val.split(',')
|
||||||
|
continue
|
||||||
|
|
||||||
|
for key, val in host.items():
|
||||||
|
if key in int_list:
|
||||||
|
host[key] = jc.utils.convert_to_int(val)
|
||||||
|
|
||||||
|
return proc_data
|
||||||
|
|
||||||
|
|
||||||
|
def parse(
|
||||||
|
data: str,
|
||||||
|
raw: bool = False,
|
||||||
|
quiet: bool = False
|
||||||
|
) -> List[JSONDictType]:
|
||||||
|
"""
|
||||||
|
Main text parsing function
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
|
||||||
|
data: (string) text data to parse
|
||||||
|
raw: (boolean) unprocessed output if True
|
||||||
|
quiet: (boolean) suppress warning messages if True
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
|
||||||
|
List of Dictionaries. Raw or processed structured data.
|
||||||
|
"""
|
||||||
|
jc.utils.compatibility(__name__, info.compatible, quiet)
|
||||||
|
jc.utils.input_type_check(data)
|
||||||
|
|
||||||
|
raw_output: List = []
|
||||||
|
host: Dict = {}
|
||||||
|
|
||||||
|
multi_fields: Set[str] = {
|
||||||
|
'certificatefile', 'identityfile', 'include', 'localforward',
|
||||||
|
'sendenv', 'setenv'
|
||||||
|
}
|
||||||
|
|
||||||
|
modified_fields: Set[str] = {
|
||||||
|
'casignaturealgorithms', 'ciphers', 'hostbasedacceptedalgorithms',
|
||||||
|
'HostKeyAlgorithms', 'kexalgorithms', 'macs',
|
||||||
|
'pubkeyacceptedalgorithms'
|
||||||
|
}
|
||||||
|
|
||||||
|
modifiers: Set[str] = {'+', '-', '^'}
|
||||||
|
|
||||||
|
match_block_found = False
|
||||||
|
|
||||||
|
if jc.utils.has_data(data):
|
||||||
|
|
||||||
|
for line in filter(None, data.splitlines()):
|
||||||
|
# skip any lines with only whitespace
|
||||||
|
if not line.strip():
|
||||||
|
continue
|
||||||
|
|
||||||
|
# support configuration file by skipping commented lines
|
||||||
|
if line.strip().startswith('#'):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if line.strip().startswith('Host '):
|
||||||
|
if host:
|
||||||
|
raw_output.append(host)
|
||||||
|
|
||||||
|
hostnames = line.split(maxsplit=1)[1]
|
||||||
|
host = {
|
||||||
|
'host': hostnames,
|
||||||
|
'host_list': hostnames.split()
|
||||||
|
}
|
||||||
|
|
||||||
|
# support configuration file by ignoring all lines between
|
||||||
|
# Match xxx and Match any
|
||||||
|
if line.strip().startswith('Match all'):
|
||||||
|
match_block_found = False
|
||||||
|
continue
|
||||||
|
|
||||||
|
if line.strip().startswith('Match'):
|
||||||
|
match_block_found = True
|
||||||
|
continue
|
||||||
|
|
||||||
|
if match_block_found:
|
||||||
|
continue
|
||||||
|
|
||||||
|
key, val = line.split(maxsplit=1)
|
||||||
|
|
||||||
|
# support configuration file by converting to lower case
|
||||||
|
key = key.lower()
|
||||||
|
|
||||||
|
if key in multi_fields:
|
||||||
|
if key not in host:
|
||||||
|
host[key] = []
|
||||||
|
host[key].append(val)
|
||||||
|
continue
|
||||||
|
|
||||||
|
if key in modified_fields and val[0] in modifiers:
|
||||||
|
host[key] = val[1:]
|
||||||
|
host[key + '_strategy'] = val[0]
|
||||||
|
continue
|
||||||
|
|
||||||
|
host[key] = val
|
||||||
|
continue
|
||||||
|
|
||||||
|
if host:
|
||||||
|
raw_output.append(host)
|
||||||
|
|
||||||
|
return raw_output if raw else _process(raw_output)
|
@ -1,4 +1,4 @@
|
|||||||
"""jc - JSON Convert sshd configuration file and `sshd -T` command output parser
|
"""jc - JSON Convert `sshd` configuration file and `sshd -T` command output parser
|
||||||
|
|
||||||
This parser will work with `sshd` configuration files or the output of
|
This parser will work with `sshd` configuration files or the output of
|
||||||
`sshd -T`. Any `Match` blocks in the `sshd` configuration file will be
|
`sshd -T`. Any `Match` blocks in the `sshd` configuration file will be
|
||||||
@ -483,13 +483,13 @@ import jc.utils
|
|||||||
|
|
||||||
class info():
|
class info():
|
||||||
"""Provides parser metadata (version, author, etc.)"""
|
"""Provides parser metadata (version, author, etc.)"""
|
||||||
version = '1.0'
|
version = '1.1'
|
||||||
description = 'sshd config file and `sshd -T` command parser'
|
description = '`sshd` config file and `sshd -T` command parser'
|
||||||
author = 'Kelly Brazil'
|
author = 'Kelly Brazil'
|
||||||
author_email = 'kellyjonbrazil@gmail.com'
|
author_email = 'kellyjonbrazil@gmail.com'
|
||||||
compatible = ['linux', 'darwin', 'freebsd']
|
compatible = ['linux', 'darwin', 'freebsd']
|
||||||
magic_commands = ['sshd -T']
|
magic_commands = ['sshd -T']
|
||||||
tags = ['file']
|
tags = ['command', 'file']
|
||||||
|
|
||||||
|
|
||||||
__version__ = info.version
|
__version__ = info.version
|
||||||
@ -622,6 +622,10 @@ def parse(
|
|||||||
if jc.utils.has_data(data):
|
if jc.utils.has_data(data):
|
||||||
|
|
||||||
for line in filter(None, data.splitlines()):
|
for line in filter(None, data.splitlines()):
|
||||||
|
# skip any lines with only whitespace
|
||||||
|
if not line.strip():
|
||||||
|
continue
|
||||||
|
|
||||||
# support configuration file by skipping commented lines
|
# support configuration file by skipping commented lines
|
||||||
if line.strip().startswith('#'):
|
if line.strip().startswith('#'):
|
||||||
continue
|
continue
|
||||||
|
@ -28,8 +28,7 @@ def simple_table_parse(data: Iterable[str]) -> List[Dict]:
|
|||||||
underscore '_'. You should also ensure headers are
|
underscore '_'. You should also ensure headers are
|
||||||
lowercase by using .lower().
|
lowercase by using .lower().
|
||||||
|
|
||||||
Also, ensure there are no blank lines (list items)
|
Also, ensure there are no blank rows in the data.
|
||||||
in the data.
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
|
|
||||||
|
209
jc/parsers/ver.py
Normal file
209
jc/parsers/ver.py
Normal file
@ -0,0 +1,209 @@
|
|||||||
|
"""jc - JSON Convert Version string output parser
|
||||||
|
|
||||||
|
Best-effort attempt to parse various styles of version numbers. This parser
|
||||||
|
is based off of the version parser included in the CPython distutils
|
||||||
|
libary.
|
||||||
|
|
||||||
|
If the version string conforms to some de facto-standard versioning rules
|
||||||
|
followed by many developers a `strict` key will be present in the output
|
||||||
|
with a value of `true` along with the named parsed components.
|
||||||
|
|
||||||
|
All other version strings will have a `strict` value of `false` and a
|
||||||
|
`components` key will contain a list of detected parts of the version
|
||||||
|
string.
|
||||||
|
|
||||||
|
See Also: `semver` parser.
|
||||||
|
|
||||||
|
Usage (cli):
|
||||||
|
|
||||||
|
$ echo 1.2a1 | jc --ver
|
||||||
|
|
||||||
|
Usage (module):
|
||||||
|
|
||||||
|
import jc
|
||||||
|
result = jc.parse('ver', version_string_output)
|
||||||
|
|
||||||
|
Schema:
|
||||||
|
|
||||||
|
{
|
||||||
|
"major": integer,
|
||||||
|
"minor": integer,
|
||||||
|
"patch": integer,
|
||||||
|
"prerelease": string,
|
||||||
|
"prerelease_num": integer,
|
||||||
|
"components": [
|
||||||
|
integer/string
|
||||||
|
],
|
||||||
|
"strict": boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
|
||||||
|
$ echo 1.2a1 | jc --ver -p
|
||||||
|
{
|
||||||
|
"major": 1,
|
||||||
|
"minor": 2,
|
||||||
|
"patch": 0,
|
||||||
|
"prerelease": "a",
|
||||||
|
"prerelease_num": 1,
|
||||||
|
"strict": true
|
||||||
|
}
|
||||||
|
|
||||||
|
$ echo 1.2a1 | jc --ver -p -r
|
||||||
|
{
|
||||||
|
"major": "1",
|
||||||
|
"minor": "2",
|
||||||
|
"patch": "0",
|
||||||
|
"prerelease": "a",
|
||||||
|
"prerelease_num": "1",
|
||||||
|
"strict": true
|
||||||
|
}
|
||||||
|
|
||||||
|
$ echo 1.2beta3 | jc --ver -p
|
||||||
|
{
|
||||||
|
"components": [
|
||||||
|
1,
|
||||||
|
2,
|
||||||
|
"beta",
|
||||||
|
3
|
||||||
|
],
|
||||||
|
"strict": false
|
||||||
|
}
|
||||||
|
|
||||||
|
$ echo 1.2beta3 | jc --ver -p -r
|
||||||
|
{
|
||||||
|
"components": [
|
||||||
|
"1",
|
||||||
|
"2",
|
||||||
|
"beta",
|
||||||
|
"3"
|
||||||
|
],
|
||||||
|
"strict": false
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
import re
|
||||||
|
from typing import Dict
|
||||||
|
from jc.jc_types import JSONDictType
|
||||||
|
import jc.utils
|
||||||
|
|
||||||
|
|
||||||
|
class info():
|
||||||
|
"""Provides parser metadata (version, author, etc.)"""
|
||||||
|
version = '1.0'
|
||||||
|
description = 'Version string parser'
|
||||||
|
author = 'Kelly Brazil'
|
||||||
|
author_email = 'kellyjonbrazil@gmail.com'
|
||||||
|
details = 'Based on distutils/version.py from CPython 3.9.5.'
|
||||||
|
compatible = ['linux', 'darwin', 'cygwin', 'win32', 'aix', 'freebsd']
|
||||||
|
tags = ['generic', 'string']
|
||||||
|
|
||||||
|
|
||||||
|
__version__ = info.version
|
||||||
|
|
||||||
|
|
||||||
|
def _process(proc_data: JSONDictType) -> JSONDictType:
|
||||||
|
"""
|
||||||
|
Final processing to conform to the schema.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
|
||||||
|
proc_data: (List of Dictionaries) raw structured data to process
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
|
||||||
|
List of Dictionaries. Structured to conform to the schema.
|
||||||
|
"""
|
||||||
|
int_list = {'major', 'minor', 'patch', 'prerelease', 'prerelease_num'}
|
||||||
|
|
||||||
|
for k, v in proc_data.items():
|
||||||
|
if k in int_list:
|
||||||
|
try:
|
||||||
|
proc_data[k] = int(v)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if 'components' in proc_data:
|
||||||
|
for i, obj in enumerate(proc_data['components']):
|
||||||
|
try:
|
||||||
|
proc_data['components'][i] = int(obj)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return proc_data
|
||||||
|
|
||||||
|
|
||||||
|
def strict_parse(vstring):
|
||||||
|
version_re = re.compile(r'^(\d+) \. (\d+) (\. (\d+))? ([ab](\d+))?$', re.VERBOSE)
|
||||||
|
match = version_re.match(vstring)
|
||||||
|
if not match:
|
||||||
|
raise ValueError("invalid version number '%s'" % vstring)
|
||||||
|
|
||||||
|
(major, minor, patch, prerelease, prerelease_num) = \
|
||||||
|
match.group(1, 2, 4, 5, 6)
|
||||||
|
|
||||||
|
if not patch:
|
||||||
|
patch = '0'
|
||||||
|
|
||||||
|
if prerelease:
|
||||||
|
prerelease = prerelease[0]
|
||||||
|
else:
|
||||||
|
prerelease = None
|
||||||
|
|
||||||
|
return {
|
||||||
|
'major': major,
|
||||||
|
'minor': minor,
|
||||||
|
'patch': patch,
|
||||||
|
'prerelease': prerelease,
|
||||||
|
'prerelease_num': prerelease_num
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def loose_parse(vstring):
|
||||||
|
component_re = re.compile(r'(\d+ | [a-z]+ | \.)', re.VERBOSE)
|
||||||
|
components = [x for x in component_re.split(vstring) if x and x != '.']
|
||||||
|
|
||||||
|
return components
|
||||||
|
|
||||||
|
|
||||||
|
def parse(
|
||||||
|
data: str,
|
||||||
|
raw: bool = False,
|
||||||
|
quiet: bool = False
|
||||||
|
) -> JSONDictType:
|
||||||
|
"""
|
||||||
|
Main text parsing function
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
|
||||||
|
data: (string) text data to parse
|
||||||
|
raw: (boolean) unprocessed output if True
|
||||||
|
quiet: (boolean) suppress warning messages if True
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
|
||||||
|
List of Dictionaries. Raw or processed structured data.
|
||||||
|
"""
|
||||||
|
jc.utils.compatibility(__name__, info.compatible, quiet)
|
||||||
|
jc.utils.input_type_check(data)
|
||||||
|
|
||||||
|
raw_output: Dict = {}
|
||||||
|
strict = True
|
||||||
|
|
||||||
|
if jc.utils.has_data(data):
|
||||||
|
|
||||||
|
# based on distutils/version.py from CPython 3.9.5
|
||||||
|
# PSF License (see https://opensource.org/licenses/Python-2.0)
|
||||||
|
|
||||||
|
data = data.strip()
|
||||||
|
|
||||||
|
try:
|
||||||
|
raw_output = strict_parse(data)
|
||||||
|
|
||||||
|
except ValueError:
|
||||||
|
raw_output['components'] = loose_parse(data)
|
||||||
|
strict = False
|
||||||
|
|
||||||
|
if raw_output:
|
||||||
|
raw_output['strict'] = strict
|
||||||
|
|
||||||
|
return raw_output if raw else _process(raw_output)
|
@ -3,6 +3,7 @@
|
|||||||
Usage (cli):
|
Usage (cli):
|
||||||
|
|
||||||
$ xrandr | jc --xrandr
|
$ xrandr | jc --xrandr
|
||||||
|
$ xrandr --properties | jc --xrandr
|
||||||
|
|
||||||
or
|
or
|
||||||
|
|
||||||
@ -44,13 +45,17 @@ Schema:
|
|||||||
"is_connected": boolean,
|
"is_connected": boolean,
|
||||||
"is_primary": boolean,
|
"is_primary": boolean,
|
||||||
"device_name": string,
|
"device_name": string,
|
||||||
|
"model_name": string,
|
||||||
|
"product_id" string,
|
||||||
|
"serial_number": string,
|
||||||
"resolution_width": integer,
|
"resolution_width": integer,
|
||||||
"resolution_height": integer,
|
"resolution_height": integer,
|
||||||
"offset_width": integer,
|
"offset_width": integer,
|
||||||
"offset_height": integer,
|
"offset_height": integer,
|
||||||
"dimension_width": integer,
|
"dimension_width": integer,
|
||||||
"dimension_height": integer,
|
"dimension_height": integer,
|
||||||
"rotation": string
|
"rotation": string,
|
||||||
|
"reflection": string
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"unassociated_devices": [
|
"unassociated_devices": [
|
||||||
@ -127,7 +132,71 @@ Examples:
|
|||||||
"offset_height": 0,
|
"offset_height": 0,
|
||||||
"dimension_width": 310,
|
"dimension_width": 310,
|
||||||
"dimension_height": 170,
|
"dimension_height": 170,
|
||||||
"rotation": "normal"
|
"rotation": "normal",
|
||||||
|
"reflection": "normal"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"unassociated_devices": []
|
||||||
|
}
|
||||||
|
|
||||||
|
$ xrandr --properties | jc --xrandr -p
|
||||||
|
{
|
||||||
|
"screens": [
|
||||||
|
{
|
||||||
|
"screen_number": 0,
|
||||||
|
"minimum_width": 8,
|
||||||
|
"minimum_height": 8,
|
||||||
|
"current_width": 1920,
|
||||||
|
"current_height": 1080,
|
||||||
|
"maximum_width": 32767,
|
||||||
|
"maximum_height": 32767,
|
||||||
|
"associated_device": {
|
||||||
|
"associated_modes": [
|
||||||
|
{
|
||||||
|
"resolution_width": 1920,
|
||||||
|
"resolution_height": 1080,
|
||||||
|
"is_high_resolution": false,
|
||||||
|
"frequencies": [
|
||||||
|
{
|
||||||
|
"frequency": 60.03,
|
||||||
|
"is_current": true,
|
||||||
|
"is_preferred": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"frequency": 59.93,
|
||||||
|
"is_current": false,
|
||||||
|
"is_preferred": false
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"resolution_width": 1680,
|
||||||
|
"resolution_height": 1050,
|
||||||
|
"is_high_resolution": false,
|
||||||
|
"frequencies": [
|
||||||
|
{
|
||||||
|
"frequency": 59.88,
|
||||||
|
"is_current": false,
|
||||||
|
"is_preferred": false
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"is_connected": true,
|
||||||
|
"is_primary": true,
|
||||||
|
"device_name": "eDP1",
|
||||||
|
"model_name": "ASUS VW193S",
|
||||||
|
"product_id": "54297",
|
||||||
|
"serial_number": "78L8021107",
|
||||||
|
"resolution_width": 1920,
|
||||||
|
"resolution_height": 1080,
|
||||||
|
"offset_width": 0,
|
||||||
|
"offset_height": 0,
|
||||||
|
"dimension_width": 310,
|
||||||
|
"dimension_height": 170,
|
||||||
|
"rotation": "normal",
|
||||||
|
"reflection": "normal"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
@ -137,14 +206,17 @@ Examples:
|
|||||||
import re
|
import re
|
||||||
from typing import Dict, List, Optional, Union
|
from typing import Dict, List, Optional, Union
|
||||||
import jc.utils
|
import jc.utils
|
||||||
|
from jc.parsers.pyedid.edid import Edid
|
||||||
|
from jc.parsers.pyedid.helpers.edid_helper import EdidHelper
|
||||||
|
|
||||||
|
|
||||||
class info:
|
class info:
|
||||||
"""Provides parser metadata (version, author, etc.)"""
|
"""Provides parser metadata (version, author, etc.)"""
|
||||||
version = "1.1"
|
version = "1.2"
|
||||||
description = "`xrandr` command parser"
|
description = "`xrandr` command parser"
|
||||||
author = "Kevin Lyter"
|
author = "Kevin Lyter"
|
||||||
author_email = "lyter_git at sent.com"
|
author_email = "lyter_git at sent.com"
|
||||||
|
details = 'Using parts of the pyedid library at https://github.com/jojonas/pyedid.'
|
||||||
compatible = ["linux", "darwin", "cygwin", "aix", "freebsd"]
|
compatible = ["linux", "darwin", "cygwin", "aix", "freebsd"]
|
||||||
magic_commands = ["xrandr"]
|
magic_commands = ["xrandr"]
|
||||||
tags = ['command']
|
tags = ['command']
|
||||||
@ -172,10 +244,21 @@ try:
|
|||||||
"frequencies": List[Frequency],
|
"frequencies": List[Frequency],
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
Model = TypedDict(
|
||||||
|
"Model",
|
||||||
|
{
|
||||||
|
"name": str,
|
||||||
|
"product_id": str,
|
||||||
|
"serial_number": str,
|
||||||
|
},
|
||||||
|
)
|
||||||
Device = TypedDict(
|
Device = TypedDict(
|
||||||
"Device",
|
"Device",
|
||||||
{
|
{
|
||||||
"device_name": str,
|
"device_name": str,
|
||||||
|
"model_name": str,
|
||||||
|
"product_id": str,
|
||||||
|
"serial_number": str,
|
||||||
"is_connected": bool,
|
"is_connected": bool,
|
||||||
"is_primary": bool,
|
"is_primary": bool,
|
||||||
"resolution_width": int,
|
"resolution_width": int,
|
||||||
@ -185,6 +268,8 @@ try:
|
|||||||
"dimension_width": int,
|
"dimension_width": int,
|
||||||
"dimension_height": int,
|
"dimension_height": int,
|
||||||
"associated_modes": List[Mode],
|
"associated_modes": List[Mode],
|
||||||
|
"rotation": str,
|
||||||
|
"reflection": str,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
Screen = TypedDict(
|
Screen = TypedDict(
|
||||||
@ -212,6 +297,7 @@ except ImportError:
|
|||||||
Device = Dict[str, Union[str, int, bool]]
|
Device = Dict[str, Union[str, int, bool]]
|
||||||
Frequency = Dict[str, Union[float, bool]]
|
Frequency = Dict[str, Union[float, bool]]
|
||||||
Mode = Dict[str, Union[int, bool, List[Frequency]]]
|
Mode = Dict[str, Union[int, bool, List[Frequency]]]
|
||||||
|
Model = Dict[str, str]
|
||||||
Response = Dict[str, Union[Device, Mode, Screen]]
|
Response = Dict[str, Union[Device, Mode, Screen]]
|
||||||
|
|
||||||
|
|
||||||
@ -252,7 +338,8 @@ _device_pattern = (
|
|||||||
+ r"(?P<is_primary> primary)? ?"
|
+ r"(?P<is_primary> primary)? ?"
|
||||||
+ r"((?P<resolution_width>\d+)x(?P<resolution_height>\d+)"
|
+ r"((?P<resolution_width>\d+)x(?P<resolution_height>\d+)"
|
||||||
+ r"\+(?P<offset_width>\d+)\+(?P<offset_height>\d+))? "
|
+ r"\+(?P<offset_width>\d+)\+(?P<offset_height>\d+))? "
|
||||||
+ r"(?P<rotation>(inverted|left|right))? ?"
|
+ r"(?P<rotation>(normal|right|left|inverted)?) ?"
|
||||||
|
+ r"(?P<reflection>(X axis|Y axis|X and Y axis)?) ?"
|
||||||
+ r"\(normal left inverted right x axis y axis\)"
|
+ r"\(normal left inverted right x axis y axis\)"
|
||||||
+ r"( ((?P<dimension_width>\d+)mm x (?P<dimension_height>\d+)mm)?)?"
|
+ r"( ((?P<dimension_width>\d+)mm x (?P<dimension_height>\d+)mm)?)?"
|
||||||
)
|
)
|
||||||
@ -277,9 +364,10 @@ def _parse_device(next_lines: List[str], quiet: bool = False) -> Optional[Device
|
|||||||
and len(matches["is_primary"]) > 0,
|
and len(matches["is_primary"]) > 0,
|
||||||
"device_name": matches["device_name"],
|
"device_name": matches["device_name"],
|
||||||
"rotation": matches["rotation"] or "normal",
|
"rotation": matches["rotation"] or "normal",
|
||||||
|
"reflection": matches["reflection"] or "normal",
|
||||||
}
|
}
|
||||||
for k, v in matches.items():
|
for k, v in matches.items():
|
||||||
if k not in {"is_connected", "is_primary", "device_name", "rotation"}:
|
if k not in {"is_connected", "is_primary", "device_name", "rotation", "reflection"}:
|
||||||
try:
|
try:
|
||||||
if v:
|
if v:
|
||||||
device[k] = int(v)
|
device[k] = int(v)
|
||||||
@ -288,15 +376,67 @@ def _parse_device(next_lines: List[str], quiet: bool = False) -> Optional[Device
|
|||||||
[f"{next_line} : {k} - {v} is not int-able"]
|
[f"{next_line} : {k} - {v} is not int-able"]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
model: Optional[Model] = _parse_model(next_lines, quiet)
|
||||||
|
if model:
|
||||||
|
device["model_name"] = model["name"]
|
||||||
|
device["product_id"] = model["product_id"]
|
||||||
|
device["serial_number"] = model["serial_number"]
|
||||||
|
|
||||||
while next_lines:
|
while next_lines:
|
||||||
next_line = next_lines.pop()
|
next_line = next_lines.pop()
|
||||||
next_mode: Optional[Mode] = _parse_mode(next_line)
|
next_mode: Optional[Mode] = _parse_mode(next_line)
|
||||||
if next_mode:
|
if next_mode:
|
||||||
device["associated_modes"].append(next_mode)
|
device["associated_modes"].append(next_mode)
|
||||||
else:
|
else:
|
||||||
|
if re.match(_device_pattern, next_line):
|
||||||
|
next_lines.append(next_line)
|
||||||
|
break
|
||||||
|
return device
|
||||||
|
|
||||||
|
|
||||||
|
# EDID:
|
||||||
|
# 00ffffffffffff004ca3523100000000
|
||||||
|
# 0014010380221378eac8959e57549226
|
||||||
|
# 0f505400000001010101010101010101
|
||||||
|
# 010101010101381d56d4500016303020
|
||||||
|
# 250058c2100000190000000f00000000
|
||||||
|
# 000000000025d9066a00000000fe0053
|
||||||
|
# 414d53554e470a204ca34154000000fe
|
||||||
|
# 004c544e313536415432343430310018
|
||||||
|
_edid_head_pattern = r"\s*EDID:\s*"
|
||||||
|
_edid_line_pattern = r"\s*(?P<edid_line>[0-9a-fA-F]{32})\s*"
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_model(next_lines: List[str], quiet: bool = False) -> Optional[Model]:
|
||||||
|
if not next_lines:
|
||||||
|
return None
|
||||||
|
|
||||||
|
next_line = next_lines.pop()
|
||||||
|
if not re.match(_edid_head_pattern, next_line):
|
||||||
|
next_lines.append(next_line)
|
||||||
|
return None
|
||||||
|
|
||||||
|
edid_hex_value = ""
|
||||||
|
|
||||||
|
while next_lines:
|
||||||
|
next_line = next_lines.pop()
|
||||||
|
result = re.match(_edid_line_pattern, next_line)
|
||||||
|
|
||||||
|
if not result:
|
||||||
next_lines.append(next_line)
|
next_lines.append(next_line)
|
||||||
break
|
break
|
||||||
return device
|
|
||||||
|
matches = result.groupdict()
|
||||||
|
edid_hex_value += matches["edid_line"]
|
||||||
|
|
||||||
|
edid = Edid(EdidHelper.hex2bytes(edid_hex_value))
|
||||||
|
|
||||||
|
model: Model = {
|
||||||
|
"name": edid.name or "Generic",
|
||||||
|
"product_id": str(edid.product),
|
||||||
|
"serial_number": str(edid.serial),
|
||||||
|
}
|
||||||
|
return model
|
||||||
|
|
||||||
|
|
||||||
# 1920x1080i 60.03*+ 59.93
|
# 1920x1080i 60.03*+ 59.93
|
||||||
@ -330,8 +470,8 @@ def _parse_mode(line: str) -> Optional[Mode]:
|
|||||||
for match in result:
|
for match in result:
|
||||||
d = match.groupdict()
|
d = match.groupdict()
|
||||||
frequency = float(d["frequency"])
|
frequency = float(d["frequency"])
|
||||||
is_current = len(d["star"]) > 0
|
is_current = len(d["star"].strip()) > 0
|
||||||
is_preferred = len(d["plus"]) > 0
|
is_preferred = len(d["plus"].strip()) > 0
|
||||||
f: Frequency = {
|
f: Frequency = {
|
||||||
"frequency": frequency,
|
"frequency": frequency,
|
||||||
"is_current": is_current,
|
"is_current": is_current,
|
||||||
|
202
jc/parsers/zpool_iostat.py
Normal file
202
jc/parsers/zpool_iostat.py
Normal file
@ -0,0 +1,202 @@
|
|||||||
|
"""jc - JSON Convert `zpool iostat` command output parser
|
||||||
|
|
||||||
|
Supports with or without the `-v` flag.
|
||||||
|
|
||||||
|
Usage (cli):
|
||||||
|
|
||||||
|
$ zpool iostat | jc --zpool-iostat
|
||||||
|
|
||||||
|
or
|
||||||
|
|
||||||
|
$ jc zpool iostat
|
||||||
|
|
||||||
|
Usage (module):
|
||||||
|
|
||||||
|
import jc
|
||||||
|
result = jc.parse('zpool_iostat', zpool_iostat_command_output)
|
||||||
|
|
||||||
|
Schema:
|
||||||
|
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"pool": string,
|
||||||
|
"parent": string,
|
||||||
|
"cap_alloc": float,
|
||||||
|
"cap_alloc_unit": string,
|
||||||
|
"cap_free": float,
|
||||||
|
"cap_free_unit": string,
|
||||||
|
"ops_read": integer,
|
||||||
|
"ops_write": integer,
|
||||||
|
"bw_read": float,
|
||||||
|
"bw_read_unit": string,
|
||||||
|
"bw_write": float,
|
||||||
|
"bw_write_unit": string
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
|
||||||
|
$ zpool iostat -v | jc --zpool-iostat -p
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"pool": "zhgstera6",
|
||||||
|
"cap_alloc": 2.89,
|
||||||
|
"cap_free": 2.2,
|
||||||
|
"ops_read": 0,
|
||||||
|
"ops_write": 2,
|
||||||
|
"bw_read": 349.0,
|
||||||
|
"bw_write": 448.0,
|
||||||
|
"cap_alloc_unit": "T",
|
||||||
|
"cap_free_unit": "T",
|
||||||
|
"bw_read_unit": "K",
|
||||||
|
"bw_write_unit": "K"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"pool": "726060ALE614-K8JAPRGN:10",
|
||||||
|
"parent": "zhgstera6",
|
||||||
|
"cap_alloc": 2.89,
|
||||||
|
"cap_free": 2.2,
|
||||||
|
"ops_read": 0,
|
||||||
|
"ops_write": 2,
|
||||||
|
"bw_read": 349.0,
|
||||||
|
"bw_write": 448.0,
|
||||||
|
"cap_alloc_unit": "T",
|
||||||
|
"cap_free_unit": "T",
|
||||||
|
"bw_read_unit": "K",
|
||||||
|
"bw_write_unit": "K"
|
||||||
|
},
|
||||||
|
...
|
||||||
|
]
|
||||||
|
|
||||||
|
$ zpool iostat -v | jc --zpool-iostat -p -r
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"pool": "zhgstera6",
|
||||||
|
"cap_alloc": "2.89T",
|
||||||
|
"cap_free": "2.20T",
|
||||||
|
"ops_read": "0",
|
||||||
|
"ops_write": "2",
|
||||||
|
"bw_read": "349K",
|
||||||
|
"bw_write": "448K"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"pool": "726060ALE614-K8JAPRGN:10",
|
||||||
|
"parent": "zhgstera6",
|
||||||
|
"cap_alloc": "2.89T",
|
||||||
|
"cap_free": "2.20T",
|
||||||
|
"ops_read": "0",
|
||||||
|
"ops_write": "2",
|
||||||
|
"bw_read": "349K",
|
||||||
|
"bw_write": "448K"
|
||||||
|
},
|
||||||
|
...
|
||||||
|
]
|
||||||
|
"""
|
||||||
|
from typing import List, Dict
|
||||||
|
from jc.jc_types import JSONDictType
|
||||||
|
import jc.utils
|
||||||
|
|
||||||
|
|
||||||
|
class info():
|
||||||
|
"""Provides parser metadata (version, author, etc.)"""
|
||||||
|
version = '1.0'
|
||||||
|
description = '`zpool iostat` command parser'
|
||||||
|
author = 'Kelly Brazil'
|
||||||
|
author_email = 'kellyjonbrazil@gmail.com'
|
||||||
|
compatible = ['linux', 'darwin', 'freebsd']
|
||||||
|
tags = ['command']
|
||||||
|
magic_commands = ['zpool iostat']
|
||||||
|
|
||||||
|
|
||||||
|
__version__ = info.version
|
||||||
|
|
||||||
|
|
||||||
|
def _process(proc_data: List[JSONDictType]) -> List[JSONDictType]:
|
||||||
|
"""
|
||||||
|
Final processing to conform to the schema.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
|
||||||
|
proc_data: (List of Dictionaries) raw structured data to process
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
|
||||||
|
List of Dictionaries. Structured to conform to the schema.
|
||||||
|
"""
|
||||||
|
unit_values = {'cap_alloc', 'cap_free', 'bw_read', 'bw_write'}
|
||||||
|
int_list = {'ops_read', 'ops_write'}
|
||||||
|
|
||||||
|
for obj in proc_data:
|
||||||
|
for k, v in obj.copy().items():
|
||||||
|
if k in unit_values:
|
||||||
|
obj[k + '_unit'] = v[-1]
|
||||||
|
obj[k] = jc.utils.convert_to_float(v[:-1])
|
||||||
|
|
||||||
|
if k in int_list:
|
||||||
|
obj[k] = jc.utils.convert_to_int(v)
|
||||||
|
|
||||||
|
return proc_data
|
||||||
|
|
||||||
|
|
||||||
|
def parse(
|
||||||
|
data: str,
|
||||||
|
raw: bool = False,
|
||||||
|
quiet: bool = False
|
||||||
|
) -> List[JSONDictType]:
|
||||||
|
"""
|
||||||
|
Main text parsing function
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
|
||||||
|
data: (string) text data to parse
|
||||||
|
raw: (boolean) unprocessed output if True
|
||||||
|
quiet: (boolean) suppress warning messages if True
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
|
||||||
|
List of Dictionaries. Raw or processed structured data.
|
||||||
|
"""
|
||||||
|
jc.utils.compatibility(__name__, info.compatible, quiet)
|
||||||
|
jc.utils.input_type_check(data)
|
||||||
|
|
||||||
|
raw_output: List[Dict] = []
|
||||||
|
output_line: Dict = {}
|
||||||
|
pool_parent = ''
|
||||||
|
|
||||||
|
if jc.utils.has_data(data):
|
||||||
|
|
||||||
|
for line in filter(None, data.splitlines()):
|
||||||
|
|
||||||
|
# skip non-data lines
|
||||||
|
if '---' in line or \
|
||||||
|
line.strip().endswith('bandwidth') or \
|
||||||
|
line.strip().endswith('write'):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# data lines
|
||||||
|
line_list = line.strip().split()
|
||||||
|
if line.startswith(' '):
|
||||||
|
output_line = {
|
||||||
|
"pool": line_list[0],
|
||||||
|
"parent": pool_parent
|
||||||
|
}
|
||||||
|
|
||||||
|
else:
|
||||||
|
pool_parent = line_list[0]
|
||||||
|
output_line = {
|
||||||
|
"pool": pool_parent
|
||||||
|
}
|
||||||
|
|
||||||
|
output_line.update(
|
||||||
|
{
|
||||||
|
'cap_alloc': line_list[1],
|
||||||
|
'cap_free': line_list[2],
|
||||||
|
'ops_read': line_list[3],
|
||||||
|
'ops_write': line_list[4],
|
||||||
|
'bw_read': line_list[5],
|
||||||
|
'bw_write': line_list[6]
|
||||||
|
}
|
||||||
|
)
|
||||||
|
raw_output.append(output_line)
|
||||||
|
|
||||||
|
return raw_output if raw else _process(raw_output)
|
254
jc/parsers/zpool_status.py
Normal file
254
jc/parsers/zpool_status.py
Normal file
@ -0,0 +1,254 @@
|
|||||||
|
"""jc - JSON Convert `zpool status` command output parser
|
||||||
|
|
||||||
|
Works with or without the `-v` option.
|
||||||
|
|
||||||
|
Usage (cli):
|
||||||
|
|
||||||
|
$ zpool status | jc --zpool-status
|
||||||
|
|
||||||
|
or
|
||||||
|
|
||||||
|
$ jc zpool status
|
||||||
|
|
||||||
|
Usage (module):
|
||||||
|
|
||||||
|
import jc
|
||||||
|
result = jc.parse('zpool_status', zpool_status_command_output)
|
||||||
|
|
||||||
|
Schema:
|
||||||
|
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"pool": string,
|
||||||
|
"state": string,
|
||||||
|
"status": string,
|
||||||
|
"action": string,
|
||||||
|
"see": string,
|
||||||
|
"scan": string,
|
||||||
|
"scrub": string,
|
||||||
|
"config": [
|
||||||
|
{
|
||||||
|
"name": string,
|
||||||
|
"state": string,
|
||||||
|
"read": integer,
|
||||||
|
"write": integer,
|
||||||
|
"checksum": integer,
|
||||||
|
"errors": string,
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"errors": string
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
|
||||||
|
$ zpool status -v | jc --zpool-status -p
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"pool": "tank",
|
||||||
|
"state": "DEGRADED",
|
||||||
|
"status": "One or more devices could not be opened. Suffic...",
|
||||||
|
"action": "Attach the missing device and online it using 'zpool...",
|
||||||
|
"see": "http://www.sun.com/msg/ZFS-8000-2Q",
|
||||||
|
"scrub": "none requested",
|
||||||
|
"config": [
|
||||||
|
{
|
||||||
|
"name": "tank",
|
||||||
|
"state": "DEGRADED",
|
||||||
|
"read": 0,
|
||||||
|
"write": 0,
|
||||||
|
"checksum": 0
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "mirror-0",
|
||||||
|
"state": "DEGRADED",
|
||||||
|
"read": 0,
|
||||||
|
"write": 0,
|
||||||
|
"checksum": 0
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "c1t0d0",
|
||||||
|
"state": "ONLINE",
|
||||||
|
"read": 0,
|
||||||
|
"write": 0,
|
||||||
|
"checksum": 0
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "c1t1d0",
|
||||||
|
"state": "UNAVAIL",
|
||||||
|
"read": 0,
|
||||||
|
"write": 0,
|
||||||
|
"checksum": 0,
|
||||||
|
"errors": "cannot open"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"errors": "No known data errors"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
$ zpool status -v | jc --zpool-status -p -r
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"pool": "tank",
|
||||||
|
"state": "DEGRADED",
|
||||||
|
"status": "One or more devices could not be opened. Sufficient...",
|
||||||
|
"action": "Attach the missing device and online it using 'zpool...",
|
||||||
|
"see": "http://www.sun.com/msg/ZFS-8000-2Q",
|
||||||
|
"scrub": "none requested",
|
||||||
|
"config": [
|
||||||
|
{
|
||||||
|
"name": "tank",
|
||||||
|
"state": "DEGRADED",
|
||||||
|
"read": "0",
|
||||||
|
"write": "0",
|
||||||
|
"checksum": "0"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "mirror-0",
|
||||||
|
"state": "DEGRADED",
|
||||||
|
"read": "0",
|
||||||
|
"write": "0",
|
||||||
|
"checksum": "0"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "c1t0d0",
|
||||||
|
"state": "ONLINE",
|
||||||
|
"read": "0",
|
||||||
|
"write": "0",
|
||||||
|
"checksum": "0"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "c1t1d0",
|
||||||
|
"state": "UNAVAIL",
|
||||||
|
"read": "0",
|
||||||
|
"write": "0",
|
||||||
|
"checksum": "0",
|
||||||
|
"errors": "cannot open"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"errors": "No known data errors"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
"""
|
||||||
|
from typing import List, Dict
|
||||||
|
from jc.jc_types import JSONDictType
|
||||||
|
import jc.utils
|
||||||
|
from jc.parsers.kv import parse as kv_parse
|
||||||
|
|
||||||
|
|
||||||
|
class info():
|
||||||
|
"""Provides parser metadata (version, author, etc.)"""
|
||||||
|
version = '1.0'
|
||||||
|
description = '`zpool status` command parser'
|
||||||
|
author = 'Kelly Brazil'
|
||||||
|
author_email = 'kellyjonbrazil@gmail.com'
|
||||||
|
compatible = ['linux', 'darwin', 'freebsd']
|
||||||
|
tags = ['command']
|
||||||
|
magic_commands = ['zpool status']
|
||||||
|
|
||||||
|
|
||||||
|
__version__ = info.version
|
||||||
|
|
||||||
|
|
||||||
|
def _process(proc_data: List[JSONDictType]) -> List[JSONDictType]:
|
||||||
|
"""
|
||||||
|
Final processing to conform to the schema.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
|
||||||
|
proc_data: (List of Dictionaries) raw structured data to process
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
|
||||||
|
List of Dictionaries. Structured to conform to the schema.
|
||||||
|
"""
|
||||||
|
int_list = {'read', 'write', 'checksum'}
|
||||||
|
|
||||||
|
for obj in proc_data:
|
||||||
|
if 'config' in obj:
|
||||||
|
for conf in obj['config']:
|
||||||
|
for k, v in conf.items():
|
||||||
|
if k in int_list:
|
||||||
|
conf[k] = jc.utils.convert_to_int(v)
|
||||||
|
|
||||||
|
return proc_data
|
||||||
|
|
||||||
|
|
||||||
|
def _build_config_list(string: str) -> List[Dict]:
|
||||||
|
config_list: List = []
|
||||||
|
for line in filter(None, string.splitlines()):
|
||||||
|
if line.strip().endswith('READ WRITE CKSUM'):
|
||||||
|
continue
|
||||||
|
|
||||||
|
line_list = line.strip().split(maxsplit=5)
|
||||||
|
config_obj: Dict = {}
|
||||||
|
config_obj['name'] = line_list[0]
|
||||||
|
config_obj['state'] = line_list[1]
|
||||||
|
config_obj['read'] = line_list[2]
|
||||||
|
config_obj['write'] = line_list[3]
|
||||||
|
config_obj['checksum'] = line_list[4]
|
||||||
|
if len(line_list) == 6:
|
||||||
|
config_obj['errors'] = line_list[5]
|
||||||
|
config_list.append(config_obj)
|
||||||
|
|
||||||
|
return config_list
|
||||||
|
|
||||||
|
def parse(
|
||||||
|
data: str,
|
||||||
|
raw: bool = False,
|
||||||
|
quiet: bool = False
|
||||||
|
) -> List[JSONDictType]:
|
||||||
|
"""
|
||||||
|
Main text parsing function
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
|
||||||
|
data: (string) text data to parse
|
||||||
|
raw: (boolean) unprocessed output if True
|
||||||
|
quiet: (boolean) suppress warning messages if True
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
|
||||||
|
List of Dictionaries. Raw or processed structured data.
|
||||||
|
"""
|
||||||
|
jc.utils.compatibility(__name__, info.compatible, quiet)
|
||||||
|
jc.utils.input_type_check(data)
|
||||||
|
|
||||||
|
raw_output: List[Dict] = []
|
||||||
|
pool_str: str = ''
|
||||||
|
pool_obj: Dict = {}
|
||||||
|
|
||||||
|
if jc.utils.has_data(data):
|
||||||
|
|
||||||
|
for line in filter(None, data.splitlines()):
|
||||||
|
|
||||||
|
if line.lstrip().startswith('pool: '):
|
||||||
|
if pool_str:
|
||||||
|
pool_obj = kv_parse(pool_str)
|
||||||
|
if 'config' in pool_obj:
|
||||||
|
pool_obj['config'] = _build_config_list(pool_obj['config'])
|
||||||
|
raw_output.append(pool_obj)
|
||||||
|
pool_str = ''
|
||||||
|
pool_str += line + '\n'
|
||||||
|
continue
|
||||||
|
|
||||||
|
# preserve indentation in continuation lines
|
||||||
|
if line.startswith(' '):
|
||||||
|
pool_str += line + '\n'
|
||||||
|
continue
|
||||||
|
|
||||||
|
# indent path lines for errors field
|
||||||
|
if line.startswith('/'):
|
||||||
|
pool_str += ' ' + line + '\n'
|
||||||
|
continue
|
||||||
|
|
||||||
|
# remove initial spaces from field start lines so we don't confuse line continuation
|
||||||
|
pool_str += line.strip() + '\n'
|
||||||
|
|
||||||
|
if pool_str:
|
||||||
|
pool_obj = kv_parse(pool_str)
|
||||||
|
if 'config' in pool_obj:
|
||||||
|
pool_obj['config'] = _build_config_list(pool_obj['config'])
|
||||||
|
raw_output.append(pool_obj)
|
||||||
|
|
||||||
|
return raw_output if raw else _process(raw_output)
|
232
man/jc.1
232
man/jc.1
@ -1,28 +1,35 @@
|
|||||||
.TH jc 1 2023-01-11 1.22.5 "JSON Convert"
|
.TH jc 1 2023-02-27 1.23.0 "JSON Convert"
|
||||||
.SH NAME
|
.SH NAME
|
||||||
\fBjc\fP \- JSON Convert JSONifies the output of many CLI tools, file-types, and strings
|
\fBjc\fP \- JSON Convert JSONifies the output of many CLI tools, file-types,
|
||||||
|
and strings
|
||||||
.SH SYNOPSIS
|
.SH SYNOPSIS
|
||||||
|
|
||||||
Standard syntax:
|
Standard syntax:
|
||||||
|
|
||||||
.RS
|
.RS
|
||||||
COMMAND | \fBjc\fP [OPTIONS] PARSER
|
COMMAND | \fBjc\fP [SLICE] [OPTIONS] PARSER
|
||||||
|
|
||||||
cat FILE | \fBjc\fP [OPTIONS] PARSER
|
cat FILE | \fBjc\fP [SLICE] [OPTIONS] PARSER
|
||||||
|
|
||||||
echo STRING | \fBjc\fP [OPTIONS] PARSER
|
echo STRING | \fBjc\fP [SLICE] [OPTIONS] PARSER
|
||||||
.RE
|
.RE
|
||||||
|
|
||||||
Magic syntax:
|
Magic syntax:
|
||||||
|
|
||||||
.RS
|
.RS
|
||||||
\fBjc\fP [OPTIONS] COMMAND
|
\fBjc\fP [SLICE] [OPTIONS] COMMAND
|
||||||
|
|
||||||
\fBjc\fP [OPTIONS] /proc/<path-to-procfile>
|
\fBjc\fP [SLICE] [OPTIONS] /proc/<path-to-procfile>
|
||||||
.RE
|
.RE
|
||||||
|
|
||||||
.SH DESCRIPTION
|
.SH DESCRIPTION
|
||||||
\fBjc\fP JSONifies the output of many CLI tools, file-types, and common strings for easier parsing in scripts. \fBjc\fP accepts piped input from \fBSTDIN\fP and outputs a JSON representation of the previous command's output to \fBSTDOUT\fP. Alternatively, the "Magic" syntax can be used by prepending \fBjc\fP to the command to be converted. Options can be passed to \fBjc\fP immediately before the command is given. (Note: "Magic" syntax does not support shell builtins or command aliases)
|
\fBjc\fP JSONifies the output of many CLI tools, file-types, and common strings
|
||||||
|
for easier parsing in scripts. \fBjc\fP accepts piped input from \fBSTDIN\fP and
|
||||||
|
outputs a JSON representation of the previous command's output to \fBSTDOUT\fP.
|
||||||
|
Alternatively, the "Magic" syntax can be used by prepending \fBjc\fP to the
|
||||||
|
command to be converted. Options can be passed to \fBjc\fP immediately before
|
||||||
|
the command is given. (Note: "Magic" syntax does not support shell builtins or
|
||||||
|
command aliases)
|
||||||
|
|
||||||
.SH OPTIONS
|
.SH OPTIONS
|
||||||
.B
|
.B
|
||||||
@ -765,10 +772,15 @@ Semantic Version string parser
|
|||||||
\fB--ss\fP
|
\fB--ss\fP
|
||||||
`ss` command parser
|
`ss` command parser
|
||||||
|
|
||||||
|
.TP
|
||||||
|
.B
|
||||||
|
\fB--ssh-conf\fP
|
||||||
|
`ssh` config file and `ssh -G` command parser
|
||||||
|
|
||||||
.TP
|
.TP
|
||||||
.B
|
.B
|
||||||
\fB--sshd-conf\fP
|
\fB--sshd-conf\fP
|
||||||
sshd config file and `sshd -T` command parser
|
`sshd` config file and `sshd -T` command parser
|
||||||
|
|
||||||
.TP
|
.TP
|
||||||
.B
|
.B
|
||||||
@ -915,6 +927,11 @@ TOML file parser
|
|||||||
\fB--url\fP
|
\fB--url\fP
|
||||||
URL string parser
|
URL string parser
|
||||||
|
|
||||||
|
.TP
|
||||||
|
.B
|
||||||
|
\fB--ver\fP
|
||||||
|
Version string parser
|
||||||
|
|
||||||
.TP
|
.TP
|
||||||
.B
|
.B
|
||||||
\fB--vmstat\fP
|
\fB--vmstat\fP
|
||||||
@ -965,6 +982,16 @@ YAML file parser
|
|||||||
\fB--zipinfo\fP
|
\fB--zipinfo\fP
|
||||||
`zipinfo` command parser
|
`zipinfo` command parser
|
||||||
|
|
||||||
|
.TP
|
||||||
|
.B
|
||||||
|
\fB--zpool-iostat\fP
|
||||||
|
`zpool iostat` command parser
|
||||||
|
|
||||||
|
.TP
|
||||||
|
.B
|
||||||
|
\fB--zpool-status\fP
|
||||||
|
`zpool status` command parser
|
||||||
|
|
||||||
|
|
||||||
.RE
|
.RE
|
||||||
.PP
|
.PP
|
||||||
@ -979,7 +1006,8 @@ About \fBjc\fP (JSON or YAML output)
|
|||||||
.TP
|
.TP
|
||||||
.B
|
.B
|
||||||
\fB-C\fP, \fB--force-color\fP
|
\fB-C\fP, \fB--force-color\fP
|
||||||
Force color output even when using pipes (overrides \fB-m\fP and the \fBNO_COLOR\fP env variable)
|
Force color output even when using pipes (overrides \fB-m\fP and the
|
||||||
|
\fBNO_COLOR\fP env variable)
|
||||||
.TP
|
.TP
|
||||||
.B
|
.B
|
||||||
\fB-d\fP, \fB--debug\fP
|
\fB-d\fP, \fB--debug\fP
|
||||||
@ -987,7 +1015,8 @@ Debug - show traceback (use \fB-dd\fP for verbose traceback)
|
|||||||
.TP
|
.TP
|
||||||
.B
|
.B
|
||||||
\fB-h\fP, \fB--help\fP
|
\fB-h\fP, \fB--help\fP
|
||||||
Help (\fB--help --parser_name\fP for parser documentation). Use twice to show hidden parsers (e.g. \fB-hh\fP)
|
Help (\fB--help --parser_name\fP for parser documentation). Use twice to show
|
||||||
|
hidden parsers (e.g. \fB-hh\fP)
|
||||||
.TP
|
.TP
|
||||||
.B
|
.B
|
||||||
\fB-m\fP, \fB--monochrome\fP
|
\fB-m\fP, \fB--monochrome\fP
|
||||||
@ -995,7 +1024,8 @@ Monochrome output
|
|||||||
.TP
|
.TP
|
||||||
.B
|
.B
|
||||||
\fB-M\fP, \fB--meta-out\fP
|
\fB-M\fP, \fB--meta-out\fP
|
||||||
Add metadata to output including timestamp, parser name, magic command, magic command exit code, etc.
|
Add metadata to output including timestamp, parser name, magic command, magic
|
||||||
|
command exit code, etc.
|
||||||
.TP
|
.TP
|
||||||
.B
|
.B
|
||||||
\fB-p\fP, \fB--pretty\fP
|
\fB-p\fP, \fB--pretty\fP
|
||||||
@ -1003,11 +1033,13 @@ Pretty print output
|
|||||||
.TP
|
.TP
|
||||||
.B
|
.B
|
||||||
\fB-q\fP, \fB--quiet\fP
|
\fB-q\fP, \fB--quiet\fP
|
||||||
Quiet mode. Suppresses parser warning messages (use -qq to ignore streaming parser errors)
|
Quiet mode. Suppresses parser warning messages (use -qq to ignore streaming
|
||||||
|
parser errors)
|
||||||
.TP
|
.TP
|
||||||
.B
|
.B
|
||||||
\fB-r\fP, \fB--raw\fP
|
\fB-r\fP, \fB--raw\fP
|
||||||
Raw output. Provides more literal output, typically with string values and no additional semantic processing
|
Raw output. Provides more literal output, typically with string values and no
|
||||||
|
additional semantic processing
|
||||||
.TP
|
.TP
|
||||||
.B
|
.B
|
||||||
\fB-u\fP, \fB--unbuffer\fP
|
\fB-u\fP, \fB--unbuffer\fP
|
||||||
@ -1029,10 +1061,93 @@ Generate Bash shell completion script
|
|||||||
\fB-Z\fP, \fB--zsh-comp\fP
|
\fB-Z\fP, \fB--zsh-comp\fP
|
||||||
Generate Zsh shell completion script
|
Generate Zsh shell completion script
|
||||||
|
|
||||||
.SH EXIT CODES
|
.RE
|
||||||
Any fatal errors within \fBjc\fP will generate an exit code of \fB100\fP, otherwise the exit code will be \fB0\fP.
|
.PP
|
||||||
|
.B
|
||||||
|
Slice:
|
||||||
|
.RS
|
||||||
|
Line slicing is supported using the \fBSTART:STOP\fP syntax similar to Python
|
||||||
|
slicing. This allows you to skip lines at the beginning and/or end of the
|
||||||
|
\fBSTDIN\fP input you would like \fBjc\fP to convert.
|
||||||
|
|
||||||
When using the "magic" syntax (e.g. \fBjc ifconfig eth0\fP), \fBjc\fP will store the exit code of the program being parsed and add it to the \fBjc\fP exit code. This way it is easier to determine if an error was from the parsed program or \fBjc\fP.
|
\fBSTART\fP and \fBSTOP\fP can be positive or negative integers or blank and
|
||||||
|
allow you to specify how many lines to skip and how many lines to process.
|
||||||
|
Positive and blank slices are the most memory efficient. Any negative
|
||||||
|
integers in the slice will use more memory.
|
||||||
|
|
||||||
|
For example, to skip the first and last line of the following text, you
|
||||||
|
could express the slice in a couple ways:
|
||||||
|
|
||||||
|
.RS
|
||||||
|
.nf
|
||||||
|
$ cat table.txt
|
||||||
|
### We want to skip this header ###
|
||||||
|
col1 col2
|
||||||
|
foo 1
|
||||||
|
bar 2
|
||||||
|
### We want to skip this footer ###
|
||||||
|
$ cat table.txt | jc 1:-1 --asciitable
|
||||||
|
[{"col1":"foo","col2":"1"},{"col1":"bar","col2":"2"}]
|
||||||
|
$ cat table.txt | jc 1:4 --asciitable
|
||||||
|
[{"col1":"foo","col2":"1"},{"col1":"bar","col2":"2"}]
|
||||||
|
.fi
|
||||||
|
.RE
|
||||||
|
|
||||||
|
In this example \fB1:-1\fP and \fB1:4\fP line slices provide the same output.
|
||||||
|
|
||||||
|
When using positive integers the index location of \fBSTOP\fP is non-inclusive.
|
||||||
|
Positive slices count from the first line of the input toward the end
|
||||||
|
starting at \fB0\fP as the first line. Negative slices count from the last line
|
||||||
|
toward the beginning starting at \fB-1\fP as the last line. This is also the way
|
||||||
|
Python's slicing feature works.
|
||||||
|
|
||||||
|
Here is a breakdown of line slice options:
|
||||||
|
|
||||||
|
.TP
|
||||||
|
.B
|
||||||
|
\fBSTART:STOP\fP
|
||||||
|
lines \fBSTART\fP through \fBSTOP - 1\fP
|
||||||
|
.TP
|
||||||
|
.B
|
||||||
|
\fBSTART:\fP
|
||||||
|
lines \fBSTART\fP through the rest of the output
|
||||||
|
.TP
|
||||||
|
.B
|
||||||
|
\fB:STOP\fP
|
||||||
|
lines from the beginning through \fBSTOP - 1\fP
|
||||||
|
.TP
|
||||||
|
.B
|
||||||
|
\fB-START:STOP\fP
|
||||||
|
\fBSTART\fP lines from the end through \fBSTOP - 1\fP
|
||||||
|
.TP
|
||||||
|
.B
|
||||||
|
\fBSTART:-STOP\fP
|
||||||
|
lines \fBSTART\fP through \fBSTOP\fP lines from the end
|
||||||
|
.TP
|
||||||
|
.B
|
||||||
|
\fB-START:-STOP\fP
|
||||||
|
\fBSTART\fP lines from the end through \fBSTOP\fP lines from the end
|
||||||
|
.TP
|
||||||
|
.B
|
||||||
|
\fB-START:\fP
|
||||||
|
\fBSTART\fP lines from the end through the rest of the output
|
||||||
|
.TP
|
||||||
|
.B
|
||||||
|
\fB:-STOP\fP
|
||||||
|
lines from the beginning through \fBSTOP\fP lines from the end
|
||||||
|
.TP
|
||||||
|
.B
|
||||||
|
\fB:\fP
|
||||||
|
all lines
|
||||||
|
|
||||||
|
.SH EXIT CODES
|
||||||
|
Any fatal errors within \fBjc\fP will generate an exit code of \fB100\fP,
|
||||||
|
otherwise the exit code will be \fB0\fP.
|
||||||
|
|
||||||
|
When using the "magic" syntax (e.g. \fBjc ifconfig eth0\fP), \fBjc\fP will store
|
||||||
|
the exit code of the program being parsed and add it to the \fBjc\fP exit code.
|
||||||
|
This way it is easier to determine if an error was from the parsed program or
|
||||||
|
\fBjc\fP.
|
||||||
|
|
||||||
Consider the following examples using \fBifconfig\fP:
|
Consider the following examples using \fBifconfig\fP:
|
||||||
|
|
||||||
@ -1047,9 +1162,9 @@ ifconfig exit code = \fB1\fP, jc exit code = \fB100\fP, combined exit code = \fB
|
|||||||
.RE
|
.RE
|
||||||
|
|
||||||
When using the "magic" syntax you can also retrieve the exit code of the called
|
When using the "magic" syntax you can also retrieve the exit code of the called
|
||||||
program by using the \fB--meta-out\fP or \fB-M\fP option. This will append a \fB_jc_meta\fP
|
program by using the \fB--meta-out\fP or \fB-M\fP option. This will append a
|
||||||
object to the output that will include the magic command information, including
|
\fB_jc_meta\fP object to the output that will include the magic command
|
||||||
the exit code.
|
information, including the exit code.
|
||||||
|
|
||||||
Here is an example with \fBping\fP:
|
Here is an example with \fBping\fP:
|
||||||
.RS
|
.RS
|
||||||
@ -1091,11 +1206,16 @@ $ echo $?
|
|||||||
|
|
||||||
\fBCustom Colors\fP
|
\fBCustom Colors\fP
|
||||||
|
|
||||||
You can specify custom colors via the \fBJC_COLORS\fP environment variable. The \fBJC_COLORS\fP environment variable takes four comma separated string values in the following format:
|
You can specify custom colors via the \fBJC_COLORS\fP environment variable. The
|
||||||
|
\fBJC_COLORS\fP environment variable takes four comma separated string values in
|
||||||
|
the following format:
|
||||||
|
|
||||||
JC_COLORS=<keyname_color>,<keyword_color>,<number_color>,<string_color>
|
JC_COLORS=<keyname_color>,<keyword_color>,<number_color>,<string_color>
|
||||||
|
|
||||||
Where colors are: \fBblack\fP, \fBred\fP, \fBgreen\fP, \fByellow\fP, \fBblue\fP, \fBmagenta\fP, \fBcyan\fP, \fBgray\fP, \fBbrightblack\fP, \fBbrightred\fP, \fBbrightgreen\fP, \fBbrightyellow\fP, \fBbrightblue\fP, \fBbrightmagenta\fP, \fBbrightcyan\fP, \fBwhite\fP, or \fBdefault\fP
|
Where colors are: \fBblack\fP, \fBred\fP, \fBgreen\fP, \fByellow\fP, \fBblue\fP,
|
||||||
|
\fBmagenta\fP, \fBcyan\fP, \fBgray\fP, \fBbrightblack\fP, \fBbrightred\fP,
|
||||||
|
\fBbrightgreen\fP, \fBbrightyellow\fP, \fBbrightblue\fP, \fBbrightmagenta\fP,
|
||||||
|
\fBbrightcyan\fP, \fBwhite\fP, or \fBdefault\fP
|
||||||
|
|
||||||
For example, to set to the default colors:
|
For example, to set to the default colors:
|
||||||
|
|
||||||
@ -1109,10 +1229,20 @@ JC_COLORS=default,default,default,default
|
|||||||
|
|
||||||
\fBDisable Color Output\fP
|
\fBDisable Color Output\fP
|
||||||
|
|
||||||
You can set the \fBNO_COLOR\fP environment variable to any value to disable color output in \fBjc\fP. Note that using the \fB-C\fP option to force color output will override both the \fBNO_COLOR\fP environment variable and the \fB-m\fP option.
|
You can set the \fBNO_COLOR\fP environment variable to any value to disable
|
||||||
|
color output in \fBjc\fP. Note that using the \fB-C\fP option to force color
|
||||||
|
output will override both the \fBNO_COLOR\fP environment variable and the
|
||||||
|
\fB-m\fP option.
|
||||||
|
|
||||||
.SH STREAMING PARSERS
|
.SH STREAMING PARSERS
|
||||||
Most parsers load all of the data from \fBSTDIN\fP, parse it, then output the entire JSON document serially. There are some streaming parsers (e.g. \fBls-s\fP, \fBping-s\fP, etc.) that immediately start processing and outputting the data line-by-line as JSON Lines (aka NDJSON) while it is being received from \fBSTDIN\fP. This can significantly reduce the amount of memory required to parse large amounts of command output (e.g. \fBls -lR /\fP) and can sometimes process the data more quickly. Streaming parsers have slightly different behavior than standard parsers as outlined below.
|
Most parsers load all of the data from \fBSTDIN\fP, parse it, then output the
|
||||||
|
entire JSON document serially. There are some streaming parsers (e.g.
|
||||||
|
\fBls-s\fP, \fBping-s\fP, etc.) that immediately start processing and outputting
|
||||||
|
the data line-by-line as JSON Lines (aka NDJSON) while it is being received from
|
||||||
|
\fBSTDIN\fP. This can significantly reduce the amount of memory required to
|
||||||
|
parse large amounts of command output (e.g. \fBls -lR /\fP) and can sometimes
|
||||||
|
process the data more quickly. Streaming parsers have slightly different
|
||||||
|
behavior than standard parsers as outlined below.
|
||||||
|
|
||||||
.RS
|
.RS
|
||||||
Note: Streaming parsers cannot be used with the "magic" syntax
|
Note: Streaming parsers cannot be used with the "magic" syntax
|
||||||
@ -1120,7 +1250,14 @@ Note: Streaming parsers cannot be used with the "magic" syntax
|
|||||||
|
|
||||||
\fBIgnoring Errors\fP
|
\fBIgnoring Errors\fP
|
||||||
|
|
||||||
You may want to ignore parsing errors when using streaming parsers since these may be used in long-lived processing pipelines and errors can break the pipe. To ignore parsing errors, use the \fB-qq\fP cli option. This will add a \fB_jc_meta\fP object to the JSON output with a \fBsuccess\fP attribute. If \fBsuccess\fP is \fBtrue\fP, then there were no issues parsing the line. If \fBsuccess\fP is \fBfalse\fP, then a parsing issue was found and \fBerror\fP and \fBline\fP fields will be added to include a short error description and the contents of the unparsable line, respectively:
|
You may want to ignore parsing errors when using streaming parsers since these
|
||||||
|
may be used in long-lived processing pipelines and errors can break the pipe. To
|
||||||
|
ignore parsing errors, use the \fB-qq\fP cli option. This will add a
|
||||||
|
\fB_jc_meta\fP object to the JSON output with a \fBsuccess\fP attribute. If
|
||||||
|
\fBsuccess\fP is \fBtrue\fP, then there were no issues parsing the line. If
|
||||||
|
\fBsuccess\fP is \fBfalse\fP, then a parsing issue was found and \fBerror\fP and
|
||||||
|
\fBline\fP fields will be added to include a short error description and the
|
||||||
|
contents of the unparsable line, respectively:
|
||||||
|
|
||||||
.RS
|
.RS
|
||||||
Successfully parsed line with \fB-qq\fP option:
|
Successfully parsed line with \fB-qq\fP option:
|
||||||
@ -1151,7 +1288,11 @@ Unsuccessfully parsed line with \fB-qq\fP option:
|
|||||||
.RE
|
.RE
|
||||||
\fBUnbuffering Output\fP
|
\fBUnbuffering Output\fP
|
||||||
|
|
||||||
Most operating systems will buffer output that is being piped from process to process. The buffer is usually around 4KB. When viewing the output in the terminal the OS buffer is not engaged so output is immediately displayed on the screen. When piping multiple processes together, though, it may seem as if the output is hanging when the input data is very slow (e.g. \fBping\fP):
|
Most operating systems will buffer output that is being piped from process to
|
||||||
|
process. The buffer is usually around 4KB. When viewing the output in the
|
||||||
|
terminal the OS buffer is not engaged so output is immediately displayed on the
|
||||||
|
screen. When piping multiple processes together, though, it may seem as if the
|
||||||
|
output is hanging when the input data is very slow (e.g. \fBping\fP):
|
||||||
|
|
||||||
.RS
|
.RS
|
||||||
.nf
|
.nf
|
||||||
@ -1160,7 +1301,9 @@ $ ping 1.1.1.1 | jc \fB--ping-s\fP | jq
|
|||||||
.fi
|
.fi
|
||||||
.RE
|
.RE
|
||||||
|
|
||||||
This is because the OS engages the 4KB buffer between \fBjc\fP and \fBjq\fP in this example. To display the data on the terminal in realtime, you can disable the buffer with the \fB-u\fP (unbuffer) cli option:
|
This is because the OS engages the 4KB buffer between \fBjc\fP and \fBjq\fP in
|
||||||
|
this example. To display the data on the terminal in realtime, you can disable
|
||||||
|
the buffer with the \fB-u\fP (unbuffer) cli option:
|
||||||
|
|
||||||
.RS
|
.RS
|
||||||
.nf
|
.nf
|
||||||
@ -1174,7 +1317,8 @@ Note: Unbuffered output can be slower for large data streams.
|
|||||||
.RE
|
.RE
|
||||||
|
|
||||||
.SH CUSTOM PARSERS
|
.SH CUSTOM PARSERS
|
||||||
Custom local parser plugins may be placed in a \fBjc/jcparsers\fP folder in your local "App data directory":
|
Custom local parser plugins may be placed in a \fBjc/jcparsers\fP folder in your
|
||||||
|
local "App data directory":
|
||||||
|
|
||||||
.RS
|
.RS
|
||||||
.nf
|
.nf
|
||||||
@ -1184,11 +1328,16 @@ Custom local parser plugins may be placed in a \fBjc/jcparsers\fP folder in your
|
|||||||
.fi
|
.fi
|
||||||
.RE
|
.RE
|
||||||
|
|
||||||
Local parser plugins are standard python module files. Use the \fBjc/parsers/foo.py\fP or \fBjc/parsers/foo_s.py\fP (streaming) parser as a template and simply place a \fB.py\fP file in the \fBjcparsers\fP subfolder.
|
Local parser plugins are standard python module files. Use the
|
||||||
|
\fBjc/parsers/foo.py\fP or \fBjc/parsers/foo_s.py\fP (streaming) parser as a
|
||||||
|
template and simply place a \fB.py\fP file in the \fBjcparsers\fP subfolder.
|
||||||
|
|
||||||
Local plugin filenames must be valid python module names and therefore must start with a letter and consist entirely of alphanumerics and underscores. Local plugins may override default parsers.
|
Local plugin filenames must be valid python module names and therefore must
|
||||||
|
start with a letter and consist entirely of alphanumerics and underscores. Local
|
||||||
|
plugins may override default parsers.
|
||||||
|
|
||||||
Note: The application data directory follows the XDG Base Directory Specification
|
Note: The application data directory follows the \fBXDG Base Directory
|
||||||
|
Specification\fP
|
||||||
|
|
||||||
.SH CAVEATS
|
.SH CAVEATS
|
||||||
\fBLocale\fP
|
\fBLocale\fP
|
||||||
@ -1213,9 +1362,13 @@ escape sequences if the \fBC\fP locale does not support UTF-8 encoding.
|
|||||||
|
|
||||||
\fBTimezones\fP
|
\fBTimezones\fP
|
||||||
|
|
||||||
Some parsers have calculated epoch timestamp fields added to the output. Unless a timestamp field name has a \fB_utc\fP suffix it is considered naive. (i.e. based on the local timezone of the system the \fBjc\fP parser was run on).
|
Some parsers have calculated epoch timestamp fields added to the output. Unless
|
||||||
|
a timestamp field name has a \fB_utc\fP suffix it is considered naive. (i.e.
|
||||||
|
based on the local timezone of the system the \fBjc\fP parser was run on).
|
||||||
|
|
||||||
If a UTC timezone can be detected in the text of the command output, the timestamp will be timezone aware and have a \fB_utc\fP suffix on the key name. (e.g. \fBepoch_utc\fP) No other timezones are supported for aware timestamps.
|
If a UTC timezone can be detected in the text of the command output, the
|
||||||
|
timestamp will be timezone aware and have a \fB_utc\fP suffix on the key name.
|
||||||
|
(e.g. \fBepoch_utc\fP) No other timezones are supported for aware timestamps.
|
||||||
|
|
||||||
.SH EXAMPLES
|
.SH EXAMPLES
|
||||||
Standard Syntax:
|
Standard Syntax:
|
||||||
@ -1232,10 +1385,23 @@ $ jc \fB--pretty\fP dig www.google.com
|
|||||||
$ jc \fB--pretty\fP /proc/meminfo
|
$ jc \fB--pretty\fP /proc/meminfo
|
||||||
.RE
|
.RE
|
||||||
|
|
||||||
|
Line Slicing:
|
||||||
|
.RS
|
||||||
|
$ cat file.csv | jc \fB:101\fP \fB--csv\fP # parse first 100 lines
|
||||||
|
.RE
|
||||||
|
|
||||||
For parser documentation:
|
For parser documentation:
|
||||||
.RS
|
.RS
|
||||||
$ jc \fB--help\fP \fB--dig\fP
|
$ jc \fB--help\fP \fB--dig\fP
|
||||||
.RE
|
.RE
|
||||||
|
|
||||||
|
More Help:
|
||||||
|
.RS
|
||||||
|
$ jc \fB-hh\fP # show hidden parsers
|
||||||
|
|
||||||
|
$ jc \fB-hhh\fP # list parsers by category tags
|
||||||
|
.RE
|
||||||
|
|
||||||
.SH AUTHOR
|
.SH AUTHOR
|
||||||
Kelly Brazil (kellyjonbrazil@gmail.com)
|
Kelly Brazil (kellyjonbrazil@gmail.com)
|
||||||
|
|
||||||
|
2
setup.py
2
setup.py
@ -5,7 +5,7 @@ with open('README.md', 'r') as f:
|
|||||||
|
|
||||||
setuptools.setup(
|
setuptools.setup(
|
||||||
name='jc',
|
name='jc',
|
||||||
version='1.22.5',
|
version='1.23.0',
|
||||||
author='Kelly Brazil',
|
author='Kelly Brazil',
|
||||||
author_email='kellyjonbrazil@gmail.com',
|
author_email='kellyjonbrazil@gmail.com',
|
||||||
description='Converts the output of popular command-line tools and file-types to JSON.',
|
description='Converts the output of popular command-line tools and file-types to JSON.',
|
||||||
|
@ -1,28 +1,35 @@
|
|||||||
.TH jc 1 {{ today }} {{ jc.version}} "JSON Convert"
|
.TH jc 1 {{ today }} {{ jc.version}} "JSON Convert"
|
||||||
.SH NAME
|
.SH NAME
|
||||||
\fBjc\fP \- JSON Convert JSONifies the output of many CLI tools, file-types, and strings
|
\fBjc\fP \- JSON Convert JSONifies the output of many CLI tools, file-types,
|
||||||
|
and strings
|
||||||
.SH SYNOPSIS
|
.SH SYNOPSIS
|
||||||
|
|
||||||
Standard syntax:
|
Standard syntax:
|
||||||
|
|
||||||
.RS
|
.RS
|
||||||
COMMAND | \fBjc\fP [OPTIONS] PARSER
|
COMMAND | \fBjc\fP [SLICE] [OPTIONS] PARSER
|
||||||
|
|
||||||
cat FILE | \fBjc\fP [OPTIONS] PARSER
|
cat FILE | \fBjc\fP [SLICE] [OPTIONS] PARSER
|
||||||
|
|
||||||
echo STRING | \fBjc\fP [OPTIONS] PARSER
|
echo STRING | \fBjc\fP [SLICE] [OPTIONS] PARSER
|
||||||
.RE
|
.RE
|
||||||
|
|
||||||
Magic syntax:
|
Magic syntax:
|
||||||
|
|
||||||
.RS
|
.RS
|
||||||
\fBjc\fP [OPTIONS] COMMAND
|
\fBjc\fP [SLICE] [OPTIONS] COMMAND
|
||||||
|
|
||||||
\fBjc\fP [OPTIONS] /proc/<path-to-procfile>
|
\fBjc\fP [SLICE] [OPTIONS] /proc/<path-to-procfile>
|
||||||
.RE
|
.RE
|
||||||
|
|
||||||
.SH DESCRIPTION
|
.SH DESCRIPTION
|
||||||
\fBjc\fP JSONifies the output of many CLI tools, file-types, and common strings for easier parsing in scripts. \fBjc\fP accepts piped input from \fBSTDIN\fP and outputs a JSON representation of the previous command's output to \fBSTDOUT\fP. Alternatively, the "Magic" syntax can be used by prepending \fBjc\fP to the command to be converted. Options can be passed to \fBjc\fP immediately before the command is given. (Note: "Magic" syntax does not support shell builtins or command aliases)
|
\fBjc\fP JSONifies the output of many CLI tools, file-types, and common strings
|
||||||
|
for easier parsing in scripts. \fBjc\fP accepts piped input from \fBSTDIN\fP and
|
||||||
|
outputs a JSON representation of the previous command's output to \fBSTDOUT\fP.
|
||||||
|
Alternatively, the "Magic" syntax can be used by prepending \fBjc\fP to the
|
||||||
|
command to be converted. Options can be passed to \fBjc\fP immediately before
|
||||||
|
the command is given. (Note: "Magic" syntax does not support shell builtins or
|
||||||
|
command aliases)
|
||||||
|
|
||||||
.SH OPTIONS
|
.SH OPTIONS
|
||||||
.B
|
.B
|
||||||
@ -49,7 +56,8 @@ About \fBjc\fP (JSON or YAML output)
|
|||||||
.TP
|
.TP
|
||||||
.B
|
.B
|
||||||
\fB-C\fP, \fB--force-color\fP
|
\fB-C\fP, \fB--force-color\fP
|
||||||
Force color output even when using pipes (overrides \fB-m\fP and the \fBNO_COLOR\fP env variable)
|
Force color output even when using pipes (overrides \fB-m\fP and the
|
||||||
|
\fBNO_COLOR\fP env variable)
|
||||||
.TP
|
.TP
|
||||||
.B
|
.B
|
||||||
\fB-d\fP, \fB--debug\fP
|
\fB-d\fP, \fB--debug\fP
|
||||||
@ -57,7 +65,8 @@ Debug - show traceback (use \fB-dd\fP for verbose traceback)
|
|||||||
.TP
|
.TP
|
||||||
.B
|
.B
|
||||||
\fB-h\fP, \fB--help\fP
|
\fB-h\fP, \fB--help\fP
|
||||||
Help (\fB--help --parser_name\fP for parser documentation). Use twice to show hidden parsers (e.g. \fB-hh\fP)
|
Help (\fB--help --parser_name\fP for parser documentation). Use twice to show
|
||||||
|
hidden parsers (e.g. \fB-hh\fP)
|
||||||
.TP
|
.TP
|
||||||
.B
|
.B
|
||||||
\fB-m\fP, \fB--monochrome\fP
|
\fB-m\fP, \fB--monochrome\fP
|
||||||
@ -65,7 +74,8 @@ Monochrome output
|
|||||||
.TP
|
.TP
|
||||||
.B
|
.B
|
||||||
\fB-M\fP, \fB--meta-out\fP
|
\fB-M\fP, \fB--meta-out\fP
|
||||||
Add metadata to output including timestamp, parser name, magic command, magic command exit code, etc.
|
Add metadata to output including timestamp, parser name, magic command, magic
|
||||||
|
command exit code, etc.
|
||||||
.TP
|
.TP
|
||||||
.B
|
.B
|
||||||
\fB-p\fP, \fB--pretty\fP
|
\fB-p\fP, \fB--pretty\fP
|
||||||
@ -73,11 +83,13 @@ Pretty print output
|
|||||||
.TP
|
.TP
|
||||||
.B
|
.B
|
||||||
\fB-q\fP, \fB--quiet\fP
|
\fB-q\fP, \fB--quiet\fP
|
||||||
Quiet mode. Suppresses parser warning messages (use -qq to ignore streaming parser errors)
|
Quiet mode. Suppresses parser warning messages (use -qq to ignore streaming
|
||||||
|
parser errors)
|
||||||
.TP
|
.TP
|
||||||
.B
|
.B
|
||||||
\fB-r\fP, \fB--raw\fP
|
\fB-r\fP, \fB--raw\fP
|
||||||
Raw output. Provides more literal output, typically with string values and no additional semantic processing
|
Raw output. Provides more literal output, typically with string values and no
|
||||||
|
additional semantic processing
|
||||||
.TP
|
.TP
|
||||||
.B
|
.B
|
||||||
\fB-u\fP, \fB--unbuffer\fP
|
\fB-u\fP, \fB--unbuffer\fP
|
||||||
@ -99,10 +111,93 @@ Generate Bash shell completion script
|
|||||||
\fB-Z\fP, \fB--zsh-comp\fP
|
\fB-Z\fP, \fB--zsh-comp\fP
|
||||||
Generate Zsh shell completion script
|
Generate Zsh shell completion script
|
||||||
|
|
||||||
.SH EXIT CODES
|
.RE
|
||||||
Any fatal errors within \fBjc\fP will generate an exit code of \fB100\fP, otherwise the exit code will be \fB0\fP.
|
.PP
|
||||||
|
.B
|
||||||
|
Slice:
|
||||||
|
.RS
|
||||||
|
Line slicing is supported using the \fBSTART:STOP\fP syntax similar to Python
|
||||||
|
slicing. This allows you to skip lines at the beginning and/or end of the
|
||||||
|
\fBSTDIN\fP input you would like \fBjc\fP to convert.
|
||||||
|
|
||||||
When using the "magic" syntax (e.g. \fBjc ifconfig eth0\fP), \fBjc\fP will store the exit code of the program being parsed and add it to the \fBjc\fP exit code. This way it is easier to determine if an error was from the parsed program or \fBjc\fP.
|
\fBSTART\fP and \fBSTOP\fP can be positive or negative integers or blank and
|
||||||
|
allow you to specify how many lines to skip and how many lines to process.
|
||||||
|
Positive and blank slices are the most memory efficient. Any negative
|
||||||
|
integers in the slice will use more memory.
|
||||||
|
|
||||||
|
For example, to skip the first and last line of the following text, you
|
||||||
|
could express the slice in a couple ways:
|
||||||
|
|
||||||
|
.RS
|
||||||
|
.nf
|
||||||
|
$ cat table.txt
|
||||||
|
### We want to skip this header ###
|
||||||
|
col1 col2
|
||||||
|
foo 1
|
||||||
|
bar 2
|
||||||
|
### We want to skip this footer ###
|
||||||
|
$ cat table.txt | jc 1:-1 --asciitable
|
||||||
|
[{"col1":"foo","col2":"1"},{"col1":"bar","col2":"2"}]
|
||||||
|
$ cat table.txt | jc 1:4 --asciitable
|
||||||
|
[{"col1":"foo","col2":"1"},{"col1":"bar","col2":"2"}]
|
||||||
|
.fi
|
||||||
|
.RE
|
||||||
|
|
||||||
|
In this example \fB1:-1\fP and \fB1:4\fP line slices provide the same output.
|
||||||
|
|
||||||
|
When using positive integers the index location of \fBSTOP\fP is non-inclusive.
|
||||||
|
Positive slices count from the first line of the input toward the end
|
||||||
|
starting at \fB0\fP as the first line. Negative slices count from the last line
|
||||||
|
toward the beginning starting at \fB-1\fP as the last line. This is also the way
|
||||||
|
Python's slicing feature works.
|
||||||
|
|
||||||
|
Here is a breakdown of line slice options:
|
||||||
|
|
||||||
|
.TP
|
||||||
|
.B
|
||||||
|
\fBSTART:STOP\fP
|
||||||
|
lines \fBSTART\fP through \fBSTOP - 1\fP
|
||||||
|
.TP
|
||||||
|
.B
|
||||||
|
\fBSTART:\fP
|
||||||
|
lines \fBSTART\fP through the rest of the output
|
||||||
|
.TP
|
||||||
|
.B
|
||||||
|
\fB:STOP\fP
|
||||||
|
lines from the beginning through \fBSTOP - 1\fP
|
||||||
|
.TP
|
||||||
|
.B
|
||||||
|
\fB-START:STOP\fP
|
||||||
|
\fBSTART\fP lines from the end through \fBSTOP - 1\fP
|
||||||
|
.TP
|
||||||
|
.B
|
||||||
|
\fBSTART:-STOP\fP
|
||||||
|
lines \fBSTART\fP through \fBSTOP\fP lines from the end
|
||||||
|
.TP
|
||||||
|
.B
|
||||||
|
\fB-START:-STOP\fP
|
||||||
|
\fBSTART\fP lines from the end through \fBSTOP\fP lines from the end
|
||||||
|
.TP
|
||||||
|
.B
|
||||||
|
\fB-START:\fP
|
||||||
|
\fBSTART\fP lines from the end through the rest of the output
|
||||||
|
.TP
|
||||||
|
.B
|
||||||
|
\fB:-STOP\fP
|
||||||
|
lines from the beginning through \fBSTOP\fP lines from the end
|
||||||
|
.TP
|
||||||
|
.B
|
||||||
|
\fB:\fP
|
||||||
|
all lines
|
||||||
|
|
||||||
|
.SH EXIT CODES
|
||||||
|
Any fatal errors within \fBjc\fP will generate an exit code of \fB100\fP,
|
||||||
|
otherwise the exit code will be \fB0\fP.
|
||||||
|
|
||||||
|
When using the "magic" syntax (e.g. \fBjc ifconfig eth0\fP), \fBjc\fP will store
|
||||||
|
the exit code of the program being parsed and add it to the \fBjc\fP exit code.
|
||||||
|
This way it is easier to determine if an error was from the parsed program or
|
||||||
|
\fBjc\fP.
|
||||||
|
|
||||||
Consider the following examples using \fBifconfig\fP:
|
Consider the following examples using \fBifconfig\fP:
|
||||||
|
|
||||||
@ -117,9 +212,9 @@ ifconfig exit code = \fB1\fP, jc exit code = \fB100\fP, combined exit code = \fB
|
|||||||
.RE
|
.RE
|
||||||
|
|
||||||
When using the "magic" syntax you can also retrieve the exit code of the called
|
When using the "magic" syntax you can also retrieve the exit code of the called
|
||||||
program by using the \fB--meta-out\fP or \fB-M\fP option. This will append a \fB_jc_meta\fP
|
program by using the \fB--meta-out\fP or \fB-M\fP option. This will append a
|
||||||
object to the output that will include the magic command information, including
|
\fB_jc_meta\fP object to the output that will include the magic command
|
||||||
the exit code.
|
information, including the exit code.
|
||||||
|
|
||||||
Here is an example with \fBping\fP:
|
Here is an example with \fBping\fP:
|
||||||
.RS
|
.RS
|
||||||
@ -161,11 +256,16 @@ $ echo $?
|
|||||||
|
|
||||||
\fBCustom Colors\fP
|
\fBCustom Colors\fP
|
||||||
|
|
||||||
You can specify custom colors via the \fBJC_COLORS\fP environment variable. The \fBJC_COLORS\fP environment variable takes four comma separated string values in the following format:
|
You can specify custom colors via the \fBJC_COLORS\fP environment variable. The
|
||||||
|
\fBJC_COLORS\fP environment variable takes four comma separated string values in
|
||||||
|
the following format:
|
||||||
|
|
||||||
JC_COLORS=<keyname_color>,<keyword_color>,<number_color>,<string_color>
|
JC_COLORS=<keyname_color>,<keyword_color>,<number_color>,<string_color>
|
||||||
|
|
||||||
Where colors are: \fBblack\fP, \fBred\fP, \fBgreen\fP, \fByellow\fP, \fBblue\fP, \fBmagenta\fP, \fBcyan\fP, \fBgray\fP, \fBbrightblack\fP, \fBbrightred\fP, \fBbrightgreen\fP, \fBbrightyellow\fP, \fBbrightblue\fP, \fBbrightmagenta\fP, \fBbrightcyan\fP, \fBwhite\fP, or \fBdefault\fP
|
Where colors are: \fBblack\fP, \fBred\fP, \fBgreen\fP, \fByellow\fP, \fBblue\fP,
|
||||||
|
\fBmagenta\fP, \fBcyan\fP, \fBgray\fP, \fBbrightblack\fP, \fBbrightred\fP,
|
||||||
|
\fBbrightgreen\fP, \fBbrightyellow\fP, \fBbrightblue\fP, \fBbrightmagenta\fP,
|
||||||
|
\fBbrightcyan\fP, \fBwhite\fP, or \fBdefault\fP
|
||||||
|
|
||||||
For example, to set to the default colors:
|
For example, to set to the default colors:
|
||||||
|
|
||||||
@ -179,10 +279,20 @@ JC_COLORS=default,default,default,default
|
|||||||
|
|
||||||
\fBDisable Color Output\fP
|
\fBDisable Color Output\fP
|
||||||
|
|
||||||
You can set the \fBNO_COLOR\fP environment variable to any value to disable color output in \fBjc\fP. Note that using the \fB-C\fP option to force color output will override both the \fBNO_COLOR\fP environment variable and the \fB-m\fP option.
|
You can set the \fBNO_COLOR\fP environment variable to any value to disable
|
||||||
|
color output in \fBjc\fP. Note that using the \fB-C\fP option to force color
|
||||||
|
output will override both the \fBNO_COLOR\fP environment variable and the
|
||||||
|
\fB-m\fP option.
|
||||||
|
|
||||||
.SH STREAMING PARSERS
|
.SH STREAMING PARSERS
|
||||||
Most parsers load all of the data from \fBSTDIN\fP, parse it, then output the entire JSON document serially. There are some streaming parsers (e.g. \fBls-s\fP, \fBping-s\fP, etc.) that immediately start processing and outputting the data line-by-line as JSON Lines (aka NDJSON) while it is being received from \fBSTDIN\fP. This can significantly reduce the amount of memory required to parse large amounts of command output (e.g. \fBls -lR /\fP) and can sometimes process the data more quickly. Streaming parsers have slightly different behavior than standard parsers as outlined below.
|
Most parsers load all of the data from \fBSTDIN\fP, parse it, then output the
|
||||||
|
entire JSON document serially. There are some streaming parsers (e.g.
|
||||||
|
\fBls-s\fP, \fBping-s\fP, etc.) that immediately start processing and outputting
|
||||||
|
the data line-by-line as JSON Lines (aka NDJSON) while it is being received from
|
||||||
|
\fBSTDIN\fP. This can significantly reduce the amount of memory required to
|
||||||
|
parse large amounts of command output (e.g. \fBls -lR /\fP) and can sometimes
|
||||||
|
process the data more quickly. Streaming parsers have slightly different
|
||||||
|
behavior than standard parsers as outlined below.
|
||||||
|
|
||||||
.RS
|
.RS
|
||||||
Note: Streaming parsers cannot be used with the "magic" syntax
|
Note: Streaming parsers cannot be used with the "magic" syntax
|
||||||
@ -190,7 +300,14 @@ Note: Streaming parsers cannot be used with the "magic" syntax
|
|||||||
|
|
||||||
\fBIgnoring Errors\fP
|
\fBIgnoring Errors\fP
|
||||||
|
|
||||||
You may want to ignore parsing errors when using streaming parsers since these may be used in long-lived processing pipelines and errors can break the pipe. To ignore parsing errors, use the \fB-qq\fP cli option. This will add a \fB_jc_meta\fP object to the JSON output with a \fBsuccess\fP attribute. If \fBsuccess\fP is \fBtrue\fP, then there were no issues parsing the line. If \fBsuccess\fP is \fBfalse\fP, then a parsing issue was found and \fBerror\fP and \fBline\fP fields will be added to include a short error description and the contents of the unparsable line, respectively:
|
You may want to ignore parsing errors when using streaming parsers since these
|
||||||
|
may be used in long-lived processing pipelines and errors can break the pipe. To
|
||||||
|
ignore parsing errors, use the \fB-qq\fP cli option. This will add a
|
||||||
|
\fB_jc_meta\fP object to the JSON output with a \fBsuccess\fP attribute. If
|
||||||
|
\fBsuccess\fP is \fBtrue\fP, then there were no issues parsing the line. If
|
||||||
|
\fBsuccess\fP is \fBfalse\fP, then a parsing issue was found and \fBerror\fP and
|
||||||
|
\fBline\fP fields will be added to include a short error description and the
|
||||||
|
contents of the unparsable line, respectively:
|
||||||
|
|
||||||
.RS
|
.RS
|
||||||
Successfully parsed line with \fB-qq\fP option:
|
Successfully parsed line with \fB-qq\fP option:
|
||||||
@ -221,7 +338,11 @@ Unsuccessfully parsed line with \fB-qq\fP option:
|
|||||||
.RE
|
.RE
|
||||||
\fBUnbuffering Output\fP
|
\fBUnbuffering Output\fP
|
||||||
|
|
||||||
Most operating systems will buffer output that is being piped from process to process. The buffer is usually around 4KB. When viewing the output in the terminal the OS buffer is not engaged so output is immediately displayed on the screen. When piping multiple processes together, though, it may seem as if the output is hanging when the input data is very slow (e.g. \fBping\fP):
|
Most operating systems will buffer output that is being piped from process to
|
||||||
|
process. The buffer is usually around 4KB. When viewing the output in the
|
||||||
|
terminal the OS buffer is not engaged so output is immediately displayed on the
|
||||||
|
screen. When piping multiple processes together, though, it may seem as if the
|
||||||
|
output is hanging when the input data is very slow (e.g. \fBping\fP):
|
||||||
|
|
||||||
.RS
|
.RS
|
||||||
.nf
|
.nf
|
||||||
@ -230,7 +351,9 @@ $ ping 1.1.1.1 | jc \fB--ping-s\fP | jq
|
|||||||
.fi
|
.fi
|
||||||
.RE
|
.RE
|
||||||
|
|
||||||
This is because the OS engages the 4KB buffer between \fBjc\fP and \fBjq\fP in this example. To display the data on the terminal in realtime, you can disable the buffer with the \fB-u\fP (unbuffer) cli option:
|
This is because the OS engages the 4KB buffer between \fBjc\fP and \fBjq\fP in
|
||||||
|
this example. To display the data on the terminal in realtime, you can disable
|
||||||
|
the buffer with the \fB-u\fP (unbuffer) cli option:
|
||||||
|
|
||||||
.RS
|
.RS
|
||||||
.nf
|
.nf
|
||||||
@ -244,7 +367,8 @@ Note: Unbuffered output can be slower for large data streams.
|
|||||||
.RE
|
.RE
|
||||||
|
|
||||||
.SH CUSTOM PARSERS
|
.SH CUSTOM PARSERS
|
||||||
Custom local parser plugins may be placed in a \fBjc/jcparsers\fP folder in your local "App data directory":
|
Custom local parser plugins may be placed in a \fBjc/jcparsers\fP folder in your
|
||||||
|
local "App data directory":
|
||||||
|
|
||||||
.RS
|
.RS
|
||||||
.nf
|
.nf
|
||||||
@ -254,11 +378,16 @@ Custom local parser plugins may be placed in a \fBjc/jcparsers\fP folder in your
|
|||||||
.fi
|
.fi
|
||||||
.RE
|
.RE
|
||||||
|
|
||||||
Local parser plugins are standard python module files. Use the \fBjc/parsers/foo.py\fP or \fBjc/parsers/foo_s.py\fP (streaming) parser as a template and simply place a \fB.py\fP file in the \fBjcparsers\fP subfolder.
|
Local parser plugins are standard python module files. Use the
|
||||||
|
\fBjc/parsers/foo.py\fP or \fBjc/parsers/foo_s.py\fP (streaming) parser as a
|
||||||
|
template and simply place a \fB.py\fP file in the \fBjcparsers\fP subfolder.
|
||||||
|
|
||||||
Local plugin filenames must be valid python module names and therefore must start with a letter and consist entirely of alphanumerics and underscores. Local plugins may override default parsers.
|
Local plugin filenames must be valid python module names and therefore must
|
||||||
|
start with a letter and consist entirely of alphanumerics and underscores. Local
|
||||||
|
plugins may override default parsers.
|
||||||
|
|
||||||
Note: The application data directory follows the XDG Base Directory Specification
|
Note: The application data directory follows the \fBXDG Base Directory
|
||||||
|
Specification\fP
|
||||||
|
|
||||||
.SH CAVEATS
|
.SH CAVEATS
|
||||||
\fBLocale\fP
|
\fBLocale\fP
|
||||||
@ -283,9 +412,13 @@ escape sequences if the \fBC\fP locale does not support UTF-8 encoding.
|
|||||||
|
|
||||||
\fBTimezones\fP
|
\fBTimezones\fP
|
||||||
|
|
||||||
Some parsers have calculated epoch timestamp fields added to the output. Unless a timestamp field name has a \fB_utc\fP suffix it is considered naive. (i.e. based on the local timezone of the system the \fBjc\fP parser was run on).
|
Some parsers have calculated epoch timestamp fields added to the output. Unless
|
||||||
|
a timestamp field name has a \fB_utc\fP suffix it is considered naive. (i.e.
|
||||||
|
based on the local timezone of the system the \fBjc\fP parser was run on).
|
||||||
|
|
||||||
If a UTC timezone can be detected in the text of the command output, the timestamp will be timezone aware and have a \fB_utc\fP suffix on the key name. (e.g. \fBepoch_utc\fP) No other timezones are supported for aware timestamps.
|
If a UTC timezone can be detected in the text of the command output, the
|
||||||
|
timestamp will be timezone aware and have a \fB_utc\fP suffix on the key name.
|
||||||
|
(e.g. \fBepoch_utc\fP) No other timezones are supported for aware timestamps.
|
||||||
|
|
||||||
.SH EXAMPLES
|
.SH EXAMPLES
|
||||||
Standard Syntax:
|
Standard Syntax:
|
||||||
@ -302,10 +435,23 @@ $ jc \fB--pretty\fP dig www.google.com
|
|||||||
$ jc \fB--pretty\fP /proc/meminfo
|
$ jc \fB--pretty\fP /proc/meminfo
|
||||||
.RE
|
.RE
|
||||||
|
|
||||||
|
Line Slicing:
|
||||||
|
.RS
|
||||||
|
$ cat file.csv | jc \fB:101\fP \fB--csv\fP # parse first 100 lines
|
||||||
|
.RE
|
||||||
|
|
||||||
For parser documentation:
|
For parser documentation:
|
||||||
.RS
|
.RS
|
||||||
$ jc \fB--help\fP \fB--dig\fP
|
$ jc \fB--help\fP \fB--dig\fP
|
||||||
.RE
|
.RE
|
||||||
|
|
||||||
|
More Help:
|
||||||
|
.RS
|
||||||
|
$ jc \fB-hh\fP # show hidden parsers
|
||||||
|
|
||||||
|
$ jc \fB-hhh\fP # list parsers by category tags
|
||||||
|
.RE
|
||||||
|
|
||||||
.SH AUTHOR
|
.SH AUTHOR
|
||||||
{{ jc.author }} ({{ jc.author_email }})
|
{{ jc.author }} ({{ jc.author_email }})
|
||||||
|
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
|
|
||||||
> Check out the `jc` Python [package documentation](https://github.com/kellyjonbrazil/jc/tree/master/docs) for developers
|
> Check out the `jc` Python [package documentation](https://github.com/kellyjonbrazil/jc/tree/master/docs) for developers
|
||||||
|
|
||||||
> Try the `jc` [web demo](https://jc-web.onrender.com/)
|
> Try the `jc` [web demo](https://jc-web.onrender.com/) and [REST API](https://github.com/kellyjonbrazil/jc-restapi)
|
||||||
|
|
||||||
> JC is [now available](https://galaxy.ansible.com/community/general) as an
|
> JC is [now available](https://galaxy.ansible.com/community/general) as an
|
||||||
Ansible filter plugin in the `community.general` collection. See this
|
Ansible filter plugin in the `community.general` collection. See this
|
||||||
@ -44,8 +44,8 @@ $ jc dig example.com | jq -r '.[].answer[].data'
|
|||||||
93.184.216.34
|
93.184.216.34
|
||||||
```
|
```
|
||||||
|
|
||||||
`jc` can also be used as a python library. In this case the output will be
|
`jc` can also be used as a python library. In this case the returned value
|
||||||
a python dictionary, a list of dictionaries, or even a
|
will be a python dictionary, a list of dictionaries, or even a
|
||||||
[lazy iterable of dictionaries](#using-streaming-parsers-as-python-modules)
|
[lazy iterable of dictionaries](#using-streaming-parsers-as-python-modules)
|
||||||
instead of JSON:
|
instead of JSON:
|
||||||
```python
|
```python
|
||||||
@ -133,9 +133,9 @@ on Github.
|
|||||||
`jc` accepts piped input from `STDIN` and outputs a JSON representation of the
|
`jc` accepts piped input from `STDIN` and outputs a JSON representation of the
|
||||||
previous command's output to `STDOUT`.
|
previous command's output to `STDOUT`.
|
||||||
```bash
|
```bash
|
||||||
COMMAND | jc [OPTIONS] PARSER
|
COMMAND | jc [SLICE] [OPTIONS] PARSER
|
||||||
cat FILE | jc [OPTIONS] PARSER
|
cat FILE | jc [SLICE] [OPTIONS] PARSER
|
||||||
echo STRING | jc [OPTIONS] PARSER
|
echo STRING | jc [SLICE] [OPTIONS] PARSER
|
||||||
```
|
```
|
||||||
|
|
||||||
Alternatively, the "magic" syntax can be used by prepending `jc` to the command
|
Alternatively, the "magic" syntax can be used by prepending `jc` to the command
|
||||||
@ -143,8 +143,8 @@ to be converted or in front of the absolute path for Proc files. Options can be
|
|||||||
passed to `jc` immediately before the command or Proc file path is given.
|
passed to `jc` immediately before the command or Proc file path is given.
|
||||||
(Note: command aliases and shell builtins are not supported)
|
(Note: command aliases and shell builtins are not supported)
|
||||||
```bash
|
```bash
|
||||||
jc [OPTIONS] COMMAND
|
jc [SLICE] [OPTIONS] COMMAND
|
||||||
jc [OPTIONS] /proc/<path-to-procfile>
|
jc [SLICE] [OPTIONS] /proc/<path-to-procfile>
|
||||||
```
|
```
|
||||||
|
|
||||||
The JSON output can be compact (default) or pretty formatted with the `-p`
|
The JSON output can be compact (default) or pretty formatted with the `-p`
|
||||||
@ -154,7 +154,7 @@ option.
|
|||||||
|
|
||||||
| Argument | Command or Filetype | Documentation |
|
| Argument | Command or Filetype | Documentation |
|
||||||
|-------------------|---------------------------------------------------------|----------------------------------------------------------------------------|{% for parser in parsers %}
|
|-------------------|---------------------------------------------------------|----------------------------------------------------------------------------|{% for parser in parsers %}
|
||||||
| `{{ "{:>15}".format(parser.argument) }}` | {{ "{:<55}".format(parser.description) }} | {{ "{:<74}".format("[details](https://kellyjonbrazil.github.io/jc/docs/parsers/" + parser.name + ")") }} |{% endfor %}
|
| {{ "{:>17}".format("`" + parser.argument + "`") }} | {{ "{:<55}".format(parser.description) }} | {{ "{:<74}".format("[details](https://kellyjonbrazil.github.io/jc/docs/parsers/" + parser.name + ")") }} |{% endfor %}
|
||||||
|
|
||||||
### Options
|
### Options
|
||||||
|
|
||||||
@ -175,6 +175,54 @@ option.
|
|||||||
| `-B` | `--bash-comp` | Generate Bash shell completion script ([more info](https://github.com/kellyjonbrazil/jc/wiki/Shell-Completions)) |
|
| `-B` | `--bash-comp` | Generate Bash shell completion script ([more info](https://github.com/kellyjonbrazil/jc/wiki/Shell-Completions)) |
|
||||||
| `-Z` | `--zsh-comp` | Generate Zsh shell completion script ([more info](https://github.com/kellyjonbrazil/jc/wiki/Shell-Completions)) |
|
| `-Z` | `--zsh-comp` | Generate Zsh shell completion script ([more info](https://github.com/kellyjonbrazil/jc/wiki/Shell-Completions)) |
|
||||||
|
|
||||||
|
### Slice
|
||||||
|
Line slicing is supported using the `START:STOP` syntax similar to Python
|
||||||
|
slicing. This allows you to skip lines at the beginning and/or end of the
|
||||||
|
`STDIN` input you would like `jc` to convert.
|
||||||
|
|
||||||
|
`START` and `STOP` can be positive or negative integers or blank and allow
|
||||||
|
you to specify how many lines to skip and how many lines to process.
|
||||||
|
Positive and blank slices are the most memory efficient. Any negative
|
||||||
|
integers in the slice will use more memory.
|
||||||
|
|
||||||
|
For example, to skip the first and last line of the following text, you
|
||||||
|
could express the slice in a couple ways:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ cat table.txt
|
||||||
|
### We want to skip this header ###
|
||||||
|
col1 col2
|
||||||
|
foo 1
|
||||||
|
bar 2
|
||||||
|
### We want to skip this footer ###
|
||||||
|
$ cat table.txt | jc 1:-1 --asciitable
|
||||||
|
[{"col1":"foo","col2":"1"},{"col1":"bar","col2":"2"}]
|
||||||
|
$ cat table.txt | jc 1:4 --asciitable
|
||||||
|
[{"col1":"foo","col2":"1"},{"col1":"bar","col2":"2"}]
|
||||||
|
```
|
||||||
|
In this example `1:-1` and `1:4` line slices provide the same output.
|
||||||
|
|
||||||
|
When using positive integers the index location of `STOP` is non-inclusive.
|
||||||
|
Positive slices count from the first line of the input toward the end
|
||||||
|
starting at `0` as the first line. Negative slices count from the last line
|
||||||
|
toward the beginning starting at `-1` as the last line. This is also the way
|
||||||
|
[Python's slicing](https://stackoverflow.com/questions/509211/understanding-slicing)
|
||||||
|
feature works.
|
||||||
|
|
||||||
|
Here is a breakdown of line slice options:
|
||||||
|
|
||||||
|
| Slice Notation | Input Lines Processed |
|
||||||
|
|----------------|--------------------------------------------------------------|
|
||||||
|
| `START:STOP` | lines `START` through `STOP - 1` |
|
||||||
|
| `START:` | lines `START` through the rest of the output |
|
||||||
|
| `:STOP` | lines from the beginning through `STOP - 1` |
|
||||||
|
| `-START:STOP` | `START` lines from the end through `STOP - 1` |
|
||||||
|
| `START:-STOP` | lines `START` through `STOP` lines from the end |
|
||||||
|
| `-START:-STOP` | `START` lines from the end through `STOP` lines from the end |
|
||||||
|
| `-START:` | `START` lines from the end through the rest of the output |
|
||||||
|
| `:-STOP` | lines from the beginning through `STOP` lines from the end |
|
||||||
|
| `:` | all lines |
|
||||||
|
|
||||||
### Exit Codes
|
### Exit Codes
|
||||||
Any fatal errors within `jc` will generate an exit code of `100`, otherwise the
|
Any fatal errors within `jc` will generate an exit code of `100`, otherwise the
|
||||||
exit code will be `0`.
|
exit code will be `0`.
|
||||||
|
1
tests/fixtures/generic/acpi-V-never-fully-discharge.json
vendored
Normal file
1
tests/fixtures/generic/acpi-V-never-fully-discharge.json
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
[{"type":"Battery","id":0,"state":"Discharging","charge_percent":87,"design_capacity_mah":2110,"last_full_capacity":2271,"last_full_capacity_percent":100},{"type":"Battery","id":1,"state":"Discharging","charge_percent":98,"charge_remaining":"01:43:14","design_capacity_mah":4400,"last_full_capacity":3013,"last_full_capacity_percent":68,"charge_remaining_hours":1,"charge_remaining_minutes":43,"charge_remaining_seconds":14,"charge_remaining_total_seconds":6194},{"type":"Battery","id":2,"state":"Discharging","charge_percent":0},{"type":"Battery","id":3,"state":"Full","charge_percent":100},{"type":"Adapter","id":0,"on-line":true},{"type":"Adapter","id":1,"on-line":false},{"type":"Thermal","id":0,"mode":"ok","temperature":46.0,"temperature_unit":"C","trip_points":[{"id":0,"switches_to_mode":"critical","temperature":127.0,"temperature_unit":"C"},{"id":1,"switches_to_mode":"hot","temperature":127.0,"temperature_unit":"C"}]},{"type":"Thermal","id":1,"mode":"ok","temperature":55.0,"temperature_unit":"C","trip_points":[{"id":0,"switches_to_mode":"critical","temperature":130.0,"temperature_unit":"C"},{"id":1,"switches_to_mode":"hot","temperature":100.0,"temperature_unit":"C"}]},{"type":"Cooling","id":0,"messages":["Processor 0 of 10"]},{"type":"Cooling","id":1,"messages":["Processor 0 of 10"]},{"type":"Cooling","id":2,"messages":["x86_pkg_temp no state information available"]},{"type":"Cooling","id":3,"messages":["Processor 0 of 10"]},{"type":"Cooling","id":4,"messages":["intel_powerclamp no state information available","another message"]},{"type":"Cooling","id":5,"messages":["Processor 0 of 10"]}]
|
21
tests/fixtures/generic/acpi-V-never-fully-discharge.out
vendored
Normal file
21
tests/fixtures/generic/acpi-V-never-fully-discharge.out
vendored
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
Battery 0: Discharging, 87%, discharging at zero rate - will never fully discharge.
|
||||||
|
Battery 0: design capacity 2110 mAh, last full capacity 2271 mAh = 100%
|
||||||
|
Battery 1: Discharging, 98%, 01:43:14 remaining
|
||||||
|
Battery 1: design capacity 4400 mAh, last full capacity 3013 mAh = 68%
|
||||||
|
Battery 2: Discharging, 0%, rate information unavailable
|
||||||
|
Battery 3: Full, 100%
|
||||||
|
Adapter 0: on-line
|
||||||
|
Adapter 1: off-line
|
||||||
|
Thermal 0: ok, 46.0 degrees C
|
||||||
|
Thermal 0: trip point 0 switches to mode critical at temperature 127.0 degrees C
|
||||||
|
Thermal 0: trip point 1 switches to mode hot at temperature 127.0 degrees C
|
||||||
|
Thermal 1: ok, 55.0 degrees C
|
||||||
|
Thermal 1: trip point 0 switches to mode critical at temperature 130.0 degrees C
|
||||||
|
Thermal 1: trip point 1 switches to mode hot at temperature 100.0 degrees C
|
||||||
|
Cooling 0: Processor 0 of 10
|
||||||
|
Cooling 1: Processor 0 of 10
|
||||||
|
Cooling 2: x86_pkg_temp no state information available
|
||||||
|
Cooling 3: Processor 0 of 10
|
||||||
|
Cooling 4: intel_powerclamp no state information available
|
||||||
|
Cooling 4: another message
|
||||||
|
Cooling 5: Processor 0 of 10
|
1
tests/fixtures/generic/crontab-no-normal-entries.json
vendored
Normal file
1
tests/fixtures/generic/crontab-no-normal-entries.json
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"variables":[],"schedule":[{"occurrence":"daily","command":"/bin/sh do_the_thing"}]}
|
2
tests/fixtures/generic/crontab-no-normal-entries.out
vendored
Normal file
2
tests/fixtures/generic/crontab-no-normal-entries.out
vendored
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
#this is a test for the jc module
|
||||||
|
@daily /bin/sh do_the_thing
|
1
tests/fixtures/generic/crontab-u-no-normal-entries.json
vendored
Normal file
1
tests/fixtures/generic/crontab-u-no-normal-entries.json
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"variables":[],"schedule":[{"occurrence":"daily","user":"root","command":"/bin/sh do_the_thing"}]}
|
2
tests/fixtures/generic/crontab-u-no-normal-entries.out
vendored
Normal file
2
tests/fixtures/generic/crontab-u-no-normal-entries.out
vendored
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
#this is a test for the jc module
|
||||||
|
@daily root /bin/sh do_the_thing
|
45
tests/fixtures/generic/ssh_config1
vendored
Normal file
45
tests/fixtures/generic/ssh_config1
vendored
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
## override as per host ##
|
||||||
|
Host server1
|
||||||
|
HostName server1.cyberciti.biz
|
||||||
|
User nixcraft
|
||||||
|
Port 4242
|
||||||
|
IdentityFile /nfs/shared/users/nixcraft/keys/server1/id_rsa
|
||||||
|
|
||||||
|
## Home nas server ##
|
||||||
|
Host nas01
|
||||||
|
HostName 192.168.1.100
|
||||||
|
User root
|
||||||
|
IdentityFile ~/.ssh/nas01.key
|
||||||
|
|
||||||
|
## Login AWS Cloud ##
|
||||||
|
Host aws.apache
|
||||||
|
HostName 1.2.3.4
|
||||||
|
User wwwdata
|
||||||
|
IdentityFile ~/.ssh/aws.apache.key
|
||||||
|
|
||||||
|
## Login to internal lan server at 192.168.0.251 via our public uk office ssh based gateway using ##
|
||||||
|
## $ ssh uk.gw.lan ##
|
||||||
|
Host uk.gw.lan uk.lan
|
||||||
|
HostName 192.168.0.251
|
||||||
|
User nixcraft
|
||||||
|
ProxyCommand ssh nixcraft@gateway.uk.cyberciti.biz nc %h %p 2> /dev/null
|
||||||
|
|
||||||
|
## Our Us Proxy Server ##
|
||||||
|
## Forward all local port 3128 traffic to port 3128 on the remote vps1.cyberciti.biz server ##
|
||||||
|
## $ ssh -f -N proxyus ##
|
||||||
|
Host proxyus
|
||||||
|
HostName vps1.cyberciti.biz
|
||||||
|
User breakfree
|
||||||
|
IdentityFile ~/.ssh/vps1.cyberciti.biz.key
|
||||||
|
LocalForward 3128 127.0.0.1:3128
|
||||||
|
|
||||||
|
### default for all ##
|
||||||
|
Host *
|
||||||
|
ForwardAgent no
|
||||||
|
ForwardX11 no
|
||||||
|
ForwardX11Trusted yes
|
||||||
|
User nixcraft
|
||||||
|
Port 22
|
||||||
|
Protocol 2
|
||||||
|
ServerAliveInterval 60
|
||||||
|
ServerAliveCountMax 30
|
1
tests/fixtures/generic/ssh_config1.json
vendored
Normal file
1
tests/fixtures/generic/ssh_config1.json
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
[{"host":"server1","host_list":["server1"],"hostname":"server1.cyberciti.biz","user":"nixcraft","port":4242,"identityfile":["/nfs/shared/users/nixcraft/keys/server1/id_rsa"]},{"host":"nas01","host_list":["nas01"],"hostname":"192.168.1.100","user":"root","identityfile":["~/.ssh/nas01.key"]},{"host":"aws.apache","host_list":["aws.apache"],"hostname":"1.2.3.4","user":"wwwdata","identityfile":["~/.ssh/aws.apache.key"]},{"host":"uk.gw.lan uk.lan","host_list":["uk.gw.lan","uk.lan"],"hostname":"192.168.0.251","user":"nixcraft","proxycommand":"ssh nixcraft@gateway.uk.cyberciti.biz nc %h %p 2> /dev/null"},{"host":"proxyus","host_list":["proxyus"],"hostname":"vps1.cyberciti.biz","user":"breakfree","identityfile":["~/.ssh/vps1.cyberciti.biz.key"],"localforward":["3128 127.0.0.1:3128"]},{"host":"*","host_list":["*"],"forwardagent":"no","forwardx11":"no","forwardx11trusted":"yes","user":"nixcraft","port":22,"protocol":2,"serveraliveinterval":60,"serveralivecountmax":30}]
|
21
tests/fixtures/generic/ssh_config2
vendored
Normal file
21
tests/fixtures/generic/ssh_config2
vendored
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
Host targaryen
|
||||||
|
HostName 192.168.1.10
|
||||||
|
User daenerys
|
||||||
|
Port 7654
|
||||||
|
IdentityFile ~/.ssh/targaryen.key
|
||||||
|
|
||||||
|
Host tyrell
|
||||||
|
HostName 192.168.10.20
|
||||||
|
|
||||||
|
Host martell
|
||||||
|
HostName 192.168.10.50
|
||||||
|
|
||||||
|
Host *ell
|
||||||
|
user oberyn
|
||||||
|
|
||||||
|
Host * !martell
|
||||||
|
LogLevel INFO
|
||||||
|
|
||||||
|
Host *
|
||||||
|
User root
|
||||||
|
Compression yes
|
1
tests/fixtures/generic/ssh_config2.json
vendored
Normal file
1
tests/fixtures/generic/ssh_config2.json
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
[{"host":"targaryen","host_list":["targaryen"],"hostname":"192.168.1.10","user":"daenerys","port":7654,"identityfile":["~/.ssh/targaryen.key"]},{"host":"tyrell","host_list":["tyrell"],"hostname":"192.168.10.20"},{"host":"martell","host_list":["martell"],"hostname":"192.168.10.50"},{"host":"*ell","host_list":["*ell"],"user":"oberyn"},{"host":"* !martell","host_list":["*","!martell"],"loglevel":"INFO"},{"host":"*","host_list":["*"],"user":"root","compression":"yes"}]
|
33
tests/fixtures/generic/ssh_config3
vendored
Normal file
33
tests/fixtures/generic/ssh_config3
vendored
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
Host server1
|
||||||
|
HostName server1.cyberciti.biz
|
||||||
|
User nixcraft
|
||||||
|
Port 4242
|
||||||
|
IdentityFile /nfs/shared/users/nixcraft/keys/server1/id_rsa
|
||||||
|
|
||||||
|
## Home nas server ##
|
||||||
|
Host nas01
|
||||||
|
HostName 192.168.1.100
|
||||||
|
User root
|
||||||
|
IdentityFile ~/.ssh/nas01.key
|
||||||
|
|
||||||
|
## Login AWS Cloud ##
|
||||||
|
Host aws.apache
|
||||||
|
HostName 1.2.3.4
|
||||||
|
User wwwdata
|
||||||
|
IdentityFile ~/.ssh/aws.apache.key
|
||||||
|
|
||||||
|
## Login to internal lan server at 192.168.0.251 via our public uk office ssh based gateway using ##
|
||||||
|
## $ ssh uk.gw.lan ##
|
||||||
|
Host uk.gw.lan uk.lan
|
||||||
|
HostName 192.168.0.251
|
||||||
|
User nixcraft
|
||||||
|
ProxyCommand ssh nixcraft@gateway.uk.cyberciti.biz nc %h %p 2> /dev/null
|
||||||
|
|
||||||
|
## Our Us Proxy Server ##
|
||||||
|
## Forward all local port 3128 traffic to port 3128 on the remote vps1.cyberciti.biz server ##
|
||||||
|
## $ ssh -f -N proxyus ##
|
||||||
|
Host proxyus
|
||||||
|
HostName vps1.cyberciti.biz
|
||||||
|
User breakfree
|
||||||
|
IdentityFile ~/.ssh/vps1.cyberciti.biz.key
|
||||||
|
LocalForward 3128 127.0.0.1:3128
|
1
tests/fixtures/generic/ssh_config3.json
vendored
Normal file
1
tests/fixtures/generic/ssh_config3.json
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
[{"host":"server1","host_list":["server1"],"hostname":"server1.cyberciti.biz","user":"nixcraft","port":4242,"identityfile":["/nfs/shared/users/nixcraft/keys/server1/id_rsa"]},{"host":"nas01","host_list":["nas01"],"hostname":"192.168.1.100","user":"root","identityfile":["~/.ssh/nas01.key"]},{"host":"aws.apache","host_list":["aws.apache"],"hostname":"1.2.3.4","user":"wwwdata","identityfile":["~/.ssh/aws.apache.key"]},{"host":"uk.gw.lan uk.lan","host_list":["uk.gw.lan","uk.lan"],"hostname":"192.168.0.251","user":"nixcraft","proxycommand":"ssh nixcraft@gateway.uk.cyberciti.biz nc %h %p 2> /dev/null"},{"host":"proxyus","host_list":["proxyus"],"hostname":"vps1.cyberciti.biz","user":"breakfree","identityfile":["~/.ssh/vps1.cyberciti.biz.key"],"localforward":["3128 127.0.0.1:3128"]}]
|
105
tests/fixtures/generic/ssh_config4
vendored
Normal file
105
tests/fixtures/generic/ssh_config4
vendored
Normal file
@ -0,0 +1,105 @@
|
|||||||
|
Host *
|
||||||
|
AddKeysToAgent ask
|
||||||
|
AddressFamily inet
|
||||||
|
BatchMode no
|
||||||
|
BindAddress 1.1.1.1
|
||||||
|
BindInterface en0
|
||||||
|
CanonicalDomains abc.com xyz.com
|
||||||
|
CanonicalizeFallbackLocal yes
|
||||||
|
CanonicalizeHostname none
|
||||||
|
CanonicalizeMaxDots 2
|
||||||
|
CanonicalizePermittedCNAMEs *.a.example.com:*.b.example.com,*.c.example.com
|
||||||
|
CASignatureAlgorithms ssh-ed25519,ecdsa-sha2-nistp256,ecdsa-sha2-nistp384,ecdsa-sha2-nistp521,sk-ssh-ed25519@openssh.com
|
||||||
|
CertificateFile ~/certificates/cert1.pem
|
||||||
|
CertificateFile ~/certificates/cert2.pem
|
||||||
|
CheckHostIP yes
|
||||||
|
Ciphers 3des-cbc,aes128-cbc,aes192-cbc
|
||||||
|
ClearAllForwardings yes
|
||||||
|
Compression yes
|
||||||
|
ConnectionAttempts 9
|
||||||
|
ConnectTimeout 30
|
||||||
|
ControlMaster ask
|
||||||
|
ControlPath none
|
||||||
|
ControlPersist yes
|
||||||
|
DynamicForward 1.1.1.1:443
|
||||||
|
EnableEscapeCommandline no
|
||||||
|
EnableSSHKeysign yes
|
||||||
|
EscapeChar none
|
||||||
|
ExitOnForwardFailure yes
|
||||||
|
FingerprintHash md5
|
||||||
|
ForkAfterAuthentication yes
|
||||||
|
ForwardAgent $mypath
|
||||||
|
ForwardX11 no
|
||||||
|
ForwardX11Timeout 500
|
||||||
|
ForwardX11Trusted yes
|
||||||
|
GatewayPorts yes
|
||||||
|
GlobalKnownHostsFile /etc/ssh/ssh_known_hosts /etc/ssh/ssh_known_hosts2
|
||||||
|
GSSAPIAuthentication yes
|
||||||
|
GSSAPIDelegateCredentials yes
|
||||||
|
HashKnownHosts yes
|
||||||
|
HostbasedAcceptedAlgorithms ssh-ed25519-cert-v01@openssh.com,ecdsa-sha2-nistp256-cert-v01@openssh.com
|
||||||
|
HostbasedAuthentication yes
|
||||||
|
HostKeyAlgorithms ssh-ed25519-cert-v01@openssh.com,ecdsa-sha2-nistp256-cert-v01@openssh.com
|
||||||
|
HostKeyAlias foobar
|
||||||
|
Hostname localhost
|
||||||
|
IdentitiesOnly yes
|
||||||
|
IdentityAgent SSH_AUTH_SOCK
|
||||||
|
IdentityFile ~/.ssh/vps1.cyberciti.biz.key
|
||||||
|
IdentityFile ~/.ssh/vps2.cyberciti.biz.key
|
||||||
|
IgnoreUnknown helloworld
|
||||||
|
Include ~/.ssh/config-extras ~/foo/bar
|
||||||
|
Include ~/.ssh/config-extra-extras
|
||||||
|
IPQoS af11 af12
|
||||||
|
KbdInteractiveAuthentication yes
|
||||||
|
KbdInteractiveDevices bsdauth,pam,skey
|
||||||
|
KexAlgorithms +sntrup761x25519-sha512@openssh.com,curve25519-sha256,curve25519-sha256@libssh.org
|
||||||
|
KnownHostsCommand ~/checkknownhosts
|
||||||
|
LocalCommand ~/mycommand
|
||||||
|
LocalForward 3128 127.0.0.1:3128
|
||||||
|
LocalForward 3129 127.0.0.1:3129
|
||||||
|
LogLevel INFO
|
||||||
|
LogVerbose kex.c:*:1000,*:kex_exchange_identification():*,packet.c:*
|
||||||
|
MACs ^umac-64-etm@openssh.com,umac-128-etm@openssh.com,hmac-sha2-256-etm@openssh.com,hmac-sha2-512-etm@openssh.com
|
||||||
|
NoHostAuthenticationForLocalhost yes
|
||||||
|
NumberOfPasswordPrompts 3
|
||||||
|
PasswordAuthentication yes
|
||||||
|
PermitLocalCommand yes
|
||||||
|
PermitRemoteOpen 1.1.1.1:443 2.2.2.2:443
|
||||||
|
PKCS11Provider ~/pkcs11provider
|
||||||
|
Port 22
|
||||||
|
PreferredAuthentications gssapi-with-mic,hostbased,publickey,keyboard-interactive,password
|
||||||
|
Protocol 2
|
||||||
|
ProxyCommand ssh nixcraft@gateway.uk.cyberciti.biz nc %h %p 2> /dev/null
|
||||||
|
ProxyJump 1.1.1.1:22,2.2.2.2:22
|
||||||
|
ProxyUseFdpass yes
|
||||||
|
PubkeyAcceptedAlgorithms -ssh-ed25519-cert-v01@openssh.com,ecdsa-sha2-nistp256-cert-v01@openssh.com
|
||||||
|
PubkeyAuthentication unbound
|
||||||
|
RekeyLimit 4G
|
||||||
|
RemoteCommand ~/mycommand
|
||||||
|
RemoteForward 1.1.1.1:22 2.2.2.2:22
|
||||||
|
RequestTTY force
|
||||||
|
RequiredRSASize 2048
|
||||||
|
RevokedHostKeys ~/revokedkeyfile
|
||||||
|
SecurityKeyProvider ~/keyprovider
|
||||||
|
SendEnv ENV1 ENV2
|
||||||
|
SendEnv ENV3
|
||||||
|
ServerAliveCountMax 3
|
||||||
|
ServerAliveInterval 3
|
||||||
|
SessionType none
|
||||||
|
SetEnv ENV1 ENV2
|
||||||
|
SetEnv ENV3
|
||||||
|
StdinNull yes
|
||||||
|
StreamLocalBindMask 0000
|
||||||
|
StreamLocalBindUnlink yes
|
||||||
|
StrictHostKeyChecking ask
|
||||||
|
SyslogFacility USER
|
||||||
|
TCPKeepAlive yes
|
||||||
|
Tunnel ethernet
|
||||||
|
TunnelDevice tun1:tun2
|
||||||
|
UpdateHostKeys ask
|
||||||
|
User nixcraft
|
||||||
|
UserKnownHostsFile ~/.ssh/knownhosts1 ~/.ssh/knownhosts2
|
||||||
|
VerifyHostKeyDNS ask
|
||||||
|
VisualHostKey yes
|
||||||
|
XAuthLocation /usr/X11R6/bin/xauth
|
||||||
|
|
1
tests/fixtures/generic/ssh_config4.json
vendored
Normal file
1
tests/fixtures/generic/ssh_config4.json
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
[{"host":"*","host_list":["*"],"addkeystoagent":"ask","addressfamily":"inet","batchmode":"no","bindaddress":"1.1.1.1","bindinterface":"en0","canonicaldomains":["abc.com","xyz.com"],"canonicalizefallbacklocal":"yes","canonicalizehostname":"none","canonicalizemaxdots":2,"canonicalizepermittedcnames":["*.a.example.com:*.b.example.com","*.c.example.com"],"casignaturealgorithms":["ssh-ed25519","ecdsa-sha2-nistp256","ecdsa-sha2-nistp384","ecdsa-sha2-nistp521","sk-ssh-ed25519@openssh.com"],"certificatefile":["~/certificates/cert1.pem","~/certificates/cert2.pem"],"checkhostip":"yes","ciphers":["3des-cbc","aes128-cbc","aes192-cbc"],"clearallforwardings":"yes","compression":"yes","connectionattempts":9,"connecttimeout":30,"controlmaster":"ask","controlpath":"none","controlpersist":"yes","dynamicforward":"1.1.1.1:443","enableescapecommandline":"no","enablesshkeysign":"yes","escapechar":"none","exitonforwardfailure":"yes","fingerprinthash":"md5","forkafterauthentication":"yes","forwardagent":"$mypath","forwardx11":"no","forwardx11timeout":500,"forwardx11trusted":"yes","gatewayports":"yes","globalknownhostsfile":["/etc/ssh/ssh_known_hosts","/etc/ssh/ssh_known_hosts2"],"gssapiauthentication":"yes","gssapidelegatecredentials":"yes","hashknownhosts":"yes","hostbasedacceptedalgorithms":["ssh-ed25519-cert-v01@openssh.com","ecdsa-sha2-nistp256-cert-v01@openssh.com"],"hostbasedauthentication":"yes","hostkeyalgorithms":["ssh-ed25519-cert-v01@openssh.com","ecdsa-sha2-nistp256-cert-v01@openssh.com"],"hostkeyalias":"foobar","hostname":"localhost","identitiesonly":"yes","identityagent":"SSH_AUTH_SOCK","identityfile":["~/.ssh/vps1.cyberciti.biz.key","~/.ssh/vps2.cyberciti.biz.key"],"ignoreunknown":"helloworld","include":["~/.ssh/config-extras","~/foo/bar","~/.ssh/config-extra-extras"],"ipqos":["af11","af12"],"kbdinteractiveauthentication":"yes","kbdinteractivedevices":["bsdauth","pam","skey"],"kexalgorithms":["sntrup761x25519-sha512@openssh.com","curve25519-sha256","curve25519-sha256@libssh.org"],"kexalgorithms_strategy":"+","knownhostscommand":"~/checkknownhosts","localcommand":"~/mycommand","localforward":["3128 127.0.0.1:3128","3129 127.0.0.1:3129"],"loglevel":"INFO","logverbose":["kex.c:*:1000","*:kex_exchange_identification():*","packet.c:*"],"macs":["umac-64-etm@openssh.com","umac-128-etm@openssh.com","hmac-sha2-256-etm@openssh.com","hmac-sha2-512-etm@openssh.com"],"macs_strategy":"^","nohostauthenticationforlocalhost":"yes","numberofpasswordprompts":3,"passwordauthentication":"yes","permitlocalcommand":"yes","permitremoteopen":["1.1.1.1:443","2.2.2.2:443"],"pkcs11provider":"~/pkcs11provider","port":22,"preferredauthentications":["gssapi-with-mic","hostbased","publickey","keyboard-interactive","password"],"protocol":2,"proxycommand":"ssh nixcraft@gateway.uk.cyberciti.biz nc %h %p 2> /dev/null","proxyjump":["1.1.1.1:22","2.2.2.2:22"],"proxyusefdpass":"yes","pubkeyacceptedalgorithms":["ssh-ed25519-cert-v01@openssh.com","ecdsa-sha2-nistp256-cert-v01@openssh.com"],"pubkeyacceptedalgorithms_strategy":"-","pubkeyauthentication":"unbound","rekeylimit":"4G","remotecommand":"~/mycommand","remoteforward":"1.1.1.1:22 2.2.2.2:22","requesttty":"force","requiredrsasize":2048,"revokedhostkeys":"~/revokedkeyfile","securitykeyprovider":"~/keyprovider","sendenv":["ENV1","ENV2","ENV3"],"serveralivecountmax":3,"serveraliveinterval":3,"sessiontype":"none","setenv":["ENV1","ENV2","ENV3"],"stdinnull":"yes","streamlocalbindmask":"0000","streamlocalbindunlink":"yes","stricthostkeychecking":"ask","syslogfacility":"USER","tcpkeepalive":"yes","tunnel":"ethernet","tunneldevice":"tun1:tun2","updatehostkeys":"ask","user":"nixcraft","userknownhostsfile":["~/.ssh/knownhosts1","~/.ssh/knownhosts2"],"verifyhostkeydns":"ask","visualhostkey":"yes","xauthlocation":"/usr/X11R6/bin/xauth"}]
|
14
tests/fixtures/generic/ssh_config5
vendored
Normal file
14
tests/fixtures/generic/ssh_config5
vendored
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
|
||||||
|
# comment
|
||||||
|
Host *
|
||||||
|
User something
|
||||||
|
|
||||||
|
# comment 2
|
||||||
|
Host svu
|
||||||
|
Hostname www.svuniversity.ac.in
|
||||||
|
# within-host-comment
|
||||||
|
Port 22
|
||||||
|
ProxyCommand nc -w 300 -x localhost:9050 %h %p
|
||||||
|
|
||||||
|
# another comment
|
||||||
|
# bla bla
|
1
tests/fixtures/generic/ssh_config5.json
vendored
Normal file
1
tests/fixtures/generic/ssh_config5.json
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
[{"host":"*","host_list":["*"],"user":"something"},{"host":"svu","host_list":["svu"],"hostname":"www.svuniversity.ac.in","port":22,"proxycommand":"nc -w 300 -x localhost:9050 %h %p"}]
|
1
tests/fixtures/generic/xrandr_fix_spaces.json
vendored
Normal file
1
tests/fixtures/generic/xrandr_fix_spaces.json
vendored
Normal file
File diff suppressed because one or more lines are too long
44
tests/fixtures/generic/xrandr_fix_spaces.out
vendored
Normal file
44
tests/fixtures/generic/xrandr_fix_spaces.out
vendored
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
Screen 0: minimum 320 x 200, current 2806 x 900, maximum 8192 x 8192
|
||||||
|
LVDS-1 connected primary 1366x768+0+0 (normal left inverted right x axis y axis) 344mm x 194mm
|
||||||
|
1366x768 60.00*+
|
||||||
|
1280x720 60.00 59.99 59.86 59.74
|
||||||
|
1024x768 60.04 60.00
|
||||||
|
960x720 60.00
|
||||||
|
928x696 60.05
|
||||||
|
896x672 60.01
|
||||||
|
1024x576 59.95 59.96 59.90 59.82
|
||||||
|
960x600 59.93 60.00
|
||||||
|
960x540 59.96 59.99 59.63 59.82
|
||||||
|
800x600 60.00 60.32 56.25
|
||||||
|
840x525 60.01 59.88
|
||||||
|
864x486 59.92 59.57
|
||||||
|
700x525 59.98
|
||||||
|
800x450 59.95 59.82
|
||||||
|
640x512 60.02
|
||||||
|
700x450 59.96 59.88
|
||||||
|
640x480 60.00 59.94
|
||||||
|
720x405 59.51 58.99
|
||||||
|
684x384 59.88 59.85
|
||||||
|
640x400 59.88 59.98
|
||||||
|
640x360 59.86 59.83 59.84 59.32
|
||||||
|
512x384 60.00
|
||||||
|
512x288 60.00 59.92
|
||||||
|
480x270 59.63 59.82
|
||||||
|
400x300 60.32 56.34
|
||||||
|
432x243 59.92 59.57
|
||||||
|
320x240 60.05
|
||||||
|
360x202 59.51 59.13
|
||||||
|
320x180 59.84 59.32
|
||||||
|
VGA-1 connected 1440x900+1366+0 normal Y axis (normal left inverted right x axis y axis) 408mm x 255mm
|
||||||
|
1440x900 59.89*+ 74.98
|
||||||
|
1280x1024 75.02 60.02
|
||||||
|
1280x960 60.00
|
||||||
|
1280x800 74.93 59.81
|
||||||
|
1152x864 75.00
|
||||||
|
1024x768 75.03 70.07 60.00
|
||||||
|
832x624 74.55
|
||||||
|
800x600 72.19 75.00 60.32 56.25
|
||||||
|
640x480 75.00 72.81 66.67 59.94
|
||||||
|
720x400 70.08
|
||||||
|
HDMI-1 disconnected (normal left inverted right x axis y axis)
|
||||||
|
DP-1 disconnected (normal left inverted right x axis y axis)
|
1
tests/fixtures/generic/xrandr_is_current_fix.json
vendored
Normal file
1
tests/fixtures/generic/xrandr_is_current_fix.json
vendored
Normal file
File diff suppressed because one or more lines are too long
44
tests/fixtures/generic/xrandr_is_current_fix.out
vendored
Normal file
44
tests/fixtures/generic/xrandr_is_current_fix.out
vendored
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
Screen 0: minimum 320 x 200, current 1846 x 768, maximum 8192 x 8192
|
||||||
|
LVDS-1 connected primary 1366x768+0+0 (normal left inverted right x axis y axis) 344mm x 194mm
|
||||||
|
1366x768 60.00*+
|
||||||
|
1280x720 60.00 59.99 59.86 59.74
|
||||||
|
1024x768 60.04 60.00
|
||||||
|
960x720 60.00
|
||||||
|
928x696 60.05
|
||||||
|
896x672 60.01
|
||||||
|
1024x576 59.95 59.96 59.90 59.82
|
||||||
|
960x600 59.93 60.00
|
||||||
|
960x540 59.96 59.99 59.63 59.82
|
||||||
|
800x600 60.00 60.32 56.25
|
||||||
|
840x525 60.01 59.88
|
||||||
|
864x486 59.92 59.57
|
||||||
|
700x525 59.98
|
||||||
|
800x450 59.95 59.82
|
||||||
|
640x512 60.02
|
||||||
|
700x450 59.96 59.88
|
||||||
|
640x480 60.00 59.94
|
||||||
|
720x405 59.51 58.99
|
||||||
|
684x384 59.88 59.85
|
||||||
|
640x400 59.88 59.98
|
||||||
|
640x360 59.86 59.83 59.84 59.32
|
||||||
|
512x384 60.00
|
||||||
|
512x288 60.00 59.92
|
||||||
|
480x270 59.63 59.82
|
||||||
|
400x300 60.32 56.34
|
||||||
|
432x243 59.92 59.57
|
||||||
|
320x240 60.05
|
||||||
|
360x202 59.51 59.13
|
||||||
|
320x180 59.84 59.32
|
||||||
|
VGA-1 connected 480x640+1366+0 left (normal left inverted right x axis y axis) 408mm x 255mm
|
||||||
|
1440x900 59.89 + 74.98
|
||||||
|
1280x1024 75.02 60.02
|
||||||
|
1280x960 60.00
|
||||||
|
1280x800 74.93 59.81
|
||||||
|
1152x864 75.00
|
||||||
|
1024x768 75.03 70.07 60.00
|
||||||
|
832x624 74.55
|
||||||
|
800x600 72.19 75.00 60.32 56.25
|
||||||
|
640x480 75.00* 72.81 66.67 59.94
|
||||||
|
720x400 70.08
|
||||||
|
HDMI-1 disconnected (normal left inverted right x axis y axis)
|
||||||
|
DP-1 disconnected (normal left inverted right x axis y axis)
|
110
tests/fixtures/generic/xrandr_properties.out
vendored
Normal file
110
tests/fixtures/generic/xrandr_properties.out
vendored
Normal file
@ -0,0 +1,110 @@
|
|||||||
|
Screen 0: minimum 320 x 200, current 2806 x 900, maximum 8192 x 8192
|
||||||
|
LVDS-1 connected primary 1366x768+0+0 (normal left inverted right x axis y axis) 344mm x 194mm
|
||||||
|
EDID:
|
||||||
|
00ffffffffffff004ca3523100000000
|
||||||
|
0014010380221378eac8959e57549226
|
||||||
|
0f505400000001010101010101010101
|
||||||
|
010101010101381d56d4500016303020
|
||||||
|
250058c2100000190000000f00000000
|
||||||
|
000000000025d9066a00000000fe0053
|
||||||
|
414d53554e470a204ca34154000000fe
|
||||||
|
004c544e313536415432343430310018
|
||||||
|
scaling mode: Full aspect
|
||||||
|
supported: Full, Center, Full aspect
|
||||||
|
link-status: Good
|
||||||
|
supported: Good, Bad
|
||||||
|
CONNECTOR_ID: 61
|
||||||
|
supported: 61
|
||||||
|
non-desktop: 0
|
||||||
|
range: (0, 1)
|
||||||
|
1366x768 60.00*+
|
||||||
|
1280x720 60.00 59.99 59.86 59.74
|
||||||
|
1024x768 60.04 60.00
|
||||||
|
960x720 60.00
|
||||||
|
928x696 60.05
|
||||||
|
896x672 60.01
|
||||||
|
1024x576 59.95 59.96 59.90 59.82
|
||||||
|
960x600 59.93 60.00
|
||||||
|
960x540 59.96 59.99 59.63 59.82
|
||||||
|
800x600 60.00 60.32 56.25
|
||||||
|
840x525 60.01 59.88
|
||||||
|
864x486 59.92 59.57
|
||||||
|
700x525 59.98
|
||||||
|
800x450 59.95 59.82
|
||||||
|
640x512 60.02
|
||||||
|
700x450 59.96 59.88
|
||||||
|
640x480 60.00 59.94
|
||||||
|
720x405 59.51 58.99
|
||||||
|
684x384 59.88 59.85
|
||||||
|
640x400 59.88 59.98
|
||||||
|
640x360 59.86 59.83 59.84 59.32
|
||||||
|
512x384 60.00
|
||||||
|
512x288 60.00 59.92
|
||||||
|
480x270 59.63 59.82
|
||||||
|
400x300 60.32 56.34
|
||||||
|
432x243 59.92 59.57
|
||||||
|
320x240 60.05
|
||||||
|
360x202 59.51 59.13
|
||||||
|
320x180 59.84 59.32
|
||||||
|
VGA-1 connected 1440x900+1366+0 (normal left inverted right x axis y axis) 408mm x 255mm
|
||||||
|
EDID:
|
||||||
|
00ffffffffffff000469d41901010101
|
||||||
|
2011010308291a78ea8585a6574a9c26
|
||||||
|
125054bfef80714f8100810f81408180
|
||||||
|
9500950f01019a29a0d0518422305098
|
||||||
|
360098ff1000001c000000fd00374b1e
|
||||||
|
530f000a202020202020000000fc0041
|
||||||
|
535553205657313933530a20000000ff
|
||||||
|
0037384c383032313130370a20200077
|
||||||
|
link-status: Good
|
||||||
|
supported: Good, Bad
|
||||||
|
CONNECTOR_ID: 64
|
||||||
|
supported: 64
|
||||||
|
non-desktop: 0
|
||||||
|
range: (0, 1)
|
||||||
|
1440x900 59.89*+ 74.98
|
||||||
|
1280x1024 75.02 60.02
|
||||||
|
1280x960 60.00
|
||||||
|
1280x800 74.93 59.81
|
||||||
|
1152x864 75.00
|
||||||
|
1024x768 75.03 70.07 60.00
|
||||||
|
832x624 74.55
|
||||||
|
800x600 72.19 75.00 60.32 56.25
|
||||||
|
640x480 75.00 72.81 66.67 59.94
|
||||||
|
720x400 70.08
|
||||||
|
HDMI-1 disconnected (normal left inverted right x axis y axis)
|
||||||
|
max bpc: 12
|
||||||
|
range: (8, 12)
|
||||||
|
content type: No Data
|
||||||
|
supported: No Data, Graphics, Photo, Cinema, Game
|
||||||
|
Colorspace: Default
|
||||||
|
supported: Default, SMPTE_170M_YCC, BT709_YCC, XVYCC_601, XVYCC_709, SYCC_601, opYCC_601, opRGB, BT2020_CYCC, BT2020_RGB, BT2020_YCC, DCI-P3_RGB_D65, DCI-P3_RGB_Theater
|
||||||
|
aspect ratio: Automatic
|
||||||
|
supported: Automatic, 4:3, 16:9
|
||||||
|
Broadcast RGB: Automatic
|
||||||
|
supported: Automatic, Full, Limited 16:235
|
||||||
|
audio: auto
|
||||||
|
supported: force-dvi, off, auto, on
|
||||||
|
link-status: Good
|
||||||
|
supported: Good, Bad
|
||||||
|
CONNECTOR_ID: 68
|
||||||
|
supported: 68
|
||||||
|
non-desktop: 0
|
||||||
|
range: (0, 1)
|
||||||
|
DP-1 disconnected (normal left inverted right x axis y axis)
|
||||||
|
Colorspace: Default
|
||||||
|
supported: Default, RGB_Wide_Gamut_Fixed_Point, RGB_Wide_Gamut_Floating_Point, opRGB, DCI-P3_RGB_D65, BT2020_RGB, BT601_YCC, BT709_YCC, XVYCC_601, XVYCC_709, SYCC_601, opYCC_601, BT2020_CYCC, BT2020_YCC
|
||||||
|
max bpc: 12
|
||||||
|
range: (6, 12)
|
||||||
|
Broadcast RGB: Automatic
|
||||||
|
supported: Automatic, Full, Limited 16:235
|
||||||
|
audio: auto
|
||||||
|
supported: force-dvi, off, auto, on
|
||||||
|
subconnector: Unknown
|
||||||
|
supported: Unknown, VGA, DVI-D, HDMI, DP, Wireless, Native
|
||||||
|
link-status: Good
|
||||||
|
supported: Good, Bad
|
||||||
|
CONNECTOR_ID: 76
|
||||||
|
supported: 76
|
||||||
|
non-desktop: 0
|
||||||
|
range: (0, 1)
|
1
tests/fixtures/generic/xrandr_simple.json
vendored
1
tests/fixtures/generic/xrandr_simple.json
vendored
@ -44,6 +44,7 @@
|
|||||||
"is_primary": true,
|
"is_primary": true,
|
||||||
"device_name": "eDP1",
|
"device_name": "eDP1",
|
||||||
"rotation": "normal",
|
"rotation": "normal",
|
||||||
|
"reflection": "normal",
|
||||||
"resolution_width": 1920,
|
"resolution_width": 1920,
|
||||||
"resolution_height": 1080,
|
"resolution_height": 1080,
|
||||||
"offset_width": 0,
|
"offset_width": 0,
|
||||||
|
1
tests/fixtures/generic/zpool-iostat-v.json
vendored
Normal file
1
tests/fixtures/generic/zpool-iostat-v.json
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
[{"pool":"zhgstera6","cap_alloc":2.89,"cap_free":2.2,"ops_read":0,"ops_write":2,"bw_read":349.0,"bw_write":448.0,"cap_alloc_unit":"T","cap_free_unit":"T","bw_read_unit":"K","bw_write_unit":"K"},{"pool":"726060ALE614-K8JAPRGN:10","parent":"zhgstera6","cap_alloc":2.89,"cap_free":2.2,"ops_read":0,"ops_write":2,"bw_read":349.0,"bw_write":448.0,"cap_alloc_unit":"T","cap_free_unit":"T","bw_read_unit":"K","bw_write_unit":"K"},{"pool":"zint500","cap_alloc":230.0,"cap_free":24.0,"ops_read":0,"ops_write":9,"bw_read":112.0,"bw_write":318.0,"cap_alloc_unit":"G","cap_free_unit":"G","bw_read_unit":"K","bw_write_unit":"K"},{"pool":"ST3500418AS-5VMSTSSX:5","parent":"zint500","cap_alloc":230.0,"cap_free":24.0,"ops_read":0,"ops_write":9,"bw_read":112.0,"bw_write":318.0,"cap_alloc_unit":"G","cap_free_unit":"G","bw_read_unit":"K","bw_write_unit":"K"},{"pool":"zsam53","cap_alloc":211.0,"cap_free":24.5,"ops_read":0,"ops_write":0,"bw_read":14.7,"bw_write":74.4,"cap_alloc_unit":"G","cap_free_unit":"G","bw_read_unit":"K","bw_write_unit":"K"},{"pool":"Portable_SSD_T5-S49WNP0N120517B:8","parent":"zsam53","cap_alloc":211.0,"cap_free":24.5,"ops_read":0,"ops_write":0,"bw_read":14.7,"bw_write":74.4,"cap_alloc_unit":"G","cap_free_unit":"G","bw_read_unit":"K","bw_write_unit":"K"}]
|
12
tests/fixtures/generic/zpool-iostat-v.out
vendored
Normal file
12
tests/fixtures/generic/zpool-iostat-v.out
vendored
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
capacity operations bandwidth
|
||||||
|
pool alloc free read write read write
|
||||||
|
----------------------------------- ----- ----- ----- ----- ----- -----
|
||||||
|
zhgstera6 2.89T 2.20T 0 2 349K 448K
|
||||||
|
726060ALE614-K8JAPRGN:10 2.89T 2.20T 0 2 349K 448K
|
||||||
|
----------------------------------- ----- ----- ----- ----- ----- -----
|
||||||
|
zint500 230G 24.0G 0 9 112K 318K
|
||||||
|
ST3500418AS-5VMSTSSX:5 230G 24.0G 0 9 112K 318K
|
||||||
|
----------------------------------- ----- ----- ----- ----- ----- -----
|
||||||
|
zsam53 211G 24.5G 0 0 14.7K 74.4K
|
||||||
|
Portable_SSD_T5-S49WNP0N120517B:8 211G 24.5G 0 0 14.7K 74.4K
|
||||||
|
----------------------------------- ----- ----- ----- ----- ----- -----
|
1
tests/fixtures/generic/zpool-iostat.json
vendored
Normal file
1
tests/fixtures/generic/zpool-iostat.json
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
[{"pool":"zhgstera6","cap_alloc":2.89,"cap_free":2.2,"ops_read":0,"ops_write":2,"bw_read":349.0,"bw_write":448.0,"cap_alloc_unit":"T","cap_free_unit":"T","bw_read_unit":"K","bw_write_unit":"K"},{"pool":"zint500","cap_alloc":230.0,"cap_free":24.0,"ops_read":0,"ops_write":9,"bw_read":112.0,"bw_write":318.0,"cap_alloc_unit":"G","cap_free_unit":"G","bw_read_unit":"K","bw_write_unit":"K"},{"pool":"zsam53","cap_alloc":211.0,"cap_free":24.5,"ops_read":0,"ops_write":0,"bw_read":14.7,"bw_write":74.4,"cap_alloc_unit":"G","cap_free_unit":"G","bw_read_unit":"K","bw_write_unit":"K"}]
|
7
tests/fixtures/generic/zpool-iostat.out
vendored
Normal file
7
tests/fixtures/generic/zpool-iostat.out
vendored
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
capacity operations bandwidth
|
||||||
|
pool alloc free read write read write
|
||||||
|
---------- ----- ----- ----- ----- ----- -----
|
||||||
|
zhgstera6 2.89T 2.20T 0 2 349K 448K
|
||||||
|
zint500 230G 24.0G 0 9 112K 318K
|
||||||
|
zsam53 211G 24.5G 0 0 14.7K 74.4K
|
||||||
|
---------- ----- ----- ----- ----- ----- -----
|
1
tests/fixtures/generic/zpool-status-v.json
vendored
Normal file
1
tests/fixtures/generic/zpool-status-v.json
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
[{"pool":"zhgstera6","state":"ONLINE","scan":"scrub canceled on Fri Aug 19 15:33:19 2022","config":[{"name":"zhgstera6","state":"ONLINE","read":0,"write":0,"checksum":0},{"name":"726060ALE614-K8JAPRGN:10","state":"ONLINE","read":0,"write":0,"checksum":0}],"errors":"No known data errors"},{"pool":"zint500","state":"ONLINE","scan":"scrub repaired 0 in 0 days 00:19:47 with 0 errors on Tue Aug 16 00:20:50 2022","config":[{"name":"zint500","state":"ONLINE","read":0,"write":0,"checksum":0},{"name":"ST3500418AS-5VMSTSSX:5","state":"ONLINE","read":0,"write":0,"checksum":0}],"errors":"No known data errors"},{"pool":"zsam53","state":"ONLINE","scan":"scrub repaired 0 in 0 days 01:25:43 with 0 errors on Mon Aug 15 01:26:46 2022","config":[{"name":"zsam53","state":"ONLINE","read":0,"write":0,"checksum":0},{"name":"Portable_SSD_T5-S49WNP0N120517B:8","state":"ONLINE","read":0,"write":0,"checksum":0}],"errors":"No known data errors"}]
|
24
tests/fixtures/generic/zpool-status-v.out
vendored
Normal file
24
tests/fixtures/generic/zpool-status-v.out
vendored
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
pool: zhgstera6
|
||||||
|
state: ONLINE
|
||||||
|
scan: scrub canceled on Fri Aug 19 15:33:19 2022
|
||||||
|
config:
|
||||||
|
NAME STATE READ WRITE CKSUM
|
||||||
|
zhgstera6 ONLINE 0 0 0
|
||||||
|
726060ALE614-K8JAPRGN:10 ONLINE 0 0 0
|
||||||
|
errors: No known data errors
|
||||||
|
pool: zint500
|
||||||
|
state: ONLINE
|
||||||
|
scan: scrub repaired 0 in 0 days 00:19:47 with 0 errors on Tue Aug 16 00:20:50 2022
|
||||||
|
config:
|
||||||
|
NAME STATE READ WRITE CKSUM
|
||||||
|
zint500 ONLINE 0 0 0
|
||||||
|
ST3500418AS-5VMSTSSX:5 ONLINE 0 0 0
|
||||||
|
errors: No known data errors
|
||||||
|
pool: zsam53
|
||||||
|
state: ONLINE
|
||||||
|
scan: scrub repaired 0 in 0 days 01:25:43 with 0 errors on Mon Aug 15 01:26:46 2022
|
||||||
|
config:
|
||||||
|
NAME STATE READ WRITE CKSUM
|
||||||
|
zsam53 ONLINE 0 0 0
|
||||||
|
Portable_SSD_T5-S49WNP0N120517B:8 ONLINE 0 0 0
|
||||||
|
errors: No known data errors
|
1
tests/fixtures/generic/zpool-status-v2.json
vendored
Normal file
1
tests/fixtures/generic/zpool-status-v2.json
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
[{"pool":"tank","state":"DEGRADED","status":"One or more devices could not be opened. Sufficient replicas exist for\nthe pool to continue functioning in a degraded state.","action":"Attach the missing device and online it using 'zpool online'.","see":"http://www.sun.com/msg/ZFS-8000-2Q","scrub":"none requested","config":[{"name":"tank","state":"DEGRADED","read":0,"write":0,"checksum":0},{"name":"mirror-0","state":"DEGRADED","read":0,"write":0,"checksum":0},{"name":"c1t0d0","state":"ONLINE","read":0,"write":0,"checksum":0},{"name":"c1t1d0","state":"UNAVAIL","read":0,"write":0,"checksum":0,"errors":"cannot open"}],"errors":"No known data errors"}]
|
17
tests/fixtures/generic/zpool-status-v2.out
vendored
Normal file
17
tests/fixtures/generic/zpool-status-v2.out
vendored
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
pool: tank
|
||||||
|
state: DEGRADED
|
||||||
|
status: One or more devices could not be opened. Sufficient replicas exist for
|
||||||
|
the pool to continue functioning in a degraded state.
|
||||||
|
action: Attach the missing device and online it using 'zpool online'.
|
||||||
|
see: http://www.sun.com/msg/ZFS-8000-2Q
|
||||||
|
scrub: none requested
|
||||||
|
config:
|
||||||
|
|
||||||
|
NAME STATE READ WRITE CKSUM
|
||||||
|
tank DEGRADED 0 0 0
|
||||||
|
mirror-0 DEGRADED 0 0 0
|
||||||
|
c1t0d0 ONLINE 0 0 0
|
||||||
|
c1t1d0 UNAVAIL 0 0 0 cannot open
|
||||||
|
|
||||||
|
errors: No known data errors
|
||||||
|
|
1
tests/fixtures/generic/zpool-status-v3.json
vendored
Normal file
1
tests/fixtures/generic/zpool-status-v3.json
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
[{"pool":"tank","state":"UNAVAIL","status":"One or more devices are faulted in response to IO failures.","action":"Make sure the affected devices are connected, then run 'zpool clear'.","see":"http://www.sun.com/msg/ZFS-8000-HC","scrub":"scrub completed after 0h0m with 0 errors on Tue Feb 2 13:08:42 2010","config":[{"name":"tank","state":"UNAVAIL","read":0,"write":0,"checksum":0,"errors":"insufficient replicas"},{"name":"c1t0d0","state":"ONLINE","read":0,"write":0,"checksum":0},{"name":"c1t1d0","state":"UNAVAIL","read":4,"write":1,"checksum":0,"errors":"cannot open"}],"errors":"Permanent errors have been detected in the following files:\n/tank/data/aaa\n/tank/data/bbb\n/tank/data/ccc"}]
|
18
tests/fixtures/generic/zpool-status-v3.out
vendored
Normal file
18
tests/fixtures/generic/zpool-status-v3.out
vendored
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
pool: tank
|
||||||
|
state: UNAVAIL
|
||||||
|
status: One or more devices are faulted in response to IO failures.
|
||||||
|
action: Make sure the affected devices are connected, then run 'zpool clear'.
|
||||||
|
see: http://www.sun.com/msg/ZFS-8000-HC
|
||||||
|
scrub: scrub completed after 0h0m with 0 errors on Tue Feb 2 13:08:42 2010
|
||||||
|
config:
|
||||||
|
|
||||||
|
NAME STATE READ WRITE CKSUM
|
||||||
|
tank UNAVAIL 0 0 0 insufficient replicas
|
||||||
|
c1t0d0 ONLINE 0 0 0
|
||||||
|
c1t1d0 UNAVAIL 4 1 0 cannot open
|
||||||
|
|
||||||
|
errors: Permanent errors have been detected in the following files:
|
||||||
|
|
||||||
|
/tank/data/aaa
|
||||||
|
/tank/data/bbb
|
||||||
|
/tank/data/ccc
|
1
tests/fixtures/ubuntu-16.04/ifconfig.json
vendored
Normal file
1
tests/fixtures/ubuntu-16.04/ifconfig.json
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
[{"name":"ens33","flags":null,"state":["UP BROADCAST RUNNING MULTICAST "],"mtu":1500,"type":"Ethernet","mac_addr":"00:0c:29:c2:c8:63","ipv4_addr":"192.168.248.129","ipv4_mask":"255.255.255.0","ipv4_bcast":"192.168.248.255","ipv6_addr":"fe80::c1ca:3dee:39f7:5937","ipv6_mask":64,"ipv6_scope":"Link","ipv6_type":null,"metric":1,"rx_packets":36,"rx_errors":0,"rx_dropped":0,"rx_overruns":0,"rx_frame":0,"tx_packets":152,"tx_errors":0,"tx_dropped":0,"tx_overruns":0,"tx_carrier":0,"tx_collisions":0,"rx_bytes":5602,"tx_bytes":13935,"ipv4":[{"address":"192.168.248.129","broadcast":"192.168.248.255","mask":"255.255.255.0"}],"ipv6":[{"address":"fe80::c1ca:3dee:39f7:5937","mask":64,"scope":"Link"}]},{"name":"lo","flags":null,"state":["UP LOOPBACK RUNNING "],"mtu":65536,"type":"Local Loopback","mac_addr":null,"ipv4_addr":"127.0.0.1","ipv4_mask":"255.0.0.0","ipv4_bcast":null,"ipv6_addr":"::1","ipv6_mask":128,"ipv6_scope":"Host","ipv6_type":null,"metric":1,"rx_packets":208,"rx_errors":0,"rx_dropped":0,"rx_overruns":0,"rx_frame":0,"tx_packets":208,"tx_errors":0,"tx_dropped":0,"tx_overruns":0,"tx_carrier":0,"tx_collisions":0,"rx_bytes":17363,"tx_bytes":17363,"ipv4":[{"address":"127.0.0.1","broadcast":null,"mask":"255.0.0.0"}],"ipv6":[{"address":"::1","mask":128,"scope":"Host"}]}]
|
17
tests/fixtures/ubuntu-16.04/ifconfig.out
vendored
Normal file
17
tests/fixtures/ubuntu-16.04/ifconfig.out
vendored
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
ens33 Link encap:Ethernet HWaddr 00:0c:29:c2:c8:63
|
||||||
|
inet addr:192.168.248.129 Bcast:192.168.248.255 Mask:255.255.255.0
|
||||||
|
inet6 addr: fe80::c1ca:3dee:39f7:5937/64 Scope:Link
|
||||||
|
UP BROADCAST RUNNING MULTICAST MTU:1500 Metric:1
|
||||||
|
RX packets:36 errors:0 dropped:0 overruns:0 frame:0
|
||||||
|
TX packets:152 errors:0 dropped:0 overruns:0 carrier:0
|
||||||
|
collisions:0 txqueuelen:1000
|
||||||
|
RX bytes:5602 (5.6 KB) TX bytes:13935 (13.9 KB)
|
||||||
|
|
||||||
|
lo Link encap:Local Loopback
|
||||||
|
inet addr:127.0.0.1 Mask:255.0.0.0
|
||||||
|
inet6 addr: ::1/128 Scope:Host
|
||||||
|
UP LOOPBACK RUNNING MTU:65536 Metric:1
|
||||||
|
RX packets:208 errors:0 dropped:0 overruns:0 frame:0
|
||||||
|
TX packets:208 errors:0 dropped:0 overruns:0 carrier:0
|
||||||
|
collisions:0 txqueuelen:1000
|
||||||
|
RX bytes:17363 (17.3 KB) TX bytes:17363 (17.3 KB)
|
@ -24,6 +24,9 @@ class MyTests(unittest.TestCase):
|
|||||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/ubuntu-18.04/acpi-V.out'), 'r', encoding='utf-8') as f:
|
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/ubuntu-18.04/acpi-V.out'), 'r', encoding='utf-8') as f:
|
||||||
ubuntu_18_04_acpi_V = f.read()
|
ubuntu_18_04_acpi_V = f.read()
|
||||||
|
|
||||||
|
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/acpi-V-never-fully-discharge.out'), 'r', encoding='utf-8') as f:
|
||||||
|
acpi_V_never_fully_discharge = f.read()
|
||||||
|
|
||||||
# output
|
# output
|
||||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/acpi-V.json'), 'r', encoding='utf-8') as f:
|
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/acpi-V.json'), 'r', encoding='utf-8') as f:
|
||||||
generic_acpi_V_json = json.loads(f.read())
|
generic_acpi_V_json = json.loads(f.read())
|
||||||
@ -40,6 +43,9 @@ class MyTests(unittest.TestCase):
|
|||||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/ubuntu-18.04/acpi-V.json'), 'r', encoding='utf-8') as f:
|
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/ubuntu-18.04/acpi-V.json'), 'r', encoding='utf-8') as f:
|
||||||
ubuntu_18_04_acpi_V_json = json.loads(f.read())
|
ubuntu_18_04_acpi_V_json = json.loads(f.read())
|
||||||
|
|
||||||
|
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/acpi-V-never-fully-discharge.json'), 'r', encoding='utf-8') as f:
|
||||||
|
acpi_V_never_fully_discharge_json = json.loads(f.read())
|
||||||
|
|
||||||
def test_acpi_nodata(self):
|
def test_acpi_nodata(self):
|
||||||
"""
|
"""
|
||||||
Test 'acpi' with no data
|
Test 'acpi' with no data
|
||||||
@ -76,6 +82,12 @@ class MyTests(unittest.TestCase):
|
|||||||
"""
|
"""
|
||||||
self.assertEqual(jc.parsers.acpi.parse(self.ubuntu_18_04_acpi_V, quiet=True), self.ubuntu_18_04_acpi_V_json)
|
self.assertEqual(jc.parsers.acpi.parse(self.ubuntu_18_04_acpi_V, quiet=True), self.ubuntu_18_04_acpi_V_json)
|
||||||
|
|
||||||
|
def test_acpi_V_never_fully_discharge(self):
|
||||||
|
"""
|
||||||
|
Test 'acpi -V' with "never fully discharge" message
|
||||||
|
"""
|
||||||
|
self.assertEqual(jc.parsers.acpi.parse(self.acpi_V_never_fully_discharge, quiet=True), self.acpi_V_never_fully_discharge_json)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
@ -12,10 +12,16 @@ class MyTests(unittest.TestCase):
|
|||||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/centos-7.7/crontab.out'), 'r', encoding='utf-8') as f:
|
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/centos-7.7/crontab.out'), 'r', encoding='utf-8') as f:
|
||||||
centos_7_7_crontab = f.read()
|
centos_7_7_crontab = f.read()
|
||||||
|
|
||||||
|
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/crontab-no-normal-entries.out'), 'r', encoding='utf-8') as f:
|
||||||
|
generic_crontab_no_normal_entries = f.read()
|
||||||
|
|
||||||
# output
|
# output
|
||||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/centos-7.7/crontab.json'), 'r', encoding='utf-8') as f:
|
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/centos-7.7/crontab.json'), 'r', encoding='utf-8') as f:
|
||||||
centos_7_7_crontab_json = json.loads(f.read())
|
centos_7_7_crontab_json = json.loads(f.read())
|
||||||
|
|
||||||
|
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/crontab-no-normal-entries.json'), 'r', encoding='utf-8') as f:
|
||||||
|
generic_crontab_no_normal_entries_json = json.loads(f.read())
|
||||||
|
|
||||||
|
|
||||||
def test_crontab_nodata(self):
|
def test_crontab_nodata(self):
|
||||||
"""
|
"""
|
||||||
@ -29,6 +35,12 @@ class MyTests(unittest.TestCase):
|
|||||||
"""
|
"""
|
||||||
self.assertEqual(jc.parsers.crontab.parse(self.centos_7_7_crontab, quiet=True), self.centos_7_7_crontab_json)
|
self.assertEqual(jc.parsers.crontab.parse(self.centos_7_7_crontab, quiet=True), self.centos_7_7_crontab_json)
|
||||||
|
|
||||||
|
def test_crontab_no_normal_entries(self):
|
||||||
|
"""
|
||||||
|
Test 'crontab' with no normal entries - only shortcuts
|
||||||
|
"""
|
||||||
|
self.assertEqual(jc.parsers.crontab.parse(self.generic_crontab_no_normal_entries, quiet=True), self.generic_crontab_no_normal_entries_json)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
@ -18,6 +18,9 @@ class MyTests(unittest.TestCase):
|
|||||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/debian10/crontab-u.out'), 'r', encoding='utf-8') as f:
|
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/debian10/crontab-u.out'), 'r', encoding='utf-8') as f:
|
||||||
debian10_crontab_u = f.read()
|
debian10_crontab_u = f.read()
|
||||||
|
|
||||||
|
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/crontab-u-no-normal-entries.out'), 'r', encoding='utf-8') as f:
|
||||||
|
generic_crontab_u_no_normal_entries = f.read()
|
||||||
|
|
||||||
# output
|
# output
|
||||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/ubuntu-18.04/crontab-u.json'), 'r', encoding='utf-8') as f:
|
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/ubuntu-18.04/crontab-u.json'), 'r', encoding='utf-8') as f:
|
||||||
ubuntu_18_4_crontab_u_json = json.loads(f.read())
|
ubuntu_18_4_crontab_u_json = json.loads(f.read())
|
||||||
@ -28,6 +31,9 @@ class MyTests(unittest.TestCase):
|
|||||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/debian10/crontab-u.json'), 'r', encoding='utf-8') as f:
|
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/debian10/crontab-u.json'), 'r', encoding='utf-8') as f:
|
||||||
debian10_crontab_u_json = json.loads(f.read())
|
debian10_crontab_u_json = json.loads(f.read())
|
||||||
|
|
||||||
|
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/crontab-u-no-normal-entries.json'), 'r', encoding='utf-8') as f:
|
||||||
|
generic_crontab_u_no_normal_entries_json = json.loads(f.read())
|
||||||
|
|
||||||
|
|
||||||
def test_crontab_u_nodata(self):
|
def test_crontab_u_nodata(self):
|
||||||
"""
|
"""
|
||||||
@ -53,6 +59,11 @@ class MyTests(unittest.TestCase):
|
|||||||
"""
|
"""
|
||||||
self.assertEqual(jc.parsers.crontab_u.parse(self.debian10_crontab_u, quiet=True), self.debian10_crontab_u_json)
|
self.assertEqual(jc.parsers.crontab_u.parse(self.debian10_crontab_u, quiet=True), self.debian10_crontab_u_json)
|
||||||
|
|
||||||
|
def test_crontab_u_no_normal_entries(self):
|
||||||
|
"""
|
||||||
|
Test 'crontab' with no normal entries - only shortcut entries (has a user field)
|
||||||
|
"""
|
||||||
|
self.assertEqual(jc.parsers.crontab_u.parse(self.generic_crontab_u_no_normal_entries, quiet=True), self.generic_crontab_u_no_normal_entries_json)
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
@ -12,6 +12,9 @@ class MyTests(unittest.TestCase):
|
|||||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/centos-7.7/ifconfig.out'), 'r', encoding='utf-8') as f:
|
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/centos-7.7/ifconfig.out'), 'r', encoding='utf-8') as f:
|
||||||
centos_7_7_ifconfig = f.read()
|
centos_7_7_ifconfig = f.read()
|
||||||
|
|
||||||
|
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/ubuntu-16.04/ifconfig.out'), 'r', encoding='utf-8') as f:
|
||||||
|
ubuntu_16_4_ifconfig = f.read()
|
||||||
|
|
||||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/ubuntu-18.04/ifconfig.out'), 'r', encoding='utf-8') as f:
|
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/ubuntu-18.04/ifconfig.out'), 'r', encoding='utf-8') as f:
|
||||||
ubuntu_18_4_ifconfig = f.read()
|
ubuntu_18_4_ifconfig = f.read()
|
||||||
|
|
||||||
@ -43,6 +46,9 @@ class MyTests(unittest.TestCase):
|
|||||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/centos-7.7/ifconfig.json'), 'r', encoding='utf-8') as f:
|
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/centos-7.7/ifconfig.json'), 'r', encoding='utf-8') as f:
|
||||||
centos_7_7_ifconfig_json = json.loads(f.read())
|
centos_7_7_ifconfig_json = json.loads(f.read())
|
||||||
|
|
||||||
|
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/ubuntu-16.04/ifconfig.json'), 'r', encoding='utf-8') as f:
|
||||||
|
ubuntu_16_4_ifconfig_json = json.loads(f.read())
|
||||||
|
|
||||||
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/ubuntu-18.04/ifconfig.json'), 'r', encoding='utf-8') as f:
|
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/ubuntu-18.04/ifconfig.json'), 'r', encoding='utf-8') as f:
|
||||||
ubuntu_18_4_ifconfig_json = json.loads(f.read())
|
ubuntu_18_4_ifconfig_json = json.loads(f.read())
|
||||||
|
|
||||||
@ -82,6 +88,12 @@ class MyTests(unittest.TestCase):
|
|||||||
"""
|
"""
|
||||||
self.assertEqual(jc.parsers.ifconfig.parse(self.centos_7_7_ifconfig, quiet=True), self.centos_7_7_ifconfig_json)
|
self.assertEqual(jc.parsers.ifconfig.parse(self.centos_7_7_ifconfig, quiet=True), self.centos_7_7_ifconfig_json)
|
||||||
|
|
||||||
|
def test_ifconfig_ubuntu_16_4(self):
|
||||||
|
"""
|
||||||
|
Test 'ifconfig' on Ubuntu 16.4
|
||||||
|
"""
|
||||||
|
self.assertEqual(jc.parsers.ifconfig.parse(self.ubuntu_16_4_ifconfig, quiet=True), self.ubuntu_16_4_ifconfig_json)
|
||||||
|
|
||||||
def test_ifconfig_ubuntu_18_4(self):
|
def test_ifconfig_ubuntu_18_4(self):
|
||||||
"""
|
"""
|
||||||
Test 'ifconfig' on Ubuntu 18.4
|
Test 'ifconfig' on Ubuntu 18.4
|
||||||
|
@ -292,7 +292,7 @@ class MyTests(unittest.TestCase):
|
|||||||
cli.magic_returncode = 2
|
cli.magic_returncode = 2
|
||||||
cli.magic_run_command = ['ping', '-c3', '192.168.1.123']
|
cli.magic_run_command = ['ping', '-c3', '192.168.1.123']
|
||||||
cli.parser_name = 'ping'
|
cli.parser_name = 'ping'
|
||||||
expected = {'a': 1, 'b': 2, '_jc_meta': {'parser': 'ping', 'magic_command': ['ping', '-c3', '192.168.1.123'], 'magic_command_exit': 2, 'timestamp': 1659659829.273349}}
|
expected = {'a': 1, 'b': 2, '_jc_meta': {'parser': 'ping', 'magic_command': ['ping', '-c3', '192.168.1.123'], 'magic_command_exit': 2, 'timestamp': 1659659829.273349, 'slice_start': None, 'slice_end': None}}
|
||||||
cli.add_metadata_to_output()
|
cli.add_metadata_to_output()
|
||||||
self.assertEqual(cli.data_out, expected)
|
self.assertEqual(cli.data_out, expected)
|
||||||
|
|
||||||
@ -303,7 +303,7 @@ class MyTests(unittest.TestCase):
|
|||||||
cli.magic_returncode = 2
|
cli.magic_returncode = 2
|
||||||
cli.magic_run_command = ['ping', '-c3', '192.168.1.123']
|
cli.magic_run_command = ['ping', '-c3', '192.168.1.123']
|
||||||
cli.parser_name = 'ping'
|
cli.parser_name = 'ping'
|
||||||
expected = [{'a': 1, 'b': 2, '_jc_meta': {'parser': 'ping', 'magic_command': ['ping', '-c3', '192.168.1.123'], 'magic_command_exit': 2, 'timestamp': 1659659829.273349}}, {'a': 3, 'b': 4, '_jc_meta': {'parser': 'ping', 'magic_command': ['ping', '-c3', '192.168.1.123'], 'magic_command_exit': 2, 'timestamp': 1659659829.273349}}]
|
expected = [{'a': 1, 'b': 2, '_jc_meta': {'parser': 'ping', 'magic_command': ['ping', '-c3', '192.168.1.123'], 'magic_command_exit': 2, 'timestamp': 1659659829.273349, 'slice_start': None, 'slice_end': None}}, {'a': 3, 'b': 4, '_jc_meta': {'parser': 'ping', 'magic_command': ['ping', '-c3', '192.168.1.123'], 'magic_command_exit': 2, 'timestamp': 1659659829.273349, 'slice_start': None, 'slice_end': None}}]
|
||||||
cli.add_metadata_to_output()
|
cli.add_metadata_to_output()
|
||||||
self.assertEqual(cli.data_out, expected)
|
self.assertEqual(cli.data_out, expected)
|
||||||
|
|
||||||
@ -314,7 +314,7 @@ class MyTests(unittest.TestCase):
|
|||||||
cli.data_out = {'a': 1, 'b': 2, '_jc_meta': {'foo': 'bar'}}
|
cli.data_out = {'a': 1, 'b': 2, '_jc_meta': {'foo': 'bar'}}
|
||||||
cli.run_timestamp = datetime(2022, 8, 5, 0, 37, 9, 273349, tzinfo=timezone.utc)
|
cli.run_timestamp = datetime(2022, 8, 5, 0, 37, 9, 273349, tzinfo=timezone.utc)
|
||||||
cli.parser_name = 'ping'
|
cli.parser_name = 'ping'
|
||||||
expected = {'a': 1, 'b': 2, '_jc_meta': {'foo': 'bar', 'parser': 'ping', 'magic_command': ['ping', '-c3', '192.168.1.123'], 'magic_command_exit': 2, 'timestamp': 1659659829.273349}}
|
expected = {'a': 1, 'b': 2, '_jc_meta': {'foo': 'bar', 'parser': 'ping', 'magic_command': ['ping', '-c3', '192.168.1.123'], 'magic_command_exit': 2, 'timestamp': 1659659829.273349, 'slice_start': None, 'slice_end': None}}
|
||||||
cli.add_metadata_to_output()
|
cli.add_metadata_to_output()
|
||||||
self.assertEqual(cli.data_out, expected)
|
self.assertEqual(cli.data_out, expected)
|
||||||
|
|
||||||
@ -325,9 +325,133 @@ class MyTests(unittest.TestCase):
|
|||||||
cli.magic_returncode = 2
|
cli.magic_returncode = 2
|
||||||
cli.magic_run_command = ['ping', '-c3', '192.168.1.123']
|
cli.magic_run_command = ['ping', '-c3', '192.168.1.123']
|
||||||
cli.parser_name = 'ping'
|
cli.parser_name = 'ping'
|
||||||
expected = [{'a': 1, 'b': 2, '_jc_meta': {'foo': 'bar', 'parser': 'ping', 'magic_command': ['ping', '-c3', '192.168.1.123'], 'magic_command_exit': 2, 'timestamp': 1659659829.273349}}, {'a': 3, 'b': 4, '_jc_meta': {'foo': 'bar', 'parser': 'ping', 'magic_command': ['ping', '-c3', '192.168.1.123'], 'magic_command_exit': 2, 'timestamp': 1659659829.273349}}]
|
expected = [{'a': 1, 'b': 2, '_jc_meta': {'foo': 'bar', 'parser': 'ping', 'magic_command': ['ping', '-c3', '192.168.1.123'], 'magic_command_exit': 2, 'timestamp': 1659659829.273349, 'slice_start': None, 'slice_end': None}}, {'a': 3, 'b': 4, '_jc_meta': {'foo': 'bar', 'parser': 'ping', 'magic_command': ['ping', '-c3', '192.168.1.123'], 'magic_command_exit': 2, 'timestamp': 1659659829.273349, 'slice_start': None, 'slice_end': None}}]
|
||||||
cli.add_metadata_to_output()
|
cli.add_metadata_to_output()
|
||||||
self.assertEqual(cli.data_out, expected)
|
self.assertEqual(cli.data_out, expected)
|
||||||
|
|
||||||
|
def test_slice_none_str(self):
|
||||||
|
cli = JcCli()
|
||||||
|
cli.slice_start = None
|
||||||
|
cli.slice_end = None
|
||||||
|
cli.data_in = '''\
|
||||||
|
row0
|
||||||
|
row1
|
||||||
|
row2
|
||||||
|
row3
|
||||||
|
row4
|
||||||
|
row5'''
|
||||||
|
expected = '''\
|
||||||
|
row0
|
||||||
|
row1
|
||||||
|
row2
|
||||||
|
row3
|
||||||
|
row4
|
||||||
|
row5'''
|
||||||
|
cli.slicer()
|
||||||
|
self.assertEqual(cli.data_in, expected)
|
||||||
|
|
||||||
|
def test_slice_positive_str(self):
|
||||||
|
cli = JcCli()
|
||||||
|
cli.slice_start = 1
|
||||||
|
cli.slice_end = 5
|
||||||
|
cli.data_in = '''\
|
||||||
|
row0
|
||||||
|
row1
|
||||||
|
row2
|
||||||
|
row3
|
||||||
|
row4
|
||||||
|
row5'''
|
||||||
|
expected = '''\
|
||||||
|
row1
|
||||||
|
row2
|
||||||
|
row3
|
||||||
|
row4'''
|
||||||
|
cli.slicer()
|
||||||
|
self.assertEqual(cli.data_in, expected)
|
||||||
|
|
||||||
|
def test_slice_negative_str(self):
|
||||||
|
cli = JcCli()
|
||||||
|
cli.slice_start = 1
|
||||||
|
cli.slice_end = -1
|
||||||
|
cli.data_in = '''\
|
||||||
|
row0
|
||||||
|
row1
|
||||||
|
row2
|
||||||
|
row3
|
||||||
|
row4
|
||||||
|
row5'''
|
||||||
|
expected = '''\
|
||||||
|
row1
|
||||||
|
row2
|
||||||
|
row3
|
||||||
|
row4'''
|
||||||
|
cli.slicer()
|
||||||
|
self.assertEqual(cli.data_in, expected)
|
||||||
|
|
||||||
|
def test_slice_none_iter(self):
|
||||||
|
cli = JcCli()
|
||||||
|
cli.slice_start = None
|
||||||
|
cli.slice_end = None
|
||||||
|
cli.data_in = [
|
||||||
|
'row0',
|
||||||
|
'row1',
|
||||||
|
'row2',
|
||||||
|
'row3',
|
||||||
|
'row4',
|
||||||
|
'row5'
|
||||||
|
]
|
||||||
|
expected = [
|
||||||
|
'row0',
|
||||||
|
'row1',
|
||||||
|
'row2',
|
||||||
|
'row3',
|
||||||
|
'row4',
|
||||||
|
'row5'
|
||||||
|
]
|
||||||
|
cli.slicer()
|
||||||
|
self.assertEqual(cli.data_in, expected)
|
||||||
|
|
||||||
|
def test_slice_positive_iter(self):
|
||||||
|
cli = JcCli()
|
||||||
|
cli.slice_start = 1
|
||||||
|
cli.slice_end = 5
|
||||||
|
cli.data_in = [
|
||||||
|
'row0',
|
||||||
|
'row1',
|
||||||
|
'row2',
|
||||||
|
'row3',
|
||||||
|
'row4',
|
||||||
|
'row5'
|
||||||
|
]
|
||||||
|
expected = [
|
||||||
|
'row1',
|
||||||
|
'row2',
|
||||||
|
'row3',
|
||||||
|
'row4'
|
||||||
|
]
|
||||||
|
cli.slicer()
|
||||||
|
self.assertEqual(list(cli.data_in), expected)
|
||||||
|
|
||||||
|
def test_slice_negative_iter(self):
|
||||||
|
cli = JcCli()
|
||||||
|
cli.slice_start = 1
|
||||||
|
cli.slice_end = -1
|
||||||
|
cli.data_in = [
|
||||||
|
'row0',
|
||||||
|
'row1',
|
||||||
|
'row2',
|
||||||
|
'row3',
|
||||||
|
'row4',
|
||||||
|
'row5'
|
||||||
|
]
|
||||||
|
expected = [
|
||||||
|
'row1',
|
||||||
|
'row2',
|
||||||
|
'row3',
|
||||||
|
'row4'
|
||||||
|
]
|
||||||
|
cli.slicer()
|
||||||
|
self.assertEqual(list(cli.data_in), expected)
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
95
tests/test_ssh_conf.py
Normal file
95
tests/test_ssh_conf.py
Normal file
@ -0,0 +1,95 @@
|
|||||||
|
import os
|
||||||
|
import unittest
|
||||||
|
import json
|
||||||
|
from typing import Dict
|
||||||
|
from jc.parsers.ssh_conf import parse
|
||||||
|
|
||||||
|
THIS_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
|
||||||
|
|
||||||
|
class MyTests(unittest.TestCase):
|
||||||
|
f_in: Dict = {}
|
||||||
|
f_json: Dict = {}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def setUpClass(cls):
|
||||||
|
fixtures = {
|
||||||
|
'ssh_config1': (
|
||||||
|
'fixtures/generic/ssh_config1',
|
||||||
|
'fixtures/generic/ssh_config1.json'),
|
||||||
|
'ssh_config2': (
|
||||||
|
'fixtures/generic/ssh_config2',
|
||||||
|
'fixtures/generic/ssh_config2.json'),
|
||||||
|
'ssh_config3': (
|
||||||
|
'fixtures/generic/ssh_config3',
|
||||||
|
'fixtures/generic/ssh_config3.json'),
|
||||||
|
'ssh_config4': (
|
||||||
|
'fixtures/generic/ssh_config4',
|
||||||
|
'fixtures/generic/ssh_config4.json'),
|
||||||
|
'ssh_config5': (
|
||||||
|
'fixtures/generic/ssh_config5',
|
||||||
|
'fixtures/generic/ssh_config5.json')
|
||||||
|
}
|
||||||
|
|
||||||
|
for file, filepaths in fixtures.items():
|
||||||
|
with open(os.path.join(THIS_DIR, filepaths[0]), 'r', encoding='utf-8') as a, \
|
||||||
|
open(os.path.join(THIS_DIR, filepaths[1]), 'r', encoding='utf-8') as b:
|
||||||
|
cls.f_in[file] = a.read()
|
||||||
|
cls.f_json[file] = json.loads(b.read())
|
||||||
|
|
||||||
|
|
||||||
|
def test_ssh_nodata(self):
|
||||||
|
"""
|
||||||
|
Test 'ssh' with no data
|
||||||
|
"""
|
||||||
|
self.assertEqual(parse('', quiet=True), [])
|
||||||
|
|
||||||
|
|
||||||
|
def test_ssh_config1(self):
|
||||||
|
"""
|
||||||
|
Test 'ssh' config 1
|
||||||
|
"""
|
||||||
|
self.assertEqual(
|
||||||
|
parse(self.f_in['ssh_config1'], quiet=True),
|
||||||
|
self.f_json['ssh_config1']
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_ssh_config2(self):
|
||||||
|
"""
|
||||||
|
Test 'ssh' config 2
|
||||||
|
"""
|
||||||
|
self.assertEqual(
|
||||||
|
parse(self.f_in['ssh_config2'], quiet=True),
|
||||||
|
self.f_json['ssh_config2']
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_ssh_config3(self):
|
||||||
|
"""
|
||||||
|
Test 'ssh' config 3
|
||||||
|
"""
|
||||||
|
self.assertEqual(
|
||||||
|
parse(self.f_in['ssh_config3'], quiet=True),
|
||||||
|
self.f_json['ssh_config3']
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_ssh_config4(self):
|
||||||
|
"""
|
||||||
|
Test 'ssh' config 4
|
||||||
|
"""
|
||||||
|
self.assertEqual(
|
||||||
|
parse(self.f_in['ssh_config4'], quiet=True),
|
||||||
|
self.f_json['ssh_config4']
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_ssh_config5(self):
|
||||||
|
"""
|
||||||
|
Test 'ssh' config 5
|
||||||
|
"""
|
||||||
|
self.assertEqual(
|
||||||
|
parse(self.f_in['ssh_config5'], quiet=True),
|
||||||
|
self.f_json['ssh_config5']
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
45
tests/test_ver.py
Normal file
45
tests/test_ver.py
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
import unittest
|
||||||
|
from jc.parsers.ver import parse
|
||||||
|
|
||||||
|
|
||||||
|
class MyTests(unittest.TestCase):
|
||||||
|
|
||||||
|
def test_ver_nodata(self):
|
||||||
|
"""
|
||||||
|
Test 'ver' with no data
|
||||||
|
"""
|
||||||
|
self.assertEqual(parse('', quiet=True), {})
|
||||||
|
|
||||||
|
|
||||||
|
def test_ver_strict_strings(self):
|
||||||
|
strict_strings = {
|
||||||
|
'0.4': {'major': 0, 'minor': 4, 'patch': 0, 'prerelease': None, 'prerelease_num': None, 'strict': True},
|
||||||
|
'0.4.0': {'major': 0, 'minor': 4, 'patch': 0, 'prerelease': None, 'prerelease_num': None, 'strict': True},
|
||||||
|
'0.4.1': {'major': 0, 'minor': 4, 'patch': 1, 'prerelease': None, 'prerelease_num': None, 'strict': True},
|
||||||
|
'0.5a1': {'major': 0, 'minor': 5, 'patch': 0, 'prerelease': 'a', 'prerelease_num': 1, 'strict': True},
|
||||||
|
'0.5b3': {'major': 0, 'minor': 5, 'patch': 0, 'prerelease': 'b', 'prerelease_num': 3, 'strict': True},
|
||||||
|
'0.5': {'major': 0, 'minor': 5, 'patch': 0, 'prerelease': None, 'prerelease_num': None, 'strict': True},
|
||||||
|
'0.9.6': {'major': 0, 'minor': 9, 'patch': 6, 'prerelease': None, 'prerelease_num': None, 'strict': True},
|
||||||
|
'1.0': {'major': 1, 'minor': 0, 'patch': 0, 'prerelease': None, 'prerelease_num': None, 'strict': True},
|
||||||
|
'1.0.4a3': {'major': 1, 'minor': 0, 'patch': 4, 'prerelease': 'a', 'prerelease_num': 3, 'strict': True},
|
||||||
|
'1.0.4b1': {'major': 1, 'minor': 0, 'patch': 4, 'prerelease': 'b', 'prerelease_num': 1, 'strict': True},
|
||||||
|
'1.0.4': {'major': 1, 'minor': 0, 'patch': 4, 'prerelease': None, 'prerelease_num': None, 'strict': True}
|
||||||
|
}
|
||||||
|
|
||||||
|
for ver_string, expected in strict_strings.items():
|
||||||
|
self.assertEqual(parse(ver_string, quiet=True), expected)
|
||||||
|
|
||||||
|
def test_ver_loose_strings(self):
|
||||||
|
loose_strings = {
|
||||||
|
'1': {'components': [1], 'strict': False},
|
||||||
|
'2.7.2.2': {'components': [2, 7, 2, 2], 'strict': False},
|
||||||
|
'1.3.a4': {'components': [1, 3, 'a', 4], 'strict': False},
|
||||||
|
'1.3pl1': {'components': [1, 3, 'pl', 1], 'strict': False},
|
||||||
|
'1.3c4': {'components': [1, 3, 'c', 4], 'strict': False}
|
||||||
|
}
|
||||||
|
|
||||||
|
for ver_string, expected in loose_strings.items():
|
||||||
|
self.assertEqual(parse(ver_string, quiet=True), expected)
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
@ -7,12 +7,16 @@ from jc.parsers.xrandr import (
|
|||||||
_parse_screen,
|
_parse_screen,
|
||||||
_parse_device,
|
_parse_device,
|
||||||
_parse_mode,
|
_parse_mode,
|
||||||
|
_parse_model,
|
||||||
_device_pattern,
|
_device_pattern,
|
||||||
_screen_pattern,
|
_screen_pattern,
|
||||||
_mode_pattern,
|
_mode_pattern,
|
||||||
_frequencies_pattern,
|
_frequencies_pattern,
|
||||||
|
_edid_head_pattern,
|
||||||
|
_edid_line_pattern,
|
||||||
parse,
|
parse,
|
||||||
Mode,
|
Mode,
|
||||||
|
Model,
|
||||||
Device,
|
Device,
|
||||||
Screen,
|
Screen,
|
||||||
)
|
)
|
||||||
@ -32,6 +36,8 @@ class XrandrTests(unittest.TestCase):
|
|||||||
"eDP1 connected primary 1920x1080+0+0 (normal left inverted right x axis y axis) 310mm x 170mm",
|
"eDP1 connected primary 1920x1080+0+0 (normal left inverted right x axis y axis) 310mm x 170mm",
|
||||||
"eDP-1 connected primary 1920x1080+0+0 (normal left inverted right x axis y axis) 309mm x 174mm",
|
"eDP-1 connected primary 1920x1080+0+0 (normal left inverted right x axis y axis) 309mm x 174mm",
|
||||||
"HDMI-0 connected 2160x3840+3840+0 right (normal left inverted right x axis y axis) 609mm x 349mm",
|
"HDMI-0 connected 2160x3840+3840+0 right (normal left inverted right x axis y axis) 609mm x 349mm",
|
||||||
|
"LVDS-1 connected primary 1366x768+0+0 normal X axis (normal left inverted right x axis y axis) 609mm x 349mm",
|
||||||
|
"VGA-1 connected 1280x1024+0+0 left X and Y axis (normal left inverted right x axis y axis) 609mm x 349mm",
|
||||||
]
|
]
|
||||||
for device in devices:
|
for device in devices:
|
||||||
self.assertIsNotNone(re.match(_device_pattern, device))
|
self.assertIsNotNone(re.match(_device_pattern, device))
|
||||||
@ -57,6 +63,27 @@ class XrandrTests(unittest.TestCase):
|
|||||||
if match:
|
if match:
|
||||||
rest = match.groupdict()["rest"]
|
rest = match.groupdict()["rest"]
|
||||||
self.assertIsNotNone(re.match(_frequencies_pattern, rest))
|
self.assertIsNotNone(re.match(_frequencies_pattern, rest))
|
||||||
|
|
||||||
|
edid_lines = [
|
||||||
|
" EDID: ",
|
||||||
|
" 00ffffffffffff000469d41901010101 ",
|
||||||
|
" 2011010308291a78ea8585a6574a9c26 ",
|
||||||
|
" 125054bfef80714f8100810f81408180 ",
|
||||||
|
" 9500950f01019a29a0d0518422305098 ",
|
||||||
|
" 360098ff1000001c000000fd00374b1e ",
|
||||||
|
" 530f000a202020202020000000fc0041 ",
|
||||||
|
" 535553205657313933530a20000000ff ",
|
||||||
|
" 0037384c383032313130370a20200077 "
|
||||||
|
]
|
||||||
|
|
||||||
|
for i in range(len(edid_lines)):
|
||||||
|
line = edid_lines[i]
|
||||||
|
if i == 0:
|
||||||
|
match = re.match(_edid_head_pattern, line)
|
||||||
|
else:
|
||||||
|
match = re.match(_edid_line_pattern, line)
|
||||||
|
|
||||||
|
self.assertIsNotNone(match)
|
||||||
|
|
||||||
def test_screens(self):
|
def test_screens(self):
|
||||||
sample = "Screen 0: minimum 8 x 8, current 1920 x 1080, maximum 32767 x 32767"
|
sample = "Screen 0: minimum 8 x 8, current 1920 x 1080, maximum 32767 x 32767"
|
||||||
@ -118,6 +145,30 @@ class XrandrTests(unittest.TestCase):
|
|||||||
59.94, device["associated_modes"][12]["frequencies"][4]["frequency"]
|
59.94, device["associated_modes"][12]["frequencies"][4]["frequency"]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def test_device_with_reflect(self):
|
||||||
|
sample = "VGA-1 connected primary 1920x1080+0+0 left X and Y axis (normal left inverted right x axis y axis) 310mm x 170mm"
|
||||||
|
actual: Optional[Device] = _parse_device([sample])
|
||||||
|
|
||||||
|
expected = {
|
||||||
|
"device_name": "VGA-1",
|
||||||
|
"is_connected": True,
|
||||||
|
"is_primary": True,
|
||||||
|
"resolution_width": 1920,
|
||||||
|
"resolution_height": 1080,
|
||||||
|
"offset_width": 0,
|
||||||
|
"offset_height": 0,
|
||||||
|
"dimension_width": 310,
|
||||||
|
"dimension_height": 170,
|
||||||
|
"rotation": "left",
|
||||||
|
"reflection": "X and Y axis",
|
||||||
|
}
|
||||||
|
|
||||||
|
self.assertIsNotNone(actual)
|
||||||
|
|
||||||
|
if actual:
|
||||||
|
for k, v in expected.items():
|
||||||
|
self.assertEqual(v, actual[k], f"Devices regex failed on {k}")
|
||||||
|
|
||||||
def test_mode(self):
|
def test_mode(self):
|
||||||
sample_1 = "1920x1080 60.03*+ 59.93"
|
sample_1 = "1920x1080 60.03*+ 59.93"
|
||||||
expected = {
|
expected = {
|
||||||
@ -170,14 +221,98 @@ class XrandrTests(unittest.TestCase):
|
|||||||
txt = f.read()
|
txt = f.read()
|
||||||
actual = parse(txt, quiet=True)
|
actual = parse(txt, quiet=True)
|
||||||
|
|
||||||
with open("tests/fixtures/generic/xrandr_simple.json", "w") as f:
|
|
||||||
json.dump(actual, f, indent=True)
|
|
||||||
|
|
||||||
self.assertEqual(1, len(actual["screens"]))
|
self.assertEqual(1, len(actual["screens"]))
|
||||||
self.assertEqual(0, len(actual["unassociated_devices"]))
|
self.assertEqual(0, len(actual["unassociated_devices"]))
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
2, len(actual["screens"][0]["associated_device"]["associated_modes"])
|
2, len(actual["screens"][0]["associated_device"]["associated_modes"])
|
||||||
)
|
)
|
||||||
|
|
||||||
|
with open("tests/fixtures/generic/xrandr_properties.out", "r") as f:
|
||||||
|
txt = f.read()
|
||||||
|
actual = parse(txt, quiet=True)
|
||||||
|
|
||||||
|
self.assertEqual(1, len(actual["screens"]))
|
||||||
|
self.assertEqual(3, len(actual["unassociated_devices"]))
|
||||||
|
self.assertEqual(
|
||||||
|
29, len(actual["screens"][0]["associated_device"]["associated_modes"])
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_infinite_loop_fix(self):
|
||||||
|
with open("tests/fixtures/generic/xrandr_fix_spaces.out", "r") as f:
|
||||||
|
txt = f.read()
|
||||||
|
actual = parse(txt, quiet=True)
|
||||||
|
|
||||||
|
with open("tests/fixtures/generic/xrandr_fix_spaces.json", "r") as f:
|
||||||
|
json_dict = json.loads(f.read())
|
||||||
|
|
||||||
|
self.assertEqual(actual, json_dict)
|
||||||
|
|
||||||
|
def test_is_current_fix(self):
|
||||||
|
with open("tests/fixtures/generic/xrandr_is_current_fix.out", "r") as f:
|
||||||
|
txt = f.read()
|
||||||
|
actual = parse(txt, quiet=True)
|
||||||
|
|
||||||
|
with open("tests/fixtures/generic/xrandr_is_current_fix.json", "r") as f:
|
||||||
|
json_dict = json.loads(f.read())
|
||||||
|
|
||||||
|
self.assertEqual(actual, json_dict)
|
||||||
|
|
||||||
|
def test_model(self):
|
||||||
|
asus_edid = [
|
||||||
|
" EDID: ",
|
||||||
|
" 00ffffffffffff000469d41901010101",
|
||||||
|
" 2011010308291a78ea8585a6574a9c26",
|
||||||
|
" 125054bfef80714f8100810f81408180",
|
||||||
|
" 9500950f01019a29a0d0518422305098",
|
||||||
|
" 360098ff1000001c000000fd00374b1e",
|
||||||
|
" 530f000a202020202020000000fc0041",
|
||||||
|
" 535553205657313933530a20000000ff",
|
||||||
|
" 0037384c383032313130370a20200077"
|
||||||
|
]
|
||||||
|
asus_edid.reverse()
|
||||||
|
|
||||||
|
expected = {
|
||||||
|
"name": "ASUS VW193S",
|
||||||
|
"product_id": "6612",
|
||||||
|
"serial_number": "78L8021107",
|
||||||
|
}
|
||||||
|
|
||||||
|
actual: Optional[Model] = _parse_model(asus_edid)
|
||||||
|
self.assertIsNotNone(actual)
|
||||||
|
|
||||||
|
if actual:
|
||||||
|
for k, v in expected.items():
|
||||||
|
self.assertEqual(v, actual[k], f"mode regex failed on {k}")
|
||||||
|
|
||||||
|
generic_edid = [
|
||||||
|
" EDID: ",
|
||||||
|
" 00ffffffffffff004ca3523100000000",
|
||||||
|
" 0014010380221378eac8959e57549226",
|
||||||
|
" 0f505400000001010101010101010101",
|
||||||
|
" 010101010101381d56d4500016303020",
|
||||||
|
" 250058c2100000190000000f00000000",
|
||||||
|
" 000000000025d9066a00000000fe0053",
|
||||||
|
" 414d53554e470a204ca34154000000fe",
|
||||||
|
" 004c544e313536415432343430310018"
|
||||||
|
]
|
||||||
|
generic_edid.reverse()
|
||||||
|
|
||||||
|
expected = {
|
||||||
|
"name": "Generic",
|
||||||
|
"product_id": "12626",
|
||||||
|
"serial_number": "0",
|
||||||
|
}
|
||||||
|
|
||||||
|
actual: Optional[Model] = _parse_model(generic_edid)
|
||||||
|
self.assertIsNotNone(actual)
|
||||||
|
|
||||||
|
if actual:
|
||||||
|
for k, v in expected.items():
|
||||||
|
self.assertEqual(v, actual[k], f"mode regex failed on {k}")
|
||||||
|
|
||||||
|
empty_edid = [""]
|
||||||
|
actual: Optional[Model] = _parse_model(empty_edid)
|
||||||
|
self.assertIsNone(actual)
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
59
tests/test_zpool_iostat.py
Normal file
59
tests/test_zpool_iostat.py
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
import os
|
||||||
|
import unittest
|
||||||
|
import json
|
||||||
|
from typing import Dict
|
||||||
|
from jc.parsers.zpool_iostat import parse
|
||||||
|
|
||||||
|
THIS_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
|
||||||
|
|
||||||
|
class MyTests(unittest.TestCase):
|
||||||
|
f_in: Dict = {}
|
||||||
|
f_json: Dict = {}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def setUpClass(cls):
|
||||||
|
fixtures = {
|
||||||
|
'zpool_iostat': (
|
||||||
|
'fixtures/generic/zpool-iostat.out',
|
||||||
|
'fixtures/generic/zpool-iostat.json'),
|
||||||
|
'zpool_iostat_v': (
|
||||||
|
'fixtures/generic/zpool-iostat-v.out',
|
||||||
|
'fixtures/generic/zpool-iostat-v.json')
|
||||||
|
}
|
||||||
|
|
||||||
|
for file, filepaths in fixtures.items():
|
||||||
|
with open(os.path.join(THIS_DIR, filepaths[0]), 'r', encoding='utf-8') as a, \
|
||||||
|
open(os.path.join(THIS_DIR, filepaths[1]), 'r', encoding='utf-8') as b:
|
||||||
|
cls.f_in[file] = a.read()
|
||||||
|
cls.f_json[file] = json.loads(b.read())
|
||||||
|
|
||||||
|
|
||||||
|
def test_zpool_iostat_nodata(self):
|
||||||
|
"""
|
||||||
|
Test 'zpool iostat' with no data
|
||||||
|
"""
|
||||||
|
self.assertEqual(parse('', quiet=True), [])
|
||||||
|
|
||||||
|
|
||||||
|
def test_zpool_iostat(self):
|
||||||
|
"""
|
||||||
|
Test 'zpool iostat'
|
||||||
|
"""
|
||||||
|
self.assertEqual(
|
||||||
|
parse(self.f_in['zpool_iostat'], quiet=True),
|
||||||
|
self.f_json['zpool_iostat']
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_zpool_iostat_v(self):
|
||||||
|
"""
|
||||||
|
Test 'zpool iostat -v'
|
||||||
|
"""
|
||||||
|
self.assertEqual(
|
||||||
|
parse(self.f_in['zpool_iostat_v'], quiet=True),
|
||||||
|
self.f_json['zpool_iostat_v']
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
71
tests/test_zpool_status.py
Normal file
71
tests/test_zpool_status.py
Normal file
@ -0,0 +1,71 @@
|
|||||||
|
import os
|
||||||
|
import unittest
|
||||||
|
import json
|
||||||
|
from typing import Dict
|
||||||
|
from jc.parsers.zpool_status import parse
|
||||||
|
|
||||||
|
THIS_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
|
||||||
|
|
||||||
|
class MyTests(unittest.TestCase):
|
||||||
|
f_in: Dict = {}
|
||||||
|
f_json: Dict = {}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def setUpClass(cls):
|
||||||
|
fixtures = {
|
||||||
|
'zpool_status': (
|
||||||
|
'fixtures/generic/zpool-status-v.out',
|
||||||
|
'fixtures/generic/zpool-status-v.json'),
|
||||||
|
'zpool_status2': (
|
||||||
|
'fixtures/generic/zpool-status-v2.out',
|
||||||
|
'fixtures/generic/zpool-status-v2.json'),
|
||||||
|
'zpool_status3': (
|
||||||
|
'fixtures/generic/zpool-status-v3.out',
|
||||||
|
'fixtures/generic/zpool-status-v3.json')
|
||||||
|
}
|
||||||
|
|
||||||
|
for file, filepaths in fixtures.items():
|
||||||
|
with open(os.path.join(THIS_DIR, filepaths[0]), 'r', encoding='utf-8') as a, \
|
||||||
|
open(os.path.join(THIS_DIR, filepaths[1]), 'r', encoding='utf-8') as b:
|
||||||
|
cls.f_in[file] = a.read()
|
||||||
|
cls.f_json[file] = json.loads(b.read())
|
||||||
|
|
||||||
|
|
||||||
|
def test_zpool_status_nodata(self):
|
||||||
|
"""
|
||||||
|
Test 'zpool_status' with no data
|
||||||
|
"""
|
||||||
|
self.assertEqual(parse('', quiet=True), [])
|
||||||
|
|
||||||
|
|
||||||
|
def test_zpool_status_v(self):
|
||||||
|
"""
|
||||||
|
Test 'zpool status -v'
|
||||||
|
"""
|
||||||
|
self.assertEqual(
|
||||||
|
parse(self.f_in['zpool_status'], quiet=True),
|
||||||
|
self.f_json['zpool_status']
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_zpool_status_v_2(self):
|
||||||
|
"""
|
||||||
|
Test 'zpool status -v' #2
|
||||||
|
"""
|
||||||
|
self.assertEqual(
|
||||||
|
parse(self.f_in['zpool_status2'], quiet=True),
|
||||||
|
self.f_json['zpool_status2']
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_zpool_status_v_3(self):
|
||||||
|
"""
|
||||||
|
Test 'zpool status -v' #3
|
||||||
|
"""
|
||||||
|
self.assertEqual(
|
||||||
|
parse(self.f_in['zpool_status3'], quiet=True),
|
||||||
|
self.f_json['zpool_status3']
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
Reference in New Issue
Block a user