mirror of
https://github.com/kellyjonbrazil/jc.git
synced 2025-07-15 01:24:29 +02:00
14
CHANGELOG
14
CHANGELOG
@ -1,5 +1,19 @@
|
||||
jc changelog
|
||||
|
||||
20221024 v1.22.1
|
||||
- add `udevadm` command parser
|
||||
- add `lspci` command parser
|
||||
- add `pci.ids` file parser
|
||||
- fix `proc-pid-stat` parser for command names with spaces and newlines
|
||||
- enhance `ip-address` parser to add `ip_split` field
|
||||
- rename `iso-datetime` parser to `datetime-iso`. A deprecation warning will
|
||||
display until `iso-datetime` is removed in a future version.
|
||||
- refactor cli module
|
||||
- optimize performance of calculated timestamps
|
||||
- add more type annotations
|
||||
- add support for deprecating parsers
|
||||
- move jc-web demo site from heroku to render.com
|
||||
|
||||
20220926 v1.22.0
|
||||
- Add /proc file parsers for linux. Support for the following files:
|
||||
`/proc/buddyinfo`
|
||||
|
@ -1707,6 +1707,12 @@ echo 192.168.2.10/24 | jc --ip-address -p
|
||||
"ip": "192.168.2.10",
|
||||
"ip_compressed": "192.168.2.10",
|
||||
"ip_exploded": "192.168.2.10",
|
||||
"ip_split": [
|
||||
"192",
|
||||
"168",
|
||||
"2",
|
||||
"10"
|
||||
],
|
||||
"scope_id": null,
|
||||
"ipv4_mapped": null,
|
||||
"six_to_four": null,
|
||||
@ -1819,7 +1825,7 @@ iptables --line-numbers -v -L -t nat | jc --iptables -p # or: jc -p ip
|
||||
```
|
||||
### ISO Datetime string
|
||||
```bash
|
||||
echo "2022-07-20T14:52:45Z" | jc --iso-datetime -p
|
||||
echo "2022-07-20T14:52:45Z" | jc --datetime-iso -p
|
||||
```
|
||||
```json
|
||||
{
|
||||
|
@ -1,2 +1,3 @@
|
||||
include jc/py.typed
|
||||
include man/jc.1
|
||||
include CHANGELOG
|
||||
|
@ -3,7 +3,7 @@
|
||||
|
||||
> Check out the `jc` Python [package documentation](https://github.com/kellyjonbrazil/jc/tree/master/docs) for developers
|
||||
|
||||
> Try the `jc` [web demo](https://jc-web-demo.herokuapp.com/)
|
||||
> Try the `jc` [web demo](https://jc-web.onrender.com/)
|
||||
|
||||
> JC is [now available](https://galaxy.ansible.com/community/general) as an
|
||||
Ansible filter plugin in the `community.general` collection. See this
|
||||
@ -170,6 +170,7 @@ option.
|
||||
| ` --csv` | CSV file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/csv) |
|
||||
| ` --csv-s` | CSV file streaming parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/csv_s) |
|
||||
| ` --date` | `date` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/date) |
|
||||
| ` --datetime-iso` | ISO 8601 Datetime string parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/datetime_iso) |
|
||||
| ` --df` | `df` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/df) |
|
||||
| ` --dig` | `dig` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/dig) |
|
||||
| ` --dir` | `dir` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/dir) |
|
||||
@ -199,7 +200,6 @@ option.
|
||||
| ` --iostat-s` | `iostat` command streaming parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/iostat_s) |
|
||||
| ` --ip-address` | IPv4 and IPv6 Address string parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/ip_address) |
|
||||
| ` --iptables` | `iptables` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/iptables) |
|
||||
| ` --iso-datetime` | ISO 8601 Datetime string parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/iso_datetime) |
|
||||
| ` --iw-scan` | `iw dev [device] scan` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/iw_scan) |
|
||||
| ` --jar-manifest` | Java MANIFEST.MF file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/jar_manifest) |
|
||||
| ` --jobs` | `jobs` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/jobs) |
|
||||
@ -211,6 +211,7 @@ option.
|
||||
| ` --lsblk` | `lsblk` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/lsblk) |
|
||||
| ` --lsmod` | `lsmod` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/lsmod) |
|
||||
| ` --lsof` | `lsof` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/lsof) |
|
||||
| ` --lspci` | `lspci -mmv` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/lspci) |
|
||||
| ` --lsusb` | `lsusb` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/lsusb) |
|
||||
| ` --m3u` | M3U and M3U8 file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/m3u) |
|
||||
| ` --mdadm` | `mdadm` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/mdadm) |
|
||||
@ -221,6 +222,7 @@ option.
|
||||
| ` --nmcli` | `nmcli` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/nmcli) |
|
||||
| ` --ntpq` | `ntpq -p` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/ntpq) |
|
||||
| ` --passwd` | `/etc/passwd` file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/passwd) |
|
||||
| ` --pci-ids` | `pci.ids` file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/pci_ids) |
|
||||
| ` --pidstat` | `pidstat -H` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/pidstat) |
|
||||
| ` --pidstat-s` | `pidstat -H` command streaming parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/pidstat_s) |
|
||||
| ` --ping` | `ping` and `ping6` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/ping) |
|
||||
@ -257,6 +259,7 @@ option.
|
||||
| ` --top-s` | `top -b` command streaming parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/top_s) |
|
||||
| ` --tracepath` | `tracepath` and `tracepath6` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/tracepath) |
|
||||
| ` --traceroute` | `traceroute` and `traceroute6` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/traceroute) |
|
||||
| ` --udevadm` | `udevadm info` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/udevadm) |
|
||||
| ` --ufw` | `ufw status` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/ufw) |
|
||||
| ` --ufw-appinfo` | `ufw app info [application]` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/ufw_appinfo) |
|
||||
| ` --uname` | `uname -a` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/uname) |
|
||||
|
@ -3,8 +3,8 @@ _jc()
|
||||
local cur prev words cword jc_commands jc_parsers jc_options \
|
||||
jc_about_options jc_about_mod_options jc_help_options jc_special_options
|
||||
|
||||
jc_commands=(acpi airport arp blkid chage cksum crontab date df dig dmidecode dpkg du env file finger free git gpg hciconfig id ifconfig iostat iptables iw jobs last lastb ls lsblk lsmod lsof lsusb md5 md5sum mdadm mount mpstat netstat nmcli ntpq pidstat ping ping6 pip pip3 postconf printenv ps route rpm rsync sfdisk sha1sum sha224sum sha256sum sha384sum sha512sum shasum ss stat sum sysctl systemctl systeminfo timedatectl top tracepath tracepath6 traceroute traceroute6 ufw uname update-alternatives upower uptime vdir vmstat w wc who xrandr zipinfo)
|
||||
jc_parsers=(--acpi --airport --airport-s --arp --asciitable --asciitable-m --blkid --cef --cef-s --chage --cksum --crontab --crontab-u --csv --csv-s --date --df --dig --dir --dmidecode --dpkg-l --du --email-address --env --file --finger --free --fstab --git-log --git-log-s --gpg --group --gshadow --hash --hashsum --hciconfig --history --hosts --id --ifconfig --ini --iostat --iostat-s --ip-address --iptables --iso-datetime --iw-scan --jar-manifest --jobs --jwt --kv --last --ls --ls-s --lsblk --lsmod --lsof --lsusb --m3u --mdadm --mount --mpstat --mpstat-s --netstat --nmcli --ntpq --passwd --pidstat --pidstat-s --ping --ping-s --pip-list --pip-show --plist --postconf --proc --proc-buddyinfo --proc-consoles --proc-cpuinfo --proc-crypto --proc-devices --proc-diskstats --proc-filesystems --proc-interrupts --proc-iomem --proc-ioports --proc-loadavg --proc-locks --proc-meminfo --proc-modules --proc-mtrr --proc-pagetypeinfo --proc-partitions --proc-slabinfo --proc-softirqs --proc-stat --proc-swaps --proc-uptime --proc-version --proc-vmallocinfo --proc-vmstat --proc-zoneinfo --proc-driver-rtc --proc-net-arp --proc-net-dev --proc-net-dev-mcast --proc-net-if-inet6 --proc-net-igmp --proc-net-igmp6 --proc-net-ipv6-route --proc-net-netlink --proc-net-netstat --proc-net-packet --proc-net-protocols --proc-net-route --proc-net-unix --proc-pid-fdinfo --proc-pid-io --proc-pid-maps --proc-pid-mountinfo --proc-pid-numa-maps --proc-pid-smaps --proc-pid-stat --proc-pid-statm --proc-pid-status --ps --route --rpm-qi --rsync --rsync-s --sfdisk --shadow --ss --stat --stat-s --sysctl --syslog --syslog-s --syslog-bsd --syslog-bsd-s --systemctl --systemctl-lj --systemctl-ls --systemctl-luf --systeminfo --time --timedatectl --timestamp --top --top-s --tracepath --traceroute --ufw --ufw-appinfo --uname --update-alt-gs --update-alt-q --upower --uptime --url --vmstat --vmstat-s --w --wc --who --x509-cert --xml --xrandr --yaml --zipinfo)
|
||||
jc_commands=(acpi airport arp blkid chage cksum crontab date df dig dmidecode dpkg du env file finger free git gpg hciconfig id ifconfig iostat iptables iw jobs last lastb ls lsblk lsmod lsof lspci lsusb md5 md5sum mdadm mount mpstat netstat nmcli ntpq pidstat ping ping6 pip pip3 postconf printenv ps route rpm rsync sfdisk sha1sum sha224sum sha256sum sha384sum sha512sum shasum ss stat sum sysctl systemctl systeminfo timedatectl top tracepath tracepath6 traceroute traceroute6 udevadm ufw uname update-alternatives upower uptime vdir vmstat w wc who xrandr zipinfo)
|
||||
jc_parsers=(--acpi --airport --airport-s --arp --asciitable --asciitable-m --blkid --cef --cef-s --chage --cksum --crontab --crontab-u --csv --csv-s --date --datetime-iso --df --dig --dir --dmidecode --dpkg-l --du --email-address --env --file --finger --free --fstab --git-log --git-log-s --gpg --group --gshadow --hash --hashsum --hciconfig --history --hosts --id --ifconfig --ini --iostat --iostat-s --ip-address --iptables --iw-scan --jar-manifest --jobs --jwt --kv --last --ls --ls-s --lsblk --lsmod --lsof --lspci --lsusb --m3u --mdadm --mount --mpstat --mpstat-s --netstat --nmcli --ntpq --passwd --pci-ids --pidstat --pidstat-s --ping --ping-s --pip-list --pip-show --plist --postconf --proc --proc-buddyinfo --proc-consoles --proc-cpuinfo --proc-crypto --proc-devices --proc-diskstats --proc-filesystems --proc-interrupts --proc-iomem --proc-ioports --proc-loadavg --proc-locks --proc-meminfo --proc-modules --proc-mtrr --proc-pagetypeinfo --proc-partitions --proc-slabinfo --proc-softirqs --proc-stat --proc-swaps --proc-uptime --proc-version --proc-vmallocinfo --proc-vmstat --proc-zoneinfo --proc-driver-rtc --proc-net-arp --proc-net-dev --proc-net-dev-mcast --proc-net-if-inet6 --proc-net-igmp --proc-net-igmp6 --proc-net-ipv6-route --proc-net-netlink --proc-net-netstat --proc-net-packet --proc-net-protocols --proc-net-route --proc-net-unix --proc-pid-fdinfo --proc-pid-io --proc-pid-maps --proc-pid-mountinfo --proc-pid-numa-maps --proc-pid-smaps --proc-pid-stat --proc-pid-statm --proc-pid-status --ps --route --rpm-qi --rsync --rsync-s --sfdisk --shadow --ss --stat --stat-s --sysctl --syslog --syslog-s --syslog-bsd --syslog-bsd-s --systemctl --systemctl-lj --systemctl-ls --systemctl-luf --systeminfo --time --timedatectl --timestamp --top --top-s --tracepath --traceroute --udevadm --ufw --ufw-appinfo --uname --update-alt-gs --update-alt-q --upower --uptime --url --vmstat --vmstat-s --w --wc --who --x509-cert --xml --xrandr --yaml --zipinfo)
|
||||
jc_options=(--force-color -C --debug -d --monochrome -m --meta-out -M --pretty -p --quiet -q --raw -r --unbuffer -u --yaml-out -y)
|
||||
jc_about_options=(--about -a)
|
||||
jc_about_mod_options=(--pretty -p --yaml-out -y --monochrome -m --force-color -C)
|
||||
|
@ -9,7 +9,7 @@ _jc() {
|
||||
jc_help_options jc_help_options_describe \
|
||||
jc_special_options jc_special_options_describe
|
||||
|
||||
jc_commands=(acpi airport arp blkid chage cksum crontab date df dig dmidecode dpkg du env file finger free git gpg hciconfig id ifconfig iostat iptables iw jobs last lastb ls lsblk lsmod lsof lsusb md5 md5sum mdadm mount mpstat netstat nmcli ntpq pidstat ping ping6 pip pip3 postconf printenv ps route rpm rsync sfdisk sha1sum sha224sum sha256sum sha384sum sha512sum shasum ss stat sum sysctl systemctl systeminfo timedatectl top tracepath tracepath6 traceroute traceroute6 ufw uname update-alternatives upower uptime vdir vmstat w wc who xrandr zipinfo)
|
||||
jc_commands=(acpi airport arp blkid chage cksum crontab date df dig dmidecode dpkg du env file finger free git gpg hciconfig id ifconfig iostat iptables iw jobs last lastb ls lsblk lsmod lsof lspci lsusb md5 md5sum mdadm mount mpstat netstat nmcli ntpq pidstat ping ping6 pip pip3 postconf printenv ps route rpm rsync sfdisk sha1sum sha224sum sha256sum sha384sum sha512sum shasum ss stat sum sysctl systemctl systeminfo timedatectl top tracepath tracepath6 traceroute traceroute6 udevadm ufw uname update-alternatives upower uptime vdir vmstat w wc who xrandr zipinfo)
|
||||
jc_commands_describe=(
|
||||
'acpi:run "acpi" command with magic syntax.'
|
||||
'airport:run "airport" command with magic syntax.'
|
||||
@ -43,6 +43,7 @@ _jc() {
|
||||
'lsblk:run "lsblk" command with magic syntax.'
|
||||
'lsmod:run "lsmod" command with magic syntax.'
|
||||
'lsof:run "lsof" command with magic syntax.'
|
||||
'lspci:run "lspci" command with magic syntax.'
|
||||
'lsusb:run "lsusb" command with magic syntax.'
|
||||
'md5:run "md5" command with magic syntax.'
|
||||
'md5sum:run "md5sum" command with magic syntax.'
|
||||
@ -82,6 +83,7 @@ _jc() {
|
||||
'tracepath6:run "tracepath6" command with magic syntax.'
|
||||
'traceroute:run "traceroute" command with magic syntax.'
|
||||
'traceroute6:run "traceroute6" command with magic syntax.'
|
||||
'udevadm:run "udevadm" command with magic syntax.'
|
||||
'ufw:run "ufw" command with magic syntax.'
|
||||
'uname:run "uname" command with magic syntax.'
|
||||
'update-alternatives:run "update-alternatives" command with magic syntax.'
|
||||
@ -95,7 +97,7 @@ _jc() {
|
||||
'xrandr:run "xrandr" command with magic syntax.'
|
||||
'zipinfo:run "zipinfo" command with magic syntax.'
|
||||
)
|
||||
jc_parsers=(--acpi --airport --airport-s --arp --asciitable --asciitable-m --blkid --cef --cef-s --chage --cksum --crontab --crontab-u --csv --csv-s --date --df --dig --dir --dmidecode --dpkg-l --du --email-address --env --file --finger --free --fstab --git-log --git-log-s --gpg --group --gshadow --hash --hashsum --hciconfig --history --hosts --id --ifconfig --ini --iostat --iostat-s --ip-address --iptables --iso-datetime --iw-scan --jar-manifest --jobs --jwt --kv --last --ls --ls-s --lsblk --lsmod --lsof --lsusb --m3u --mdadm --mount --mpstat --mpstat-s --netstat --nmcli --ntpq --passwd --pidstat --pidstat-s --ping --ping-s --pip-list --pip-show --plist --postconf --proc --proc-buddyinfo --proc-consoles --proc-cpuinfo --proc-crypto --proc-devices --proc-diskstats --proc-filesystems --proc-interrupts --proc-iomem --proc-ioports --proc-loadavg --proc-locks --proc-meminfo --proc-modules --proc-mtrr --proc-pagetypeinfo --proc-partitions --proc-slabinfo --proc-softirqs --proc-stat --proc-swaps --proc-uptime --proc-version --proc-vmallocinfo --proc-vmstat --proc-zoneinfo --proc-driver-rtc --proc-net-arp --proc-net-dev --proc-net-dev-mcast --proc-net-if-inet6 --proc-net-igmp --proc-net-igmp6 --proc-net-ipv6-route --proc-net-netlink --proc-net-netstat --proc-net-packet --proc-net-protocols --proc-net-route --proc-net-unix --proc-pid-fdinfo --proc-pid-io --proc-pid-maps --proc-pid-mountinfo --proc-pid-numa-maps --proc-pid-smaps --proc-pid-stat --proc-pid-statm --proc-pid-status --ps --route --rpm-qi --rsync --rsync-s --sfdisk --shadow --ss --stat --stat-s --sysctl --syslog --syslog-s --syslog-bsd --syslog-bsd-s --systemctl --systemctl-lj --systemctl-ls --systemctl-luf --systeminfo --time --timedatectl --timestamp --top --top-s --tracepath --traceroute --ufw --ufw-appinfo --uname --update-alt-gs --update-alt-q --upower --uptime --url --vmstat --vmstat-s --w --wc --who --x509-cert --xml --xrandr --yaml --zipinfo)
|
||||
jc_parsers=(--acpi --airport --airport-s --arp --asciitable --asciitable-m --blkid --cef --cef-s --chage --cksum --crontab --crontab-u --csv --csv-s --date --datetime-iso --df --dig --dir --dmidecode --dpkg-l --du --email-address --env --file --finger --free --fstab --git-log --git-log-s --gpg --group --gshadow --hash --hashsum --hciconfig --history --hosts --id --ifconfig --ini --iostat --iostat-s --ip-address --iptables --iw-scan --jar-manifest --jobs --jwt --kv --last --ls --ls-s --lsblk --lsmod --lsof --lspci --lsusb --m3u --mdadm --mount --mpstat --mpstat-s --netstat --nmcli --ntpq --passwd --pci-ids --pidstat --pidstat-s --ping --ping-s --pip-list --pip-show --plist --postconf --proc --proc-buddyinfo --proc-consoles --proc-cpuinfo --proc-crypto --proc-devices --proc-diskstats --proc-filesystems --proc-interrupts --proc-iomem --proc-ioports --proc-loadavg --proc-locks --proc-meminfo --proc-modules --proc-mtrr --proc-pagetypeinfo --proc-partitions --proc-slabinfo --proc-softirqs --proc-stat --proc-swaps --proc-uptime --proc-version --proc-vmallocinfo --proc-vmstat --proc-zoneinfo --proc-driver-rtc --proc-net-arp --proc-net-dev --proc-net-dev-mcast --proc-net-if-inet6 --proc-net-igmp --proc-net-igmp6 --proc-net-ipv6-route --proc-net-netlink --proc-net-netstat --proc-net-packet --proc-net-protocols --proc-net-route --proc-net-unix --proc-pid-fdinfo --proc-pid-io --proc-pid-maps --proc-pid-mountinfo --proc-pid-numa-maps --proc-pid-smaps --proc-pid-stat --proc-pid-statm --proc-pid-status --ps --route --rpm-qi --rsync --rsync-s --sfdisk --shadow --ss --stat --stat-s --sysctl --syslog --syslog-s --syslog-bsd --syslog-bsd-s --systemctl --systemctl-lj --systemctl-ls --systemctl-luf --systeminfo --time --timedatectl --timestamp --top --top-s --tracepath --traceroute --udevadm --ufw --ufw-appinfo --uname --update-alt-gs --update-alt-q --upower --uptime --url --vmstat --vmstat-s --w --wc --who --x509-cert --xml --xrandr --yaml --zipinfo)
|
||||
jc_parsers_describe=(
|
||||
'--acpi:`acpi` command parser'
|
||||
'--airport:`airport -I` command parser'
|
||||
@ -113,6 +115,7 @@ _jc() {
|
||||
'--csv:CSV file parser'
|
||||
'--csv-s:CSV file streaming parser'
|
||||
'--date:`date` command parser'
|
||||
'--datetime-iso:ISO 8601 Datetime string parser'
|
||||
'--df:`df` command parser'
|
||||
'--dig:`dig` command parser'
|
||||
'--dir:`dir` command parser'
|
||||
@ -142,7 +145,6 @@ _jc() {
|
||||
'--iostat-s:`iostat` command streaming parser'
|
||||
'--ip-address:IPv4 and IPv6 Address string parser'
|
||||
'--iptables:`iptables` command parser'
|
||||
'--iso-datetime:ISO 8601 Datetime string parser'
|
||||
'--iw-scan:`iw dev [device] scan` command parser'
|
||||
'--jar-manifest:Java MANIFEST.MF file parser'
|
||||
'--jobs:`jobs` command parser'
|
||||
@ -154,6 +156,7 @@ _jc() {
|
||||
'--lsblk:`lsblk` command parser'
|
||||
'--lsmod:`lsmod` command parser'
|
||||
'--lsof:`lsof` command parser'
|
||||
'--lspci:`lspci -mmv` command parser'
|
||||
'--lsusb:`lsusb` command parser'
|
||||
'--m3u:M3U and M3U8 file parser'
|
||||
'--mdadm:`mdadm` command parser'
|
||||
@ -164,6 +167,7 @@ _jc() {
|
||||
'--nmcli:`nmcli` command parser'
|
||||
'--ntpq:`ntpq -p` command parser'
|
||||
'--passwd:`/etc/passwd` file parser'
|
||||
'--pci-ids:`pci.ids` file parser'
|
||||
'--pidstat:`pidstat -H` command parser'
|
||||
'--pidstat-s:`pidstat -H` command streaming parser'
|
||||
'--ping:`ping` and `ping6` command parser'
|
||||
@ -249,6 +253,7 @@ _jc() {
|
||||
'--top-s:`top -b` command streaming parser'
|
||||
'--tracepath:`tracepath` and `tracepath6` command parser'
|
||||
'--traceroute:`traceroute` and `traceroute6` command parser'
|
||||
'--udevadm:`udevadm info` command parser'
|
||||
'--ufw:`ufw status` command parser'
|
||||
'--ufw-appinfo:`ufw app info [application]` command parser'
|
||||
'--uname:`uname -a` command parser'
|
||||
|
55
docs/lib.md
55
docs/lib.md
@ -21,15 +21,18 @@ jc - JSON Convert lib module
|
||||
### parse
|
||||
|
||||
```python
|
||||
def parse(parser_mod_name: str,
|
||||
data: Union[str, bytes, Iterable[str]],
|
||||
quiet: bool = False,
|
||||
raw: bool = False,
|
||||
ignore_exceptions: bool = None,
|
||||
**kwargs) -> Union[Dict, List[Dict], Iterator[Dict]]
|
||||
def parse(
|
||||
parser_mod_name: Union[str, ModuleType],
|
||||
data: Union[str, bytes, Iterable[str]],
|
||||
quiet: bool = False,
|
||||
raw: bool = False,
|
||||
ignore_exceptions: bool = None,
|
||||
**kwargs
|
||||
) -> Union[JSONDictType, List[JSONDictType], Iterator[JSONDictType]]
|
||||
```
|
||||
|
||||
Parse the string data using the supplied parser module.
|
||||
Parse the data (string or bytes) using the supplied parser (string or
|
||||
module object).
|
||||
|
||||
This function provides a high-level API to simplify parser use. This
|
||||
function will call built-in parsers and custom plugin parsers.
|
||||
@ -53,6 +56,14 @@ Example (streaming parsers):
|
||||
|
||||
To get a list of available parser module names, use `parser_mod_list()`.
|
||||
|
||||
Alternatively, a parser module object can be supplied:
|
||||
|
||||
>>> import jc
|
||||
>>> import jc.parsers.date as jc_date
|
||||
>>> date_obj = jc.parse(jc_date, 'Tue Jan 18 10:23:07 PST 2022')
|
||||
>>> print(f'The year is: {date_obj["year"]}')
|
||||
The year is: 2022
|
||||
|
||||
You can also use the lower-level parser modules directly:
|
||||
|
||||
>>> import jc.parsers.date
|
||||
@ -73,11 +84,14 @@ parsers without this API:
|
||||
|
||||
Parameters:
|
||||
|
||||
parser_mod_name: (string) name of the parser module. This
|
||||
function will accept module_name,
|
||||
parser_mod_name: (string or name of the parser module. This
|
||||
Module) function will accept module_name,
|
||||
cli-name, and --argument-name
|
||||
variants of the module name.
|
||||
|
||||
A Module object can also be passed
|
||||
directly or via _get_parser()
|
||||
|
||||
data: (string or data to parse (string or bytes for
|
||||
bytes or standard parsers, iterable of
|
||||
iterable) strings for streaming parsers)
|
||||
@ -99,7 +113,8 @@ Returns:
|
||||
### parser\_mod\_list
|
||||
|
||||
```python
|
||||
def parser_mod_list() -> List[str]
|
||||
def parser_mod_list(show_hidden: bool = False,
|
||||
show_deprecated: bool = False) -> List[str]
|
||||
```
|
||||
|
||||
Returns a list of all available parser module names.
|
||||
@ -109,7 +124,8 @@ Returns a list of all available parser module names.
|
||||
### plugin\_parser\_mod\_list
|
||||
|
||||
```python
|
||||
def plugin_parser_mod_list() -> List[str]
|
||||
def plugin_parser_mod_list(show_hidden: bool = False,
|
||||
show_deprecated: bool = False) -> List[str]
|
||||
```
|
||||
|
||||
Returns a list of plugin parser module names. This function is a
|
||||
@ -120,7 +136,8 @@ subset of `parser_mod_list()`.
|
||||
### standard\_parser\_mod\_list
|
||||
|
||||
```python
|
||||
def standard_parser_mod_list() -> List[str]
|
||||
def standard_parser_mod_list(show_hidden: bool = False,
|
||||
show_deprecated: bool = False) -> List[str]
|
||||
```
|
||||
|
||||
Returns a list of standard parser module names. This function is a
|
||||
@ -132,7 +149,8 @@ parsers.
|
||||
### streaming\_parser\_mod\_list
|
||||
|
||||
```python
|
||||
def streaming_parser_mod_list() -> List[str]
|
||||
def streaming_parser_mod_list(show_hidden: bool = False,
|
||||
show_deprecated: bool = False) -> List[str]
|
||||
```
|
||||
|
||||
Returns a list of streaming parser module names. This function is a
|
||||
@ -143,7 +161,8 @@ subset of `parser_mod_list()`.
|
||||
### parser\_info
|
||||
|
||||
```python
|
||||
def parser_info(parser_mod_name: str, documentation: bool = False) -> Dict
|
||||
def parser_info(parser_mod_name: str,
|
||||
documentation: bool = False) -> ParserInfoType
|
||||
```
|
||||
|
||||
Returns a dictionary that includes the parser module metadata.
|
||||
@ -163,17 +182,21 @@ Parameters:
|
||||
|
||||
```python
|
||||
def all_parser_info(documentation: bool = False,
|
||||
show_hidden: bool = False) -> List[Dict]
|
||||
show_hidden: bool = False,
|
||||
show_deprecated: bool = False) -> List[ParserInfoType]
|
||||
```
|
||||
|
||||
Returns a list of dictionaries that includes metadata for all parser
|
||||
modules.
|
||||
modules. By default only non-hidden, non-deprecated parsers are
|
||||
returned.
|
||||
|
||||
Parameters:
|
||||
|
||||
documentation: (boolean) include parser docstrings if True
|
||||
show_hidden: (boolean) also show parsers marked as hidden
|
||||
in their info metadata.
|
||||
show_deprecated: (boolean) also show parsers marked as
|
||||
deprecated in their info metadata.
|
||||
|
||||
<a id="jc.lib.get_help"></a>
|
||||
|
||||
|
91
docs/parsers/datetime_iso.md
Normal file
91
docs/parsers/datetime_iso.md
Normal file
@ -0,0 +1,91 @@
|
||||
[Home](https://kellyjonbrazil.github.io/jc/)
|
||||
<a id="jc.parsers.datetime_iso"></a>
|
||||
|
||||
# jc.parsers.datetime\_iso
|
||||
|
||||
jc - JSON Convert ISO 8601 Datetime string parser
|
||||
|
||||
This parser supports standard ISO 8601 strings that include both date and
|
||||
time. If no timezone or offset information is available in the sring, then
|
||||
UTC timezone is used.
|
||||
|
||||
Usage (cli):
|
||||
|
||||
$ echo "2022-07-20T14:52:45Z" | jc --iso-datetime
|
||||
|
||||
Usage (module):
|
||||
|
||||
import jc
|
||||
result = jc.parse('iso_datetime', iso_8601_string)
|
||||
|
||||
Schema:
|
||||
|
||||
{
|
||||
"year": integer,
|
||||
"month": string,
|
||||
"month_num": integer,
|
||||
"day": integer,
|
||||
"weekday": string,
|
||||
"weekday_num": integer,
|
||||
"hour": integer,
|
||||
"hour_24": integer,
|
||||
"minute": integer,
|
||||
"second": integer,
|
||||
"microsecond": integer,
|
||||
"period": string,
|
||||
"utc_offset": string,
|
||||
"day_of_year": integer,
|
||||
"week_of_year": integer,
|
||||
"iso": string,
|
||||
"timestamp": integer # [0]
|
||||
}
|
||||
|
||||
[0] timezone aware UNIX timestamp expressed in UTC
|
||||
|
||||
Examples:
|
||||
|
||||
$ echo "2022-07-20T14:52:45Z" | jc --iso-datetime -p
|
||||
{
|
||||
"year": 2022,
|
||||
"month": "Jul",
|
||||
"month_num": 7,
|
||||
"day": 20,
|
||||
"weekday": "Wed",
|
||||
"weekday_num": 3,
|
||||
"hour": 2,
|
||||
"hour_24": 14,
|
||||
"minute": 52,
|
||||
"second": 45,
|
||||
"microsecond": 0,
|
||||
"period": "PM",
|
||||
"utc_offset": "+0000",
|
||||
"day_of_year": 201,
|
||||
"week_of_year": 29,
|
||||
"iso": "2022-07-20T14:52:45+00:00",
|
||||
"timestamp": 1658328765
|
||||
}
|
||||
|
||||
<a id="jc.parsers.datetime_iso.parse"></a>
|
||||
|
||||
### parse
|
||||
|
||||
```python
|
||||
def parse(data, raw=False, quiet=False)
|
||||
```
|
||||
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) unprocessed output if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
Dictionary. Raw or processed structured data.
|
||||
|
||||
### Parser Information
|
||||
Compatibility: linux, aix, freebsd, darwin, win32, cygwin
|
||||
|
||||
Version 1.0 by Kelly Brazil (kellyjonbrazil@gmail.com)
|
@ -26,6 +26,9 @@ Schema:
|
||||
"ip": string,
|
||||
"ip_compressed": string,
|
||||
"ip_exploded": string,
|
||||
"ip_split": [
|
||||
string
|
||||
],
|
||||
"scope_id": string/null,
|
||||
"ipv4_mapped": string/null,
|
||||
"six_to_four": string/null,
|
||||
@ -83,6 +86,12 @@ Examples:
|
||||
"ip": "192.168.2.10",
|
||||
"ip_compressed": "192.168.2.10",
|
||||
"ip_exploded": "192.168.2.10",
|
||||
"ip_split": [
|
||||
"192",
|
||||
"168",
|
||||
"2",
|
||||
"10"
|
||||
],
|
||||
"scope_id": null,
|
||||
"ipv4_mapped": null,
|
||||
"six_to_four": null,
|
||||
@ -138,6 +147,12 @@ Examples:
|
||||
"ip": "192.168.2.10",
|
||||
"ip_compressed": "192.168.2.10",
|
||||
"ip_exploded": "192.168.2.10",
|
||||
"ip_split": [
|
||||
"192",
|
||||
"168",
|
||||
"2",
|
||||
"10"
|
||||
],
|
||||
"scope_id": null,
|
||||
"ipv4_mapped": null,
|
||||
"six_to_four": null,
|
||||
@ -191,14 +206,24 @@ Examples:
|
||||
"version": 6,
|
||||
"max_prefix_length": 128,
|
||||
"ip": "127:0:de::1",
|
||||
"ip_compressed": "127:0:de::1%128",
|
||||
"ip_compressed": "127:0:de::1",
|
||||
"ip_exploded": "0127:0000:00de:0000:0000:0000:0000:0001",
|
||||
"ip_split": [
|
||||
"0127",
|
||||
"0000",
|
||||
"00de",
|
||||
"0000",
|
||||
"0000",
|
||||
"0000",
|
||||
"0000",
|
||||
"0001"
|
||||
],
|
||||
"scope_id": "128",
|
||||
"ipv4_mapped": null,
|
||||
"six_to_four": null,
|
||||
"teredo_client": null,
|
||||
"teredo_server": null,
|
||||
"dns_ptr": "1.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.....0.7.2.1.0.ip6.arpa",
|
||||
"dns_ptr": "1.0.0.0.0.0...0.0.0.e.d.0.0.0.0.0.0.7.2.1.0.ip6.arpa",
|
||||
"network": "127:0:de::",
|
||||
"broadcast": "127:0:de::ffff:ffff",
|
||||
"hostmask": "::ffff:ffff",
|
||||
@ -231,13 +256,13 @@ Examples:
|
||||
"last_host": "01:27:00:00:00:de:00:00:00:00:00:00:ff:ff:ff:fe"
|
||||
},
|
||||
"bin": {
|
||||
"ip": "000000010010011100000000000000000000000011011110000000...",
|
||||
"network": "0000000100100111000000000000000000000000110111100...",
|
||||
"broadcast": "00000001001001110000000000000000000000001101111...",
|
||||
"hostmask": "000000000000000000000000000000000000000000000000...",
|
||||
"netmask": "1111111111111111111111111111111111111111111111111...",
|
||||
"first_host": "0000000100100111000000000000000000000000110111...",
|
||||
"last_host": "00000001001001110000000000000000000000001101111..."
|
||||
"ip": "0000000100100111000000000000000000000000110...000000000001",
|
||||
"network": "00000001001001110000000000000000000000...000000000000",
|
||||
"broadcast": "000000010010011100000000000000000000...111111111111",
|
||||
"hostmask": "0000000000000000000000000000000000000...111111111111",
|
||||
"netmask": "11111111111111111111111111111111111111...000000000000",
|
||||
"first_host": "00000001001001110000000000000000000...000000000001",
|
||||
"last_host": "000000010010011100000000000000000000...1111111111110"
|
||||
}
|
||||
}
|
||||
|
||||
@ -248,12 +273,22 @@ Examples:
|
||||
"ip": "127:0:de::1",
|
||||
"ip_compressed": "127:0:de::1",
|
||||
"ip_exploded": "0127:0000:00de:0000:0000:0000:0000:0001",
|
||||
"ip_split": [
|
||||
"0127",
|
||||
"0000",
|
||||
"00de",
|
||||
"0000",
|
||||
"0000",
|
||||
"0000",
|
||||
"0000",
|
||||
"0001"
|
||||
],
|
||||
"scope_id": null,
|
||||
"ipv4_mapped": null,
|
||||
"six_to_four": null,
|
||||
"teredo_client": null,
|
||||
"teredo_server": null,
|
||||
"dns_ptr": "1.0.0.0.0.0.0....0.0.0.e.d.0.0.0.0.0.0.7.2.1.0.ip6.arpa",
|
||||
"dns_ptr": "1.0.0.0.0.0....0.0.0.0.e.d.0.0.0.0.0.0.7.2.1.0.ip6.arpa",
|
||||
"network": "127:0:de::1",
|
||||
"broadcast": "127:0:de::1",
|
||||
"hostmask": "::",
|
||||
@ -286,13 +321,13 @@ Examples:
|
||||
"last_host": "01:27:00:00:00:de:00:00:00:00:00:00:00:00:00:01"
|
||||
},
|
||||
"bin": {
|
||||
"ip": "0000000100100111000000000000000000000000110111100000000...",
|
||||
"network": "00000001001001110000000000000000000000001101111000...",
|
||||
"broadcast": "000000010010011100000000000000000000000011011110...",
|
||||
"hostmask": "0000000000000000000000000000000000000000000000000...",
|
||||
"netmask": "11111111111111111111111111111111111111111111111111...",
|
||||
"first_host": "00000001001001110000000000000000000000001101111...",
|
||||
"last_host": "000000010010011100000000000000000000000011011110..."
|
||||
"ip": "0000000100100111000000000000000000000000110111100...000001",
|
||||
"network": "00000001001001110000000000000000000000001101...000001",
|
||||
"broadcast": "000000010010011100000000000000000000000011...000001",
|
||||
"hostmask": "0000000000000000000000000000000000000000000...000000",
|
||||
"netmask": "11111111111111111111111111111111111111111111...111111",
|
||||
"first_host": "00000001001001110000000000000000000000001...000001",
|
||||
"last_host": "000000010010011100000000000000000000000011...0000001"
|
||||
}
|
||||
}
|
||||
|
||||
@ -304,12 +339,22 @@ Examples:
|
||||
"ip": "::ffff:c0a8:123",
|
||||
"ip_compressed": "::ffff:c0a8:123",
|
||||
"ip_exploded": "0000:0000:0000:0000:0000:ffff:c0a8:0123",
|
||||
"ip_split": [
|
||||
"0000",
|
||||
"0000",
|
||||
"0000",
|
||||
"0000",
|
||||
"0000",
|
||||
"ffff",
|
||||
"c0a8",
|
||||
"0123"
|
||||
],
|
||||
"scope_id": null,
|
||||
"ipv4_mapped": "192.168.1.35",
|
||||
"six_to_four": null,
|
||||
"teredo_client": null,
|
||||
"teredo_server": null,
|
||||
"dns_ptr": "3.2.1.0.8.a.0.c.f.f.f.f.0.0.0....0.0.0.0.0.0.0.ip6.arpa",
|
||||
"dns_ptr": "3.2.1.0.8.a.0.c.f.f.f.f.0.0....0.0.0.0.0.0.ip6.arpa",
|
||||
"network": "::ffff:c0a8:123",
|
||||
"broadcast": "::ffff:c0a8:123",
|
||||
"hostmask": "::",
|
||||
@ -342,13 +387,13 @@ Examples:
|
||||
"last_host": "00:00:00:00:00:00:00:00:00:00:ff:ff:c0:a8:01:23"
|
||||
},
|
||||
"bin": {
|
||||
"ip": "0000000000000000000000000000000000000000000000000000000...",
|
||||
"network": "00000000000000000000000000000000000000000000000000...",
|
||||
"broadcast": "000000000000000000000000000000000000000000000000...",
|
||||
"hostmask": "0000000000000000000000000000000000000000000000000...",
|
||||
"netmask": "11111111111111111111111111111111111111111111111111...",
|
||||
"first_host": "00000000000000000000000000000000000000000000000...",
|
||||
"last_host": "000000000000000000000000000000000000000000000000..."
|
||||
"ip": "000000000000000000000000000000000000000000000...100100011",
|
||||
"network": "0000000000000000000000000000000000000000...000100011",
|
||||
"broadcast": "00000000000000000000000000000000000000...000100011",
|
||||
"hostmask": "000000000000000000000000000000000000000...000000000",
|
||||
"netmask": "1111111111111111111111111111111111111111...111111111",
|
||||
"first_host": "0000000000000000000000000000000000000...100100011",
|
||||
"last_host": "00000000000000000000000000000000000000...0100100011"
|
||||
}
|
||||
}
|
||||
|
||||
@ -360,12 +405,22 @@ Examples:
|
||||
"ip": "2002:c000:204::",
|
||||
"ip_compressed": "2002:c000:204::",
|
||||
"ip_exploded": "2002:c000:0204:0000:0000:0000:0000:0000",
|
||||
"ip_split": [
|
||||
"2002",
|
||||
"c000",
|
||||
"0204",
|
||||
"0000",
|
||||
"0000",
|
||||
"0000",
|
||||
"0000",
|
||||
"0000"
|
||||
],
|
||||
"scope_id": null,
|
||||
"ipv4_mapped": null,
|
||||
"six_to_four": "192.0.2.4",
|
||||
"teredo_client": null,
|
||||
"teredo_server": null,
|
||||
"dns_ptr": "0.0.0.0.0.0.0.0......0.4.0.2.0.0.0.0.c.2.0.0.2.ip6.arpa",
|
||||
"dns_ptr": "0.0.0.0.0.0.0...0.0.0.4.0.2.0.0.0.0.c.2.0.0.2.ip6.arpa",
|
||||
"network": "2002:c000:204::",
|
||||
"broadcast": "2002:c000:204:ffff:ffff:ffff:ffff:ffff",
|
||||
"hostmask": "::ffff:ffff:ffff:ffff:ffff",
|
||||
@ -398,13 +453,13 @@ Examples:
|
||||
"last_host": "20:02:c0:00:02:04:ff:ff:ff:ff:ff:ff:ff:ff:ff:fe"
|
||||
},
|
||||
"bin": {
|
||||
"ip": "0010000000000010110000000000000000000010000001000000000...",
|
||||
"network": "00100000000000101100000000000000000000100000010000...",
|
||||
"broadcast": "001000000000001011000000000000000000001000000100...",
|
||||
"hostmask": "0000000000000000000000000000000000000000000000001...",
|
||||
"netmask": "11111111111111111111111111111111111111111111111100...",
|
||||
"first_host": "00100000000000101100000000000000000000100000010...",
|
||||
"last_host": "001000000000001011000000000000000000001000000100..."
|
||||
"ip": "00100000000000101100000000000000000000100000010...00000000",
|
||||
"network": "001000000000001011000000000000000000001000...00000000",
|
||||
"broadcast": "0010000000000010110000000000000000000010...11111111",
|
||||
"hostmask": "00000000000000000000000000000000000000000...11111111",
|
||||
"netmask": "111111111111111111111111111111111111111111...00000000",
|
||||
"first_host": "001000000000001011000000000000000000001...00000001",
|
||||
"last_host": "0010000000000010110000000000000000000010...111111110"
|
||||
}
|
||||
}
|
||||
|
||||
@ -416,12 +471,22 @@ Examples:
|
||||
"ip": "2001:0:4136:e378:8000:63bf:3fff:fdd2",
|
||||
"ip_compressed": "2001:0:4136:e378:8000:63bf:3fff:fdd2",
|
||||
"ip_exploded": "2001:0000:4136:e378:8000:63bf:3fff:fdd2",
|
||||
"ip_split": [
|
||||
"2001",
|
||||
"0000",
|
||||
"4136",
|
||||
"e378",
|
||||
"8000",
|
||||
"63bf",
|
||||
"3fff",
|
||||
"fdd2"
|
||||
],
|
||||
"scope_id": null,
|
||||
"ipv4_mapped": null,
|
||||
"six_to_four": null,
|
||||
"teredo_client": "192.0.2.45",
|
||||
"teredo_server": "65.54.227.120",
|
||||
"dns_ptr": "2.d.d.f.f.f.f.3.f.b.3.6.0.0.0....0.0.0.1.0.0.2.ip6.arpa",
|
||||
"dns_ptr": "2.d.d.f.f.f.f.3.f.b.3.6.0.0.0.8.8....0.1.0.0.2.ip6.arpa",
|
||||
"network": "2001:0:4136:e378:8000:63bf:3fff:fdd2",
|
||||
"broadcast": "2001:0:4136:e378:8000:63bf:3fff:fdd2",
|
||||
"hostmask": "::",
|
||||
@ -454,13 +519,13 @@ Examples:
|
||||
"last_host": "20:01:00:00:41:36:e3:78:80:00:63:bf:3f:ff:fd:d2"
|
||||
},
|
||||
"bin": {
|
||||
"ip": "001000000000000100000000000000000100000100110110111000...",
|
||||
"network": "0010000000000001000000000000000001000001001101101...",
|
||||
"broadcast": "00100000000000010000000000000000010000010011011...",
|
||||
"hostmask": "000000000000000000000000000000000000000000000000...",
|
||||
"netmask": "1111111111111111111111111111111111111111111111111...",
|
||||
"first_host": "0010000000000001000000000000000001000001001101...",
|
||||
"last_host": "00100000000000010000000000000000010000010011011..."
|
||||
"ip": "0010000000000001000000000000000001000001001...110111010010",
|
||||
"network": "00100000000000010000000000000000010000...110111010010",
|
||||
"broadcast": "001000000000000100000000000000000100...110111010010",
|
||||
"hostmask": "0000000000000000000000000000000000000...000000000000",
|
||||
"netmask": "11111111111111111111111111111111111111...111111111111",
|
||||
"first_host": "00100000000000010000000000000000010...110111010010",
|
||||
"last_host": "001000000000000100000000000000000100...110111010010"
|
||||
}
|
||||
}
|
||||
|
||||
@ -487,4 +552,4 @@ Returns:
|
||||
### Parser Information
|
||||
Compatibility: linux, darwin, cygwin, win32, aix, freebsd
|
||||
|
||||
Version 1.2 by Kelly Brazil (kellyjonbrazil@gmail.com)
|
||||
Version 1.3 by Kelly Brazil (kellyjonbrazil@gmail.com)
|
||||
|
@ -5,65 +5,10 @@
|
||||
|
||||
jc - JSON Convert ISO 8601 Datetime string parser
|
||||
|
||||
This parser supports standard ISO 8601 strings that include both date and
|
||||
time. If no timezone or offset information is available in the sring, then
|
||||
UTC timezone is used.
|
||||
This parser has been renamed to datetime-iso (cli) or datetime_iso (module).
|
||||
|
||||
Usage (cli):
|
||||
|
||||
$ echo "2022-07-20T14:52:45Z" | jc --iso-datetime
|
||||
|
||||
Usage (module):
|
||||
|
||||
import jc
|
||||
result = jc.parse('iso_datetime', iso_8601_string)
|
||||
|
||||
Schema:
|
||||
|
||||
{
|
||||
"year": integer,
|
||||
"month": string,
|
||||
"month_num": integer,
|
||||
"day": integer,
|
||||
"weekday": string,
|
||||
"weekday_num": integer,
|
||||
"hour": integer,
|
||||
"hour_24": integer,
|
||||
"minute": integer,
|
||||
"second": integer,
|
||||
"microsecond": integer,
|
||||
"period": string,
|
||||
"utc_offset": string,
|
||||
"day_of_year": integer,
|
||||
"week_of_year": integer,
|
||||
"iso": string,
|
||||
"timestamp": integer # [0]
|
||||
}
|
||||
|
||||
[0] timezone aware UNIX timestamp expressed in UTC
|
||||
|
||||
Examples:
|
||||
|
||||
$ echo "2022-07-20T14:52:45Z" | jc --iso-datetime -p
|
||||
{
|
||||
"year": 2022,
|
||||
"month": "Jul",
|
||||
"month_num": 7,
|
||||
"day": 20,
|
||||
"weekday": "Wed",
|
||||
"weekday_num": 3,
|
||||
"hour": 2,
|
||||
"hour_24": 14,
|
||||
"minute": 52,
|
||||
"second": 45,
|
||||
"microsecond": 0,
|
||||
"period": "PM",
|
||||
"utc_offset": "+0000",
|
||||
"day_of_year": 201,
|
||||
"week_of_year": 29,
|
||||
"iso": "2022-07-20T14:52:45+00:00",
|
||||
"timestamp": 1658328765
|
||||
}
|
||||
This parser will be removed in a future version, so please start using
|
||||
the new parser name.
|
||||
|
||||
<a id="jc.parsers.iso_datetime.parse"></a>
|
||||
|
||||
@ -73,7 +18,8 @@ Examples:
|
||||
def parse(data, raw=False, quiet=False)
|
||||
```
|
||||
|
||||
Main text parsing function
|
||||
This parser is deprecated and calls datetime_iso. Please use datetime_iso
|
||||
directly. This parser will be removed in the future.
|
||||
|
||||
Parameters:
|
||||
|
||||
@ -88,4 +34,4 @@ Returns:
|
||||
### Parser Information
|
||||
Compatibility: linux, aix, freebsd, darwin, win32, cygwin
|
||||
|
||||
Version 1.0 by Kelly Brazil (kellyjonbrazil@gmail.com)
|
||||
Version 1.1 by Kelly Brazil (kellyjonbrazil@gmail.com)
|
||||
|
148
docs/parsers/lspci.md
Normal file
148
docs/parsers/lspci.md
Normal file
@ -0,0 +1,148 @@
|
||||
[Home](https://kellyjonbrazil.github.io/jc/)
|
||||
<a id="jc.parsers.lspci"></a>
|
||||
|
||||
# jc.parsers.lspci
|
||||
|
||||
jc - JSON Convert `lspci -mmv` command output parser
|
||||
|
||||
This parser supports the following `lspci` options:
|
||||
- `-mmv`
|
||||
- `-nmmv`
|
||||
- `-nnmmv`
|
||||
|
||||
Usage (cli):
|
||||
|
||||
$ lspci -nnmmv | jc --lspci
|
||||
|
||||
or
|
||||
|
||||
$ jc lspci -nnmmv
|
||||
|
||||
Usage (module):
|
||||
|
||||
import jc
|
||||
result = jc.parse('lspci', lspci_command_output)
|
||||
|
||||
Schema:
|
||||
|
||||
[
|
||||
{
|
||||
"slot": string,
|
||||
"domain": string,
|
||||
"domain_int": integer,
|
||||
"bus": string,
|
||||
"bus_int": integer,
|
||||
"dev": string,
|
||||
"dev_int": integer,
|
||||
"function": string,
|
||||
"function_int": integer,
|
||||
"class": string,
|
||||
"class_id": string,
|
||||
"class_id_int": integer,
|
||||
"vendor": string,
|
||||
"vendor_id": string,
|
||||
"vendor_id_int": integer,
|
||||
"device": string,
|
||||
"device_id": string,
|
||||
"device_id_int": integer,
|
||||
"svendor": string,
|
||||
"svendor_id": string,
|
||||
"svendor_id_int": integer,
|
||||
"sdevice": string,
|
||||
"sdevice_id": string,
|
||||
"sdevice_id_int": integer,
|
||||
"rev": string,
|
||||
"physlot": string,
|
||||
"physlot_int": integer,
|
||||
"progif": string,
|
||||
"progif_int": integer
|
||||
}
|
||||
]
|
||||
|
||||
Examples:
|
||||
|
||||
$ lspci -nnmmv | jc --lspci -p
|
||||
[
|
||||
{
|
||||
"slot": "ff:02:05.0",
|
||||
"domain": "ff",
|
||||
"domain_int": 255,
|
||||
"bus": "02",
|
||||
"bus_int": 2,
|
||||
"dev": "05",
|
||||
"dev_int": 5,
|
||||
"function": "0",
|
||||
"function_int": 0,
|
||||
"class": "SATA controller",
|
||||
"class_id": "0106",
|
||||
"class_id_int": 262,
|
||||
"vendor": "VMware",
|
||||
"vendor_id": "15ad",
|
||||
"vendor_id_int": 5549,
|
||||
"device": "SATA AHCI controller",
|
||||
"device_id": "07e0",
|
||||
"device_id_int": 2016,
|
||||
"svendor": "VMware",
|
||||
"svendor_id": "15ad",
|
||||
"svendor_id_int": 5549,
|
||||
"sdevice": "SATA AHCI controller",
|
||||
"sdevice_id": "07e0",
|
||||
"sdevice_id_int": 2016,
|
||||
"physlot": "37",
|
||||
"physlot_int": 55,
|
||||
"progif": "01",
|
||||
"progif_int": 1
|
||||
},
|
||||
...
|
||||
]
|
||||
|
||||
$ lspci -nnmmv | jc --lspci -p -r
|
||||
[
|
||||
{
|
||||
"slot": "ff:02:05.0",
|
||||
"domain": "ff",
|
||||
"bus": "02",
|
||||
"dev": "05",
|
||||
"function": "0",
|
||||
"class": "SATA controller",
|
||||
"class_id": "0106",
|
||||
"vendor": "VMware",
|
||||
"vendor_id": "15ad",
|
||||
"device": "SATA AHCI controller",
|
||||
"device_id": "07e0",
|
||||
"svendor": "VMware",
|
||||
"svendor_id": "15ad",
|
||||
"sdevice": "SATA AHCI controller",
|
||||
"sdevice_id": "07e0",
|
||||
"physlot": "37",
|
||||
"progif": "01"
|
||||
},
|
||||
...
|
||||
]
|
||||
|
||||
<a id="jc.parsers.lspci.parse"></a>
|
||||
|
||||
### parse
|
||||
|
||||
```python
|
||||
def parse(data: str,
|
||||
raw: bool = False,
|
||||
quiet: bool = False) -> List[JSONDictType]
|
||||
```
|
||||
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) unprocessed output if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
List of Dictionaries. Raw or processed structured data.
|
||||
|
||||
### Parser Information
|
||||
Compatibility: linux
|
||||
|
||||
Version 1.0 by Kelly Brazil (kellyjonbrazil@gmail.com)
|
99
docs/parsers/pci_ids.md
Normal file
99
docs/parsers/pci_ids.md
Normal file
@ -0,0 +1,99 @@
|
||||
[Home](https://kellyjonbrazil.github.io/jc/)
|
||||
<a id="jc.parsers.pci_ids"></a>
|
||||
|
||||
# jc.parsers.pci\_ids
|
||||
|
||||
jc - JSON Convert `pci.ids` file parser
|
||||
|
||||
This parser converts the pci.ids database file.
|
||||
|
||||
https://raw.githubusercontent.com/pciutils/pciids/master/pci.ids
|
||||
|
||||
A nested schema allows straightforward queries with tools like `jq`. Hex id
|
||||
numbers are prefixed with an underscore (`_`) so bracket notation is not
|
||||
necessary when referencing. For example:
|
||||
|
||||
$ cat pci.ids | jc --pci-ids | jq '.vendors._9005._0053._9005._ffff.subsystem_name'
|
||||
"AIC-7896 SCSI Controller mainboard implementation"
|
||||
|
||||
Here are the vendor and class mappings:
|
||||
|
||||
jq '.vendors._001c._0001._001c._0005.subsystem_name'
|
||||
| | | |
|
||||
| | | subdevice
|
||||
| | subvendor
|
||||
| device
|
||||
vendor
|
||||
|
||||
jq '.classes._0c._03._40'
|
||||
| | |
|
||||
| | prog_if
|
||||
| subclass
|
||||
class
|
||||
|
||||
Usage (cli):
|
||||
|
||||
$ cat pci.ids | jc --pci-ids
|
||||
|
||||
Usage (module):
|
||||
|
||||
import jc
|
||||
result = jc.parse('pci_ids', pci_ids_file_output)
|
||||
|
||||
Schema:
|
||||
|
||||
{
|
||||
"vendors": {
|
||||
"_<vendor_id>": {
|
||||
"vendor_name": string,
|
||||
"_<device_id>": {
|
||||
"device_name": string,
|
||||
"_<subvendor_id>": {
|
||||
"_<subdevice_id": string
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"classes": {
|
||||
"_<class_id>": {
|
||||
"class_name": string,
|
||||
"_<subclass_id>": {
|
||||
"subclass_name": string,
|
||||
"_<prog_if>": string
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Examples:
|
||||
|
||||
$ cat pci.ids | jc --pci-ids | jq '.vendors._001c._0001._001c._0005.subsystem_name'
|
||||
"2 Channel CAN Bus SJC1000 (Optically Isolated)"
|
||||
|
||||
$ cat pci.ids | jc --pci-ids | jq '.classes._0c._03._40'
|
||||
"USB4 Host Interface"
|
||||
|
||||
<a id="jc.parsers.pci_ids.parse"></a>
|
||||
|
||||
### parse
|
||||
|
||||
```python
|
||||
def parse(data: str, raw: bool = False, quiet: bool = False) -> JSONDictType
|
||||
```
|
||||
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) unprocessed output if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
Dictionary. Raw or processed structured data.
|
||||
|
||||
### Parser Information
|
||||
Compatibility: linux, darwin, cygwin, win32, aix, freebsd
|
||||
|
||||
Version 1.0 by Kelly Brazil (kellyjonbrazil@gmail.com)
|
@ -223,4 +223,4 @@ Returns:
|
||||
### Parser Information
|
||||
Compatibility: linux
|
||||
|
||||
Version 1.0 by Kelly Brazil (kellyjonbrazil@gmail.com)
|
||||
Version 1.1 by Kelly Brazil (kellyjonbrazil@gmail.com)
|
||||
|
@ -83,7 +83,10 @@ Examples:
|
||||
|
||||
```python
|
||||
@add_jc_meta
|
||||
def parse(data, raw=False, quiet=False, ignore_exceptions=False)
|
||||
def parse(data: Iterable[str],
|
||||
raw: bool = False,
|
||||
quiet: bool = False,
|
||||
ignore_exceptions: bool = False) -> StreamingOutputType
|
||||
```
|
||||
|
||||
Main text parsing generator function. Returns an iterable object.
|
||||
@ -104,4 +107,4 @@ Returns:
|
||||
### Parser Information
|
||||
Compatibility: linux, darwin, freebsd
|
||||
|
||||
Version 1.1 by Kelly Brazil (kellyjonbrazil@gmail.com)
|
||||
Version 1.2 by Kelly Brazil (kellyjonbrazil@gmail.com)
|
||||
|
143
docs/parsers/udevadm.md
Normal file
143
docs/parsers/udevadm.md
Normal file
@ -0,0 +1,143 @@
|
||||
[Home](https://kellyjonbrazil.github.io/jc/)
|
||||
<a id="jc.parsers.udevadm"></a>
|
||||
|
||||
# jc.parsers.udevadm
|
||||
|
||||
jc - JSON Convert `udevadm info` command output parser
|
||||
|
||||
Usage (cli):
|
||||
|
||||
$ udevadm info --query=all /dev/sda | jc --udevadm
|
||||
|
||||
or
|
||||
|
||||
$ jc udevadm info --query=all /dev/sda
|
||||
|
||||
Usage (module):
|
||||
|
||||
import jc
|
||||
result = jc.parse('udevadm', udevadm_command_output)
|
||||
|
||||
Schema:
|
||||
|
||||
{
|
||||
"P": string,
|
||||
"N": string,
|
||||
"L": integer,
|
||||
"S": [
|
||||
string
|
||||
],
|
||||
"E": {
|
||||
"<key>": string
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Examples:
|
||||
|
||||
$ udevadm info --query=all /dev/sda | jc --udevadm -p
|
||||
{
|
||||
"P": "/devices/pci0000:00/0000:00:10.0/host32/target32:0:0/32:0:0:0/block/sda",
|
||||
"N": "sda",
|
||||
"L": 0,
|
||||
"S": [
|
||||
"disk/by-path/pci-0000:00:10.0-scsi-0:0:0:0"
|
||||
],
|
||||
"E": {
|
||||
"DEVPATH": "/devices/pci0000:00/0000:00:10.0/host32/target32:0:0/32:0:0:0/block/sda",
|
||||
"DEVNAME": "/dev/sda",
|
||||
"DEVTYPE": "disk",
|
||||
"MAJOR": "8",
|
||||
"MINOR": "0",
|
||||
"SUBSYSTEM": "block",
|
||||
"USEC_INITIALIZED": "6100111",
|
||||
"SCSI_TPGS": "0",
|
||||
"SCSI_TYPE": "disk",
|
||||
"SCSI_VENDOR": "VMware,",
|
||||
"SCSI_VENDOR_ENC": "VMware,\\x20",
|
||||
"SCSI_MODEL": "VMware_Virtual_S",
|
||||
"SCSI_MODEL_ENC": "VMware\\x20Virtual\\x20S",
|
||||
"SCSI_REVISION": "1.0",
|
||||
"ID_SCSI": "1",
|
||||
"ID_VENDOR": "VMware_",
|
||||
"ID_VENDOR_ENC": "VMware\\x2c\\x20",
|
||||
"ID_MODEL": "VMware_Virtual_S",
|
||||
"ID_MODEL_ENC": "VMware\\x20Virtual\\x20S",
|
||||
"ID_REVISION": "1.0",
|
||||
"ID_TYPE": "disk",
|
||||
"MPATH_SBIN_PATH": "/sbin",
|
||||
"ID_BUS": "scsi",
|
||||
"ID_PATH": "pci-0000:00:10.0-scsi-0:0:0:0",
|
||||
"ID_PATH_TAG": "pci-0000_00_10_0-scsi-0_0_0_0",
|
||||
"ID_PART_TABLE_UUID": "a5bd0c01-4210-46f2-b558-5c11c209a8f7",
|
||||
"ID_PART_TABLE_TYPE": "gpt",
|
||||
"DEVLINKS": "/dev/disk/by-path/pci-0000:00:10.0-scsi-0:0:0:0",
|
||||
"TAGS": ":systemd:"
|
||||
}
|
||||
}
|
||||
|
||||
$ udevadm info --query=all /dev/sda | jc --udevadm -p -r
|
||||
{
|
||||
"P": "/devices/pci0000:00/0000:00:10.0/host32/target32:0:0/32:0:0:0/block/sda",
|
||||
"N": "sda",
|
||||
"L": "0",
|
||||
"S": [
|
||||
"disk/by-path/pci-0000:00:10.0-scsi-0:0:0:0"
|
||||
],
|
||||
"E": {
|
||||
"DEVPATH": "/devices/pci0000:00/0000:00:10.0/host32/target32:0:0/32:0:0:0/block/sda",
|
||||
"DEVNAME": "/dev/sda",
|
||||
"DEVTYPE": "disk",
|
||||
"MAJOR": "8",
|
||||
"MINOR": "0",
|
||||
"SUBSYSTEM": "block",
|
||||
"USEC_INITIALIZED": "6100111",
|
||||
"SCSI_TPGS": "0",
|
||||
"SCSI_TYPE": "disk",
|
||||
"SCSI_VENDOR": "VMware,",
|
||||
"SCSI_VENDOR_ENC": "VMware,\\x20",
|
||||
"SCSI_MODEL": "VMware_Virtual_S",
|
||||
"SCSI_MODEL_ENC": "VMware\\x20Virtual\\x20S",
|
||||
"SCSI_REVISION": "1.0",
|
||||
"ID_SCSI": "1",
|
||||
"ID_VENDOR": "VMware_",
|
||||
"ID_VENDOR_ENC": "VMware\\x2c\\x20",
|
||||
"ID_MODEL": "VMware_Virtual_S",
|
||||
"ID_MODEL_ENC": "VMware\\x20Virtual\\x20S",
|
||||
"ID_REVISION": "1.0",
|
||||
"ID_TYPE": "disk",
|
||||
"MPATH_SBIN_PATH": "/sbin",
|
||||
"ID_BUS": "scsi",
|
||||
"ID_PATH": "pci-0000:00:10.0-scsi-0:0:0:0",
|
||||
"ID_PATH_TAG": "pci-0000_00_10_0-scsi-0_0_0_0",
|
||||
"ID_PART_TABLE_UUID": "a5bd0c01-4210-46f2-b558-5c11c209a8f7",
|
||||
"ID_PART_TABLE_TYPE": "gpt",
|
||||
"DEVLINKS": "/dev/disk/by-path/pci-0000:00:10.0-scsi-0:0:0:0",
|
||||
"TAGS": ":systemd:"
|
||||
}
|
||||
}
|
||||
|
||||
<a id="jc.parsers.udevadm.parse"></a>
|
||||
|
||||
### parse
|
||||
|
||||
```python
|
||||
def parse(data: str, raw: bool = False, quiet: bool = False) -> JSONDictType
|
||||
```
|
||||
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) unprocessed output if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
Dictionary. Raw or processed structured data.
|
||||
|
||||
### Parser Information
|
||||
Compatibility: linux
|
||||
|
||||
Version 1.0 by Kelly Brazil (kellyjonbrazil@gmail.com)
|
@ -19,7 +19,7 @@ jc - JSON Convert streaming utils
|
||||
### streaming\_input\_type\_check
|
||||
|
||||
```python
|
||||
def streaming_input_type_check(data: Iterable) -> None
|
||||
def streaming_input_type_check(data: Iterable[Union[str, bytes]]) -> None
|
||||
```
|
||||
|
||||
Ensure input data is an iterable, but not a string or bytes. Raises
|
||||
@ -40,7 +40,8 @@ Ensure each line is a string. Raises `TypeError` if not.
|
||||
### stream\_success
|
||||
|
||||
```python
|
||||
def stream_success(output_line: Dict, ignore_exceptions: bool) -> Dict
|
||||
def stream_success(output_line: JSONDictType,
|
||||
ignore_exceptions: bool) -> JSONDictType
|
||||
```
|
||||
|
||||
Add `_jc_meta` object to output line if `ignore_exceptions=True`
|
||||
@ -50,7 +51,7 @@ Add `_jc_meta` object to output line if `ignore_exceptions=True`
|
||||
### stream\_error
|
||||
|
||||
```python
|
||||
def stream_error(e: BaseException, line: str) -> Dict
|
||||
def stream_error(e: BaseException, line: str) -> Dict[str, MetadataType]
|
||||
```
|
||||
|
||||
Return an error `_jc_meta` field.
|
||||
@ -60,7 +61,7 @@ Return an error `_jc_meta` field.
|
||||
### add\_jc\_meta
|
||||
|
||||
```python
|
||||
def add_jc_meta(func)
|
||||
def add_jc_meta(func: F) -> F
|
||||
```
|
||||
|
||||
Decorator for streaming parsers to add stream_success and stream_error
|
||||
@ -106,7 +107,7 @@ In all cases above:
|
||||
|
||||
```python
|
||||
def raise_or_yield(ignore_exceptions: bool, e: BaseException,
|
||||
line: str) -> tuple
|
||||
line: str) -> Tuple[BaseException, str]
|
||||
```
|
||||
|
||||
Return the exception object and line string if ignore_exceptions is
|
||||
|
@ -64,7 +64,7 @@ Returns:
|
||||
### is\_compatible
|
||||
|
||||
```python
|
||||
def is_compatible(compatible: List) -> bool
|
||||
def is_compatible(compatible: List[str]) -> bool
|
||||
```
|
||||
|
||||
Returns True if the parser is compatible with the running OS platform.
|
||||
@ -75,7 +75,7 @@ Returns True if the parser is compatible with the running OS platform.
|
||||
|
||||
```python
|
||||
def compatibility(mod_name: str,
|
||||
compatible: List,
|
||||
compatible: List[str],
|
||||
quiet: bool = False) -> None
|
||||
```
|
||||
|
||||
@ -125,7 +125,7 @@ Returns:
|
||||
### convert\_to\_int
|
||||
|
||||
```python
|
||||
def convert_to_int(value: Union[str, float]) -> Optional[int]
|
||||
def convert_to_int(value: object) -> Optional[int]
|
||||
```
|
||||
|
||||
Converts string and float input to int. Strips all non-numeric
|
||||
@ -144,7 +144,7 @@ Returns:
|
||||
### convert\_to\_float
|
||||
|
||||
```python
|
||||
def convert_to_float(value: Union[str, int]) -> Optional[float]
|
||||
def convert_to_float(value: object) -> Optional[float]
|
||||
```
|
||||
|
||||
Converts string and int input to float. Strips all non-numeric
|
||||
@ -163,7 +163,7 @@ Returns:
|
||||
### convert\_to\_bool
|
||||
|
||||
```python
|
||||
def convert_to_bool(value: Union[str, int, float]) -> bool
|
||||
def convert_to_bool(value: object) -> bool
|
||||
```
|
||||
|
||||
Converts string, integer, or float input to boolean by checking
|
||||
@ -201,8 +201,8 @@ class timestamp()
|
||||
### \_\_init\_\_
|
||||
|
||||
```python
|
||||
def __init__(datetime_string: str,
|
||||
format_hint: Optional[Iterable] = None) -> None
|
||||
def __init__(datetime_string: Optional[str],
|
||||
format_hint: Optional[Iterable[int]] = None) -> None
|
||||
```
|
||||
|
||||
Input a datetime text string of several formats and convert to a
|
||||
|
@ -1,4 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# type: ignore
|
||||
# Copyright (c) 2005-2010 ActiveState Software Inc.
|
||||
# Copyright (c) 2013 Eddy Petrișor
|
||||
|
||||
|
@ -18,7 +18,7 @@ long_options_map: Dict[str, List[str]] = {
|
||||
'--zsh-comp': ['Z', 'gen Zsh completion: jc -Z > "${fpath[1]}/_jc"']
|
||||
}
|
||||
|
||||
new_pygments_colors = {
|
||||
new_pygments_colors: Dict[str, str] = {
|
||||
'black': 'ansiblack',
|
||||
'red': 'ansired',
|
||||
'green': 'ansigreen',
|
||||
@ -37,7 +37,7 @@ new_pygments_colors = {
|
||||
'white': 'ansiwhite',
|
||||
}
|
||||
|
||||
old_pygments_colors = {
|
||||
old_pygments_colors: Dict[str, str] = {
|
||||
'black': '#ansiblack',
|
||||
'red': '#ansidarkred',
|
||||
'green': '#ansidarkgreen',
|
||||
@ -55,3 +55,42 @@ old_pygments_colors = {
|
||||
'brightcyan': '#ansiturquoise',
|
||||
'white': '#ansiwhite',
|
||||
}
|
||||
|
||||
helptext_preamble_string: str = f'''\
|
||||
jc converts the output of many commands, file-types, and strings to JSON or YAML
|
||||
|
||||
Usage:
|
||||
|
||||
Standard syntax:
|
||||
|
||||
COMMAND | jc [OPTIONS] PARSER
|
||||
|
||||
cat FILE | jc [OPTIONS] PARSER
|
||||
|
||||
echo STRING | jc [OPTIONS] PARSER
|
||||
|
||||
Magic syntax:
|
||||
|
||||
jc [OPTIONS] COMMAND
|
||||
|
||||
jc [OPTIONS] /proc/<path-to-procfile>
|
||||
|
||||
Parsers:
|
||||
'''
|
||||
|
||||
helptext_end_string: str = '''\
|
||||
Examples:
|
||||
Standard Syntax:
|
||||
$ dig www.google.com | jc --pretty --dig
|
||||
$ cat /proc/meminfo | jc --pretty --proc
|
||||
|
||||
Magic Syntax:
|
||||
$ jc --pretty dig www.google.com
|
||||
$ jc --pretty /proc/meminfo
|
||||
|
||||
Parser Documentation:
|
||||
$ jc --help --dig
|
||||
|
||||
Show Hidden Parsers:
|
||||
$ jc -hh
|
||||
'''
|
56
jc/jc_types.py
Normal file
56
jc/jc_types.py
Normal file
@ -0,0 +1,56 @@
|
||||
"""jc - JSON Convert lib module"""
|
||||
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from typing import Dict, List, Tuple, Iterator, Optional, Union
|
||||
|
||||
JSONDictType = Dict[str, Union[str, int, float, bool, List, Dict, None]]
|
||||
MetadataType = Dict[str, Optional[Union[str, int, float, List[str], datetime]]]
|
||||
StreamingOutputType = Iterator[Union[JSONDictType, Tuple[BaseException, str]]]
|
||||
|
||||
if sys.version_info >= (3, 8):
|
||||
from typing import TypedDict
|
||||
|
||||
ParserInfoType = TypedDict(
|
||||
'ParserInfoType',
|
||||
{
|
||||
"name": str,
|
||||
"argument": str,
|
||||
"version": str,
|
||||
"description": str,
|
||||
"author": str,
|
||||
"author_email": str,
|
||||
"compatible": List[str],
|
||||
"magic_commands": List[str],
|
||||
"documentation": str,
|
||||
"streaming": bool,
|
||||
"plugin": bool,
|
||||
"hidden": bool,
|
||||
"deprecated": bool
|
||||
},
|
||||
total=False
|
||||
)
|
||||
|
||||
TimeStampFormatType = TypedDict(
|
||||
'TimeStampFormatType',
|
||||
{
|
||||
'id': int,
|
||||
'format': str,
|
||||
'locale': Optional[str]
|
||||
}
|
||||
)
|
||||
|
||||
else:
|
||||
ParserInfoType = Dict
|
||||
TimeStampFormatType = Dict
|
||||
|
||||
|
||||
AboutJCType = Dict[str, Union[str, int, List[ParserInfoType]]]
|
||||
|
||||
|
||||
try:
|
||||
from pygments.token import (Name, Number, String, Keyword)
|
||||
CustomColorType = Dict[Union[Name.Tag, Number, String, Keyword], str]
|
||||
|
||||
except Exception:
|
||||
CustomColorType = Dict # type: ignore
|
197
jc/lib.py
197
jc/lib.py
@ -3,12 +3,15 @@ import sys
|
||||
import os
|
||||
import re
|
||||
import importlib
|
||||
from typing import Dict, List, Iterable, Union, Iterator
|
||||
from typing import List, Iterable, Union, Iterator
|
||||
from types import ModuleType
|
||||
from .jc_types import ParserInfoType, JSONDictType
|
||||
from jc import appdirs
|
||||
|
||||
__version__ = '1.22.0'
|
||||
|
||||
parsers = [
|
||||
__version__ = '1.22.1'
|
||||
|
||||
parsers: List[str] = [
|
||||
'acpi',
|
||||
'airport',
|
||||
'airport-s',
|
||||
@ -25,6 +28,7 @@ parsers = [
|
||||
'csv',
|
||||
'csv-s',
|
||||
'date',
|
||||
'datetime-iso',
|
||||
'df',
|
||||
'dig',
|
||||
'dir',
|
||||
@ -66,6 +70,7 @@ parsers = [
|
||||
'lsblk',
|
||||
'lsmod',
|
||||
'lsof',
|
||||
'lspci',
|
||||
'lsusb',
|
||||
'm3u',
|
||||
'mdadm',
|
||||
@ -76,6 +81,7 @@ parsers = [
|
||||
'nmcli',
|
||||
'ntpq',
|
||||
'passwd',
|
||||
'pci-ids',
|
||||
'pidstat',
|
||||
'pidstat-s',
|
||||
'ping',
|
||||
@ -161,6 +167,7 @@ parsers = [
|
||||
'top-s',
|
||||
'tracepath',
|
||||
'traceroute',
|
||||
'udevadm',
|
||||
'ufw',
|
||||
'ufw-appinfo',
|
||||
'uname',
|
||||
@ -181,19 +188,19 @@ parsers = [
|
||||
'zipinfo'
|
||||
]
|
||||
|
||||
def _cliname_to_modname(parser_cli_name):
|
||||
def _cliname_to_modname(parser_cli_name: str) -> str:
|
||||
"""Return real module name (dashes converted to underscores)"""
|
||||
return parser_cli_name.replace('--', '').replace('-', '_')
|
||||
|
||||
def _modname_to_cliname(parser_mod_name):
|
||||
def _modname_to_cliname(parser_mod_name: str) -> str:
|
||||
"""Return module's cli name (underscores converted to dashes)"""
|
||||
return parser_mod_name.replace('_', '-')
|
||||
|
||||
# Create the local_parsers list. This is a list of custom or
|
||||
# override parsers from <user_data_dir>/jc/jcparsers/*.py.
|
||||
# Once this list is created, extend the parsers list with it.
|
||||
local_parsers = []
|
||||
data_dir = appdirs.user_data_dir('jc', 'jc')
|
||||
local_parsers: List[str] = []
|
||||
data_dir = appdirs.user_data_dir('jc', 'jc') # type: ignore
|
||||
local_parsers_dir = os.path.join(data_dir, 'jcparsers')
|
||||
if os.path.isdir(local_parsers_dir):
|
||||
sys.path.append(data_dir)
|
||||
@ -208,21 +215,20 @@ if os.path.isdir(local_parsers_dir):
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def _parser_argument(parser_mod_name):
|
||||
def _parser_argument(parser_mod_name: str) -> str:
|
||||
"""Return short name of the parser with dashes and with -- prefix"""
|
||||
parser = _modname_to_cliname(parser_mod_name)
|
||||
return f'--{parser}'
|
||||
|
||||
def _get_parser(parser_mod_name):
|
||||
def _get_parser(parser_mod_name: str) -> ModuleType:
|
||||
"""Return the parser module object"""
|
||||
# ensure parser_mod_name is a true module name and not a cli name
|
||||
parser_mod_name = _cliname_to_modname(parser_mod_name)
|
||||
|
||||
parser_cli_name = _modname_to_cliname(parser_mod_name)
|
||||
modpath = 'jcparsers.' if parser_cli_name in local_parsers else 'jc.parsers.'
|
||||
modpath: str = 'jcparsers.' if parser_cli_name in local_parsers else 'jc.parsers.'
|
||||
return importlib.import_module(f'{modpath}{parser_mod_name}')
|
||||
|
||||
def _parser_is_streaming(parser):
|
||||
def _parser_is_streaming(parser: ModuleType) -> bool:
|
||||
"""
|
||||
Returns True if this is a streaming parser, else False
|
||||
|
||||
@ -233,16 +239,39 @@ def _parser_is_streaming(parser):
|
||||
|
||||
return False
|
||||
|
||||
def _parser_is_hidden(parser: ModuleType) -> bool:
|
||||
"""
|
||||
Returns True if this is a hidden parser, else False
|
||||
|
||||
parser is a parser module object.
|
||||
"""
|
||||
if getattr(parser.info, 'hidden', None):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def _parser_is_deprecated(parser: ModuleType) -> bool:
|
||||
"""
|
||||
Returns True if this is a deprecated parser, else False
|
||||
|
||||
parser is a parser module object.
|
||||
"""
|
||||
if getattr(parser.info, 'deprecated', None):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def parse(
|
||||
parser_mod_name: str,
|
||||
parser_mod_name: Union[str, ModuleType],
|
||||
data: Union[str, bytes, Iterable[str]],
|
||||
quiet: bool = False,
|
||||
raw: bool = False,
|
||||
ignore_exceptions: bool = None,
|
||||
**kwargs
|
||||
) -> Union[Dict, List[Dict], Iterator[Dict]]:
|
||||
) -> Union[JSONDictType, List[JSONDictType], Iterator[JSONDictType]]:
|
||||
"""
|
||||
Parse the string data using the supplied parser module.
|
||||
Parse the data (string or bytes) using the supplied parser (string or
|
||||
module object).
|
||||
|
||||
This function provides a high-level API to simplify parser use. This
|
||||
function will call built-in parsers and custom plugin parsers.
|
||||
@ -266,6 +295,14 @@ def parse(
|
||||
|
||||
To get a list of available parser module names, use `parser_mod_list()`.
|
||||
|
||||
Alternatively, a parser module object can be supplied:
|
||||
|
||||
>>> import jc
|
||||
>>> import jc.parsers.date as jc_date
|
||||
>>> date_obj = jc.parse(jc_date, 'Tue Jan 18 10:23:07 PST 2022')
|
||||
>>> print(f'The year is: {date_obj["year"]}')
|
||||
The year is: 2022
|
||||
|
||||
You can also use the lower-level parser modules directly:
|
||||
|
||||
>>> import jc.parsers.date
|
||||
@ -286,11 +323,14 @@ def parse(
|
||||
|
||||
Parameters:
|
||||
|
||||
parser_mod_name: (string) name of the parser module. This
|
||||
function will accept module_name,
|
||||
parser_mod_name: (string or name of the parser module. This
|
||||
Module) function will accept module_name,
|
||||
cli-name, and --argument-name
|
||||
variants of the module name.
|
||||
|
||||
A Module object can also be passed
|
||||
directly or via _get_parser()
|
||||
|
||||
data: (string or data to parse (string or bytes for
|
||||
bytes or standard parsers, iterable of
|
||||
iterable) strings for streaming parsers)
|
||||
@ -307,51 +347,113 @@ def parse(
|
||||
Standard Parsers: Dictionary or List of Dictionaries
|
||||
Streaming Parsers: Generator Object containing Dictionaries
|
||||
"""
|
||||
jc_parser = _get_parser(parser_mod_name)
|
||||
if isinstance(parser_mod_name, ModuleType):
|
||||
jc_parser = parser_mod_name
|
||||
else:
|
||||
jc_parser = _get_parser(parser_mod_name)
|
||||
|
||||
if ignore_exceptions is not None:
|
||||
return jc_parser.parse(data, quiet=quiet, raw=raw,
|
||||
ignore_exceptions=ignore_exceptions, **kwargs)
|
||||
return jc_parser.parse(
|
||||
data,
|
||||
quiet=quiet,
|
||||
raw=raw,
|
||||
ignore_exceptions=ignore_exceptions,
|
||||
**kwargs
|
||||
)
|
||||
|
||||
return jc_parser.parse(data, quiet=quiet, raw=raw, **kwargs)
|
||||
|
||||
def parser_mod_list() -> List[str]:
|
||||
def parser_mod_list(
|
||||
show_hidden: bool = False,
|
||||
show_deprecated: bool = False
|
||||
) -> List[str]:
|
||||
"""Returns a list of all available parser module names."""
|
||||
return [_cliname_to_modname(p) for p in parsers]
|
||||
plist: List[str] = []
|
||||
for p in parsers:
|
||||
parser = _get_parser(p)
|
||||
|
||||
def plugin_parser_mod_list() -> List[str]:
|
||||
if not show_hidden and _parser_is_hidden(parser):
|
||||
continue
|
||||
|
||||
if not show_deprecated and _parser_is_deprecated(parser):
|
||||
continue
|
||||
|
||||
plist.append(_cliname_to_modname(p))
|
||||
|
||||
return plist
|
||||
|
||||
def plugin_parser_mod_list(
|
||||
show_hidden: bool = False,
|
||||
show_deprecated: bool = False
|
||||
) -> List[str]:
|
||||
"""
|
||||
Returns a list of plugin parser module names. This function is a
|
||||
subset of `parser_mod_list()`.
|
||||
"""
|
||||
return [_cliname_to_modname(p) for p in local_parsers]
|
||||
plist: List[str] = []
|
||||
for p in local_parsers:
|
||||
parser = _get_parser(p)
|
||||
|
||||
def standard_parser_mod_list() -> List[str]:
|
||||
if not show_hidden and _parser_is_hidden(parser):
|
||||
continue
|
||||
|
||||
if not show_deprecated and _parser_is_deprecated(parser):
|
||||
continue
|
||||
|
||||
plist.append(_cliname_to_modname(p))
|
||||
|
||||
return plist
|
||||
|
||||
def standard_parser_mod_list(
|
||||
show_hidden: bool = False,
|
||||
show_deprecated: bool = False
|
||||
) -> List[str]:
|
||||
"""
|
||||
Returns a list of standard parser module names. This function is a
|
||||
subset of `parser_mod_list()` and does not contain any streaming
|
||||
parsers.
|
||||
"""
|
||||
plist = []
|
||||
plist: List[str] = []
|
||||
for p in parsers:
|
||||
parser = _get_parser(p)
|
||||
|
||||
if not _parser_is_streaming(parser):
|
||||
|
||||
if not show_hidden and _parser_is_hidden(parser):
|
||||
continue
|
||||
|
||||
if not show_deprecated and _parser_is_deprecated(parser):
|
||||
continue
|
||||
|
||||
plist.append(_cliname_to_modname(p))
|
||||
|
||||
return plist
|
||||
|
||||
def streaming_parser_mod_list() -> List[str]:
|
||||
def streaming_parser_mod_list(
|
||||
show_hidden: bool = False,
|
||||
show_deprecated: bool = False
|
||||
) -> List[str]:
|
||||
"""
|
||||
Returns a list of streaming parser module names. This function is a
|
||||
subset of `parser_mod_list()`.
|
||||
"""
|
||||
plist = []
|
||||
plist: List[str] = []
|
||||
for p in parsers:
|
||||
parser = _get_parser(p)
|
||||
|
||||
if _parser_is_streaming(parser):
|
||||
|
||||
if not show_hidden and _parser_is_hidden(parser):
|
||||
continue
|
||||
|
||||
if not show_deprecated and _parser_is_deprecated(parser):
|
||||
continue
|
||||
|
||||
plist.append(_cliname_to_modname(p))
|
||||
|
||||
return plist
|
||||
|
||||
def parser_info(parser_mod_name: str, documentation: bool = False) -> Dict:
|
||||
def parser_info(parser_mod_name: str, documentation: bool = False) -> ParserInfoType:
|
||||
"""
|
||||
Returns a dictionary that includes the parser module metadata.
|
||||
|
||||
@ -367,7 +469,7 @@ def parser_info(parser_mod_name: str, documentation: bool = False) -> Dict:
|
||||
# ensure parser_mod_name is a true module name and not a cli name
|
||||
parser_mod_name = _cliname_to_modname(parser_mod_name)
|
||||
parser_mod = _get_parser(parser_mod_name)
|
||||
info_dict: Dict = {}
|
||||
info_dict: ParserInfoType = {}
|
||||
|
||||
if hasattr(parser_mod, 'info'):
|
||||
info_dict['name'] = parser_mod_name
|
||||
@ -376,7 +478,7 @@ def parser_info(parser_mod_name: str, documentation: bool = False) -> Dict:
|
||||
|
||||
for k, v in parser_entry.items():
|
||||
if not k.startswith('__'):
|
||||
info_dict[k] = v
|
||||
info_dict[k] = v # type: ignore
|
||||
|
||||
if _modname_to_cliname(parser_mod_name) in local_parsers:
|
||||
info_dict['plugin'] = True
|
||||
@ -389,32 +491,39 @@ def parser_info(parser_mod_name: str, documentation: bool = False) -> Dict:
|
||||
|
||||
return info_dict
|
||||
|
||||
def all_parser_info(documentation: bool = False,
|
||||
show_hidden: bool = False
|
||||
) -> List[Dict]:
|
||||
def all_parser_info(
|
||||
documentation: bool = False,
|
||||
show_hidden: bool = False,
|
||||
show_deprecated: bool = False
|
||||
) -> List[ParserInfoType]:
|
||||
"""
|
||||
Returns a list of dictionaries that includes metadata for all parser
|
||||
modules.
|
||||
modules. By default only non-hidden, non-deprecated parsers are
|
||||
returned.
|
||||
|
||||
Parameters:
|
||||
|
||||
documentation: (boolean) include parser docstrings if True
|
||||
show_hidden: (boolean) also show parsers marked as hidden
|
||||
in their info metadata.
|
||||
show_deprecated: (boolean) also show parsers marked as
|
||||
deprecated in their info metadata.
|
||||
"""
|
||||
temp_list = [parser_info(p, documentation=documentation) for p in parsers]
|
||||
plist: List[str] = []
|
||||
for p in parsers:
|
||||
parser = _get_parser(p)
|
||||
|
||||
p_list = []
|
||||
if show_hidden:
|
||||
p_list = temp_list
|
||||
if not show_hidden and _parser_is_hidden(parser):
|
||||
continue
|
||||
|
||||
else:
|
||||
for item in temp_list:
|
||||
if not item.get('hidden', None):
|
||||
p_list.append(item)
|
||||
if not show_deprecated and _parser_is_deprecated(parser):
|
||||
continue
|
||||
|
||||
return p_list
|
||||
plist.append(_cliname_to_modname(p))
|
||||
|
||||
p_info_list: List[ParserInfoType] = [parser_info(p, documentation=documentation) for p in plist]
|
||||
|
||||
return p_info_list
|
||||
|
||||
def get_help(parser_mod_name: str) -> None:
|
||||
"""
|
||||
|
313
jc/parsers/datetime_iso.py
Normal file
313
jc/parsers/datetime_iso.py
Normal file
@ -0,0 +1,313 @@
|
||||
"""jc - JSON Convert ISO 8601 Datetime string parser
|
||||
|
||||
This parser supports standard ISO 8601 strings that include both date and
|
||||
time. If no timezone or offset information is available in the sring, then
|
||||
UTC timezone is used.
|
||||
|
||||
Usage (cli):
|
||||
|
||||
$ echo "2022-07-20T14:52:45Z" | jc --iso-datetime
|
||||
|
||||
Usage (module):
|
||||
|
||||
import jc
|
||||
result = jc.parse('iso_datetime', iso_8601_string)
|
||||
|
||||
Schema:
|
||||
|
||||
{
|
||||
"year": integer,
|
||||
"month": string,
|
||||
"month_num": integer,
|
||||
"day": integer,
|
||||
"weekday": string,
|
||||
"weekday_num": integer,
|
||||
"hour": integer,
|
||||
"hour_24": integer,
|
||||
"minute": integer,
|
||||
"second": integer,
|
||||
"microsecond": integer,
|
||||
"period": string,
|
||||
"utc_offset": string,
|
||||
"day_of_year": integer,
|
||||
"week_of_year": integer,
|
||||
"iso": string,
|
||||
"timestamp": integer # [0]
|
||||
}
|
||||
|
||||
[0] timezone aware UNIX timestamp expressed in UTC
|
||||
|
||||
Examples:
|
||||
|
||||
$ echo "2022-07-20T14:52:45Z" | jc --iso-datetime -p
|
||||
{
|
||||
"year": 2022,
|
||||
"month": "Jul",
|
||||
"month_num": 7,
|
||||
"day": 20,
|
||||
"weekday": "Wed",
|
||||
"weekday_num": 3,
|
||||
"hour": 2,
|
||||
"hour_24": 14,
|
||||
"minute": 52,
|
||||
"second": 45,
|
||||
"microsecond": 0,
|
||||
"period": "PM",
|
||||
"utc_offset": "+0000",
|
||||
"day_of_year": 201,
|
||||
"week_of_year": 29,
|
||||
"iso": "2022-07-20T14:52:45+00:00",
|
||||
"timestamp": 1658328765
|
||||
}
|
||||
"""
|
||||
import datetime
|
||||
import re
|
||||
import typing
|
||||
from decimal import Decimal
|
||||
import jc.utils
|
||||
|
||||
|
||||
class info():
|
||||
"""Provides parser metadata (version, author, etc.)"""
|
||||
version = '1.0'
|
||||
description = 'ISO 8601 Datetime string parser'
|
||||
author = 'Kelly Brazil'
|
||||
author_email = 'kellyjonbrazil@gmail.com'
|
||||
details = 'Using the pyiso8601 library from https://github.com/micktwomey/pyiso8601/releases/tag/1.0.2'
|
||||
compatible = ['linux', 'aix', 'freebsd', 'darwin', 'win32', 'cygwin']
|
||||
|
||||
|
||||
__version__ = info.version
|
||||
|
||||
|
||||
####################################################
|
||||
"""
|
||||
pyiso8601 library from https://github.com/micktwomey/pyiso8601/releases/tag/1.0.2
|
||||
"""
|
||||
|
||||
"""
|
||||
Copyright (c) 2007 - 2022 Michael Twomey
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a
|
||||
copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included
|
||||
in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
"""
|
||||
|
||||
"""ISO 8601 date time string parsing
|
||||
Basic usage:
|
||||
>>> import iso8601
|
||||
>>> iso8601._parse_date("2007-01-25T12:00:00Z")
|
||||
datetime.datetime(2007, 1, 25, 12, 0, tzinfo=<iso8601.Utc ...>)
|
||||
>>>
|
||||
"""
|
||||
|
||||
# __all__ = ["_parse_date", "_ParseError", "UTC", "_FixedOffset"]
|
||||
|
||||
# Adapted from http://delete.me.uk/2005/03/iso8601.html
|
||||
ISO8601_REGEX = re.compile(
|
||||
r"""
|
||||
(?P<year>[0-9]{4})
|
||||
(
|
||||
(
|
||||
(-(?P<monthdash>[0-9]{1,2}))
|
||||
|
|
||||
(?P<month>[0-9]{2})
|
||||
(?!$) # Don't allow YYYYMM
|
||||
)
|
||||
(
|
||||
(
|
||||
(-(?P<daydash>[0-9]{1,2}))
|
||||
|
|
||||
(?P<day>[0-9]{2})
|
||||
)
|
||||
(
|
||||
(
|
||||
(?P<separator>[ T])
|
||||
(?P<hour>[0-9]{2})
|
||||
(:{0,1}(?P<minute>[0-9]{2})){0,1}
|
||||
(
|
||||
:{0,1}(?P<second>[0-9]{1,2})
|
||||
([.,](?P<second_fraction>[0-9]+)){0,1}
|
||||
){0,1}
|
||||
(?P<timezone>
|
||||
Z
|
||||
|
|
||||
(
|
||||
(?P<tz_sign>[-+])
|
||||
(?P<tz_hour>[0-9]{2})
|
||||
:{0,1}
|
||||
(?P<tz_minute>[0-9]{2}){0,1}
|
||||
)
|
||||
){0,1}
|
||||
){0,1}
|
||||
)
|
||||
){0,1} # YYYY-MM
|
||||
){0,1} # YYYY only
|
||||
$
|
||||
""",
|
||||
re.VERBOSE,
|
||||
)
|
||||
|
||||
|
||||
class _ParseError(ValueError):
|
||||
"""Raised when there is a problem parsing a date string"""
|
||||
|
||||
|
||||
UTC = datetime.timezone.utc
|
||||
|
||||
|
||||
def _FixedOffset(
|
||||
offset_hours: float, offset_minutes: float, name: str
|
||||
) -> datetime.timezone:
|
||||
return datetime.timezone(
|
||||
datetime.timedelta(hours=offset_hours, minutes=offset_minutes), name
|
||||
)
|
||||
|
||||
|
||||
def _parse_timezone(
|
||||
matches: typing.Dict[str, str],
|
||||
default_timezone: typing.Optional[datetime.timezone] = UTC,
|
||||
) -> typing.Optional[datetime.timezone]:
|
||||
"""Parses ISO 8601 time zone specs into tzinfo offsets"""
|
||||
tz = matches.get("timezone", None)
|
||||
if tz == "Z":
|
||||
return UTC
|
||||
# This isn't strictly correct, but it's common to encounter dates without
|
||||
# timezones so I'll assume the default (which defaults to UTC).
|
||||
# Addresses issue 4.
|
||||
if tz is None:
|
||||
return default_timezone
|
||||
sign = matches.get("tz_sign", None)
|
||||
hours = int(matches.get("tz_hour", 0))
|
||||
minutes = int(matches.get("tz_minute", 0))
|
||||
description = f"{sign}{hours:02d}:{minutes:02d}"
|
||||
if sign == "-":
|
||||
hours = -hours
|
||||
minutes = -minutes
|
||||
return _FixedOffset(hours, minutes, description)
|
||||
|
||||
|
||||
def _parse_date(
|
||||
datestring: str, default_timezone: typing.Optional[datetime.timezone] = UTC
|
||||
) -> datetime.datetime:
|
||||
"""Parses ISO 8601 dates into datetime objects
|
||||
The timezone is parsed from the date string. However it is quite common to
|
||||
have dates without a timezone (not strictly correct). In this case the
|
||||
default timezone specified in default_timezone is used. This is UTC by
|
||||
default.
|
||||
:param datestring: The date to parse as a string
|
||||
:param default_timezone: A datetime tzinfo instance to use when no timezone
|
||||
is specified in the datestring. If this is set to
|
||||
None then a naive datetime object is returned.
|
||||
:returns: A datetime.datetime instance
|
||||
:raises: _ParseError when there is a problem parsing the date or
|
||||
constructing the datetime instance.
|
||||
"""
|
||||
try:
|
||||
m = ISO8601_REGEX.match(datestring)
|
||||
except Exception as e:
|
||||
raise _ParseError(e)
|
||||
|
||||
if not m:
|
||||
raise _ParseError(f"Unable to parse date string {datestring!r}")
|
||||
|
||||
# Drop any Nones from the regex matches
|
||||
# TODO: check if there's a way to omit results in regexes
|
||||
groups: typing.Dict[str, str] = {
|
||||
k: v for k, v in m.groupdict().items() if v is not None
|
||||
}
|
||||
|
||||
try:
|
||||
return datetime.datetime(
|
||||
year=int(groups.get("year", 0)),
|
||||
month=int(groups.get("month", groups.get("monthdash", 1))),
|
||||
day=int(groups.get("day", groups.get("daydash", 1))),
|
||||
hour=int(groups.get("hour", 0)),
|
||||
minute=int(groups.get("minute", 0)),
|
||||
second=int(groups.get("second", 0)),
|
||||
microsecond=int(
|
||||
Decimal(f"0.{groups.get('second_fraction', 0)}") * Decimal("1000000.0")
|
||||
),
|
||||
tzinfo=_parse_timezone(groups, default_timezone=default_timezone),
|
||||
)
|
||||
except Exception as e:
|
||||
raise _ParseError(e)
|
||||
|
||||
####################################################
|
||||
|
||||
|
||||
def _process(proc_data):
|
||||
"""
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (Dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
Dictionary. Structured data to conform to the schema.
|
||||
"""
|
||||
# no further processing
|
||||
return proc_data
|
||||
|
||||
|
||||
def parse(data, raw=False, quiet=False):
|
||||
"""
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) unprocessed output if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
Dictionary. Raw or processed structured data.
|
||||
"""
|
||||
jc.utils.compatibility(__name__, info.compatible, quiet)
|
||||
jc.utils.input_type_check(data)
|
||||
|
||||
raw_output = {}
|
||||
|
||||
if jc.utils.has_data(data):
|
||||
|
||||
dt = _parse_date(data)
|
||||
|
||||
raw_output = {
|
||||
'year': dt.year,
|
||||
'month': dt.strftime('%b'),
|
||||
'month_num': dt.month,
|
||||
'day': dt.day,
|
||||
'weekday': dt.strftime('%a'),
|
||||
'weekday_num': dt.isoweekday(),
|
||||
'hour': int(dt.strftime('%I')),
|
||||
'hour_24': dt.hour,
|
||||
'minute': dt.minute,
|
||||
'second': dt.second,
|
||||
'microsecond': dt.microsecond,
|
||||
'period': dt.strftime('%p').upper(),
|
||||
'utc_offset': dt.strftime('%z') or None,
|
||||
'day_of_year': int(dt.strftime('%j')),
|
||||
'week_of_year': int(dt.strftime('%W')),
|
||||
'iso': dt.isoformat(),
|
||||
'timestamp': int(dt.timestamp())
|
||||
}
|
||||
|
||||
return raw_output if raw else _process(raw_output)
|
@ -34,6 +34,7 @@ Examples:
|
||||
[]
|
||||
"""
|
||||
from typing import List, Dict
|
||||
from jc.jc_types import JSONDictType
|
||||
import jc.utils
|
||||
|
||||
|
||||
@ -53,7 +54,7 @@ class info():
|
||||
__version__ = info.version
|
||||
|
||||
|
||||
def _process(proc_data: List[Dict]) -> List[Dict]:
|
||||
def _process(proc_data: List[JSONDictType]) -> List[JSONDictType]:
|
||||
"""
|
||||
Final processing to conform to the schema.
|
||||
|
||||
@ -78,7 +79,7 @@ def parse(
|
||||
data: str,
|
||||
raw: bool = False,
|
||||
quiet: bool = False
|
||||
) -> List[Dict]:
|
||||
) -> List[JSONDictType]:
|
||||
"""
|
||||
Main text parsing function
|
||||
|
||||
@ -95,7 +96,7 @@ def parse(
|
||||
jc.utils.compatibility(__name__, info.compatible, quiet)
|
||||
jc.utils.input_type_check(data)
|
||||
|
||||
raw_output: List = []
|
||||
raw_output: List[Dict] = []
|
||||
|
||||
if jc.utils.has_data(data):
|
||||
|
||||
|
@ -40,11 +40,12 @@ Examples:
|
||||
{example output}
|
||||
...
|
||||
"""
|
||||
from typing import Dict, Iterable, Union
|
||||
from typing import Dict, Iterable
|
||||
import jc.utils
|
||||
from jc.streaming import (
|
||||
add_jc_meta, streaming_input_type_check, streaming_line_input_type_check, raise_or_yield
|
||||
)
|
||||
from jc.jc_types import JSONDictType, StreamingOutputType
|
||||
from jc.exceptions import ParseError
|
||||
|
||||
|
||||
@ -63,7 +64,7 @@ class info():
|
||||
__version__ = info.version
|
||||
|
||||
|
||||
def _process(proc_data: Dict) -> Dict:
|
||||
def _process(proc_data: JSONDictType) -> JSONDictType:
|
||||
"""
|
||||
Final processing to conform to the schema.
|
||||
|
||||
@ -90,7 +91,7 @@ def parse(
|
||||
raw: bool = False,
|
||||
quiet: bool = False,
|
||||
ignore_exceptions: bool = False
|
||||
) -> Union[Iterable[Dict], tuple]:
|
||||
) -> StreamingOutputType:
|
||||
"""
|
||||
Main text parsing generator function. Returns an iterable object.
|
||||
|
||||
|
@ -21,6 +21,9 @@ Schema:
|
||||
"ip": string,
|
||||
"ip_compressed": string,
|
||||
"ip_exploded": string,
|
||||
"ip_split": [
|
||||
string
|
||||
],
|
||||
"scope_id": string/null,
|
||||
"ipv4_mapped": string/null,
|
||||
"six_to_four": string/null,
|
||||
@ -78,6 +81,12 @@ Examples:
|
||||
"ip": "192.168.2.10",
|
||||
"ip_compressed": "192.168.2.10",
|
||||
"ip_exploded": "192.168.2.10",
|
||||
"ip_split": [
|
||||
"192",
|
||||
"168",
|
||||
"2",
|
||||
"10"
|
||||
],
|
||||
"scope_id": null,
|
||||
"ipv4_mapped": null,
|
||||
"six_to_four": null,
|
||||
@ -133,6 +142,12 @@ Examples:
|
||||
"ip": "192.168.2.10",
|
||||
"ip_compressed": "192.168.2.10",
|
||||
"ip_exploded": "192.168.2.10",
|
||||
"ip_split": [
|
||||
"192",
|
||||
"168",
|
||||
"2",
|
||||
"10"
|
||||
],
|
||||
"scope_id": null,
|
||||
"ipv4_mapped": null,
|
||||
"six_to_four": null,
|
||||
@ -186,14 +201,24 @@ Examples:
|
||||
"version": 6,
|
||||
"max_prefix_length": 128,
|
||||
"ip": "127:0:de::1",
|
||||
"ip_compressed": "127:0:de::1%128",
|
||||
"ip_compressed": "127:0:de::1",
|
||||
"ip_exploded": "0127:0000:00de:0000:0000:0000:0000:0001",
|
||||
"ip_split": [
|
||||
"0127",
|
||||
"0000",
|
||||
"00de",
|
||||
"0000",
|
||||
"0000",
|
||||
"0000",
|
||||
"0000",
|
||||
"0001"
|
||||
],
|
||||
"scope_id": "128",
|
||||
"ipv4_mapped": null,
|
||||
"six_to_four": null,
|
||||
"teredo_client": null,
|
||||
"teredo_server": null,
|
||||
"dns_ptr": "1.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.....0.7.2.1.0.ip6.arpa",
|
||||
"dns_ptr": "1.0.0.0.0.0...0.0.0.e.d.0.0.0.0.0.0.7.2.1.0.ip6.arpa",
|
||||
"network": "127:0:de::",
|
||||
"broadcast": "127:0:de::ffff:ffff",
|
||||
"hostmask": "::ffff:ffff",
|
||||
@ -226,13 +251,13 @@ Examples:
|
||||
"last_host": "01:27:00:00:00:de:00:00:00:00:00:00:ff:ff:ff:fe"
|
||||
},
|
||||
"bin": {
|
||||
"ip": "000000010010011100000000000000000000000011011110000000...",
|
||||
"network": "0000000100100111000000000000000000000000110111100...",
|
||||
"broadcast": "00000001001001110000000000000000000000001101111...",
|
||||
"hostmask": "000000000000000000000000000000000000000000000000...",
|
||||
"netmask": "1111111111111111111111111111111111111111111111111...",
|
||||
"first_host": "0000000100100111000000000000000000000000110111...",
|
||||
"last_host": "00000001001001110000000000000000000000001101111..."
|
||||
"ip": "0000000100100111000000000000000000000000110...000000000001",
|
||||
"network": "00000001001001110000000000000000000000...000000000000",
|
||||
"broadcast": "000000010010011100000000000000000000...111111111111",
|
||||
"hostmask": "0000000000000000000000000000000000000...111111111111",
|
||||
"netmask": "11111111111111111111111111111111111111...000000000000",
|
||||
"first_host": "00000001001001110000000000000000000...000000000001",
|
||||
"last_host": "000000010010011100000000000000000000...1111111111110"
|
||||
}
|
||||
}
|
||||
|
||||
@ -243,12 +268,22 @@ Examples:
|
||||
"ip": "127:0:de::1",
|
||||
"ip_compressed": "127:0:de::1",
|
||||
"ip_exploded": "0127:0000:00de:0000:0000:0000:0000:0001",
|
||||
"ip_split": [
|
||||
"0127",
|
||||
"0000",
|
||||
"00de",
|
||||
"0000",
|
||||
"0000",
|
||||
"0000",
|
||||
"0000",
|
||||
"0001"
|
||||
],
|
||||
"scope_id": null,
|
||||
"ipv4_mapped": null,
|
||||
"six_to_four": null,
|
||||
"teredo_client": null,
|
||||
"teredo_server": null,
|
||||
"dns_ptr": "1.0.0.0.0.0.0....0.0.0.e.d.0.0.0.0.0.0.7.2.1.0.ip6.arpa",
|
||||
"dns_ptr": "1.0.0.0.0.0....0.0.0.0.e.d.0.0.0.0.0.0.7.2.1.0.ip6.arpa",
|
||||
"network": "127:0:de::1",
|
||||
"broadcast": "127:0:de::1",
|
||||
"hostmask": "::",
|
||||
@ -281,13 +316,13 @@ Examples:
|
||||
"last_host": "01:27:00:00:00:de:00:00:00:00:00:00:00:00:00:01"
|
||||
},
|
||||
"bin": {
|
||||
"ip": "0000000100100111000000000000000000000000110111100000000...",
|
||||
"network": "00000001001001110000000000000000000000001101111000...",
|
||||
"broadcast": "000000010010011100000000000000000000000011011110...",
|
||||
"hostmask": "0000000000000000000000000000000000000000000000000...",
|
||||
"netmask": "11111111111111111111111111111111111111111111111111...",
|
||||
"first_host": "00000001001001110000000000000000000000001101111...",
|
||||
"last_host": "000000010010011100000000000000000000000011011110..."
|
||||
"ip": "0000000100100111000000000000000000000000110111100...000001",
|
||||
"network": "00000001001001110000000000000000000000001101...000001",
|
||||
"broadcast": "000000010010011100000000000000000000000011...000001",
|
||||
"hostmask": "0000000000000000000000000000000000000000000...000000",
|
||||
"netmask": "11111111111111111111111111111111111111111111...111111",
|
||||
"first_host": "00000001001001110000000000000000000000001...000001",
|
||||
"last_host": "000000010010011100000000000000000000000011...0000001"
|
||||
}
|
||||
}
|
||||
|
||||
@ -299,12 +334,22 @@ Examples:
|
||||
"ip": "::ffff:c0a8:123",
|
||||
"ip_compressed": "::ffff:c0a8:123",
|
||||
"ip_exploded": "0000:0000:0000:0000:0000:ffff:c0a8:0123",
|
||||
"ip_split": [
|
||||
"0000",
|
||||
"0000",
|
||||
"0000",
|
||||
"0000",
|
||||
"0000",
|
||||
"ffff",
|
||||
"c0a8",
|
||||
"0123"
|
||||
],
|
||||
"scope_id": null,
|
||||
"ipv4_mapped": "192.168.1.35",
|
||||
"six_to_four": null,
|
||||
"teredo_client": null,
|
||||
"teredo_server": null,
|
||||
"dns_ptr": "3.2.1.0.8.a.0.c.f.f.f.f.0.0.0....0.0.0.0.0.0.0.ip6.arpa",
|
||||
"dns_ptr": "3.2.1.0.8.a.0.c.f.f.f.f.0.0....0.0.0.0.0.0.ip6.arpa",
|
||||
"network": "::ffff:c0a8:123",
|
||||
"broadcast": "::ffff:c0a8:123",
|
||||
"hostmask": "::",
|
||||
@ -337,13 +382,13 @@ Examples:
|
||||
"last_host": "00:00:00:00:00:00:00:00:00:00:ff:ff:c0:a8:01:23"
|
||||
},
|
||||
"bin": {
|
||||
"ip": "0000000000000000000000000000000000000000000000000000000...",
|
||||
"network": "00000000000000000000000000000000000000000000000000...",
|
||||
"broadcast": "000000000000000000000000000000000000000000000000...",
|
||||
"hostmask": "0000000000000000000000000000000000000000000000000...",
|
||||
"netmask": "11111111111111111111111111111111111111111111111111...",
|
||||
"first_host": "00000000000000000000000000000000000000000000000...",
|
||||
"last_host": "000000000000000000000000000000000000000000000000..."
|
||||
"ip": "000000000000000000000000000000000000000000000...100100011",
|
||||
"network": "0000000000000000000000000000000000000000...000100011",
|
||||
"broadcast": "00000000000000000000000000000000000000...000100011",
|
||||
"hostmask": "000000000000000000000000000000000000000...000000000",
|
||||
"netmask": "1111111111111111111111111111111111111111...111111111",
|
||||
"first_host": "0000000000000000000000000000000000000...100100011",
|
||||
"last_host": "00000000000000000000000000000000000000...0100100011"
|
||||
}
|
||||
}
|
||||
|
||||
@ -355,12 +400,22 @@ Examples:
|
||||
"ip": "2002:c000:204::",
|
||||
"ip_compressed": "2002:c000:204::",
|
||||
"ip_exploded": "2002:c000:0204:0000:0000:0000:0000:0000",
|
||||
"ip_split": [
|
||||
"2002",
|
||||
"c000",
|
||||
"0204",
|
||||
"0000",
|
||||
"0000",
|
||||
"0000",
|
||||
"0000",
|
||||
"0000"
|
||||
],
|
||||
"scope_id": null,
|
||||
"ipv4_mapped": null,
|
||||
"six_to_four": "192.0.2.4",
|
||||
"teredo_client": null,
|
||||
"teredo_server": null,
|
||||
"dns_ptr": "0.0.0.0.0.0.0.0......0.4.0.2.0.0.0.0.c.2.0.0.2.ip6.arpa",
|
||||
"dns_ptr": "0.0.0.0.0.0.0...0.0.0.4.0.2.0.0.0.0.c.2.0.0.2.ip6.arpa",
|
||||
"network": "2002:c000:204::",
|
||||
"broadcast": "2002:c000:204:ffff:ffff:ffff:ffff:ffff",
|
||||
"hostmask": "::ffff:ffff:ffff:ffff:ffff",
|
||||
@ -393,13 +448,13 @@ Examples:
|
||||
"last_host": "20:02:c0:00:02:04:ff:ff:ff:ff:ff:ff:ff:ff:ff:fe"
|
||||
},
|
||||
"bin": {
|
||||
"ip": "0010000000000010110000000000000000000010000001000000000...",
|
||||
"network": "00100000000000101100000000000000000000100000010000...",
|
||||
"broadcast": "001000000000001011000000000000000000001000000100...",
|
||||
"hostmask": "0000000000000000000000000000000000000000000000001...",
|
||||
"netmask": "11111111111111111111111111111111111111111111111100...",
|
||||
"first_host": "00100000000000101100000000000000000000100000010...",
|
||||
"last_host": "001000000000001011000000000000000000001000000100..."
|
||||
"ip": "00100000000000101100000000000000000000100000010...00000000",
|
||||
"network": "001000000000001011000000000000000000001000...00000000",
|
||||
"broadcast": "0010000000000010110000000000000000000010...11111111",
|
||||
"hostmask": "00000000000000000000000000000000000000000...11111111",
|
||||
"netmask": "111111111111111111111111111111111111111111...00000000",
|
||||
"first_host": "001000000000001011000000000000000000001...00000001",
|
||||
"last_host": "0010000000000010110000000000000000000010...111111110"
|
||||
}
|
||||
}
|
||||
|
||||
@ -411,12 +466,22 @@ Examples:
|
||||
"ip": "2001:0:4136:e378:8000:63bf:3fff:fdd2",
|
||||
"ip_compressed": "2001:0:4136:e378:8000:63bf:3fff:fdd2",
|
||||
"ip_exploded": "2001:0000:4136:e378:8000:63bf:3fff:fdd2",
|
||||
"ip_split": [
|
||||
"2001",
|
||||
"0000",
|
||||
"4136",
|
||||
"e378",
|
||||
"8000",
|
||||
"63bf",
|
||||
"3fff",
|
||||
"fdd2"
|
||||
],
|
||||
"scope_id": null,
|
||||
"ipv4_mapped": null,
|
||||
"six_to_four": null,
|
||||
"teredo_client": "192.0.2.45",
|
||||
"teredo_server": "65.54.227.120",
|
||||
"dns_ptr": "2.d.d.f.f.f.f.3.f.b.3.6.0.0.0....0.0.0.1.0.0.2.ip6.arpa",
|
||||
"dns_ptr": "2.d.d.f.f.f.f.3.f.b.3.6.0.0.0.8.8....0.1.0.0.2.ip6.arpa",
|
||||
"network": "2001:0:4136:e378:8000:63bf:3fff:fdd2",
|
||||
"broadcast": "2001:0:4136:e378:8000:63bf:3fff:fdd2",
|
||||
"hostmask": "::",
|
||||
@ -449,13 +514,13 @@ Examples:
|
||||
"last_host": "20:01:00:00:41:36:e3:78:80:00:63:bf:3f:ff:fd:d2"
|
||||
},
|
||||
"bin": {
|
||||
"ip": "001000000000000100000000000000000100000100110110111000...",
|
||||
"network": "0010000000000001000000000000000001000001001101101...",
|
||||
"broadcast": "00100000000000010000000000000000010000010011011...",
|
||||
"hostmask": "000000000000000000000000000000000000000000000000...",
|
||||
"netmask": "1111111111111111111111111111111111111111111111111...",
|
||||
"first_host": "0010000000000001000000000000000001000001001101...",
|
||||
"last_host": "00100000000000010000000000000000010000010011011..."
|
||||
"ip": "0010000000000001000000000000000001000001001...110111010010",
|
||||
"network": "00100000000000010000000000000000010000...110111010010",
|
||||
"broadcast": "001000000000000100000000000000000100...110111010010",
|
||||
"hostmask": "0000000000000000000000000000000000000...000000000000",
|
||||
"netmask": "11111111111111111111111111111111111111...111111111111",
|
||||
"first_host": "00100000000000010000000000000000010...110111010010",
|
||||
"last_host": "001000000000000100000000000000000100...110111010010"
|
||||
}
|
||||
}
|
||||
"""
|
||||
@ -468,7 +533,7 @@ import jc.utils
|
||||
|
||||
class info():
|
||||
"""Provides parser metadata (version, author, etc.)"""
|
||||
version = '1.2'
|
||||
version = '1.3'
|
||||
description = 'IPv4 and IPv6 Address string parser'
|
||||
author = 'Kelly Brazil'
|
||||
author_email = 'kellyjonbrazil@gmail.com'
|
||||
@ -590,6 +655,13 @@ def parse(
|
||||
bare_ip_string = str(interface.ip)
|
||||
bare_ip = ipaddress.ip_address(bare_ip_string)
|
||||
ip_ptr = bare_ip.reverse_pointer
|
||||
ip_compressed = bare_ip.compressed
|
||||
ip_exploded = bare_ip.exploded
|
||||
|
||||
if interface.version == 4:
|
||||
ip_split = ip_exploded.split('.')
|
||||
else:
|
||||
ip_split = ip_exploded.split(':')
|
||||
|
||||
# fix for ipv6-only attributes
|
||||
scope_id = None
|
||||
@ -638,8 +710,9 @@ def parse(
|
||||
'version': interface.version,
|
||||
'max_prefix_length': interface.max_prefixlen,
|
||||
'ip': bare_ip_string,
|
||||
'ip_compressed': bare_ip.compressed,
|
||||
'ip_exploded': bare_ip.exploded,
|
||||
'ip_compressed': ip_compressed,
|
||||
'ip_exploded': ip_exploded,
|
||||
'ip_split': ip_split,
|
||||
'scope_id': scope_id,
|
||||
'ipv4_mapped': ipv4_mapped,
|
||||
'six_to_four': sixtofour,
|
||||
|
@ -1,275 +1,32 @@
|
||||
"""jc - JSON Convert ISO 8601 Datetime string parser
|
||||
|
||||
This parser supports standard ISO 8601 strings that include both date and
|
||||
time. If no timezone or offset information is available in the sring, then
|
||||
UTC timezone is used.
|
||||
This parser has been renamed to datetime-iso (cli) or datetime_iso (module).
|
||||
|
||||
Usage (cli):
|
||||
|
||||
$ echo "2022-07-20T14:52:45Z" | jc --iso-datetime
|
||||
|
||||
Usage (module):
|
||||
|
||||
import jc
|
||||
result = jc.parse('iso_datetime', iso_8601_string)
|
||||
|
||||
Schema:
|
||||
|
||||
{
|
||||
"year": integer,
|
||||
"month": string,
|
||||
"month_num": integer,
|
||||
"day": integer,
|
||||
"weekday": string,
|
||||
"weekday_num": integer,
|
||||
"hour": integer,
|
||||
"hour_24": integer,
|
||||
"minute": integer,
|
||||
"second": integer,
|
||||
"microsecond": integer,
|
||||
"period": string,
|
||||
"utc_offset": string,
|
||||
"day_of_year": integer,
|
||||
"week_of_year": integer,
|
||||
"iso": string,
|
||||
"timestamp": integer # [0]
|
||||
}
|
||||
|
||||
[0] timezone aware UNIX timestamp expressed in UTC
|
||||
|
||||
Examples:
|
||||
|
||||
$ echo "2022-07-20T14:52:45Z" | jc --iso-datetime -p
|
||||
{
|
||||
"year": 2022,
|
||||
"month": "Jul",
|
||||
"month_num": 7,
|
||||
"day": 20,
|
||||
"weekday": "Wed",
|
||||
"weekday_num": 3,
|
||||
"hour": 2,
|
||||
"hour_24": 14,
|
||||
"minute": 52,
|
||||
"second": 45,
|
||||
"microsecond": 0,
|
||||
"period": "PM",
|
||||
"utc_offset": "+0000",
|
||||
"day_of_year": 201,
|
||||
"week_of_year": 29,
|
||||
"iso": "2022-07-20T14:52:45+00:00",
|
||||
"timestamp": 1658328765
|
||||
}
|
||||
This parser will be removed in a future version, so please start using
|
||||
the new parser name.
|
||||
"""
|
||||
import datetime
|
||||
import re
|
||||
import typing
|
||||
from decimal import Decimal
|
||||
from jc.parsers import datetime_iso
|
||||
import jc.utils
|
||||
|
||||
|
||||
class info():
|
||||
"""Provides parser metadata (version, author, etc.)"""
|
||||
version = '1.0'
|
||||
description = 'ISO 8601 Datetime string parser'
|
||||
version = '1.1'
|
||||
description = 'Deprecated - please use datetime-iso'
|
||||
author = 'Kelly Brazil'
|
||||
author_email = 'kellyjonbrazil@gmail.com'
|
||||
details = 'Using the pyiso8601 library from https://github.com/micktwomey/pyiso8601/releases/tag/1.0.2'
|
||||
details = 'Deprecated - please use datetime-iso'
|
||||
compatible = ['linux', 'aix', 'freebsd', 'darwin', 'win32', 'cygwin']
|
||||
deprecated = True
|
||||
|
||||
|
||||
__version__ = info.version
|
||||
|
||||
|
||||
####################################################
|
||||
"""
|
||||
pyiso8601 library from https://github.com/micktwomey/pyiso8601/releases/tag/1.0.2
|
||||
"""
|
||||
|
||||
"""
|
||||
Copyright (c) 2007 - 2022 Michael Twomey
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a
|
||||
copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included
|
||||
in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
"""
|
||||
|
||||
"""ISO 8601 date time string parsing
|
||||
Basic usage:
|
||||
>>> import iso8601
|
||||
>>> iso8601._parse_date("2007-01-25T12:00:00Z")
|
||||
datetime.datetime(2007, 1, 25, 12, 0, tzinfo=<iso8601.Utc ...>)
|
||||
>>>
|
||||
"""
|
||||
|
||||
# __all__ = ["_parse_date", "_ParseError", "UTC", "_FixedOffset"]
|
||||
|
||||
# Adapted from http://delete.me.uk/2005/03/iso8601.html
|
||||
ISO8601_REGEX = re.compile(
|
||||
r"""
|
||||
(?P<year>[0-9]{4})
|
||||
(
|
||||
(
|
||||
(-(?P<monthdash>[0-9]{1,2}))
|
||||
|
|
||||
(?P<month>[0-9]{2})
|
||||
(?!$) # Don't allow YYYYMM
|
||||
)
|
||||
(
|
||||
(
|
||||
(-(?P<daydash>[0-9]{1,2}))
|
||||
|
|
||||
(?P<day>[0-9]{2})
|
||||
)
|
||||
(
|
||||
(
|
||||
(?P<separator>[ T])
|
||||
(?P<hour>[0-9]{2})
|
||||
(:{0,1}(?P<minute>[0-9]{2})){0,1}
|
||||
(
|
||||
:{0,1}(?P<second>[0-9]{1,2})
|
||||
([.,](?P<second_fraction>[0-9]+)){0,1}
|
||||
){0,1}
|
||||
(?P<timezone>
|
||||
Z
|
||||
|
|
||||
(
|
||||
(?P<tz_sign>[-+])
|
||||
(?P<tz_hour>[0-9]{2})
|
||||
:{0,1}
|
||||
(?P<tz_minute>[0-9]{2}){0,1}
|
||||
)
|
||||
){0,1}
|
||||
){0,1}
|
||||
)
|
||||
){0,1} # YYYY-MM
|
||||
){0,1} # YYYY only
|
||||
$
|
||||
""",
|
||||
re.VERBOSE,
|
||||
)
|
||||
|
||||
|
||||
class _ParseError(ValueError):
|
||||
"""Raised when there is a problem parsing a date string"""
|
||||
|
||||
|
||||
UTC = datetime.timezone.utc
|
||||
|
||||
|
||||
def _FixedOffset(
|
||||
offset_hours: float, offset_minutes: float, name: str
|
||||
) -> datetime.timezone:
|
||||
return datetime.timezone(
|
||||
datetime.timedelta(hours=offset_hours, minutes=offset_minutes), name
|
||||
)
|
||||
|
||||
|
||||
def _parse_timezone(
|
||||
matches: typing.Dict[str, str],
|
||||
default_timezone: typing.Optional[datetime.timezone] = UTC,
|
||||
) -> typing.Optional[datetime.timezone]:
|
||||
"""Parses ISO 8601 time zone specs into tzinfo offsets"""
|
||||
tz = matches.get("timezone", None)
|
||||
if tz == "Z":
|
||||
return UTC
|
||||
# This isn't strictly correct, but it's common to encounter dates without
|
||||
# timezones so I'll assume the default (which defaults to UTC).
|
||||
# Addresses issue 4.
|
||||
if tz is None:
|
||||
return default_timezone
|
||||
sign = matches.get("tz_sign", None)
|
||||
hours = int(matches.get("tz_hour", 0))
|
||||
minutes = int(matches.get("tz_minute", 0))
|
||||
description = f"{sign}{hours:02d}:{minutes:02d}"
|
||||
if sign == "-":
|
||||
hours = -hours
|
||||
minutes = -minutes
|
||||
return _FixedOffset(hours, minutes, description)
|
||||
|
||||
|
||||
def _parse_date(
|
||||
datestring: str, default_timezone: typing.Optional[datetime.timezone] = UTC
|
||||
) -> datetime.datetime:
|
||||
"""Parses ISO 8601 dates into datetime objects
|
||||
The timezone is parsed from the date string. However it is quite common to
|
||||
have dates without a timezone (not strictly correct). In this case the
|
||||
default timezone specified in default_timezone is used. This is UTC by
|
||||
default.
|
||||
:param datestring: The date to parse as a string
|
||||
:param default_timezone: A datetime tzinfo instance to use when no timezone
|
||||
is specified in the datestring. If this is set to
|
||||
None then a naive datetime object is returned.
|
||||
:returns: A datetime.datetime instance
|
||||
:raises: _ParseError when there is a problem parsing the date or
|
||||
constructing the datetime instance.
|
||||
"""
|
||||
try:
|
||||
m = ISO8601_REGEX.match(datestring)
|
||||
except Exception as e:
|
||||
raise _ParseError(e)
|
||||
|
||||
if not m:
|
||||
raise _ParseError(f"Unable to parse date string {datestring!r}")
|
||||
|
||||
# Drop any Nones from the regex matches
|
||||
# TODO: check if there's a way to omit results in regexes
|
||||
groups: typing.Dict[str, str] = {
|
||||
k: v for k, v in m.groupdict().items() if v is not None
|
||||
}
|
||||
|
||||
try:
|
||||
return datetime.datetime(
|
||||
year=int(groups.get("year", 0)),
|
||||
month=int(groups.get("month", groups.get("monthdash", 1))),
|
||||
day=int(groups.get("day", groups.get("daydash", 1))),
|
||||
hour=int(groups.get("hour", 0)),
|
||||
minute=int(groups.get("minute", 0)),
|
||||
second=int(groups.get("second", 0)),
|
||||
microsecond=int(
|
||||
Decimal(f"0.{groups.get('second_fraction', 0)}") * Decimal("1000000.0")
|
||||
),
|
||||
tzinfo=_parse_timezone(groups, default_timezone=default_timezone),
|
||||
)
|
||||
except Exception as e:
|
||||
raise _ParseError(e)
|
||||
|
||||
####################################################
|
||||
|
||||
|
||||
def _process(proc_data):
|
||||
"""
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (Dictionary) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
Dictionary. Structured data to conform to the schema.
|
||||
"""
|
||||
# no further processing
|
||||
return proc_data
|
||||
|
||||
|
||||
def parse(data, raw=False, quiet=False):
|
||||
"""
|
||||
Main text parsing function
|
||||
This parser is deprecated and calls datetime_iso. Please use datetime_iso
|
||||
directly. This parser will be removed in the future.
|
||||
|
||||
Parameters:
|
||||
|
||||
@ -281,33 +38,8 @@ def parse(data, raw=False, quiet=False):
|
||||
|
||||
Dictionary. Raw or processed structured data.
|
||||
"""
|
||||
jc.utils.compatibility(__name__, info.compatible, quiet)
|
||||
jc.utils.input_type_check(data)
|
||||
jc.utils.warning_message([
|
||||
'iso-datetime parser is deprecated. Please use datetime-iso instead.'
|
||||
])
|
||||
|
||||
raw_output = {}
|
||||
|
||||
if jc.utils.has_data(data):
|
||||
|
||||
dt = _parse_date(data)
|
||||
|
||||
raw_output = {
|
||||
'year': dt.year,
|
||||
'month': dt.strftime('%b'),
|
||||
'month_num': dt.month,
|
||||
'day': dt.day,
|
||||
'weekday': dt.strftime('%a'),
|
||||
'weekday_num': dt.isoweekday(),
|
||||
'hour': int(dt.strftime('%I')),
|
||||
'hour_24': dt.hour,
|
||||
'minute': dt.minute,
|
||||
'second': dt.second,
|
||||
'microsecond': dt.microsecond,
|
||||
'period': dt.strftime('%p').upper(),
|
||||
'utc_offset': dt.strftime('%z') or None,
|
||||
'day_of_year': int(dt.strftime('%j')),
|
||||
'week_of_year': int(dt.strftime('%W')),
|
||||
'iso': dt.isoformat(),
|
||||
'timestamp': int(dt.timestamp())
|
||||
}
|
||||
|
||||
return raw_output if raw else _process(raw_output)
|
||||
return datetime_iso.parse(data, raw=raw, quiet=quiet)
|
||||
|
241
jc/parsers/lspci.py
Normal file
241
jc/parsers/lspci.py
Normal file
@ -0,0 +1,241 @@
|
||||
"""jc - JSON Convert `lspci -mmv` command output parser
|
||||
|
||||
This parser supports the following `lspci` options:
|
||||
- `-mmv`
|
||||
- `-nmmv`
|
||||
- `-nnmmv`
|
||||
|
||||
Usage (cli):
|
||||
|
||||
$ lspci -nnmmv | jc --lspci
|
||||
|
||||
or
|
||||
|
||||
$ jc lspci -nnmmv
|
||||
|
||||
Usage (module):
|
||||
|
||||
import jc
|
||||
result = jc.parse('lspci', lspci_command_output)
|
||||
|
||||
Schema:
|
||||
|
||||
[
|
||||
{
|
||||
"slot": string,
|
||||
"domain": string,
|
||||
"domain_int": integer,
|
||||
"bus": string,
|
||||
"bus_int": integer,
|
||||
"dev": string,
|
||||
"dev_int": integer,
|
||||
"function": string,
|
||||
"function_int": integer,
|
||||
"class": string,
|
||||
"class_id": string,
|
||||
"class_id_int": integer,
|
||||
"vendor": string,
|
||||
"vendor_id": string,
|
||||
"vendor_id_int": integer,
|
||||
"device": string,
|
||||
"device_id": string,
|
||||
"device_id_int": integer,
|
||||
"svendor": string,
|
||||
"svendor_id": string,
|
||||
"svendor_id_int": integer,
|
||||
"sdevice": string,
|
||||
"sdevice_id": string,
|
||||
"sdevice_id_int": integer,
|
||||
"rev": string,
|
||||
"physlot": string,
|
||||
"physlot_int": integer,
|
||||
"progif": string,
|
||||
"progif_int": integer
|
||||
}
|
||||
]
|
||||
|
||||
Examples:
|
||||
|
||||
$ lspci -nnmmv | jc --lspci -p
|
||||
[
|
||||
{
|
||||
"slot": "ff:02:05.0",
|
||||
"domain": "ff",
|
||||
"domain_int": 255,
|
||||
"bus": "02",
|
||||
"bus_int": 2,
|
||||
"dev": "05",
|
||||
"dev_int": 5,
|
||||
"function": "0",
|
||||
"function_int": 0,
|
||||
"class": "SATA controller",
|
||||
"class_id": "0106",
|
||||
"class_id_int": 262,
|
||||
"vendor": "VMware",
|
||||
"vendor_id": "15ad",
|
||||
"vendor_id_int": 5549,
|
||||
"device": "SATA AHCI controller",
|
||||
"device_id": "07e0",
|
||||
"device_id_int": 2016,
|
||||
"svendor": "VMware",
|
||||
"svendor_id": "15ad",
|
||||
"svendor_id_int": 5549,
|
||||
"sdevice": "SATA AHCI controller",
|
||||
"sdevice_id": "07e0",
|
||||
"sdevice_id_int": 2016,
|
||||
"physlot": "37",
|
||||
"physlot_int": 55,
|
||||
"progif": "01",
|
||||
"progif_int": 1
|
||||
},
|
||||
...
|
||||
]
|
||||
|
||||
$ lspci -nnmmv | jc --lspci -p -r
|
||||
[
|
||||
{
|
||||
"slot": "ff:02:05.0",
|
||||
"domain": "ff",
|
||||
"bus": "02",
|
||||
"dev": "05",
|
||||
"function": "0",
|
||||
"class": "SATA controller",
|
||||
"class_id": "0106",
|
||||
"vendor": "VMware",
|
||||
"vendor_id": "15ad",
|
||||
"device": "SATA AHCI controller",
|
||||
"device_id": "07e0",
|
||||
"svendor": "VMware",
|
||||
"svendor_id": "15ad",
|
||||
"sdevice": "SATA AHCI controller",
|
||||
"sdevice_id": "07e0",
|
||||
"physlot": "37",
|
||||
"progif": "01"
|
||||
},
|
||||
...
|
||||
]
|
||||
"""
|
||||
import re
|
||||
from typing import List, Dict
|
||||
from jc.jc_types import JSONDictType
|
||||
import jc.utils
|
||||
|
||||
|
||||
class info():
|
||||
"""Provides parser metadata (version, author, etc.)"""
|
||||
version = '1.0'
|
||||
description = '`lspci -mmv` command parser'
|
||||
author = 'Kelly Brazil'
|
||||
author_email = 'kellyjonbrazil@gmail.com'
|
||||
compatible = ['linux']
|
||||
magic_commands = ['lspci']
|
||||
|
||||
|
||||
__version__ = info.version
|
||||
|
||||
|
||||
def _process(proc_data: List[JSONDictType]) -> List[JSONDictType]:
|
||||
"""
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (List of Dictionaries) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
List of Dictionaries. Structured to conform to the schema.
|
||||
"""
|
||||
int_list: set[str] = {
|
||||
'domain', 'bus', 'dev', 'function', 'class_id', 'vendor_id', 'device_id',
|
||||
'svendor_id', 'sdevice_id', 'physlot', 'progif'
|
||||
}
|
||||
|
||||
new_list: List[JSONDictType] = []
|
||||
|
||||
for item in proc_data:
|
||||
output: Dict = {}
|
||||
for key, val in item.items():
|
||||
output[key] = val
|
||||
if key in int_list:
|
||||
output[key + '_int'] = int(val, 16) # type: ignore
|
||||
new_list.append(output)
|
||||
|
||||
return new_list
|
||||
|
||||
|
||||
def parse(
|
||||
data: str,
|
||||
raw: bool = False,
|
||||
quiet: bool = False
|
||||
) -> List[JSONDictType]:
|
||||
"""
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) unprocessed output if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
List of Dictionaries. Raw or processed structured data.
|
||||
"""
|
||||
jc.utils.compatibility(__name__, info.compatible, quiet)
|
||||
jc.utils.input_type_check(data)
|
||||
|
||||
raw_output: List = []
|
||||
device_output: Dict = {}
|
||||
|
||||
if jc.utils.has_data(data):
|
||||
item_id_p = re.compile(r'(?P<id>^[0-9a-f]{4}$)')
|
||||
item_id_bracket_p = re.compile(r' \[(?P<id>[0-9a-f]{4})\]$')
|
||||
|
||||
for line in filter(None, data.splitlines()):
|
||||
if line.startswith('Slot:'):
|
||||
if device_output:
|
||||
raw_output.append(device_output)
|
||||
device_output = {}
|
||||
|
||||
device_output['slot'] = line.split()[1]
|
||||
|
||||
slot_info = line.split()[1]
|
||||
*domain, bus, dev_fun = slot_info.split(':')
|
||||
|
||||
if domain:
|
||||
dom = domain[0]
|
||||
else:
|
||||
dom = "00"
|
||||
|
||||
dev, fun = dev_fun.split('.')
|
||||
device_output['domain'] = dom
|
||||
device_output['bus'] = bus
|
||||
device_output['dev'] = dev
|
||||
device_output['function'] = fun
|
||||
continue
|
||||
|
||||
key, val = line.split(maxsplit=1)
|
||||
key = key[:-1].lower()
|
||||
|
||||
# numeric only (-nmmv)
|
||||
if item_id_p.match(val):
|
||||
device_output[key + '_id'] = val
|
||||
continue
|
||||
|
||||
# string and numeric (-nnmmv)
|
||||
if item_id_bracket_p.search(val):
|
||||
string, idnum = val.rsplit(maxsplit=1)
|
||||
device_output[key] = string
|
||||
device_output[key + '_id'] = idnum[1:-1]
|
||||
continue
|
||||
|
||||
# string only (-mmv)
|
||||
device_output[key] = val
|
||||
continue
|
||||
|
||||
|
||||
if device_output:
|
||||
raw_output.append(device_output)
|
||||
|
||||
return raw_output if raw else _process(raw_output)
|
231
jc/parsers/pci_ids.py
Normal file
231
jc/parsers/pci_ids.py
Normal file
@ -0,0 +1,231 @@
|
||||
"""jc - JSON Convert `pci.ids` file parser
|
||||
|
||||
This parser converts the pci.ids database file.
|
||||
|
||||
https://raw.githubusercontent.com/pciutils/pciids/master/pci.ids
|
||||
|
||||
A nested schema allows straightforward queries with tools like `jq`. Hex id
|
||||
numbers are prefixed with an underscore (`_`) so bracket notation is not
|
||||
necessary when referencing. For example:
|
||||
|
||||
$ cat pci.ids | jc --pci-ids | jq '.vendors._9005._0053._9005._ffff.subsystem_name'
|
||||
"AIC-7896 SCSI Controller mainboard implementation"
|
||||
|
||||
Here are the vendor and class mappings:
|
||||
|
||||
jq '.vendors._001c._0001._001c._0005.subsystem_name'
|
||||
| | | |
|
||||
| | | subdevice
|
||||
| | subvendor
|
||||
| device
|
||||
vendor
|
||||
|
||||
jq '.classes._0c._03._40'
|
||||
| | |
|
||||
| | prog_if
|
||||
| subclass
|
||||
class
|
||||
|
||||
Usage (cli):
|
||||
|
||||
$ cat pci.ids | jc --pci-ids
|
||||
|
||||
Usage (module):
|
||||
|
||||
import jc
|
||||
result = jc.parse('pci_ids', pci_ids_file_output)
|
||||
|
||||
Schema:
|
||||
|
||||
{
|
||||
"vendors": {
|
||||
"_<vendor_id>": {
|
||||
"vendor_name": string,
|
||||
"_<device_id>": {
|
||||
"device_name": string,
|
||||
"_<subvendor_id>": {
|
||||
"_<subdevice_id": string
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"classes": {
|
||||
"_<class_id>": {
|
||||
"class_name": string,
|
||||
"_<subclass_id>": {
|
||||
"subclass_name": string,
|
||||
"_<prog_if>": string
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Examples:
|
||||
|
||||
$ cat pci.ids | jc --pci-ids | jq '.vendors._001c._0001._001c._0005.subsystem_name'
|
||||
"2 Channel CAN Bus SJC1000 (Optically Isolated)"
|
||||
|
||||
$ cat pci.ids | jc --pci-ids | jq '.classes._0c._03._40'
|
||||
"USB4 Host Interface"
|
||||
"""
|
||||
import re
|
||||
from typing import Dict
|
||||
from jc.jc_types import JSONDictType
|
||||
import jc.utils
|
||||
|
||||
|
||||
class info():
|
||||
"""Provides parser metadata (version, author, etc.)"""
|
||||
version = '1.0'
|
||||
description = '`pci.ids` file parser'
|
||||
author = 'Kelly Brazil'
|
||||
author_email = 'kellyjonbrazil@gmail.com'
|
||||
compatible = ['linux', 'darwin', 'cygwin', 'win32', 'aix', 'freebsd']
|
||||
|
||||
|
||||
__version__ = info.version
|
||||
|
||||
|
||||
def _process(proc_data: JSONDictType) -> JSONDictType:
|
||||
"""
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (List of Dictionaries) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
Dictionary. Structured to conform to the schema.
|
||||
"""
|
||||
return proc_data
|
||||
|
||||
|
||||
def parse(
|
||||
data: str,
|
||||
raw: bool = False,
|
||||
quiet: bool = False
|
||||
) -> JSONDictType:
|
||||
"""
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) unprocessed output if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
Dictionary. Raw or processed structured data.
|
||||
"""
|
||||
jc.utils.compatibility(__name__, info.compatible, quiet)
|
||||
jc.utils.input_type_check(data)
|
||||
|
||||
raw_output: Dict = {}
|
||||
vdc_obj: Dict = {}
|
||||
vendor_id: str = ''
|
||||
device_id: str = ''
|
||||
|
||||
class_obj: Dict = {}
|
||||
class_id: str = ''
|
||||
subclass_id: str = ''
|
||||
|
||||
if jc.utils.has_data(data):
|
||||
|
||||
vdc_header_p = re.compile(r'^(?P<vendor_id>[0-9a-f]{4})\s+(?P<vendor_name>.+)')
|
||||
vdc_device_p = re.compile(r'^\t(?P<device_id>[0-9a-f]{4})\s+(?P<device_name>.+)')
|
||||
vdc_subvendor_p = re.compile(r'^\t\t(?P<subvendor>[0-9a-f]{4})\s+(?P<subdevice>[0-9a-f]{4})\s+(?P<subsystem_name>.+)')
|
||||
|
||||
class_header_p = re.compile(r'^C\s+(?P<class_id>[0-9a-f]{2})\s+(?P<class_name>.+)')
|
||||
class_sub_p = re.compile(r'^\t(?P<subclass_id>[0-9a-f]{2})\s+(?P<subclass_name>.+)')
|
||||
class_progif_p = re.compile(r'^\t\t(?P<prog_if_id>[0-9a-f]{2})\s+(?P<prog_if_name>.+)')
|
||||
|
||||
for line in filter(None, data.splitlines()):
|
||||
|
||||
vdc_header = vdc_header_p.match(line)
|
||||
vdc_device = vdc_device_p.match(line)
|
||||
vdc_subvendor = vdc_subvendor_p.match(line)
|
||||
|
||||
class_header = class_header_p.match(line)
|
||||
class_sub = class_sub_p.match(line)
|
||||
class_progif = class_progif_p.match(line)
|
||||
|
||||
# Vendors, devices and subsystems
|
||||
# Syntax:
|
||||
# vendor vendor_name
|
||||
# device device_name <-- single tab
|
||||
# subvendor subdevice subsystem_name <-- two tabs
|
||||
# Example:
|
||||
# 001c PEAK-System Technik GmbH
|
||||
# 0001 PCAN-PCI CAN-Bus controller
|
||||
# 001c 0004 2 Channel CAN Bus SJC1000
|
||||
if vdc_header:
|
||||
if vdc_obj:
|
||||
if 'vendors' not in raw_output:
|
||||
raw_output['vendors'] = {}
|
||||
raw_output['vendors'][vendor_id] = vdc_obj[vendor_id]
|
||||
vdc_obj = {}
|
||||
|
||||
vendor_id = '_' + vdc_header.groupdict()['vendor_id']
|
||||
vdc_obj[vendor_id] = {}
|
||||
vdc_obj[vendor_id]['vendor_name'] = vdc_header.groupdict()['vendor_name']
|
||||
continue
|
||||
|
||||
if vdc_device:
|
||||
device_id = '_' + vdc_device.groupdict()['device_id']
|
||||
vdc_obj[vendor_id][device_id] = {}
|
||||
vdc_obj[vendor_id][device_id]['device_name'] = vdc_device.groupdict()['device_name']
|
||||
continue
|
||||
|
||||
if vdc_subvendor:
|
||||
subvendor = '_' + vdc_subvendor.groupdict()['subvendor']
|
||||
subdevice = '_' + vdc_subvendor.groupdict()['subdevice']
|
||||
vdc_obj[vendor_id][device_id][subvendor] = {}
|
||||
vdc_obj[vendor_id][device_id][subvendor][subdevice] = {}
|
||||
vdc_obj[vendor_id][device_id][subvendor][subdevice]['subsystem_name'] = vdc_subvendor.groupdict()['subsystem_name']
|
||||
continue
|
||||
|
||||
# List of known device classes, subclasses and programming interfaces
|
||||
# Syntax:
|
||||
# C class class_name
|
||||
# subclass subclass_name <-- single tab
|
||||
# prog-if prog-if_name <-- two tabs
|
||||
# Example:
|
||||
# C 01 Mass storage controller
|
||||
# 01 IDE interface
|
||||
# 00 ISA Compatibility mode-only controller
|
||||
if class_header:
|
||||
if class_obj:
|
||||
if 'classes' not in raw_output:
|
||||
raw_output['classes'] = {}
|
||||
raw_output['classes'][class_id] = class_obj[class_id]
|
||||
class_obj = {}
|
||||
|
||||
class_id = '_' + class_header.groupdict()['class_id']
|
||||
class_obj[class_id] = {}
|
||||
class_obj[class_id]['class_name'] = class_header.groupdict()['class_name']
|
||||
continue
|
||||
|
||||
if class_sub:
|
||||
subclass_id = '_' + class_sub.groupdict()['subclass_id']
|
||||
class_obj[class_id][subclass_id] = {}
|
||||
class_obj[class_id][subclass_id]['subclass_name'] = class_sub.groupdict()['subclass_name']
|
||||
continue
|
||||
|
||||
if class_progif:
|
||||
prog_if_id = '_' + class_progif.groupdict()['prog_if_id']
|
||||
class_obj[class_id][subclass_id][prog_if_id] = class_progif.groupdict()['prog_if_name']
|
||||
continue
|
||||
|
||||
if vdc_obj:
|
||||
if 'vendors' not in raw_output:
|
||||
raw_output['vendors'] = {}
|
||||
raw_output['vendors'][vendor_id] = vdc_obj[vendor_id]
|
||||
|
||||
if class_obj:
|
||||
if 'classes' not in raw_output:
|
||||
raw_output['classes'] = {}
|
||||
raw_output['classes'][class_id] = class_obj[class_id]
|
||||
|
||||
return raw_output if raw else _process(raw_output)
|
@ -201,7 +201,7 @@ def parse(
|
||||
pid_mountinfo_p = re.compile(r'^\d+ \d+ \d+:\d+ /.+\n')
|
||||
pid_numa_maps_p = re.compile(r'^[a-f0-9]{12} default [^\n]+\n')
|
||||
pid_smaps_p = re.compile(r'^[0-9a-f]{12}-[0-9a-f]{12} [rwxsp\-]{4} [0-9a-f]{8} [0-9a-f]{2}:[0-9a-f]{2} \d+ [^\n]+\nSize:\s+\d+ \S\S\n')
|
||||
pid_stat_p = re.compile(r'^\d+ \(.{1,16}\) \w \d+ \d+ \d+ \d+ -?\d+ (?:\d+ ){43}\d+$')
|
||||
pid_stat_p = re.compile(r'^\d+ \(.{1,15}\) \S \d+ \d+ \d+ \d+ -?\d+ (?:\d+ ){43}\d+$', re.DOTALL)
|
||||
pid_statm_p = re.compile(r'^\d+ \d+ \d+\s\d+\s\d+\s\d+\s\d+$')
|
||||
pid_status_p = re.compile(r'^Name:\t.+\nUmask:\t\d+\nState:\t.+\nTgid:\t\d+\n')
|
||||
|
||||
|
@ -195,13 +195,14 @@ Examples:
|
||||
"exit_code": 0
|
||||
}
|
||||
"""
|
||||
import re
|
||||
from typing import Dict
|
||||
import jc.utils
|
||||
|
||||
|
||||
class info():
|
||||
"""Provides parser metadata (version, author, etc.)"""
|
||||
version = '1.0'
|
||||
version = '1.1'
|
||||
description = '`/proc/<pid>/stat` file parser'
|
||||
author = 'Kelly Brazil'
|
||||
author_email = 'kellyjonbrazil@gmail.com'
|
||||
@ -242,6 +243,12 @@ def _process(proc_data: Dict) -> Dict:
|
||||
if 'state' in proc_data:
|
||||
proc_data['state_pretty'] = state_map[proc_data['state']]
|
||||
|
||||
for key, val in proc_data.items():
|
||||
try:
|
||||
proc_data[key] = int(val)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return proc_data
|
||||
|
||||
|
||||
@ -270,74 +277,65 @@ def parse(
|
||||
|
||||
if jc.utils.has_data(data):
|
||||
|
||||
split_line = data.split()
|
||||
raw_output = {
|
||||
'pid': int(split_line[0]),
|
||||
'comm': split_line[1].strip('()'),
|
||||
'state': split_line[2],
|
||||
'ppid': int(split_line[3]),
|
||||
'pgrp': int(split_line[4]),
|
||||
'session': int(split_line[5]),
|
||||
'tty_nr': int(split_line[6]),
|
||||
'tpg_id': int(split_line[7]),
|
||||
'flags': int(split_line[8]),
|
||||
'minflt': int(split_line[9]),
|
||||
'cminflt': int(split_line[10]),
|
||||
'majflt': int(split_line[11]),
|
||||
'cmajflt': int(split_line[12]),
|
||||
'utime': int(split_line[13]),
|
||||
'stime': int(split_line[14]),
|
||||
'cutime': int(split_line[15]),
|
||||
'cstime': int(split_line[16]),
|
||||
'priority': int(split_line[17]),
|
||||
'nice': int(split_line[18]),
|
||||
'num_threads': int(split_line[19]),
|
||||
'itrealvalue': int(split_line[20]),
|
||||
'starttime': int(split_line[21]),
|
||||
'vsize': int(split_line[22]),
|
||||
'rss': int(split_line[23]),
|
||||
'rsslim': int(split_line[24]),
|
||||
'startcode': int(split_line[25]),
|
||||
'endcode': int(split_line[26]),
|
||||
'startstack': int(split_line[27]),
|
||||
'kstkeep': int(split_line[28]),
|
||||
'kstkeip': int(split_line[29]),
|
||||
'signal': int(split_line[30]),
|
||||
'blocked': int(split_line[31]),
|
||||
'sigignore': int(split_line[32]),
|
||||
'sigcatch': int(split_line[33]),
|
||||
'wchan': int(split_line[34]),
|
||||
'nswap': int(split_line[35]),
|
||||
'cnswap': int(split_line[36])
|
||||
}
|
||||
line_re = re.compile(r'''
|
||||
^(?P<pid>\d+)\s
|
||||
\((?P<comm>.+)\)\s
|
||||
(?P<state>\S)\s
|
||||
(?P<ppid>\d+)\s
|
||||
(?P<pgrp>\d+)\s
|
||||
(?P<session>\d+)\s
|
||||
(?P<tty_nr>\d+)\s
|
||||
(?P<tpg_id>-?\d+)\s
|
||||
(?P<flags>\d+)\s
|
||||
(?P<minflt>\d+)\s
|
||||
(?P<cminflt>\d+)\s
|
||||
(?P<majflt>\d+)\s
|
||||
(?P<cmajflt>\d+)\s
|
||||
(?P<utime>\d+)\s
|
||||
(?P<stime>\d+)\s
|
||||
(?P<cutime>\d+)\s
|
||||
(?P<cstime>\d+)\s
|
||||
(?P<priority>\d+)\s
|
||||
(?P<nice>\d+)\s
|
||||
(?P<num_threads>\d+)\s
|
||||
(?P<itrealvalue>\d+)\s
|
||||
(?P<starttime>\d+)\s
|
||||
(?P<vsize>\d+)\s
|
||||
(?P<rss>\d+)\s
|
||||
(?P<rsslim>\d+)\s
|
||||
(?P<startcode>\d+)\s
|
||||
(?P<endcode>\d+)\s
|
||||
(?P<startstack>\d+)\s
|
||||
(?P<kstkeep>\d+)\s
|
||||
(?P<kstkeip>\d+)\s
|
||||
(?P<signal>\d+)\s
|
||||
(?P<blocked>\d+)\s
|
||||
(?P<sigignore>\d+)\s
|
||||
(?P<sigcatch>\d+)\s
|
||||
(?P<wchan>\d+)\s
|
||||
(?P<nswap>\d+)\s
|
||||
(?P<cnswap>\d+)\s
|
||||
(?P<exit_signal>\d+)\s
|
||||
(?P<processor>\d+)\s
|
||||
(?P<rt_priority>\d+)\s
|
||||
(?P<policy>\d+)\s
|
||||
(?P<delayacct_blkio_ticks>\d+)\s
|
||||
(?P<guest_time>\d+)\s
|
||||
(?P<cguest_time>\d+)\s
|
||||
(?P<start_data>\d+)\s
|
||||
(?P<end_data>\d+)\s
|
||||
(?P<start_brk>\d+)\s
|
||||
(?P<arg_start>\d+)\s
|
||||
(?P<arg_end>\d+)\s
|
||||
(?P<env_start>\d+)\s
|
||||
(?P<env_end>\d+)\s
|
||||
(?P<exit_code>\d+)
|
||||
''', re.VERBOSE | re.DOTALL
|
||||
)
|
||||
|
||||
if len(split_line) > 37:
|
||||
raw_output['exit_signal'] = int(split_line[37])
|
||||
line_match = line_re.search(data)
|
||||
|
||||
if len(split_line) > 38:
|
||||
raw_output['processor'] = int(split_line[38])
|
||||
|
||||
if len(split_line) > 39:
|
||||
raw_output['rt_priority'] = int(split_line[39])
|
||||
raw_output['policy'] = int(split_line[40])
|
||||
|
||||
if len(split_line) > 41:
|
||||
raw_output['delayacct_blkio_ticks'] = int(split_line[41])
|
||||
|
||||
if len(split_line) > 42:
|
||||
raw_output['guest_time'] = int(split_line[42])
|
||||
raw_output['cguest_time'] = int(split_line[43])
|
||||
|
||||
if len(split_line) > 44:
|
||||
raw_output['start_data'] = int(split_line[44])
|
||||
raw_output['end_data'] = int(split_line[45])
|
||||
raw_output['start_brk'] = int(split_line[46])
|
||||
|
||||
if len(split_line) > 47:
|
||||
raw_output['arg_start'] = int(split_line[47])
|
||||
raw_output['arg_end'] = int(split_line[48])
|
||||
raw_output['env_start'] = int(split_line[49])
|
||||
raw_output['env_end'] = int(split_line[50])
|
||||
raw_output['exit_code'] = int(split_line[51])
|
||||
if line_match:
|
||||
raw_output = line_match.groupdict()
|
||||
|
||||
return raw_output if raw else _process(raw_output)
|
||||
|
@ -77,12 +77,14 @@ import jc.utils
|
||||
from jc.streaming import (
|
||||
add_jc_meta, streaming_input_type_check, streaming_line_input_type_check, raise_or_yield
|
||||
)
|
||||
from typing import Dict, Iterable
|
||||
from jc.jc_types import JSONDictType, StreamingOutputType
|
||||
from jc.exceptions import ParseError
|
||||
|
||||
|
||||
class info():
|
||||
"""Provides parser metadata (version, author, etc.)"""
|
||||
version = '1.1'
|
||||
version = '1.2'
|
||||
description = '`stat` command streaming parser'
|
||||
author = 'Kelly Brazil'
|
||||
author_email = 'kellyjonbrazil@gmail.com'
|
||||
@ -93,7 +95,7 @@ class info():
|
||||
__version__ = info.version
|
||||
|
||||
|
||||
def _process(proc_data):
|
||||
def _process(proc_data: JSONDictType) -> JSONDictType:
|
||||
"""
|
||||
Final processing to conform to the schema.
|
||||
|
||||
@ -105,10 +107,10 @@ def _process(proc_data):
|
||||
|
||||
Dictionary. Structured data to conform to the schema.
|
||||
"""
|
||||
int_list = {'size', 'blocks', 'io_blocks', 'inode', 'links', 'uid', 'gid',
|
||||
int_list: set[str] = {'size', 'blocks', 'io_blocks', 'inode', 'links', 'uid', 'gid',
|
||||
'unix_device', 'rdev', 'block_size'}
|
||||
|
||||
null_list = {'access_time', 'modify_time', 'change_time', 'birth_time'}
|
||||
null_list: set[str] = {'access_time', 'modify_time', 'change_time', 'birth_time'}
|
||||
|
||||
for key in proc_data.copy():
|
||||
if key in int_list:
|
||||
@ -118,15 +120,23 @@ def _process(proc_data):
|
||||
if key in null_list:
|
||||
if proc_data[key] == '-':
|
||||
proc_data[key] = None
|
||||
ts = jc.utils.timestamp(proc_data[key], format_hint=(7100, 7200))
|
||||
proc_data[key + '_epoch'] = ts.naive
|
||||
proc_data[key + '_epoch_utc'] = ts.utc
|
||||
|
||||
ts_string = proc_data[key]
|
||||
if isinstance(ts_string, str) or ts_string is None:
|
||||
ts = jc.utils.timestamp(ts_string, format_hint=(7100, 7200))
|
||||
proc_data[key + '_epoch'] = ts.naive
|
||||
proc_data[key + '_epoch_utc'] = ts.utc
|
||||
|
||||
return proc_data
|
||||
|
||||
|
||||
@add_jc_meta
|
||||
def parse(data, raw=False, quiet=False, ignore_exceptions=False):
|
||||
def parse(
|
||||
data: Iterable[str],
|
||||
raw: bool = False,
|
||||
quiet: bool = False,
|
||||
ignore_exceptions: bool = False
|
||||
) -> StreamingOutputType:
|
||||
"""
|
||||
Main text parsing generator function. Returns an iterable object.
|
||||
|
||||
@ -146,7 +156,7 @@ def parse(data, raw=False, quiet=False, ignore_exceptions=False):
|
||||
jc.utils.compatibility(__name__, info.compatible, quiet)
|
||||
streaming_input_type_check(data)
|
||||
|
||||
output_line = {}
|
||||
output_line: Dict = {}
|
||||
os_type = ''
|
||||
|
||||
for line in data:
|
||||
|
203
jc/parsers/udevadm.py
Normal file
203
jc/parsers/udevadm.py
Normal file
@ -0,0 +1,203 @@
|
||||
"""jc - JSON Convert `udevadm info` command output parser
|
||||
|
||||
Usage (cli):
|
||||
|
||||
$ udevadm info --query=all /dev/sda | jc --udevadm
|
||||
|
||||
or
|
||||
|
||||
$ jc udevadm info --query=all /dev/sda
|
||||
|
||||
Usage (module):
|
||||
|
||||
import jc
|
||||
result = jc.parse('udevadm', udevadm_command_output)
|
||||
|
||||
Schema:
|
||||
|
||||
{
|
||||
"P": string,
|
||||
"N": string,
|
||||
"L": integer,
|
||||
"S": [
|
||||
string
|
||||
],
|
||||
"E": {
|
||||
"<key>": string
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Examples:
|
||||
|
||||
$ udevadm info --query=all /dev/sda | jc --udevadm -p
|
||||
{
|
||||
"P": "/devices/pci0000:00/0000:00:10.0/host32/target32:0:0/32:0:0:0/block/sda",
|
||||
"N": "sda",
|
||||
"L": 0,
|
||||
"S": [
|
||||
"disk/by-path/pci-0000:00:10.0-scsi-0:0:0:0"
|
||||
],
|
||||
"E": {
|
||||
"DEVPATH": "/devices/pci0000:00/0000:00:10.0/host32/target32:0:0/32:0:0:0/block/sda",
|
||||
"DEVNAME": "/dev/sda",
|
||||
"DEVTYPE": "disk",
|
||||
"MAJOR": "8",
|
||||
"MINOR": "0",
|
||||
"SUBSYSTEM": "block",
|
||||
"USEC_INITIALIZED": "6100111",
|
||||
"SCSI_TPGS": "0",
|
||||
"SCSI_TYPE": "disk",
|
||||
"SCSI_VENDOR": "VMware,",
|
||||
"SCSI_VENDOR_ENC": "VMware,\\x20",
|
||||
"SCSI_MODEL": "VMware_Virtual_S",
|
||||
"SCSI_MODEL_ENC": "VMware\\x20Virtual\\x20S",
|
||||
"SCSI_REVISION": "1.0",
|
||||
"ID_SCSI": "1",
|
||||
"ID_VENDOR": "VMware_",
|
||||
"ID_VENDOR_ENC": "VMware\\x2c\\x20",
|
||||
"ID_MODEL": "VMware_Virtual_S",
|
||||
"ID_MODEL_ENC": "VMware\\x20Virtual\\x20S",
|
||||
"ID_REVISION": "1.0",
|
||||
"ID_TYPE": "disk",
|
||||
"MPATH_SBIN_PATH": "/sbin",
|
||||
"ID_BUS": "scsi",
|
||||
"ID_PATH": "pci-0000:00:10.0-scsi-0:0:0:0",
|
||||
"ID_PATH_TAG": "pci-0000_00_10_0-scsi-0_0_0_0",
|
||||
"ID_PART_TABLE_UUID": "a5bd0c01-4210-46f2-b558-5c11c209a8f7",
|
||||
"ID_PART_TABLE_TYPE": "gpt",
|
||||
"DEVLINKS": "/dev/disk/by-path/pci-0000:00:10.0-scsi-0:0:0:0",
|
||||
"TAGS": ":systemd:"
|
||||
}
|
||||
}
|
||||
|
||||
$ udevadm info --query=all /dev/sda | jc --udevadm -p -r
|
||||
{
|
||||
"P": "/devices/pci0000:00/0000:00:10.0/host32/target32:0:0/32:0:0:0/block/sda",
|
||||
"N": "sda",
|
||||
"L": "0",
|
||||
"S": [
|
||||
"disk/by-path/pci-0000:00:10.0-scsi-0:0:0:0"
|
||||
],
|
||||
"E": {
|
||||
"DEVPATH": "/devices/pci0000:00/0000:00:10.0/host32/target32:0:0/32:0:0:0/block/sda",
|
||||
"DEVNAME": "/dev/sda",
|
||||
"DEVTYPE": "disk",
|
||||
"MAJOR": "8",
|
||||
"MINOR": "0",
|
||||
"SUBSYSTEM": "block",
|
||||
"USEC_INITIALIZED": "6100111",
|
||||
"SCSI_TPGS": "0",
|
||||
"SCSI_TYPE": "disk",
|
||||
"SCSI_VENDOR": "VMware,",
|
||||
"SCSI_VENDOR_ENC": "VMware,\\x20",
|
||||
"SCSI_MODEL": "VMware_Virtual_S",
|
||||
"SCSI_MODEL_ENC": "VMware\\x20Virtual\\x20S",
|
||||
"SCSI_REVISION": "1.0",
|
||||
"ID_SCSI": "1",
|
||||
"ID_VENDOR": "VMware_",
|
||||
"ID_VENDOR_ENC": "VMware\\x2c\\x20",
|
||||
"ID_MODEL": "VMware_Virtual_S",
|
||||
"ID_MODEL_ENC": "VMware\\x20Virtual\\x20S",
|
||||
"ID_REVISION": "1.0",
|
||||
"ID_TYPE": "disk",
|
||||
"MPATH_SBIN_PATH": "/sbin",
|
||||
"ID_BUS": "scsi",
|
||||
"ID_PATH": "pci-0000:00:10.0-scsi-0:0:0:0",
|
||||
"ID_PATH_TAG": "pci-0000_00_10_0-scsi-0_0_0_0",
|
||||
"ID_PART_TABLE_UUID": "a5bd0c01-4210-46f2-b558-5c11c209a8f7",
|
||||
"ID_PART_TABLE_TYPE": "gpt",
|
||||
"DEVLINKS": "/dev/disk/by-path/pci-0000:00:10.0-scsi-0:0:0:0",
|
||||
"TAGS": ":systemd:"
|
||||
}
|
||||
}
|
||||
"""
|
||||
from typing import List, Dict
|
||||
from jc.jc_types import JSONDictType
|
||||
import jc.utils
|
||||
|
||||
|
||||
class info():
|
||||
"""Provides parser metadata (version, author, etc.)"""
|
||||
version = '1.0'
|
||||
description = '`udevadm info` command parser'
|
||||
author = 'Kelly Brazil'
|
||||
author_email = 'kellyjonbrazil@gmail.com'
|
||||
compatible = ['linux']
|
||||
magic_commands = ['udevadm info']
|
||||
|
||||
|
||||
__version__ = info.version
|
||||
|
||||
|
||||
def _process(proc_data: JSONDictType) -> JSONDictType:
|
||||
"""
|
||||
Final processing to conform to the schema.
|
||||
|
||||
Parameters:
|
||||
|
||||
proc_data: (List of Dictionaries) raw structured data to process
|
||||
|
||||
Returns:
|
||||
|
||||
List of Dictionaries. Structured to conform to the schema.
|
||||
"""
|
||||
if 'L' in proc_data:
|
||||
proc_data['L'] = int(proc_data['L']) # type: ignore
|
||||
|
||||
return proc_data
|
||||
|
||||
|
||||
def parse(
|
||||
data: str,
|
||||
raw: bool = False,
|
||||
quiet: bool = False
|
||||
) -> JSONDictType:
|
||||
"""
|
||||
Main text parsing function
|
||||
|
||||
Parameters:
|
||||
|
||||
data: (string) text data to parse
|
||||
raw: (boolean) unprocessed output if True
|
||||
quiet: (boolean) suppress warning messages if True
|
||||
|
||||
Returns:
|
||||
|
||||
Dictionary. Raw or processed structured data.
|
||||
"""
|
||||
jc.utils.compatibility(__name__, info.compatible, quiet)
|
||||
jc.utils.input_type_check(data)
|
||||
|
||||
raw_output: Dict = {}
|
||||
s_list: List = []
|
||||
e_list: List = []
|
||||
|
||||
if jc.utils.has_data(data):
|
||||
for line in filter(None, data.splitlines()):
|
||||
prefix, value = line.split(maxsplit=1)
|
||||
|
||||
if prefix == 'P:':
|
||||
raw_output['P'] = value
|
||||
continue
|
||||
|
||||
if prefix == 'S:':
|
||||
s_list.append(value)
|
||||
continue
|
||||
|
||||
if prefix == 'E:':
|
||||
e_list.append(value)
|
||||
continue
|
||||
|
||||
raw_output[prefix[:-1]] = value
|
||||
|
||||
if s_list:
|
||||
raw_output['S'] = s_list
|
||||
|
||||
if e_list:
|
||||
raw_output['E'] = {}
|
||||
for item in e_list:
|
||||
k, v = item.split('=')
|
||||
raw_output['E'][k] = v
|
||||
|
||||
return raw_output if raw else _process(raw_output)
|
@ -219,9 +219,9 @@ except ImportError:
|
||||
|
||||
_screen_pattern = (
|
||||
r"Screen (?P<screen_number>\d+): "
|
||||
+ "minimum (?P<minimum_width>\d+) x (?P<minimum_height>\d+), "
|
||||
+ "current (?P<current_width>\d+) x (?P<current_height>\d+), "
|
||||
+ "maximum (?P<maximum_width>\d+) x (?P<maximum_height>\d+)"
|
||||
+ r"minimum (?P<minimum_width>\d+) x (?P<minimum_height>\d+), "
|
||||
+ r"current (?P<current_width>\d+) x (?P<current_height>\d+), "
|
||||
+ r"maximum (?P<maximum_width>\d+) x (?P<maximum_height>\d+)"
|
||||
)
|
||||
|
||||
|
||||
@ -250,13 +250,13 @@ def _parse_screen(next_lines: List[str]) -> Optional[Screen]:
|
||||
# regex101 demo link
|
||||
_device_pattern = (
|
||||
r"(?P<device_name>.+) "
|
||||
+ "(?P<is_connected>(connected|disconnected)) ?"
|
||||
+ "(?P<is_primary> primary)? ?"
|
||||
+ "((?P<resolution_width>\d+)x(?P<resolution_height>\d+)"
|
||||
+ "\+(?P<offset_width>\d+)\+(?P<offset_height>\d+))? "
|
||||
+ "(?P<rotation>(inverted|left|right))? ?"
|
||||
+ "\(normal left inverted right x axis y axis\)"
|
||||
+ "( ((?P<dimension_width>\d+)mm x (?P<dimension_height>\d+)mm)?)?"
|
||||
+ r"(?P<is_connected>(connected|disconnected)) ?"
|
||||
+ r"(?P<is_primary> primary)? ?"
|
||||
+ r"((?P<resolution_width>\d+)x(?P<resolution_height>\d+)"
|
||||
+ r"\+(?P<offset_width>\d+)\+(?P<offset_height>\d+))? "
|
||||
+ r"(?P<rotation>(inverted|left|right))? ?"
|
||||
+ r"\(normal left inverted right x axis y axis\)"
|
||||
+ r"( ((?P<dimension_width>\d+)mm x (?P<dimension_height>\d+)mm)?)?"
|
||||
)
|
||||
|
||||
|
||||
|
0
jc/py.typed
Normal file
0
jc/py.typed
Normal file
@ -1,10 +1,14 @@
|
||||
"""jc - JSON Convert streaming utils"""
|
||||
|
||||
from functools import wraps
|
||||
from typing import Dict, Iterable
|
||||
from typing import Dict, Tuple, Union, Iterable, Callable, TypeVar, cast, Any
|
||||
from .jc_types import JSONDictType, MetadataType
|
||||
|
||||
|
||||
def streaming_input_type_check(data: Iterable) -> None:
|
||||
F = TypeVar('F', bound=Callable[..., Any])
|
||||
|
||||
|
||||
def streaming_input_type_check(data: Iterable[Union[str, bytes]]) -> None:
|
||||
"""
|
||||
Ensure input data is an iterable, but not a string or bytes. Raises
|
||||
`TypeError` if not.
|
||||
@ -19,7 +23,7 @@ def streaming_line_input_type_check(line: str) -> None:
|
||||
raise TypeError("Input line must be a 'str' object.")
|
||||
|
||||
|
||||
def stream_success(output_line: Dict, ignore_exceptions: bool) -> Dict:
|
||||
def stream_success(output_line: JSONDictType, ignore_exceptions: bool) -> JSONDictType:
|
||||
"""Add `_jc_meta` object to output line if `ignore_exceptions=True`"""
|
||||
if ignore_exceptions:
|
||||
output_line.update({'_jc_meta': {'success': True}})
|
||||
@ -27,7 +31,7 @@ def stream_success(output_line: Dict, ignore_exceptions: bool) -> Dict:
|
||||
return output_line
|
||||
|
||||
|
||||
def stream_error(e: BaseException, line: str) -> Dict:
|
||||
def stream_error(e: BaseException, line: str) -> Dict[str, MetadataType]:
|
||||
"""
|
||||
Return an error `_jc_meta` field.
|
||||
"""
|
||||
@ -41,7 +45,7 @@ def stream_error(e: BaseException, line: str) -> Dict:
|
||||
}
|
||||
|
||||
|
||||
def add_jc_meta(func):
|
||||
def add_jc_meta(func: F) -> F:
|
||||
"""
|
||||
Decorator for streaming parsers to add stream_success and stream_error
|
||||
objects. This simplifies the yield lines in the streaming parsers.
|
||||
@ -96,14 +100,14 @@ def add_jc_meta(func):
|
||||
line = value[1]
|
||||
yield stream_error(exception_obj, line)
|
||||
|
||||
return wrapper
|
||||
return cast(F, wrapper)
|
||||
|
||||
|
||||
def raise_or_yield(
|
||||
ignore_exceptions: bool,
|
||||
e: BaseException,
|
||||
line: str
|
||||
) -> tuple:
|
||||
) -> Tuple[BaseException, str]:
|
||||
"""
|
||||
Return the exception object and line string if ignore_exceptions is
|
||||
True. Otherwise, re-raise the exception from the exception object with
|
||||
|
@ -1,3 +1,5 @@
|
||||
# type: ignore
|
||||
|
||||
"""More comprehensive traceback formatting for Python scripts.
|
||||
To enable this module, do:
|
||||
import tracebackplus; tracebackplus.enable()
|
||||
@ -69,7 +71,6 @@ products or services of Licensee, or any third party.
|
||||
agrees to be bound by the terms and conditions of this License
|
||||
Agreement.
|
||||
'''
|
||||
|
||||
import inspect
|
||||
import keyword
|
||||
import linecache
|
||||
|
61
jc/utils.py
61
jc/utils.py
@ -6,7 +6,8 @@ import shutil
|
||||
from datetime import datetime, timezone
|
||||
from textwrap import TextWrapper
|
||||
from functools import lru_cache
|
||||
from typing import List, Iterable, Union, Optional
|
||||
from typing import List, Dict, Iterable, Union, Optional, TextIO
|
||||
from .jc_types import TimeStampFormatType
|
||||
|
||||
|
||||
def _asciify(string: str) -> str:
|
||||
@ -21,7 +22,13 @@ def _asciify(string: str) -> str:
|
||||
return string
|
||||
|
||||
|
||||
def _safe_print(string: str, sep=' ', end='\n', file=sys.stdout, flush=False) -> None:
|
||||
def _safe_print(
|
||||
string: str,
|
||||
sep: str = ' ',
|
||||
end: str = '\n',
|
||||
file: TextIO = sys.stdout,
|
||||
flush: bool = False
|
||||
) -> None:
|
||||
"""Output for both UTF-8 and ASCII encoding systems"""
|
||||
try:
|
||||
print(string, sep=sep, end=end, file=file, flush=flush)
|
||||
@ -106,7 +113,7 @@ def error_message(message_lines: List[str]) -> None:
|
||||
_safe_print(message, file=sys.stderr)
|
||||
|
||||
|
||||
def is_compatible(compatible: List) -> bool:
|
||||
def is_compatible(compatible: List[str]) -> bool:
|
||||
"""
|
||||
Returns True if the parser is compatible with the running OS platform.
|
||||
"""
|
||||
@ -120,7 +127,7 @@ def is_compatible(compatible: List) -> bool:
|
||||
return platform_found
|
||||
|
||||
|
||||
def compatibility(mod_name: str, compatible: List, quiet: bool = False) -> None:
|
||||
def compatibility(mod_name: str, compatible: List[str], quiet: bool = False) -> None:
|
||||
"""
|
||||
Checks for the parser's compatibility with the running OS platform and
|
||||
prints a warning message to `STDERR` if not compatible and
|
||||
@ -172,7 +179,7 @@ def has_data(data: Union[str, bytes]) -> bool:
|
||||
return bool(data)
|
||||
|
||||
|
||||
def convert_to_int(value: Union[str, float]) -> Optional[int]:
|
||||
def convert_to_int(value: object) -> Optional[int]:
|
||||
"""
|
||||
Converts string and float input to int. Strips all non-numeric
|
||||
characters from strings.
|
||||
@ -202,7 +209,7 @@ def convert_to_int(value: Union[str, float]) -> Optional[int]:
|
||||
return None
|
||||
|
||||
|
||||
def convert_to_float(value: Union[str, int]) -> Optional[float]:
|
||||
def convert_to_float(value: object) -> Optional[float]:
|
||||
"""
|
||||
Converts string and int input to float. Strips all non-numeric
|
||||
characters from strings.
|
||||
@ -228,7 +235,7 @@ def convert_to_float(value: Union[str, int]) -> Optional[float]:
|
||||
return None
|
||||
|
||||
|
||||
def convert_to_bool(value: Union[str, int, float]) -> bool:
|
||||
def convert_to_bool(value: object) -> bool:
|
||||
"""
|
||||
Converts string, integer, or float input to boolean by checking
|
||||
for 'truthy' values.
|
||||
@ -273,9 +280,11 @@ def input_type_check(data: str) -> None:
|
||||
|
||||
|
||||
class timestamp:
|
||||
__slots__ = ('string', 'format', 'naive', 'utc')
|
||||
|
||||
def __init__(self,
|
||||
datetime_string: str,
|
||||
format_hint: Optional[Iterable] = None
|
||||
datetime_string: Optional[str],
|
||||
format_hint: Optional[Iterable[int]] = None
|
||||
) -> None:
|
||||
"""
|
||||
Input a datetime text string of several formats and convert to a
|
||||
@ -318,12 +327,15 @@ class timestamp:
|
||||
self.naive = dt['timestamp_naive']
|
||||
self.utc = dt['timestamp_utc']
|
||||
|
||||
def __repr__(self):
|
||||
def __repr__(self) -> str:
|
||||
return f'timestamp(string={self.string!r}, format={self.format}, naive={self.naive}, utc={self.utc})'
|
||||
|
||||
@staticmethod
|
||||
@lru_cache(maxsize=512)
|
||||
def _parse_dt(dt_string, format_hint=None):
|
||||
def _parse_dt(
|
||||
dt_string: Optional[str],
|
||||
format_hint: Optional[Iterable[int]] = None
|
||||
) -> Dict[str, Optional[int]]:
|
||||
"""
|
||||
Input a datetime text string of several formats and convert to
|
||||
a naive or timezone-aware epoch timestamp in UTC.
|
||||
@ -370,7 +382,7 @@ class timestamp:
|
||||
|
||||
If the conversion completely fails, all fields will be None.
|
||||
"""
|
||||
formats = (
|
||||
formats: tuple[TimeStampFormatType, ...] = (
|
||||
{'id': 1000, 'format': '%a %b %d %H:%M:%S %Y', 'locale': None}, # manual C locale format conversion: Tue Mar 23 16:12:11 2021 or Tue Mar 23 16:12:11 IST 2021
|
||||
{'id': 1100, 'format': '%a %b %d %H:%M:%S %Y %z', 'locale': None}, # git date output: Thu Mar 5 09:17:40 2020 -0800
|
||||
{'id': 1300, 'format': '%Y-%m-%dT%H:%M:%S.%f%Z', 'locale': None}, # ISO Format with UTC (found in syslog 5424): 2003-10-11T22:14:15.003Z
|
||||
@ -405,7 +417,7 @@ class timestamp:
|
||||
|
||||
# from https://www.timeanddate.com/time/zones/
|
||||
# only removed UTC timezone and added known non-UTC offsets
|
||||
tz_abbr = {
|
||||
tz_abbr: set[str] = {
|
||||
'A', 'ACDT', 'ACST', 'ACT', 'ACWST', 'ADT', 'AEDT', 'AEST', 'AET', 'AFT', 'AKDT',
|
||||
'AKST', 'ALMT', 'AMST', 'AMT', 'ANAST', 'ANAT', 'AQTT', 'ART', 'AST', 'AT', 'AWDT',
|
||||
'AWST', 'AZOST', 'AZOT', 'AZST', 'AZT', 'AoE', 'B', 'BNT', 'BOT', 'BRST', 'BRT', 'BST',
|
||||
@ -433,7 +445,7 @@ class timestamp:
|
||||
'UTC+1345', 'UTC+1400'
|
||||
}
|
||||
|
||||
offset_suffixes = (
|
||||
offset_suffixes: tuple[str, ...] = (
|
||||
'-12:00', '-11:00', '-10:00', '-09:30', '-09:00',
|
||||
'-08:00', '-07:00', '-06:00', '-05:00', '-04:00', '-03:00', '-02:30',
|
||||
'-02:00', '-01:00', '+01:00', '+02:00', '+03:00', '+04:00', '+04:30',
|
||||
@ -442,19 +454,18 @@ class timestamp:
|
||||
'+13:45', '+14:00'
|
||||
)
|
||||
|
||||
data = dt_string or ''
|
||||
normalized_datetime = ''
|
||||
utc_tz = False
|
||||
dt = None
|
||||
dt_utc = None
|
||||
timestamp_naive = None
|
||||
timestamp_utc = None
|
||||
timestamp_obj = {
|
||||
data: str = dt_string or ''
|
||||
normalized_datetime: str = ''
|
||||
utc_tz: bool = False
|
||||
dt: Optional[datetime] = None
|
||||
dt_utc: Optional[datetime] = None
|
||||
timestamp_naive: Optional[int] = None
|
||||
timestamp_utc: Optional[int] = None
|
||||
timestamp_obj: Dict[str, Optional[int]] = {
|
||||
'format': None,
|
||||
'timestamp_naive': None,
|
||||
'timestamp_utc': None
|
||||
}
|
||||
utc_tz = False
|
||||
|
||||
# convert format_hint to a tuple so it is hashable (for lru_cache)
|
||||
if not format_hint:
|
||||
@ -478,7 +489,7 @@ class timestamp:
|
||||
|
||||
# normalize the timezone by taking out any timezone reference, except UTC
|
||||
cleandata = data.replace('(', '').replace(')', '')
|
||||
normalized_datetime_list = []
|
||||
normalized_datetime_list: List[str] = []
|
||||
for term in cleandata.split():
|
||||
if term not in tz_abbr:
|
||||
normalized_datetime_list.append(term)
|
||||
@ -496,7 +507,7 @@ class timestamp:
|
||||
normalized_datetime = p.sub(r'\g<1> ', normalized_datetime)
|
||||
|
||||
# try format hints first, then fall back to brute-force method
|
||||
hint_obj_list = []
|
||||
hint_obj_list: List[TimeStampFormatType] = []
|
||||
for fmt_id in format_hint:
|
||||
for fmt in formats:
|
||||
if fmt_id == fmt['id']:
|
||||
|
24
man/jc.1
24
man/jc.1
@ -1,4 +1,4 @@
|
||||
.TH jc 1 2022-09-26 1.22.0 "JSON Convert"
|
||||
.TH jc 1 2022-10-24 1.22.1 "JSON Convert"
|
||||
.SH NAME
|
||||
\fBjc\fP \- JSON Convert JSONifies the output of many CLI tools, file-types, and strings
|
||||
.SH SYNOPSIS
|
||||
@ -110,6 +110,11 @@ CSV file streaming parser
|
||||
\fB--date\fP
|
||||
`date` command parser
|
||||
|
||||
.TP
|
||||
.B
|
||||
\fB--datetime-iso\fP
|
||||
ISO 8601 Datetime string parser
|
||||
|
||||
.TP
|
||||
.B
|
||||
\fB--df\fP
|
||||
@ -258,7 +263,7 @@ IPv4 and IPv6 Address string parser
|
||||
.TP
|
||||
.B
|
||||
\fB--iso-datetime\fP
|
||||
ISO 8601 Datetime string parser
|
||||
Deprecated - please use datetime-iso
|
||||
|
||||
.TP
|
||||
.B
|
||||
@ -315,6 +320,11 @@ Key/Value file parser
|
||||
\fB--lsof\fP
|
||||
`lsof` command parser
|
||||
|
||||
.TP
|
||||
.B
|
||||
\fB--lspci\fP
|
||||
`lspci -mmv` command parser
|
||||
|
||||
.TP
|
||||
.B
|
||||
\fB--lsusb\fP
|
||||
@ -365,6 +375,11 @@ M3U and M3U8 file parser
|
||||
\fB--passwd\fP
|
||||
`/etc/passwd` file parser
|
||||
|
||||
.TP
|
||||
.B
|
||||
\fB--pci-ids\fP
|
||||
`pci.ids` file parser
|
||||
|
||||
.TP
|
||||
.B
|
||||
\fB--pidstat\fP
|
||||
@ -790,6 +805,11 @@ Unix Epoch Timestamp string parser
|
||||
\fB--traceroute\fP
|
||||
`traceroute` and `traceroute6` command parser
|
||||
|
||||
.TP
|
||||
.B
|
||||
\fB--udevadm\fP
|
||||
`udevadm info` command parser
|
||||
|
||||
.TP
|
||||
.B
|
||||
\fB--ufw\fP
|
||||
|
@ -8,7 +8,7 @@ file_loader = FileSystemLoader('templates')
|
||||
env = Environment(loader=file_loader)
|
||||
template = env.get_template('manpage_template')
|
||||
output = template.render(today=date.today(),
|
||||
jc=jc.cli.about_jc())
|
||||
jc=jc.cli.JcCli.about_jc())
|
||||
|
||||
with open('man/jc.1', 'w') as f:
|
||||
f.write(output)
|
||||
|
@ -9,7 +9,7 @@ env = Environment(loader=file_loader)
|
||||
template = env.get_template('readme_template')
|
||||
# output = template.render(jc=jc.cli.about_jc())
|
||||
output = template.render(parsers=jc.lib.all_parser_info(),
|
||||
info=jc.cli.about_jc())
|
||||
info=jc.cli.JcCli.about_jc())
|
||||
|
||||
with open('README.md', 'w') as f:
|
||||
f.write(output)
|
||||
|
2
setup.py
2
setup.py
@ -5,7 +5,7 @@ with open('README.md', 'r') as f:
|
||||
|
||||
setuptools.setup(
|
||||
name='jc',
|
||||
version='1.22.0',
|
||||
version='1.22.1',
|
||||
author='Kelly Brazil',
|
||||
author_email='kellyjonbrazil@gmail.com',
|
||||
description='Converts the output of popular command-line tools and file-types to JSON.',
|
||||
|
@ -3,7 +3,7 @@
|
||||
|
||||
> Check out the `jc` Python [package documentation](https://github.com/kellyjonbrazil/jc/tree/master/docs) for developers
|
||||
|
||||
> Try the `jc` [web demo](https://jc-web-demo.herokuapp.com/)
|
||||
> Try the `jc` [web demo](https://jc-web.onrender.com/)
|
||||
|
||||
> JC is [now available](https://galaxy.ansible.com/community/general) as an
|
||||
Ansible filter plugin in the `community.general` collection. See this
|
||||
|
35589
tests/fixtures/generic/pci.ids
vendored
Normal file
35589
tests/fixtures/generic/pci.ids
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1
tests/fixtures/generic/pci.ids.json
vendored
Normal file
1
tests/fixtures/generic/pci.ids.json
vendored
Normal file
File diff suppressed because one or more lines are too long
1
tests/fixtures/generic/udevadm.json
vendored
Normal file
1
tests/fixtures/generic/udevadm.json
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"P":"/devices/pci0000:00/0000:00:10.0/host32/target32:0:0/32:0:0:0/block/sda","N":"sda","L":0,"S":["disk/by-path/pci-0000:00:10.0-scsi-0:0:0:0"],"E":{"DEVPATH":"/devices/pci0000:00/0000:00:10.0/host32/target32:0:0/32:0:0:0/block/sda","DEVNAME":"/dev/sda","DEVTYPE":"disk","MAJOR":"8","MINOR":"0","SUBSYSTEM":"block","USEC_INITIALIZED":"6100111","SCSI_TPGS":"0","SCSI_TYPE":"disk","SCSI_VENDOR":"VMware,","SCSI_VENDOR_ENC":"VMware,\\x20","SCSI_MODEL":"VMware_Virtual_S","SCSI_MODEL_ENC":"VMware\\x20Virtual\\x20S","SCSI_REVISION":"1.0","ID_SCSI":"1","ID_VENDOR":"VMware_","ID_VENDOR_ENC":"VMware\\x2c\\x20","ID_MODEL":"VMware_Virtual_S","ID_MODEL_ENC":"VMware\\x20Virtual\\x20S","ID_REVISION":"1.0","ID_TYPE":"disk","MPATH_SBIN_PATH":"/sbin","ID_BUS":"scsi","ID_PATH":"pci-0000:00:10.0-scsi-0:0:0:0","ID_PATH_TAG":"pci-0000_00_10_0-scsi-0_0_0_0","ID_PART_TABLE_UUID":"a5bd0c01-4210-46f2-b558-5c11c209a8f7","ID_PART_TABLE_TYPE":"gpt","DEVLINKS":"/dev/disk/by-path/pci-0000:00:10.0-scsi-0:0:0:0","TAGS":":systemd:"}}
|
33
tests/fixtures/generic/udevadm.out
vendored
Normal file
33
tests/fixtures/generic/udevadm.out
vendored
Normal file
@ -0,0 +1,33 @@
|
||||
P: /devices/pci0000:00/0000:00:10.0/host32/target32:0:0/32:0:0:0/block/sda
|
||||
N: sda
|
||||
L: 0
|
||||
S: disk/by-path/pci-0000:00:10.0-scsi-0:0:0:0
|
||||
E: DEVPATH=/devices/pci0000:00/0000:00:10.0/host32/target32:0:0/32:0:0:0/block/sda
|
||||
E: DEVNAME=/dev/sda
|
||||
E: DEVTYPE=disk
|
||||
E: MAJOR=8
|
||||
E: MINOR=0
|
||||
E: SUBSYSTEM=block
|
||||
E: USEC_INITIALIZED=6100111
|
||||
E: SCSI_TPGS=0
|
||||
E: SCSI_TYPE=disk
|
||||
E: SCSI_VENDOR=VMware,
|
||||
E: SCSI_VENDOR_ENC=VMware,\x20
|
||||
E: SCSI_MODEL=VMware_Virtual_S
|
||||
E: SCSI_MODEL_ENC=VMware\x20Virtual\x20S
|
||||
E: SCSI_REVISION=1.0
|
||||
E: ID_SCSI=1
|
||||
E: ID_VENDOR=VMware_
|
||||
E: ID_VENDOR_ENC=VMware\x2c\x20
|
||||
E: ID_MODEL=VMware_Virtual_S
|
||||
E: ID_MODEL_ENC=VMware\x20Virtual\x20S
|
||||
E: ID_REVISION=1.0
|
||||
E: ID_TYPE=disk
|
||||
E: MPATH_SBIN_PATH=/sbin
|
||||
E: ID_BUS=scsi
|
||||
E: ID_PATH=pci-0000:00:10.0-scsi-0:0:0:0
|
||||
E: ID_PATH_TAG=pci-0000_00_10_0-scsi-0_0_0_0
|
||||
E: ID_PART_TABLE_UUID=a5bd0c01-4210-46f2-b558-5c11c209a8f7
|
||||
E: ID_PART_TABLE_TYPE=gpt
|
||||
E: DEVLINKS=/dev/disk/by-path/pci-0000:00:10.0-scsi-0:0:0:0
|
||||
E: TAGS=:systemd:
|
2
tests/fixtures/linux-proc/pid_stat_w_space_and_nl_in_comm
vendored
Normal file
2
tests/fixtures/linux-proc/pid_stat_w_space_and_nl_in_comm
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
2001 (my file with
|
||||
sp) S 1888 2001 1888 34816 2001 4202496 428 0 0 0 0 0 0 0 20 0 1 0 75513 115900416 297 18446744073709551615 4194304 5100612 140737020052256 140737020050904 140096699233308 0 65536 4 65538 18446744072034584486 0 0 17 0 0 0 0 0 0 7200240 7236240 35389440 140737020057179 140737020057223 140737020057223 140737020059606 0
|
1
tests/fixtures/linux-proc/pid_stat_w_space_and_nl_in_comm.json
vendored
Normal file
1
tests/fixtures/linux-proc/pid_stat_w_space_and_nl_in_comm.json
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"pid":2001,"comm":"my file with\nsp","state":"S","ppid":1888,"pgrp":2001,"session":1888,"tty_nr":34816,"tpg_id":2001,"flags":4202496,"minflt":428,"cminflt":0,"majflt":0,"cmajflt":0,"utime":0,"stime":0,"cutime":0,"cstime":0,"priority":20,"nice":0,"num_threads":1,"itrealvalue":0,"starttime":75513,"vsize":115900416,"rss":297,"rsslim":18446744073709551615,"startcode":4194304,"endcode":5100612,"startstack":140737020052256,"kstkeep":140737020050904,"kstkeip":140096699233308,"signal":0,"blocked":65536,"sigignore":4,"sigcatch":65538,"wchan":18446744072034584486,"nswap":0,"cnswap":0,"exit_signal":17,"processor":0,"rt_priority":0,"policy":0,"delayacct_blkio_ticks":0,"guest_time":0,"cguest_time":0,"start_data":7200240,"end_data":7236240,"start_brk":35389440,"arg_start":140737020057179,"arg_end":140737020057223,"env_start":140737020057223,"env_end":140737020059606,"exit_code":0,"state_pretty":"Sleeping in an interruptible wait"}
|
1
tests/fixtures/ubuntu-20.10/lspci-mmv.json
vendored
Normal file
1
tests/fixtures/ubuntu-20.10/lspci-mmv.json
vendored
Normal file
File diff suppressed because one or more lines are too long
305
tests/fixtures/ubuntu-20.10/lspci-mmv.out
vendored
Normal file
305
tests/fixtures/ubuntu-20.10/lspci-mmv.out
vendored
Normal file
@ -0,0 +1,305 @@
|
||||
Slot: 00:00.0
|
||||
Class: Host bridge
|
||||
Vendor: Intel Corporation
|
||||
Device: 440BX/ZX/DX - 82443BX/ZX/DX Host bridge
|
||||
SVendor: VMware
|
||||
SDevice: Virtual Machine Chipset
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:01.0
|
||||
Class: PCI bridge
|
||||
Vendor: Intel Corporation
|
||||
Device: 440BX/ZX/DX - 82443BX/ZX/DX AGP bridge
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:07.0
|
||||
Class: ISA bridge
|
||||
Vendor: Intel Corporation
|
||||
Device: 82371AB/EB/MB PIIX4 ISA
|
||||
SVendor: VMware
|
||||
SDevice: Virtual Machine Chipset
|
||||
Rev: 08
|
||||
|
||||
Slot: 00:07.1
|
||||
Class: IDE interface
|
||||
Vendor: Intel Corporation
|
||||
Device: 82371AB/EB/MB PIIX4 IDE
|
||||
SVendor: VMware
|
||||
SDevice: Virtual Machine Chipset
|
||||
Rev: 01
|
||||
ProgIf: 8a
|
||||
|
||||
Slot: 00:07.3
|
||||
Class: Bridge
|
||||
Vendor: Intel Corporation
|
||||
Device: 82371AB/EB/MB PIIX4 ACPI
|
||||
SVendor: VMware
|
||||
SDevice: Virtual Machine Chipset
|
||||
Rev: 08
|
||||
|
||||
Slot: 00:07.7
|
||||
Class: System peripheral
|
||||
Vendor: VMware
|
||||
Device: Virtual Machine Communication Interface
|
||||
SVendor: VMware
|
||||
SDevice: Virtual Machine Communication Interface
|
||||
Rev: 10
|
||||
|
||||
Slot: 00:0f.0
|
||||
Class: VGA compatible controller
|
||||
Vendor: VMware
|
||||
Device: SVGA II Adapter
|
||||
SVendor: VMware
|
||||
SDevice: SVGA II Adapter
|
||||
|
||||
Slot: 00:10.0
|
||||
Class: SCSI storage controller
|
||||
Vendor: Broadcom / LSI
|
||||
Device: 53c1030 PCI-X Fusion-MPT Dual Ultra320 SCSI
|
||||
SVendor: VMware
|
||||
SDevice: LSI Logic Parallel SCSI Controller
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:11.0
|
||||
Class: PCI bridge
|
||||
Vendor: VMware
|
||||
Device: PCI bridge
|
||||
Rev: 02
|
||||
ProgIf: 01
|
||||
|
||||
Slot: 00:15.0
|
||||
Class: PCI bridge
|
||||
Vendor: VMware
|
||||
Device: PCI Express Root Port
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:15.1
|
||||
Class: PCI bridge
|
||||
Vendor: VMware
|
||||
Device: PCI Express Root Port
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:15.2
|
||||
Class: PCI bridge
|
||||
Vendor: VMware
|
||||
Device: PCI Express Root Port
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:15.3
|
||||
Class: PCI bridge
|
||||
Vendor: VMware
|
||||
Device: PCI Express Root Port
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:15.4
|
||||
Class: PCI bridge
|
||||
Vendor: VMware
|
||||
Device: PCI Express Root Port
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:15.5
|
||||
Class: PCI bridge
|
||||
Vendor: VMware
|
||||
Device: PCI Express Root Port
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:15.6
|
||||
Class: PCI bridge
|
||||
Vendor: VMware
|
||||
Device: PCI Express Root Port
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:15.7
|
||||
Class: PCI bridge
|
||||
Vendor: VMware
|
||||
Device: PCI Express Root Port
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:16.0
|
||||
Class: PCI bridge
|
||||
Vendor: VMware
|
||||
Device: PCI Express Root Port
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:16.1
|
||||
Class: PCI bridge
|
||||
Vendor: VMware
|
||||
Device: PCI Express Root Port
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:16.2
|
||||
Class: PCI bridge
|
||||
Vendor: VMware
|
||||
Device: PCI Express Root Port
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:16.3
|
||||
Class: PCI bridge
|
||||
Vendor: VMware
|
||||
Device: PCI Express Root Port
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:16.4
|
||||
Class: PCI bridge
|
||||
Vendor: VMware
|
||||
Device: PCI Express Root Port
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:16.5
|
||||
Class: PCI bridge
|
||||
Vendor: VMware
|
||||
Device: PCI Express Root Port
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:16.6
|
||||
Class: PCI bridge
|
||||
Vendor: VMware
|
||||
Device: PCI Express Root Port
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:16.7
|
||||
Class: PCI bridge
|
||||
Vendor: VMware
|
||||
Device: PCI Express Root Port
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:17.0
|
||||
Class: PCI bridge
|
||||
Vendor: VMware
|
||||
Device: PCI Express Root Port
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:17.1
|
||||
Class: PCI bridge
|
||||
Vendor: VMware
|
||||
Device: PCI Express Root Port
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:17.2
|
||||
Class: PCI bridge
|
||||
Vendor: VMware
|
||||
Device: PCI Express Root Port
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:17.3
|
||||
Class: PCI bridge
|
||||
Vendor: VMware
|
||||
Device: PCI Express Root Port
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:17.4
|
||||
Class: PCI bridge
|
||||
Vendor: VMware
|
||||
Device: PCI Express Root Port
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:17.5
|
||||
Class: PCI bridge
|
||||
Vendor: VMware
|
||||
Device: PCI Express Root Port
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:17.6
|
||||
Class: PCI bridge
|
||||
Vendor: VMware
|
||||
Device: PCI Express Root Port
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:17.7
|
||||
Class: PCI bridge
|
||||
Vendor: VMware
|
||||
Device: PCI Express Root Port
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:18.0
|
||||
Class: PCI bridge
|
||||
Vendor: VMware
|
||||
Device: PCI Express Root Port
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:18.1
|
||||
Class: PCI bridge
|
||||
Vendor: VMware
|
||||
Device: PCI Express Root Port
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:18.2
|
||||
Class: PCI bridge
|
||||
Vendor: VMware
|
||||
Device: PCI Express Root Port
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:18.3
|
||||
Class: PCI bridge
|
||||
Vendor: VMware
|
||||
Device: PCI Express Root Port
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:18.4
|
||||
Class: PCI bridge
|
||||
Vendor: VMware
|
||||
Device: PCI Express Root Port
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:18.5
|
||||
Class: PCI bridge
|
||||
Vendor: VMware
|
||||
Device: PCI Express Root Port
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:18.6
|
||||
Class: PCI bridge
|
||||
Vendor: VMware
|
||||
Device: PCI Express Root Port
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:18.7
|
||||
Class: PCI bridge
|
||||
Vendor: VMware
|
||||
Device: PCI Express Root Port
|
||||
Rev: 01
|
||||
|
||||
Slot: 02:00.0
|
||||
Class: USB controller
|
||||
Vendor: VMware
|
||||
Device: USB1.1 UHCI Controller
|
||||
SVendor: VMware
|
||||
SDevice: USB1.1 UHCI Controller
|
||||
PhySlot: 32
|
||||
|
||||
Slot: 02:01.0
|
||||
Class: Ethernet controller
|
||||
Vendor: Intel Corporation
|
||||
Device: 82545EM Gigabit Ethernet Controller (Copper)
|
||||
SVendor: VMware
|
||||
SDevice: PRO/1000 MT Single Port Adapter
|
||||
PhySlot: 33
|
||||
Rev: 01
|
||||
|
||||
Slot: 02:02.0
|
||||
Class: Multimedia audio controller
|
||||
Vendor: Ensoniq
|
||||
Device: ES1371/ES1373 / Creative Labs CT2518
|
||||
SVendor: Ensoniq
|
||||
SDevice: Audio PCI 64V/128/5200 / Creative CT4810/CT5803/CT5806 [Sound Blaster PCI]
|
||||
PhySlot: 34
|
||||
Rev: 02
|
||||
|
||||
Slot: 02:03.0
|
||||
Class: USB controller
|
||||
Vendor: VMware
|
||||
Device: USB2 EHCI Controller
|
||||
SVendor: VMware
|
||||
SDevice: USB2 EHCI Controller
|
||||
PhySlot: 35
|
||||
ProgIf: 20
|
||||
|
||||
Slot: 02:05.0
|
||||
Class: SATA controller
|
||||
Vendor: VMware
|
||||
Device: SATA AHCI controller
|
||||
SVendor: VMware
|
||||
SDevice: SATA AHCI controller
|
||||
PhySlot: 37
|
||||
ProgIf: 01
|
||||
|
1
tests/fixtures/ubuntu-20.10/lspci-nmmv.json
vendored
Normal file
1
tests/fixtures/ubuntu-20.10/lspci-nmmv.json
vendored
Normal file
File diff suppressed because one or more lines are too long
305
tests/fixtures/ubuntu-20.10/lspci-nmmv.out
vendored
Normal file
305
tests/fixtures/ubuntu-20.10/lspci-nmmv.out
vendored
Normal file
@ -0,0 +1,305 @@
|
||||
Slot: 00:00.0
|
||||
Class: 0600
|
||||
Vendor: 8086
|
||||
Device: 7190
|
||||
SVendor: 15ad
|
||||
SDevice: 1976
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:01.0
|
||||
Class: 0604
|
||||
Vendor: 8086
|
||||
Device: 7191
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:07.0
|
||||
Class: 0601
|
||||
Vendor: 8086
|
||||
Device: 7110
|
||||
SVendor: 15ad
|
||||
SDevice: 1976
|
||||
Rev: 08
|
||||
|
||||
Slot: 00:07.1
|
||||
Class: 0101
|
||||
Vendor: 8086
|
||||
Device: 7111
|
||||
SVendor: 15ad
|
||||
SDevice: 1976
|
||||
Rev: 01
|
||||
ProgIf: 8a
|
||||
|
||||
Slot: 00:07.3
|
||||
Class: 0680
|
||||
Vendor: 8086
|
||||
Device: 7113
|
||||
SVendor: 15ad
|
||||
SDevice: 1976
|
||||
Rev: 08
|
||||
|
||||
Slot: 00:07.7
|
||||
Class: 0880
|
||||
Vendor: 15ad
|
||||
Device: 0740
|
||||
SVendor: 15ad
|
||||
SDevice: 0740
|
||||
Rev: 10
|
||||
|
||||
Slot: 00:0f.0
|
||||
Class: 0300
|
||||
Vendor: 15ad
|
||||
Device: 0405
|
||||
SVendor: 15ad
|
||||
SDevice: 0405
|
||||
|
||||
Slot: 00:10.0
|
||||
Class: 0100
|
||||
Vendor: 1000
|
||||
Device: 0030
|
||||
SVendor: 15ad
|
||||
SDevice: 1976
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:11.0
|
||||
Class: 0604
|
||||
Vendor: 15ad
|
||||
Device: 0790
|
||||
Rev: 02
|
||||
ProgIf: 01
|
||||
|
||||
Slot: 00:15.0
|
||||
Class: 0604
|
||||
Vendor: 15ad
|
||||
Device: 07a0
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:15.1
|
||||
Class: 0604
|
||||
Vendor: 15ad
|
||||
Device: 07a0
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:15.2
|
||||
Class: 0604
|
||||
Vendor: 15ad
|
||||
Device: 07a0
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:15.3
|
||||
Class: 0604
|
||||
Vendor: 15ad
|
||||
Device: 07a0
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:15.4
|
||||
Class: 0604
|
||||
Vendor: 15ad
|
||||
Device: 07a0
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:15.5
|
||||
Class: 0604
|
||||
Vendor: 15ad
|
||||
Device: 07a0
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:15.6
|
||||
Class: 0604
|
||||
Vendor: 15ad
|
||||
Device: 07a0
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:15.7
|
||||
Class: 0604
|
||||
Vendor: 15ad
|
||||
Device: 07a0
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:16.0
|
||||
Class: 0604
|
||||
Vendor: 15ad
|
||||
Device: 07a0
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:16.1
|
||||
Class: 0604
|
||||
Vendor: 15ad
|
||||
Device: 07a0
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:16.2
|
||||
Class: 0604
|
||||
Vendor: 15ad
|
||||
Device: 07a0
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:16.3
|
||||
Class: 0604
|
||||
Vendor: 15ad
|
||||
Device: 07a0
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:16.4
|
||||
Class: 0604
|
||||
Vendor: 15ad
|
||||
Device: 07a0
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:16.5
|
||||
Class: 0604
|
||||
Vendor: 15ad
|
||||
Device: 07a0
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:16.6
|
||||
Class: 0604
|
||||
Vendor: 15ad
|
||||
Device: 07a0
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:16.7
|
||||
Class: 0604
|
||||
Vendor: 15ad
|
||||
Device: 07a0
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:17.0
|
||||
Class: 0604
|
||||
Vendor: 15ad
|
||||
Device: 07a0
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:17.1
|
||||
Class: 0604
|
||||
Vendor: 15ad
|
||||
Device: 07a0
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:17.2
|
||||
Class: 0604
|
||||
Vendor: 15ad
|
||||
Device: 07a0
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:17.3
|
||||
Class: 0604
|
||||
Vendor: 15ad
|
||||
Device: 07a0
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:17.4
|
||||
Class: 0604
|
||||
Vendor: 15ad
|
||||
Device: 07a0
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:17.5
|
||||
Class: 0604
|
||||
Vendor: 15ad
|
||||
Device: 07a0
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:17.6
|
||||
Class: 0604
|
||||
Vendor: 15ad
|
||||
Device: 07a0
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:17.7
|
||||
Class: 0604
|
||||
Vendor: 15ad
|
||||
Device: 07a0
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:18.0
|
||||
Class: 0604
|
||||
Vendor: 15ad
|
||||
Device: 07a0
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:18.1
|
||||
Class: 0604
|
||||
Vendor: 15ad
|
||||
Device: 07a0
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:18.2
|
||||
Class: 0604
|
||||
Vendor: 15ad
|
||||
Device: 07a0
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:18.3
|
||||
Class: 0604
|
||||
Vendor: 15ad
|
||||
Device: 07a0
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:18.4
|
||||
Class: 0604
|
||||
Vendor: 15ad
|
||||
Device: 07a0
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:18.5
|
||||
Class: 0604
|
||||
Vendor: 15ad
|
||||
Device: 07a0
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:18.6
|
||||
Class: 0604
|
||||
Vendor: 15ad
|
||||
Device: 07a0
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:18.7
|
||||
Class: 0604
|
||||
Vendor: 15ad
|
||||
Device: 07a0
|
||||
Rev: 01
|
||||
|
||||
Slot: 02:00.0
|
||||
Class: 0c03
|
||||
Vendor: 15ad
|
||||
Device: 0774
|
||||
SVendor: 15ad
|
||||
SDevice: 1976
|
||||
PhySlot: 32
|
||||
|
||||
Slot: 02:01.0
|
||||
Class: 0200
|
||||
Vendor: 8086
|
||||
Device: 100f
|
||||
SVendor: 15ad
|
||||
SDevice: 0750
|
||||
PhySlot: 33
|
||||
Rev: 01
|
||||
|
||||
Slot: 02:02.0
|
||||
Class: 0401
|
||||
Vendor: 1274
|
||||
Device: 1371
|
||||
SVendor: 1274
|
||||
SDevice: 1371
|
||||
PhySlot: 34
|
||||
Rev: 02
|
||||
|
||||
Slot: 02:03.0
|
||||
Class: 0c03
|
||||
Vendor: 15ad
|
||||
Device: 0770
|
||||
SVendor: 15ad
|
||||
SDevice: 0770
|
||||
PhySlot: 35
|
||||
ProgIf: 20
|
||||
|
||||
Slot: 02:05.0
|
||||
Class: 0106
|
||||
Vendor: 15ad
|
||||
Device: 07e0
|
||||
SVendor: 15ad
|
||||
SDevice: 07e0
|
||||
PhySlot: 37
|
||||
ProgIf: 01
|
||||
|
1
tests/fixtures/ubuntu-20.10/lspci-nnmmv.json
vendored
Normal file
1
tests/fixtures/ubuntu-20.10/lspci-nnmmv.json
vendored
Normal file
File diff suppressed because one or more lines are too long
314
tests/fixtures/ubuntu-20.10/lspci-nnmmv.out
vendored
Normal file
314
tests/fixtures/ubuntu-20.10/lspci-nnmmv.out
vendored
Normal file
@ -0,0 +1,314 @@
|
||||
Slot: 00:00.0
|
||||
Class: Host bridge [0600]
|
||||
Vendor: Intel Corporation [8086]
|
||||
Device: 440BX/ZX/DX - 82443BX/ZX/DX Host bridge [7190]
|
||||
SVendor: VMware [15ad]
|
||||
SDevice: Virtual Machine Chipset [1976]
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:01.0
|
||||
Class: PCI bridge [0604]
|
||||
Vendor: Intel Corporation [8086]
|
||||
Device: 440BX/ZX/DX - 82443BX/ZX/DX AGP bridge [7191]
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:07.0
|
||||
Class: ISA bridge [0601]
|
||||
Vendor: Intel Corporation [8086]
|
||||
Device: 82371AB/EB/MB PIIX4 ISA [7110]
|
||||
SVendor: VMware [15ad]
|
||||
SDevice: Virtual Machine Chipset [1976]
|
||||
Rev: 08
|
||||
|
||||
Slot: 00:07.1
|
||||
Class: IDE interface [0101]
|
||||
Vendor: Intel Corporation [8086]
|
||||
Device: 82371AB/EB/MB PIIX4 IDE [7111]
|
||||
SVendor: VMware [15ad]
|
||||
SDevice: Virtual Machine Chipset [1976]
|
||||
Rev: 01
|
||||
ProgIf: 8a
|
||||
|
||||
Slot: 00:07.3
|
||||
Class: Bridge [0680]
|
||||
Vendor: Intel Corporation [8086]
|
||||
Device: 82371AB/EB/MB PIIX4 ACPI [7113]
|
||||
SVendor: VMware [15ad]
|
||||
SDevice: Virtual Machine Chipset [1976]
|
||||
Rev: 08
|
||||
|
||||
Slot: 00:07.7
|
||||
Class: System peripheral [0880]
|
||||
Vendor: VMware [15ad]
|
||||
Device: Virtual Machine Communication Interface [0740]
|
||||
SVendor: VMware [15ad]
|
||||
SDevice: Virtual Machine Communication Interface [0740]
|
||||
Rev: 10
|
||||
|
||||
Slot: 00:0f.0
|
||||
Class: VGA compatible controller [0300]
|
||||
Vendor: VMware [15ad]
|
||||
Device: SVGA II Adapter [0405]
|
||||
SVendor: VMware [15ad]
|
||||
SDevice: SVGA II Adapter [0405]
|
||||
|
||||
Slot: 00:10.0
|
||||
Class: SCSI storage controller [0100]
|
||||
Vendor: Broadcom / LSI [1000]
|
||||
Device: 53c1030 PCI-X Fusion-MPT Dual Ultra320 SCSI [0030]
|
||||
SVendor: VMware [15ad]
|
||||
SDevice: LSI Logic Parallel SCSI Controller [1976]
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:11.0
|
||||
Class: PCI bridge [0604]
|
||||
Vendor: VMware [15ad]
|
||||
Device: PCI bridge [0790]
|
||||
Rev: 02
|
||||
ProgIf: 01
|
||||
|
||||
Slot: 00:15.0
|
||||
Class: PCI bridge [0604]
|
||||
Vendor: VMware [15ad]
|
||||
Device: PCI Express Root Port [07a0]
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:15.1
|
||||
Class: PCI bridge [0604]
|
||||
Vendor: VMware [15ad]
|
||||
Device: PCI Express Root Port [07a0]
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:15.2
|
||||
Class: PCI bridge [0604]
|
||||
Vendor: VMware [15ad]
|
||||
Device: PCI Express Root Port [07a0]
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:15.3
|
||||
Class: PCI bridge [0604]
|
||||
Vendor: VMware [15ad]
|
||||
Device: PCI Express Root Port [07a0]
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:15.4
|
||||
Class: PCI bridge [0604]
|
||||
Vendor: VMware [15ad]
|
||||
Device: PCI Express Root Port [07a0]
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:15.5
|
||||
Class: PCI bridge [0604]
|
||||
Vendor: VMware [15ad]
|
||||
Device: PCI Express Root Port [07a0]
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:15.6
|
||||
Class: PCI bridge [0604]
|
||||
Vendor: VMware [15ad]
|
||||
Device: PCI Express Root Port [07a0]
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:15.7
|
||||
Class: PCI bridge [0604]
|
||||
Vendor: VMware [15ad]
|
||||
Device: PCI Express Root Port [07a0]
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:16.0
|
||||
Class: PCI bridge [0604]
|
||||
Vendor: VMware [15ad]
|
||||
Device: PCI Express Root Port [07a0]
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:16.1
|
||||
Class: PCI bridge [0604]
|
||||
Vendor: VMware [15ad]
|
||||
Device: PCI Express Root Port [07a0]
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:16.2
|
||||
Class: PCI bridge [0604]
|
||||
Vendor: VMware [15ad]
|
||||
Device: PCI Express Root Port [07a0]
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:16.3
|
||||
Class: PCI bridge [0604]
|
||||
Vendor: VMware [15ad]
|
||||
Device: PCI Express Root Port [07a0]
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:16.4
|
||||
Class: PCI bridge [0604]
|
||||
Vendor: VMware [15ad]
|
||||
Device: PCI Express Root Port [07a0]
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:16.5
|
||||
Class: PCI bridge [0604]
|
||||
Vendor: VMware [15ad]
|
||||
Device: PCI Express Root Port [07a0]
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:16.6
|
||||
Class: PCI bridge [0604]
|
||||
Vendor: VMware [15ad]
|
||||
Device: PCI Express Root Port [07a0]
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:16.7
|
||||
Class: PCI bridge [0604]
|
||||
Vendor: VMware [15ad]
|
||||
Device: PCI Express Root Port [07a0]
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:17.0
|
||||
Class: PCI bridge [0604]
|
||||
Vendor: VMware [15ad]
|
||||
Device: PCI Express Root Port [07a0]
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:17.1
|
||||
Class: PCI bridge [0604]
|
||||
Vendor: VMware [15ad]
|
||||
Device: PCI Express Root Port [07a0]
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:17.2
|
||||
Class: PCI bridge [0604]
|
||||
Vendor: VMware [15ad]
|
||||
Device: PCI Express Root Port [07a0]
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:17.3
|
||||
Class: PCI bridge [0604]
|
||||
Vendor: VMware [15ad]
|
||||
Device: PCI Express Root Port [07a0]
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:17.4
|
||||
Class: PCI bridge [0604]
|
||||
Vendor: VMware [15ad]
|
||||
Device: PCI Express Root Port [07a0]
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:17.5
|
||||
Class: PCI bridge [0604]
|
||||
Vendor: VMware [15ad]
|
||||
Device: PCI Express Root Port [07a0]
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:17.6
|
||||
Class: PCI bridge [0604]
|
||||
Vendor: VMware [15ad]
|
||||
Device: PCI Express Root Port [07a0]
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:17.7
|
||||
Class: PCI bridge [0604]
|
||||
Vendor: VMware [15ad]
|
||||
Device: PCI Express Root Port [07a0]
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:18.0
|
||||
Class: PCI bridge [0604]
|
||||
Vendor: VMware [15ad]
|
||||
Device: PCI Express Root Port [07a0]
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:18.1
|
||||
Class: PCI bridge [0604]
|
||||
Vendor: VMware [15ad]
|
||||
Device: PCI Express Root Port [07a0]
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:18.2
|
||||
Class: PCI bridge [0604]
|
||||
Vendor: VMware [15ad]
|
||||
Device: PCI Express Root Port [07a0]
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:18.3
|
||||
Class: PCI bridge [0604]
|
||||
Vendor: VMware [15ad]
|
||||
Device: PCI Express Root Port [07a0]
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:18.4
|
||||
Class: PCI bridge [0604]
|
||||
Vendor: VMware [15ad]
|
||||
Device: PCI Express Root Port [07a0]
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:18.5
|
||||
Class: PCI bridge [0604]
|
||||
Vendor: VMware [15ad]
|
||||
Device: PCI Express Root Port [07a0]
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:18.6
|
||||
Class: PCI bridge [0604]
|
||||
Vendor: VMware [15ad]
|
||||
Device: PCI Express Root Port [07a0]
|
||||
Rev: 01
|
||||
|
||||
Slot: 00:18.7
|
||||
Class: PCI bridge [0604]
|
||||
Vendor: VMware [15ad]
|
||||
Device: PCI Express Root Port [07a0]
|
||||
Rev: 01
|
||||
|
||||
Slot: 02:00.0
|
||||
Class: USB controller [0c03]
|
||||
Vendor: VMware [15ad]
|
||||
Device: USB1.1 UHCI Controller [0774]
|
||||
SVendor: VMware [15ad]
|
||||
SDevice: USB1.1 UHCI Controller [1976]
|
||||
PhySlot: 32
|
||||
|
||||
Slot: 02:01.0
|
||||
Class: Ethernet controller [0200]
|
||||
Vendor: Intel Corporation [8086]
|
||||
Device: 82545EM Gigabit Ethernet Controller (Copper) [100f]
|
||||
SVendor: VMware [15ad]
|
||||
SDevice: PRO/1000 MT Single Port Adapter [0750]
|
||||
PhySlot: 33
|
||||
Rev: 01
|
||||
|
||||
Slot: 02:02.0
|
||||
Class: Multimedia audio controller [0401]
|
||||
Vendor: Ensoniq [1274]
|
||||
Device: ES1371/ES1373 / Creative Labs CT2518 [1371]
|
||||
SVendor: Ensoniq [1274]
|
||||
SDevice: Audio PCI 64V/128/5200 / Creative CT4810/CT5803/CT5806 [Sound Blaster PCI] [1371]
|
||||
PhySlot: 34
|
||||
Rev: 02
|
||||
|
||||
Slot: 02:03.0
|
||||
Class: USB controller [0c03]
|
||||
Vendor: VMware [15ad]
|
||||
Device: USB2 EHCI Controller [0770]
|
||||
SVendor: VMware [15ad]
|
||||
SDevice: USB2 EHCI Controller [0770]
|
||||
PhySlot: 35
|
||||
ProgIf: 20
|
||||
|
||||
Slot: 02:05.0
|
||||
Class: SATA controller [0106]
|
||||
Vendor: VMware [15ad]
|
||||
Device: SATA AHCI controller [07e0]
|
||||
SVendor: VMware [15ad]
|
||||
SDevice: SATA AHCI controller [07e0]
|
||||
PhySlot: 37
|
||||
ProgIf: 01
|
||||
|
||||
Slot: ff:02:05.0
|
||||
Class: SATA controller [0106]
|
||||
Vendor: VMware [15ad]
|
||||
Device: SATA AHCI controller [07e0]
|
||||
SVendor: VMware [15ad]
|
||||
SDevice: SATA AHCI controller [07e0]
|
||||
PhySlot: 37
|
||||
ProgIf: 01
|
||||
|
60
tests/test_datetime_iso.py
Normal file
60
tests/test_datetime_iso.py
Normal file
@ -0,0 +1,60 @@
|
||||
import unittest
|
||||
import json
|
||||
import jc.parsers.datetime_iso
|
||||
|
||||
|
||||
class MyTests(unittest.TestCase):
|
||||
def test_datetime_iso_nodata(self):
|
||||
"""
|
||||
Test 'datetime_iso' with no data
|
||||
"""
|
||||
self.assertEqual(jc.parsers.datetime_iso.parse('', quiet=True), {})
|
||||
|
||||
|
||||
def test_datetime_iso_z(self):
|
||||
"""
|
||||
Test ISO datetime string with Z timezone
|
||||
"""
|
||||
data = r'2007-04-05T14:30Z'
|
||||
expected = json.loads(r'''{"year":2007,"month":"Apr","month_num":4,"day":5,"weekday":"Thu","weekday_num":4,"hour":2,"hour_24":14,"minute":30,"second":0,"microsecond":0,"period":"PM","utc_offset":"+0000","day_of_year":95,"week_of_year":14,"iso":"2007-04-05T14:30:00+00:00","timestamp":1175783400}''')
|
||||
self.assertEqual(jc.parsers.datetime_iso.parse(data, quiet=True), expected)
|
||||
|
||||
|
||||
def test_datetime_iso_microseconds(self):
|
||||
"""
|
||||
Test ISO datetime string with Z timezone
|
||||
"""
|
||||
data = r'2007-04-05T14:30.555Z'
|
||||
expected = json.loads(r'''{"year":2007,"month":"Apr","month_num":4,"day":5,"weekday":"Thu","weekday_num":4,"hour":2,"hour_24":14,"minute":0,"second":30,"microsecond":555000,"period":"PM","utc_offset":"+0000","day_of_year":95,"week_of_year":14,"iso":"2007-04-05T14:00:30.555000+00:00","timestamp":1175781630}''')
|
||||
self.assertEqual(jc.parsers.datetime_iso.parse(data, quiet=True), expected)
|
||||
|
||||
|
||||
def test_datetime_iso_plus_offset(self):
|
||||
"""
|
||||
Test ISO datetime string with + offset
|
||||
"""
|
||||
data = r'2007-04-05T14:30+03:30'
|
||||
expected = json.loads(r'''{"year":2007,"month":"Apr","month_num":4,"day":5,"weekday":"Thu","weekday_num":4,"hour":2,"hour_24":14,"minute":30,"second":0,"microsecond":0,"period":"PM","utc_offset":"+0330","day_of_year":95,"week_of_year":14,"iso":"2007-04-05T14:30:00+03:30","timestamp":1175770800}''')
|
||||
self.assertEqual(jc.parsers.datetime_iso.parse(data, quiet=True), expected)
|
||||
|
||||
|
||||
def test_datetime_iso_negative_offset(self):
|
||||
"""
|
||||
Test ISO datetime string with - offset
|
||||
"""
|
||||
data = r'2007-04-05T14:30-03:30'
|
||||
expected = json.loads(r'''{"year":2007,"month":"Apr","month_num":4,"day":5,"weekday":"Thu","weekday_num":4,"hour":2,"hour_24":14,"minute":30,"second":0,"microsecond":0,"period":"PM","utc_offset":"-0330","day_of_year":95,"week_of_year":14,"iso":"2007-04-05T14:30:00-03:30","timestamp":1175796000}''')
|
||||
self.assertEqual(jc.parsers.datetime_iso.parse(data, quiet=True), expected)
|
||||
|
||||
|
||||
def test_datetime_iso_nocolon_offset(self):
|
||||
"""
|
||||
Test ISO datetime string with no colon offset
|
||||
"""
|
||||
data = r'2007-04-05T14:30+0300'
|
||||
expected = json.loads(r'''{"year":2007,"month":"Apr","month_num":4,"day":5,"weekday":"Thu","weekday_num":4,"hour":2,"hour_24":14,"minute":30,"second":0,"microsecond":0,"period":"PM","utc_offset":"+0300","day_of_year":95,"week_of_year":14,"iso":"2007-04-05T14:30:00+03:00","timestamp":1175772600}''')
|
||||
self.assertEqual(jc.parsers.datetime_iso.parse(data, quiet=True), expected)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
@ -17,7 +17,7 @@ class MyTests(unittest.TestCase):
|
||||
Test ipv4 address string
|
||||
"""
|
||||
data = r'192.168.1.35'
|
||||
expected = json.loads(r'''{"version":4,"max_prefix_length":32,"ip":"192.168.1.35","ip_compressed":"192.168.1.35","ip_exploded":"192.168.1.35","scope_id":null,"ipv4_mapped":null,"six_to_four":null,"teredo_client":null,"teredo_server":null,"dns_ptr":"35.1.168.192.in-addr.arpa","network":"192.168.1.35","broadcast":"192.168.1.35","hostmask":"0.0.0.0","netmask":"255.255.255.255","cidr_netmask":32,"hosts":1,"first_host":"192.168.1.35","last_host":"192.168.1.35","is_multicast":false,"is_private":true,"is_global":false,"is_link_local":false,"is_loopback":false,"is_reserved":false,"is_unspecified":false,"int":{"ip":3232235811,"network":3232235811,"broadcast":3232235811,"first_host":3232235811,"last_host":3232235811},"hex":{"ip":"c0:a8:01:23","network":"c0:a8:01:23","broadcast":"c0:a8:01:23","hostmask":"00:00:00:00","netmask":"ff:ff:ff:ff","first_host":"c0:a8:01:23","last_host":"c0:a8:01:23"},"bin":{"ip":"11000000101010000000000100100011","network":"11000000101010000000000100100011","broadcast":"11000000101010000000000100100011","hostmask":"00000000000000000000000000000000","netmask":"11111111111111111111111111111111","first_host":"11000000101010000000000100100011","last_host":"11000000101010000000000100100011"}}''')
|
||||
expected = json.loads(r'''{"version":4,"max_prefix_length":32,"ip":"192.168.1.35","ip_compressed":"192.168.1.35","ip_exploded":"192.168.1.35","ip_split":["192","168","1","35"],"scope_id":null,"ipv4_mapped":null,"six_to_four":null,"teredo_client":null,"teredo_server":null,"dns_ptr":"35.1.168.192.in-addr.arpa","network":"192.168.1.35","broadcast":"192.168.1.35","hostmask":"0.0.0.0","netmask":"255.255.255.255","cidr_netmask":32,"hosts":1,"first_host":"192.168.1.35","last_host":"192.168.1.35","is_multicast":false,"is_private":true,"is_global":false,"is_link_local":false,"is_loopback":false,"is_reserved":false,"is_unspecified":false,"int":{"ip":3232235811,"network":3232235811,"broadcast":3232235811,"first_host":3232235811,"last_host":3232235811},"hex":{"ip":"c0:a8:01:23","network":"c0:a8:01:23","broadcast":"c0:a8:01:23","hostmask":"00:00:00:00","netmask":"ff:ff:ff:ff","first_host":"c0:a8:01:23","last_host":"c0:a8:01:23"},"bin":{"ip":"11000000101010000000000100100011","network":"11000000101010000000000100100011","broadcast":"11000000101010000000000100100011","hostmask":"00000000000000000000000000000000","netmask":"11111111111111111111111111111111","first_host":"11000000101010000000000100100011","last_host":"11000000101010000000000100100011"}}''')
|
||||
self.assertEqual(jc.parsers.ip_address.parse(data, quiet=True), expected)
|
||||
|
||||
|
||||
@ -26,7 +26,7 @@ class MyTests(unittest.TestCase):
|
||||
Test CIDR ipv4 address string
|
||||
"""
|
||||
data = r'192.168.2.10/24'
|
||||
expected = json.loads(r'''{"version":4,"max_prefix_length":32,"ip":"192.168.2.10","ip_compressed":"192.168.2.10","ip_exploded":"192.168.2.10","scope_id":null,"ipv4_mapped":null,"six_to_four":null,"teredo_client":null,"teredo_server":null,"dns_ptr":"10.2.168.192.in-addr.arpa","network":"192.168.2.0","broadcast":"192.168.2.255","hostmask":"0.0.0.255","netmask":"255.255.255.0","cidr_netmask":24,"hosts":254,"first_host":"192.168.2.1","last_host":"192.168.2.254","is_multicast":false,"is_private":true,"is_global":false,"is_link_local":false,"is_loopback":false,"is_reserved":false,"is_unspecified":false,"int":{"ip":3232236042,"network":3232236032,"broadcast":3232236287,"first_host":3232236033,"last_host":3232236286},"hex":{"ip":"c0:a8:02:0a","network":"c0:a8:02:00","broadcast":"c0:a8:02:ff","hostmask":"00:00:00:ff","netmask":"ff:ff:ff:00","first_host":"c0:a8:02:01","last_host":"c0:a8:02:fe"},"bin":{"ip":"11000000101010000000001000001010","network":"11000000101010000000001000000000","broadcast":"11000000101010000000001011111111","hostmask":"00000000000000000000000011111111","netmask":"11111111111111111111111100000000","first_host":"11000000101010000000001000000001","last_host":"11000000101010000000001011111110"}}''')
|
||||
expected = json.loads(r'''{"version":4,"max_prefix_length":32,"ip":"192.168.2.10","ip_compressed":"192.168.2.10","ip_exploded":"192.168.2.10","ip_split":["192","168","2","10"],"scope_id":null,"ipv4_mapped":null,"six_to_four":null,"teredo_client":null,"teredo_server":null,"dns_ptr":"10.2.168.192.in-addr.arpa","network":"192.168.2.0","broadcast":"192.168.2.255","hostmask":"0.0.0.255","netmask":"255.255.255.0","cidr_netmask":24,"hosts":254,"first_host":"192.168.2.1","last_host":"192.168.2.254","is_multicast":false,"is_private":true,"is_global":false,"is_link_local":false,"is_loopback":false,"is_reserved":false,"is_unspecified":false,"int":{"ip":3232236042,"network":3232236032,"broadcast":3232236287,"first_host":3232236033,"last_host":3232236286},"hex":{"ip":"c0:a8:02:0a","network":"c0:a8:02:00","broadcast":"c0:a8:02:ff","hostmask":"00:00:00:ff","netmask":"ff:ff:ff:00","first_host":"c0:a8:02:01","last_host":"c0:a8:02:fe"},"bin":{"ip":"11000000101010000000001000001010","network":"11000000101010000000001000000000","broadcast":"11000000101010000000001011111111","hostmask":"00000000000000000000000011111111","netmask":"11111111111111111111111100000000","first_host":"11000000101010000000001000000001","last_host":"11000000101010000000001011111110"}}''')
|
||||
self.assertEqual(jc.parsers.ip_address.parse(data, quiet=True), expected)
|
||||
|
||||
|
||||
@ -35,7 +35,7 @@ class MyTests(unittest.TestCase):
|
||||
Test ipv4 address with a dotted netmask string
|
||||
"""
|
||||
data = r'192.168.0.1/255.255.128.0'
|
||||
expected = json.loads(r'''{"version":4,"max_prefix_length":32,"ip":"192.168.0.1","ip_compressed":"192.168.0.1","ip_exploded":"192.168.0.1","scope_id":null,"ipv4_mapped":null,"six_to_four":null,"teredo_client":null,"teredo_server":null,"dns_ptr":"1.0.168.192.in-addr.arpa","network":"192.168.0.0","broadcast":"192.168.127.255","hostmask":"0.0.127.255","netmask":"255.255.128.0","cidr_netmask":17,"hosts":32766,"first_host":"192.168.0.1","last_host":"192.168.127.254","is_multicast":false,"is_private":true,"is_global":false,"is_link_local":false,"is_loopback":false,"is_reserved":false,"is_unspecified":false,"int":{"ip":3232235521,"network":3232235520,"broadcast":3232268287,"first_host":3232235521,"last_host":3232268286},"hex":{"ip":"c0:a8:00:01","network":"c0:a8:00:00","broadcast":"c0:a8:7f:ff","hostmask":"00:00:7f:ff","netmask":"ff:ff:80:00","first_host":"c0:a8:00:01","last_host":"c0:a8:7f:fe"},"bin":{"ip":"11000000101010000000000000000001","network":"11000000101010000000000000000000","broadcast":"11000000101010000111111111111111","hostmask":"00000000000000000111111111111111","netmask":"11111111111111111000000000000000","first_host":"11000000101010000000000000000001","last_host":"11000000101010000111111111111110"}}''')
|
||||
expected = json.loads(r'''{"version":4,"max_prefix_length":32,"ip":"192.168.0.1","ip_compressed":"192.168.0.1","ip_exploded":"192.168.0.1","ip_split":["192","168","0","1"],"scope_id":null,"ipv4_mapped":null,"six_to_four":null,"teredo_client":null,"teredo_server":null,"dns_ptr":"1.0.168.192.in-addr.arpa","network":"192.168.0.0","broadcast":"192.168.127.255","hostmask":"0.0.127.255","netmask":"255.255.128.0","cidr_netmask":17,"hosts":32766,"first_host":"192.168.0.1","last_host":"192.168.127.254","is_multicast":false,"is_private":true,"is_global":false,"is_link_local":false,"is_loopback":false,"is_reserved":false,"is_unspecified":false,"int":{"ip":3232235521,"network":3232235520,"broadcast":3232268287,"first_host":3232235521,"last_host":3232268286},"hex":{"ip":"c0:a8:00:01","network":"c0:a8:00:00","broadcast":"c0:a8:7f:ff","hostmask":"00:00:7f:ff","netmask":"ff:ff:80:00","first_host":"c0:a8:00:01","last_host":"c0:a8:7f:fe"},"bin":{"ip":"11000000101010000000000000000001","network":"11000000101010000000000000000000","broadcast":"11000000101010000111111111111111","hostmask":"00000000000000000111111111111111","netmask":"11111111111111111000000000000000","first_host":"11000000101010000000000000000001","last_host":"11000000101010000111111111111110"}}''')
|
||||
self.assertEqual(jc.parsers.ip_address.parse(data, quiet=True), expected)
|
||||
|
||||
|
||||
@ -44,7 +44,7 @@ class MyTests(unittest.TestCase):
|
||||
Test ipv4 address integer string
|
||||
"""
|
||||
data = r'3232236042'
|
||||
expected = json.loads(r'''{"version":4,"max_prefix_length":32,"ip":"192.168.2.10","ip_compressed":"192.168.2.10","ip_exploded":"192.168.2.10","scope_id":null,"ipv4_mapped":null,"six_to_four":null,"teredo_client":null,"teredo_server":null,"dns_ptr":"10.2.168.192.in-addr.arpa","network":"192.168.2.10","broadcast":"192.168.2.10","hostmask":"0.0.0.0","netmask":"255.255.255.255","cidr_netmask":32,"hosts":1,"first_host":"192.168.2.10","last_host":"192.168.2.10","is_multicast":false,"is_private":true,"is_global":false,"is_link_local":false,"is_loopback":false,"is_reserved":false,"is_unspecified":false,"int":{"ip":3232236042,"network":3232236042,"broadcast":3232236042,"first_host":3232236042,"last_host":3232236042},"hex":{"ip":"c0:a8:02:0a","network":"c0:a8:02:0a","broadcast":"c0:a8:02:0a","hostmask":"00:00:00:00","netmask":"ff:ff:ff:ff","first_host":"c0:a8:02:0a","last_host":"c0:a8:02:0a"},"bin":{"ip":"11000000101010000000001000001010","network":"11000000101010000000001000001010","broadcast":"11000000101010000000001000001010","hostmask":"00000000000000000000000000000000","netmask":"11111111111111111111111111111111","first_host":"11000000101010000000001000001010","last_host":"11000000101010000000001000001010"}}''')
|
||||
expected = json.loads(r'''{"version":4,"max_prefix_length":32,"ip":"192.168.2.10","ip_compressed":"192.168.2.10","ip_exploded":"192.168.2.10","ip_split":["192","168","2","10"],"scope_id":null,"ipv4_mapped":null,"six_to_four":null,"teredo_client":null,"teredo_server":null,"dns_ptr":"10.2.168.192.in-addr.arpa","network":"192.168.2.10","broadcast":"192.168.2.10","hostmask":"0.0.0.0","netmask":"255.255.255.255","cidr_netmask":32,"hosts":1,"first_host":"192.168.2.10","last_host":"192.168.2.10","is_multicast":false,"is_private":true,"is_global":false,"is_link_local":false,"is_loopback":false,"is_reserved":false,"is_unspecified":false,"int":{"ip":3232236042,"network":3232236042,"broadcast":3232236042,"first_host":3232236042,"last_host":3232236042},"hex":{"ip":"c0:a8:02:0a","network":"c0:a8:02:0a","broadcast":"c0:a8:02:0a","hostmask":"00:00:00:00","netmask":"ff:ff:ff:ff","first_host":"c0:a8:02:0a","last_host":"c0:a8:02:0a"},"bin":{"ip":"11000000101010000000001000001010","network":"11000000101010000000001000001010","broadcast":"11000000101010000000001000001010","hostmask":"00000000000000000000000000000000","netmask":"11111111111111111111111111111111","first_host":"11000000101010000000001000001010","last_host":"11000000101010000000001000001010"}}''')
|
||||
self.assertEqual(jc.parsers.ip_address.parse(data, quiet=True), expected)
|
||||
|
||||
|
||||
@ -53,7 +53,7 @@ class MyTests(unittest.TestCase):
|
||||
Test ipv6 address string
|
||||
"""
|
||||
data = r'127:0:de::1'
|
||||
expected = json.loads(r'''{"version":6,"max_prefix_length":128,"ip":"127:0:de::1","ip_compressed":"127:0:de::1","ip_exploded":"0127:0000:00de:0000:0000:0000:0000:0001","scope_id":null,"ipv4_mapped":null,"six_to_four":null,"teredo_client":null,"teredo_server":null,"dns_ptr":"1.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.e.d.0.0.0.0.0.0.7.2.1.0.ip6.arpa","network":"127:0:de::1","broadcast":"127:0:de::1","hostmask":"::","netmask":"ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff","cidr_netmask":128,"hosts":1,"first_host":"127:0:de::1","last_host":"127:0:de::1","is_multicast":false,"is_private":false,"is_global":true,"is_link_local":false,"is_loopback":false,"is_reserved":true,"is_unspecified":false,"int":{"ip":1531727573536155682370944093904699393,"network":1531727573536155682370944093904699393,"broadcast":1531727573536155682370944093904699393,"first_host":1531727573536155682370944093904699393,"last_host":1531727573536155682370944093904699393},"hex":{"ip":"01:27:00:00:00:de:00:00:00:00:00:00:00:00:00:01","network":"01:27:00:00:00:de:00:00:00:00:00:00:00:00:00:01","broadcast":"01:27:00:00:00:de:00:00:00:00:00:00:00:00:00:01","hostmask":"00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00","netmask":"ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff","first_host":"01:27:00:00:00:de:00:00:00:00:00:00:00:00:00:01","last_host":"01:27:00:00:00:de:00:00:00:00:00:00:00:00:00:01"},"bin":{"ip":"00000001001001110000000000000000000000001101111000000000000000000000000000000000000000000000000000000000000000000000000000000001","network":"00000001001001110000000000000000000000001101111000000000000000000000000000000000000000000000000000000000000000000000000000000001","broadcast":"00000001001001110000000000000000000000001101111000000000000000000000000000000000000000000000000000000000000000000000000000000001","hostmask":"00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","netmask":"11111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111","first_host":"00000001001001110000000000000000000000001101111000000000000000000000000000000000000000000000000000000000000000000000000000000001","last_host":"00000001001001110000000000000000000000001101111000000000000000000000000000000000000000000000000000000000000000000000000000000001"}}''')
|
||||
expected = json.loads(r'''{"version":6,"max_prefix_length":128,"ip":"127:0:de::1","ip_compressed":"127:0:de::1","ip_exploded":"0127:0000:00de:0000:0000:0000:0000:0001","ip_split":["0127","0000","00de","0000","0000","0000","0000","0001"],"scope_id":null,"ipv4_mapped":null,"six_to_four":null,"teredo_client":null,"teredo_server":null,"dns_ptr":"1.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.e.d.0.0.0.0.0.0.7.2.1.0.ip6.arpa","network":"127:0:de::1","broadcast":"127:0:de::1","hostmask":"::","netmask":"ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff","cidr_netmask":128,"hosts":1,"first_host":"127:0:de::1","last_host":"127:0:de::1","is_multicast":false,"is_private":false,"is_global":true,"is_link_local":false,"is_loopback":false,"is_reserved":true,"is_unspecified":false,"int":{"ip":1531727573536155682370944093904699393,"network":1531727573536155682370944093904699393,"broadcast":1531727573536155682370944093904699393,"first_host":1531727573536155682370944093904699393,"last_host":1531727573536155682370944093904699393},"hex":{"ip":"01:27:00:00:00:de:00:00:00:00:00:00:00:00:00:01","network":"01:27:00:00:00:de:00:00:00:00:00:00:00:00:00:01","broadcast":"01:27:00:00:00:de:00:00:00:00:00:00:00:00:00:01","hostmask":"00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00","netmask":"ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff","first_host":"01:27:00:00:00:de:00:00:00:00:00:00:00:00:00:01","last_host":"01:27:00:00:00:de:00:00:00:00:00:00:00:00:00:01"},"bin":{"ip":"00000001001001110000000000000000000000001101111000000000000000000000000000000000000000000000000000000000000000000000000000000001","network":"00000001001001110000000000000000000000001101111000000000000000000000000000000000000000000000000000000000000000000000000000000001","broadcast":"00000001001001110000000000000000000000001101111000000000000000000000000000000000000000000000000000000000000000000000000000000001","hostmask":"00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","netmask":"11111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111","first_host":"00000001001001110000000000000000000000001101111000000000000000000000000000000000000000000000000000000000000000000000000000000001","last_host":"00000001001001110000000000000000000000001101111000000000000000000000000000000000000000000000000000000000000000000000000000000001"}}''')
|
||||
self.assertEqual(jc.parsers.ip_address.parse(data, quiet=True), expected)
|
||||
|
||||
|
||||
@ -62,7 +62,7 @@ class MyTests(unittest.TestCase):
|
||||
Test CIDR ipv6 address string
|
||||
"""
|
||||
data = r'127:0:de::1/96'
|
||||
expected = json.loads(r'''{"version":6,"max_prefix_length":128,"ip":"127:0:de::1","ip_compressed":"127:0:de::1","ip_exploded":"0127:0000:00de:0000:0000:0000:0000:0001","scope_id":null,"ipv4_mapped":null,"six_to_four":null,"teredo_client":null,"teredo_server":null,"dns_ptr":"1.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.e.d.0.0.0.0.0.0.7.2.1.0.ip6.arpa","network":"127:0:de::","broadcast":"127:0:de::ffff:ffff","hostmask":"::ffff:ffff","netmask":"ffff:ffff:ffff:ffff:ffff:ffff::","cidr_netmask":96,"hosts":4294967294,"first_host":"127:0:de::1","last_host":"127:0:de::ffff:fffe","is_multicast":false,"is_private":false,"is_global":true,"is_link_local":false,"is_loopback":false,"is_reserved":true,"is_unspecified":false,"int":{"ip":1531727573536155682370944093904699393,"network":1531727573536155682370944093904699392,"broadcast":1531727573536155682370944098199666687,"first_host":1531727573536155682370944093904699393,"last_host":1531727573536155682370944098199666686},"hex":{"ip":"01:27:00:00:00:de:00:00:00:00:00:00:00:00:00:01","network":"01:27:00:00:00:de:00:00:00:00:00:00:00:00:00:00","broadcast":"01:27:00:00:00:de:00:00:00:00:00:00:ff:ff:ff:ff","hostmask":"00:00:00:00:00:00:00:00:00:00:00:00:ff:ff:ff:ff","netmask":"ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:00:00:00:00","first_host":"01:27:00:00:00:de:00:00:00:00:00:00:00:00:00:01","last_host":"01:27:00:00:00:de:00:00:00:00:00:00:ff:ff:ff:fe"},"bin":{"ip":"00000001001001110000000000000000000000001101111000000000000000000000000000000000000000000000000000000000000000000000000000000001","network":"00000001001001110000000000000000000000001101111000000000000000000000000000000000000000000000000000000000000000000000000000000000","broadcast":"00000001001001110000000000000000000000001101111000000000000000000000000000000000000000000000000011111111111111111111111111111111","hostmask":"00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000011111111111111111111111111111111","netmask":"11111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100000000000000000000000000000000","first_host":"00000001001001110000000000000000000000001101111000000000000000000000000000000000000000000000000000000000000000000000000000000001","last_host":"00000001001001110000000000000000000000001101111000000000000000000000000000000000000000000000000011111111111111111111111111111110"}}''')
|
||||
expected = json.loads(r'''{"version":6,"max_prefix_length":128,"ip":"127:0:de::1","ip_compressed":"127:0:de::1","ip_exploded":"0127:0000:00de:0000:0000:0000:0000:0001","ip_split":["0127","0000","00de","0000","0000","0000","0000","0001"],"scope_id":null,"ipv4_mapped":null,"six_to_four":null,"teredo_client":null,"teredo_server":null,"dns_ptr":"1.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.e.d.0.0.0.0.0.0.7.2.1.0.ip6.arpa","network":"127:0:de::","broadcast":"127:0:de::ffff:ffff","hostmask":"::ffff:ffff","netmask":"ffff:ffff:ffff:ffff:ffff:ffff::","cidr_netmask":96,"hosts":4294967294,"first_host":"127:0:de::1","last_host":"127:0:de::ffff:fffe","is_multicast":false,"is_private":false,"is_global":true,"is_link_local":false,"is_loopback":false,"is_reserved":true,"is_unspecified":false,"int":{"ip":1531727573536155682370944093904699393,"network":1531727573536155682370944093904699392,"broadcast":1531727573536155682370944098199666687,"first_host":1531727573536155682370944093904699393,"last_host":1531727573536155682370944098199666686},"hex":{"ip":"01:27:00:00:00:de:00:00:00:00:00:00:00:00:00:01","network":"01:27:00:00:00:de:00:00:00:00:00:00:00:00:00:00","broadcast":"01:27:00:00:00:de:00:00:00:00:00:00:ff:ff:ff:ff","hostmask":"00:00:00:00:00:00:00:00:00:00:00:00:ff:ff:ff:ff","netmask":"ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:00:00:00:00","first_host":"01:27:00:00:00:de:00:00:00:00:00:00:00:00:00:01","last_host":"01:27:00:00:00:de:00:00:00:00:00:00:ff:ff:ff:fe"},"bin":{"ip":"00000001001001110000000000000000000000001101111000000000000000000000000000000000000000000000000000000000000000000000000000000001","network":"00000001001001110000000000000000000000001101111000000000000000000000000000000000000000000000000000000000000000000000000000000000","broadcast":"00000001001001110000000000000000000000001101111000000000000000000000000000000000000000000000000011111111111111111111111111111111","hostmask":"00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000011111111111111111111111111111111","netmask":"11111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100000000000000000000000000000000","first_host":"00000001001001110000000000000000000000001101111000000000000000000000000000000000000000000000000000000000000000000000000000000001","last_host":"00000001001001110000000000000000000000001101111000000000000000000000000000000000000000000000000011111111111111111111111111111110"}}''')
|
||||
self.assertEqual(jc.parsers.ip_address.parse(data, quiet=True), expected)
|
||||
|
||||
|
||||
@ -71,7 +71,7 @@ class MyTests(unittest.TestCase):
|
||||
Test CIDR ipv6 address with scope string
|
||||
"""
|
||||
data = r'127:0:de::1%128aBc123/96'
|
||||
expected = json.loads(r'''{"version":6,"max_prefix_length":128,"ip":"127:0:de::1","ip_compressed":"127:0:de::1","ip_exploded":"0127:0000:00de:0000:0000:0000:0000:0001","scope_id":"128aBc123","ipv4_mapped":null,"six_to_four":null,"teredo_client":null,"teredo_server":null,"dns_ptr":"1.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.e.d.0.0.0.0.0.0.7.2.1.0.ip6.arpa","network":"127:0:de::","broadcast":"127:0:de::ffff:ffff","hostmask":"::ffff:ffff","netmask":"ffff:ffff:ffff:ffff:ffff:ffff::","cidr_netmask":96,"hosts":4294967294,"first_host":"127:0:de::1","last_host":"127:0:de::ffff:fffe","is_multicast":false,"is_private":false,"is_global":true,"is_link_local":false,"is_loopback":false,"is_reserved":true,"is_unspecified":false,"int":{"ip":1531727573536155682370944093904699393,"network":1531727573536155682370944093904699392,"broadcast":1531727573536155682370944098199666687,"first_host":1531727573536155682370944093904699393,"last_host":1531727573536155682370944098199666686},"hex":{"ip":"01:27:00:00:00:de:00:00:00:00:00:00:00:00:00:01","network":"01:27:00:00:00:de:00:00:00:00:00:00:00:00:00:00","broadcast":"01:27:00:00:00:de:00:00:00:00:00:00:ff:ff:ff:ff","hostmask":"00:00:00:00:00:00:00:00:00:00:00:00:ff:ff:ff:ff","netmask":"ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:00:00:00:00","first_host":"01:27:00:00:00:de:00:00:00:00:00:00:00:00:00:01","last_host":"01:27:00:00:00:de:00:00:00:00:00:00:ff:ff:ff:fe"},"bin":{"ip":"00000001001001110000000000000000000000001101111000000000000000000000000000000000000000000000000000000000000000000000000000000001","network":"00000001001001110000000000000000000000001101111000000000000000000000000000000000000000000000000000000000000000000000000000000000","broadcast":"00000001001001110000000000000000000000001101111000000000000000000000000000000000000000000000000011111111111111111111111111111111","hostmask":"00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000011111111111111111111111111111111","netmask":"11111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100000000000000000000000000000000","first_host":"00000001001001110000000000000000000000001101111000000000000000000000000000000000000000000000000000000000000000000000000000000001","last_host":"00000001001001110000000000000000000000001101111000000000000000000000000000000000000000000000000011111111111111111111111111111110"}}''')
|
||||
expected = json.loads(r'''{"version":6,"max_prefix_length":128,"ip":"127:0:de::1","ip_compressed":"127:0:de::1","ip_exploded":"0127:0000:00de:0000:0000:0000:0000:0001","ip_split":["0127","0000","00de","0000","0000","0000","0000","0001"],"scope_id":"128aBc123","ipv4_mapped":null,"six_to_four":null,"teredo_client":null,"teredo_server":null,"dns_ptr":"1.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.e.d.0.0.0.0.0.0.7.2.1.0.ip6.arpa","network":"127:0:de::","broadcast":"127:0:de::ffff:ffff","hostmask":"::ffff:ffff","netmask":"ffff:ffff:ffff:ffff:ffff:ffff::","cidr_netmask":96,"hosts":4294967294,"first_host":"127:0:de::1","last_host":"127:0:de::ffff:fffe","is_multicast":false,"is_private":false,"is_global":true,"is_link_local":false,"is_loopback":false,"is_reserved":true,"is_unspecified":false,"int":{"ip":1531727573536155682370944093904699393,"network":1531727573536155682370944093904699392,"broadcast":1531727573536155682370944098199666687,"first_host":1531727573536155682370944093904699393,"last_host":1531727573536155682370944098199666686},"hex":{"ip":"01:27:00:00:00:de:00:00:00:00:00:00:00:00:00:01","network":"01:27:00:00:00:de:00:00:00:00:00:00:00:00:00:00","broadcast":"01:27:00:00:00:de:00:00:00:00:00:00:ff:ff:ff:ff","hostmask":"00:00:00:00:00:00:00:00:00:00:00:00:ff:ff:ff:ff","netmask":"ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:00:00:00:00","first_host":"01:27:00:00:00:de:00:00:00:00:00:00:00:00:00:01","last_host":"01:27:00:00:00:de:00:00:00:00:00:00:ff:ff:ff:fe"},"bin":{"ip":"00000001001001110000000000000000000000001101111000000000000000000000000000000000000000000000000000000000000000000000000000000001","network":"00000001001001110000000000000000000000001101111000000000000000000000000000000000000000000000000000000000000000000000000000000000","broadcast":"00000001001001110000000000000000000000001101111000000000000000000000000000000000000000000000000011111111111111111111111111111111","hostmask":"00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000011111111111111111111111111111111","netmask":"11111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100000000000000000000000000000000","first_host":"00000001001001110000000000000000000000001101111000000000000000000000000000000000000000000000000000000000000000000000000000000001","last_host":"00000001001001110000000000000000000000001101111000000000000000000000000000000000000000000000000011111111111111111111111111111110"}}''')
|
||||
self.assertEqual(jc.parsers.ip_address.parse(data, quiet=True), expected)
|
||||
|
||||
|
||||
@ -80,7 +80,7 @@ class MyTests(unittest.TestCase):
|
||||
Test ipv6 address integer string
|
||||
"""
|
||||
data = r'1531727573536155682370944093904699393'
|
||||
expected = json.loads(r'''{"version":6,"max_prefix_length":128,"ip":"127:0:de::1","ip_compressed":"127:0:de::1","ip_exploded":"0127:0000:00de:0000:0000:0000:0000:0001","scope_id":null,"ipv4_mapped":null,"six_to_four":null,"teredo_client":null,"teredo_server":null,"dns_ptr":"1.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.e.d.0.0.0.0.0.0.7.2.1.0.ip6.arpa","network":"127:0:de::1","broadcast":"127:0:de::1","hostmask":"::","netmask":"ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff","cidr_netmask":128,"hosts":1,"first_host":"127:0:de::1","last_host":"127:0:de::1","is_multicast":false,"is_private":false,"is_global":true,"is_link_local":false,"is_loopback":false,"is_reserved":true,"is_unspecified":false,"int":{"ip":1531727573536155682370944093904699393,"network":1531727573536155682370944093904699393,"broadcast":1531727573536155682370944093904699393,"first_host":1531727573536155682370944093904699393,"last_host":1531727573536155682370944093904699393},"hex":{"ip":"01:27:00:00:00:de:00:00:00:00:00:00:00:00:00:01","network":"01:27:00:00:00:de:00:00:00:00:00:00:00:00:00:01","broadcast":"01:27:00:00:00:de:00:00:00:00:00:00:00:00:00:01","hostmask":"00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00","netmask":"ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff","first_host":"01:27:00:00:00:de:00:00:00:00:00:00:00:00:00:01","last_host":"01:27:00:00:00:de:00:00:00:00:00:00:00:00:00:01"},"bin":{"ip":"00000001001001110000000000000000000000001101111000000000000000000000000000000000000000000000000000000000000000000000000000000001","network":"00000001001001110000000000000000000000001101111000000000000000000000000000000000000000000000000000000000000000000000000000000001","broadcast":"00000001001001110000000000000000000000001101111000000000000000000000000000000000000000000000000000000000000000000000000000000001","hostmask":"00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","netmask":"11111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111","first_host":"00000001001001110000000000000000000000001101111000000000000000000000000000000000000000000000000000000000000000000000000000000001","last_host":"00000001001001110000000000000000000000001101111000000000000000000000000000000000000000000000000000000000000000000000000000000001"}}''')
|
||||
expected = json.loads(r'''{"version":6,"max_prefix_length":128,"ip":"127:0:de::1","ip_compressed":"127:0:de::1","ip_exploded":"0127:0000:00de:0000:0000:0000:0000:0001","ip_split":["0127","0000","00de","0000","0000","0000","0000","0001"],"scope_id":null,"ipv4_mapped":null,"six_to_four":null,"teredo_client":null,"teredo_server":null,"dns_ptr":"1.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.e.d.0.0.0.0.0.0.7.2.1.0.ip6.arpa","network":"127:0:de::1","broadcast":"127:0:de::1","hostmask":"::","netmask":"ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff","cidr_netmask":128,"hosts":1,"first_host":"127:0:de::1","last_host":"127:0:de::1","is_multicast":false,"is_private":false,"is_global":true,"is_link_local":false,"is_loopback":false,"is_reserved":true,"is_unspecified":false,"int":{"ip":1531727573536155682370944093904699393,"network":1531727573536155682370944093904699393,"broadcast":1531727573536155682370944093904699393,"first_host":1531727573536155682370944093904699393,"last_host":1531727573536155682370944093904699393},"hex":{"ip":"01:27:00:00:00:de:00:00:00:00:00:00:00:00:00:01","network":"01:27:00:00:00:de:00:00:00:00:00:00:00:00:00:01","broadcast":"01:27:00:00:00:de:00:00:00:00:00:00:00:00:00:01","hostmask":"00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00","netmask":"ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff","first_host":"01:27:00:00:00:de:00:00:00:00:00:00:00:00:00:01","last_host":"01:27:00:00:00:de:00:00:00:00:00:00:00:00:00:01"},"bin":{"ip":"00000001001001110000000000000000000000001101111000000000000000000000000000000000000000000000000000000000000000000000000000000001","network":"00000001001001110000000000000000000000001101111000000000000000000000000000000000000000000000000000000000000000000000000000000001","broadcast":"00000001001001110000000000000000000000001101111000000000000000000000000000000000000000000000000000000000000000000000000000000001","hostmask":"00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","netmask":"11111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111","first_host":"00000001001001110000000000000000000000001101111000000000000000000000000000000000000000000000000000000000000000000000000000000001","last_host":"00000001001001110000000000000000000000001101111000000000000000000000000000000000000000000000000000000000000000000000000000000001"}}''')
|
||||
self.assertEqual(jc.parsers.ip_address.parse(data, quiet=True), expected)
|
||||
|
||||
|
||||
@ -89,7 +89,7 @@ class MyTests(unittest.TestCase):
|
||||
Test ipv6 address with ipv4 mapped string
|
||||
"""
|
||||
data = r'::FFFF:192.168.1.35'
|
||||
expected = json.loads(r'''{"version":6,"max_prefix_length":128,"ip":"::ffff:c0a8:123","ip_compressed":"::ffff:c0a8:123","ip_exploded":"0000:0000:0000:0000:0000:ffff:c0a8:0123","scope_id":null,"ipv4_mapped":"192.168.1.35","six_to_four":null,"teredo_client":null,"teredo_server":null,"dns_ptr":"3.2.1.0.8.a.0.c.f.f.f.f.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.ip6.arpa","network":"::ffff:c0a8:123","broadcast":"::ffff:c0a8:123","hostmask":"::","netmask":"ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff","cidr_netmask":128,"hosts":1,"first_host":"::ffff:c0a8:123","last_host":"::ffff:c0a8:123","is_multicast":false,"is_private":true,"is_global":false,"is_link_local":false,"is_loopback":false,"is_reserved":true,"is_unspecified":false,"int":{"ip":281473913979171,"network":281473913979171,"broadcast":281473913979171,"first_host":281473913979171,"last_host":281473913979171},"hex":{"ip":"00:00:00:00:00:00:00:00:00:00:ff:ff:c0:a8:01:23","network":"00:00:00:00:00:00:00:00:00:00:ff:ff:c0:a8:01:23","broadcast":"00:00:00:00:00:00:00:00:00:00:ff:ff:c0:a8:01:23","hostmask":"00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00","netmask":"ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff","first_host":"00:00:00:00:00:00:00:00:00:00:ff:ff:c0:a8:01:23","last_host":"00:00:00:00:00:00:00:00:00:00:ff:ff:c0:a8:01:23"},"bin":{"ip":"00000000000000000000000000000000000000000000000000000000000000000000000000000000111111111111111111000000101010000000000100100011","network":"00000000000000000000000000000000000000000000000000000000000000000000000000000000111111111111111111000000101010000000000100100011","broadcast":"00000000000000000000000000000000000000000000000000000000000000000000000000000000111111111111111111000000101010000000000100100011","hostmask":"00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","netmask":"11111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111","first_host":"00000000000000000000000000000000000000000000000000000000000000000000000000000000111111111111111111000000101010000000000100100011","last_host":"00000000000000000000000000000000000000000000000000000000000000000000000000000000111111111111111111000000101010000000000100100011"}}''')
|
||||
expected = json.loads(r'''{"version":6,"max_prefix_length":128,"ip":"::ffff:c0a8:123","ip_compressed":"::ffff:c0a8:123","ip_exploded":"0000:0000:0000:0000:0000:ffff:c0a8:0123","ip_split":["0000","0000","0000","0000","0000","ffff","c0a8","0123"],"scope_id":null,"ipv4_mapped":"192.168.1.35","six_to_four":null,"teredo_client":null,"teredo_server":null,"dns_ptr":"3.2.1.0.8.a.0.c.f.f.f.f.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.ip6.arpa","network":"::ffff:c0a8:123","broadcast":"::ffff:c0a8:123","hostmask":"::","netmask":"ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff","cidr_netmask":128,"hosts":1,"first_host":"::ffff:c0a8:123","last_host":"::ffff:c0a8:123","is_multicast":false,"is_private":true,"is_global":false,"is_link_local":false,"is_loopback":false,"is_reserved":true,"is_unspecified":false,"int":{"ip":281473913979171,"network":281473913979171,"broadcast":281473913979171,"first_host":281473913979171,"last_host":281473913979171},"hex":{"ip":"00:00:00:00:00:00:00:00:00:00:ff:ff:c0:a8:01:23","network":"00:00:00:00:00:00:00:00:00:00:ff:ff:c0:a8:01:23","broadcast":"00:00:00:00:00:00:00:00:00:00:ff:ff:c0:a8:01:23","hostmask":"00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00","netmask":"ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff","first_host":"00:00:00:00:00:00:00:00:00:00:ff:ff:c0:a8:01:23","last_host":"00:00:00:00:00:00:00:00:00:00:ff:ff:c0:a8:01:23"},"bin":{"ip":"00000000000000000000000000000000000000000000000000000000000000000000000000000000111111111111111111000000101010000000000100100011","network":"00000000000000000000000000000000000000000000000000000000000000000000000000000000111111111111111111000000101010000000000100100011","broadcast":"00000000000000000000000000000000000000000000000000000000000000000000000000000000111111111111111111000000101010000000000100100011","hostmask":"00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","netmask":"11111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111","first_host":"00000000000000000000000000000000000000000000000000000000000000000000000000000000111111111111111111000000101010000000000100100011","last_host":"00000000000000000000000000000000000000000000000000000000000000000000000000000000111111111111111111000000101010000000000100100011"}}''')
|
||||
self.assertEqual(jc.parsers.ip_address.parse(data, quiet=True), expected)
|
||||
|
||||
|
||||
@ -98,7 +98,7 @@ class MyTests(unittest.TestCase):
|
||||
Test ipv6 6to4 address string
|
||||
"""
|
||||
data = r'2002:c000:204::/48'
|
||||
expected = json.loads(r'''{"version":6,"max_prefix_length":128,"ip":"2002:c000:204::","ip_compressed":"2002:c000:204::","ip_exploded":"2002:c000:0204:0000:0000:0000:0000:0000","scope_id":null,"ipv4_mapped":null,"six_to_four":"192.0.2.4","teredo_client":null,"teredo_server":null,"dns_ptr":"0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.4.0.2.0.0.0.0.c.2.0.0.2.ip6.arpa","network":"2002:c000:204::","broadcast":"2002:c000:204:ffff:ffff:ffff:ffff:ffff","hostmask":"::ffff:ffff:ffff:ffff:ffff","netmask":"ffff:ffff:ffff::","cidr_netmask":48,"hosts":1208925819614629174706174,"first_host":"2002:c000:204::1","last_host":"2002:c000:204:ffff:ffff:ffff:ffff:fffe","is_multicast":false,"is_private":false,"is_global":true,"is_link_local":false,"is_loopback":false,"is_reserved":false,"is_unspecified":false,"int":{"ip":42549574682102084431821433448024768512,"network":42549574682102084431821433448024768512,"broadcast":42549574682103293357641048077199474687,"first_host":42549574682102084431821433448024768513,"last_host":42549574682103293357641048077199474686},"hex":{"ip":"20:02:c0:00:02:04:00:00:00:00:00:00:00:00:00:00","network":"20:02:c0:00:02:04:00:00:00:00:00:00:00:00:00:00","broadcast":"20:02:c0:00:02:04:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff","hostmask":"00:00:00:00:00:00:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff","netmask":"ff:ff:ff:ff:ff:ff:00:00:00:00:00:00:00:00:00:00","first_host":"20:02:c0:00:02:04:00:00:00:00:00:00:00:00:00:01","last_host":"20:02:c0:00:02:04:ff:ff:ff:ff:ff:ff:ff:ff:ff:fe"},"bin":{"ip":"00100000000000101100000000000000000000100000010000000000000000000000000000000000000000000000000000000000000000000000000000000000","network":"00100000000000101100000000000000000000100000010000000000000000000000000000000000000000000000000000000000000000000000000000000000","broadcast":"00100000000000101100000000000000000000100000010011111111111111111111111111111111111111111111111111111111111111111111111111111111","hostmask":"00000000000000000000000000000000000000000000000011111111111111111111111111111111111111111111111111111111111111111111111111111111","netmask":"11111111111111111111111111111111111111111111111100000000000000000000000000000000000000000000000000000000000000000000000000000000","first_host":"00100000000000101100000000000000000000100000010000000000000000000000000000000000000000000000000000000000000000000000000000000001","last_host":"00100000000000101100000000000000000000100000010011111111111111111111111111111111111111111111111111111111111111111111111111111110"}}''')
|
||||
expected = json.loads(r'''{"version":6,"max_prefix_length":128,"ip":"2002:c000:204::","ip_compressed":"2002:c000:204::","ip_exploded":"2002:c000:0204:0000:0000:0000:0000:0000","ip_split":["2002","c000","0204","0000","0000","0000","0000","0000"],"scope_id":null,"ipv4_mapped":null,"six_to_four":"192.0.2.4","teredo_client":null,"teredo_server":null,"dns_ptr":"0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.4.0.2.0.0.0.0.c.2.0.0.2.ip6.arpa","network":"2002:c000:204::","broadcast":"2002:c000:204:ffff:ffff:ffff:ffff:ffff","hostmask":"::ffff:ffff:ffff:ffff:ffff","netmask":"ffff:ffff:ffff::","cidr_netmask":48,"hosts":1208925819614629174706174,"first_host":"2002:c000:204::1","last_host":"2002:c000:204:ffff:ffff:ffff:ffff:fffe","is_multicast":false,"is_private":false,"is_global":true,"is_link_local":false,"is_loopback":false,"is_reserved":false,"is_unspecified":false,"int":{"ip":42549574682102084431821433448024768512,"network":42549574682102084431821433448024768512,"broadcast":42549574682103293357641048077199474687,"first_host":42549574682102084431821433448024768513,"last_host":42549574682103293357641048077199474686},"hex":{"ip":"20:02:c0:00:02:04:00:00:00:00:00:00:00:00:00:00","network":"20:02:c0:00:02:04:00:00:00:00:00:00:00:00:00:00","broadcast":"20:02:c0:00:02:04:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff","hostmask":"00:00:00:00:00:00:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff","netmask":"ff:ff:ff:ff:ff:ff:00:00:00:00:00:00:00:00:00:00","first_host":"20:02:c0:00:02:04:00:00:00:00:00:00:00:00:00:01","last_host":"20:02:c0:00:02:04:ff:ff:ff:ff:ff:ff:ff:ff:ff:fe"},"bin":{"ip":"00100000000000101100000000000000000000100000010000000000000000000000000000000000000000000000000000000000000000000000000000000000","network":"00100000000000101100000000000000000000100000010000000000000000000000000000000000000000000000000000000000000000000000000000000000","broadcast":"00100000000000101100000000000000000000100000010011111111111111111111111111111111111111111111111111111111111111111111111111111111","hostmask":"00000000000000000000000000000000000000000000000011111111111111111111111111111111111111111111111111111111111111111111111111111111","netmask":"11111111111111111111111111111111111111111111111100000000000000000000000000000000000000000000000000000000000000000000000000000000","first_host":"00100000000000101100000000000000000000100000010000000000000000000000000000000000000000000000000000000000000000000000000000000001","last_host":"00100000000000101100000000000000000000100000010011111111111111111111111111111111111111111111111111111111111111111111111111111110"}}''')
|
||||
self.assertEqual(jc.parsers.ip_address.parse(data, quiet=True), expected)
|
||||
|
||||
|
||||
@ -107,7 +107,7 @@ class MyTests(unittest.TestCase):
|
||||
Test ipv6 teredo address string
|
||||
"""
|
||||
data = r'2001:0000:4136:e378:8000:63bf:3fff:fdd2'
|
||||
expected = json.loads(r'''{"version":6,"max_prefix_length":128,"ip":"2001:0:4136:e378:8000:63bf:3fff:fdd2","ip_compressed":"2001:0:4136:e378:8000:63bf:3fff:fdd2","ip_exploded":"2001:0000:4136:e378:8000:63bf:3fff:fdd2","scope_id":null,"ipv4_mapped":null,"six_to_four":null,"teredo_client":"192.0.2.45","teredo_server":"65.54.227.120","dns_ptr":"2.d.d.f.f.f.f.3.f.b.3.6.0.0.0.8.8.7.3.e.6.3.1.4.0.0.0.0.1.0.0.2.ip6.arpa","network":"2001:0:4136:e378:8000:63bf:3fff:fdd2","broadcast":"2001:0:4136:e378:8000:63bf:3fff:fdd2","hostmask":"::","netmask":"ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff","cidr_netmask":128,"hosts":1,"first_host":"2001:0:4136:e378:8000:63bf:3fff:fdd2","last_host":"2001:0:4136:e378:8000:63bf:3fff:fdd2","is_multicast":false,"is_private":true,"is_global":false,"is_link_local":false,"is_loopback":false,"is_reserved":false,"is_unspecified":false,"int":{"ip":42540488182158724593221357832373272018,"network":42540488182158724593221357832373272018,"broadcast":42540488182158724593221357832373272018,"first_host":42540488182158724593221357832373272018,"last_host":42540488182158724593221357832373272018},"hex":{"ip":"20:01:00:00:41:36:e3:78:80:00:63:bf:3f:ff:fd:d2","network":"20:01:00:00:41:36:e3:78:80:00:63:bf:3f:ff:fd:d2","broadcast":"20:01:00:00:41:36:e3:78:80:00:63:bf:3f:ff:fd:d2","hostmask":"00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00","netmask":"ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff","first_host":"20:01:00:00:41:36:e3:78:80:00:63:bf:3f:ff:fd:d2","last_host":"20:01:00:00:41:36:e3:78:80:00:63:bf:3f:ff:fd:d2"},"bin":{"ip":"00100000000000010000000000000000010000010011011011100011011110001000000000000000011000111011111100111111111111111111110111010010","network":"00100000000000010000000000000000010000010011011011100011011110001000000000000000011000111011111100111111111111111111110111010010","broadcast":"00100000000000010000000000000000010000010011011011100011011110001000000000000000011000111011111100111111111111111111110111010010","hostmask":"00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","netmask":"11111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111","first_host":"00100000000000010000000000000000010000010011011011100011011110001000000000000000011000111011111100111111111111111111110111010010","last_host":"00100000000000010000000000000000010000010011011011100011011110001000000000000000011000111011111100111111111111111111110111010010"}}''')
|
||||
expected = json.loads(r'''{"version":6,"max_prefix_length":128,"ip":"2001:0:4136:e378:8000:63bf:3fff:fdd2","ip_compressed":"2001:0:4136:e378:8000:63bf:3fff:fdd2","ip_exploded":"2001:0000:4136:e378:8000:63bf:3fff:fdd2","ip_split":["2001","0000","4136","e378","8000","63bf","3fff","fdd2"],"scope_id":null,"ipv4_mapped":null,"six_to_four":null,"teredo_client":"192.0.2.45","teredo_server":"65.54.227.120","dns_ptr":"2.d.d.f.f.f.f.3.f.b.3.6.0.0.0.8.8.7.3.e.6.3.1.4.0.0.0.0.1.0.0.2.ip6.arpa","network":"2001:0:4136:e378:8000:63bf:3fff:fdd2","broadcast":"2001:0:4136:e378:8000:63bf:3fff:fdd2","hostmask":"::","netmask":"ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff","cidr_netmask":128,"hosts":1,"first_host":"2001:0:4136:e378:8000:63bf:3fff:fdd2","last_host":"2001:0:4136:e378:8000:63bf:3fff:fdd2","is_multicast":false,"is_private":true,"is_global":false,"is_link_local":false,"is_loopback":false,"is_reserved":false,"is_unspecified":false,"int":{"ip":42540488182158724593221357832373272018,"network":42540488182158724593221357832373272018,"broadcast":42540488182158724593221357832373272018,"first_host":42540488182158724593221357832373272018,"last_host":42540488182158724593221357832373272018},"hex":{"ip":"20:01:00:00:41:36:e3:78:80:00:63:bf:3f:ff:fd:d2","network":"20:01:00:00:41:36:e3:78:80:00:63:bf:3f:ff:fd:d2","broadcast":"20:01:00:00:41:36:e3:78:80:00:63:bf:3f:ff:fd:d2","hostmask":"00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00","netmask":"ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff","first_host":"20:01:00:00:41:36:e3:78:80:00:63:bf:3f:ff:fd:d2","last_host":"20:01:00:00:41:36:e3:78:80:00:63:bf:3f:ff:fd:d2"},"bin":{"ip":"00100000000000010000000000000000010000010011011011100011011110001000000000000000011000111011111100111111111111111111110111010010","network":"00100000000000010000000000000000010000010011011011100011011110001000000000000000011000111011111100111111111111111111110111010010","broadcast":"00100000000000010000000000000000010000010011011011100011011110001000000000000000011000111011111100111111111111111111110111010010","hostmask":"00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","netmask":"11111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111","first_host":"00100000000000010000000000000000010000010011011011100011011110001000000000000000011000111011111100111111111111111111110111010010","last_host":"00100000000000010000000000000000010000010011011011100011011110001000000000000000011000111011111100111111111111111111110111010010"}}''')
|
||||
self.assertEqual(jc.parsers.ip_address.parse(data, quiet=True), expected)
|
||||
|
||||
|
||||
|
@ -1,39 +1,45 @@
|
||||
import os
|
||||
import unittest
|
||||
from datetime import datetime, timezone
|
||||
import pygments
|
||||
from pygments.token import (Name, Number, String, Keyword)
|
||||
import jc.cli
|
||||
from jc.cli import JcCli
|
||||
|
||||
|
||||
class MyTests(unittest.TestCase):
|
||||
def test_cli_magic_parser(self):
|
||||
commands = {
|
||||
'jc -p systemctl list-sockets': (True, ['systemctl', 'list-sockets'], '--systemctl-ls', ['p']),
|
||||
'jc -p systemctl list-unit-files': (True, ['systemctl', 'list-unit-files'], '--systemctl-luf', ['p']),
|
||||
'jc -p pip list': (True, ['pip', 'list'], '--pip-list', ['p']),
|
||||
'jc -p pip3 list': (True, ['pip3', 'list'], '--pip-list', ['p']),
|
||||
'jc -p pip show jc': (True, ['pip', 'show', 'jc'], '--pip-show', ['p']),
|
||||
'jc -p pip3 show jc': (True, ['pip3', 'show', 'jc'], '--pip-show', ['p']),
|
||||
'jc -prd last': (True, ['last'], '--last', ['p', 'r', 'd']),
|
||||
'jc -prdd lastb': (True, ['lastb'], '--last', ['p', 'r', 'd', 'd']),
|
||||
'jc -p airport -I': (True, ['airport', '-I'], '--airport', ['p']),
|
||||
'jc -p -r airport -I': (True, ['airport', '-I'], '--airport', ['p', 'r']),
|
||||
'jc -prd airport -I': (True, ['airport', '-I'], '--airport', ['p', 'r', 'd']),
|
||||
'jc -p nonexistent command': (False, ['nonexistent', 'command'], None, ['p']),
|
||||
'jc -ap': (False, None, None, []),
|
||||
'jc -a arp -a': (False, None, None, []),
|
||||
'jc -v': (False, None, None, []),
|
||||
'jc -h': (False, None, None, []),
|
||||
'jc -h --arp': (False, None, None, []),
|
||||
'jc -h arp': (False, None, None, []),
|
||||
'jc -h arp -a': (False, None, None, []),
|
||||
'jc --pretty dig': (True, ['dig'], '--dig', ['p']),
|
||||
'jc --pretty --monochrome --quiet --raw dig': (True, ['dig'], '--dig', ['p', 'm', 'q', 'r']),
|
||||
'jc --about --yaml-out': (False, None, None, [])
|
||||
'jc -p systemctl list-sockets': ('--systemctl-ls', ['p'], ['systemctl', 'list-sockets']),
|
||||
'jc -p systemctl list-unit-files': ('--systemctl-luf', ['p'], ['systemctl', 'list-unit-files']),
|
||||
'jc -p pip list': ('--pip-list', ['p'], ['pip', 'list']),
|
||||
'jc -p pip3 list': ('--pip-list', ['p'], ['pip3', 'list']),
|
||||
'jc -p pip show jc': ('--pip-show', ['p'], ['pip', 'show', 'jc']),
|
||||
'jc -p pip3 show jc': ('--pip-show', ['p'], ['pip3', 'show', 'jc']),
|
||||
'jc -prd last': ('--last', ['p', 'r', 'd'], ['last']),
|
||||
'jc -prdd lastb': ('--last', ['p', 'r', 'd', 'd'], ['lastb']),
|
||||
'jc -p airport -I': ('--airport', ['p'], ['airport', '-I']),
|
||||
'jc -p -r airport -I': ('--airport', ['p', 'r'], ['airport', '-I']),
|
||||
'jc -prd airport -I': ('--airport', ['p', 'r', 'd'], ['airport', '-I']),
|
||||
'jc -p nonexistent command': (None, ['p'], ['nonexistent', 'command']),
|
||||
'jc -ap': (None, [], None),
|
||||
'jc -a arp -a': ('--arp', ['a'], ['arp', '-a']),
|
||||
'jc -v': (None, [], None),
|
||||
'jc -h': (None, [], None),
|
||||
'jc -h --arp': (None, [], None),
|
||||
'jc -h arp': ('--arp', ['h'], ['arp']),
|
||||
'jc -h arp -a': ('--arp', ['h'], ['arp', '-a']),
|
||||
'jc -v arp -a': ('--arp', ['v'], ['arp', '-a']),
|
||||
'jc --pretty dig': ('--dig', ['p'], ['dig']),
|
||||
'jc --pretty --monochrome --quiet --raw dig': ('--dig', ['p', 'm', 'q', 'r'], ['dig']),
|
||||
'jc --about --yaml-out': (None, [], None)
|
||||
}
|
||||
|
||||
for command, expected_command in commands.items():
|
||||
self.assertEqual(jc.cli.magic_parser(command.split(' ')), expected_command)
|
||||
for command, expected in commands.items():
|
||||
cli = JcCli()
|
||||
cli.args = command.split()
|
||||
cli.magic_parser()
|
||||
resulting_attributes = (cli.magic_found_parser, cli.magic_options, cli.magic_run_command)
|
||||
self.assertEqual(expected, resulting_attributes)
|
||||
|
||||
def test_cli_set_env_colors(self):
|
||||
if pygments.__version__.startswith('2.3.'):
|
||||
@ -128,7 +134,10 @@ class MyTests(unittest.TestCase):
|
||||
}
|
||||
|
||||
for jc_colors, expected_colors in env.items():
|
||||
self.assertEqual(jc.cli.set_env_colors(jc_colors), expected_colors)
|
||||
cli = JcCli()
|
||||
os.environ["JC_COLORS"] = jc_colors
|
||||
cli.set_custom_colors()
|
||||
self.assertEqual(cli.custom_colors, expected_colors)
|
||||
|
||||
def test_cli_json_out(self):
|
||||
test_input = [
|
||||
@ -157,7 +166,11 @@ class MyTests(unittest.TestCase):
|
||||
]
|
||||
|
||||
for test_dict, expected_json in zip(test_input, expected_output):
|
||||
self.assertEqual(jc.cli.json_out(test_dict), expected_json)
|
||||
cli = JcCli()
|
||||
os.environ["JC_COLORS"] = "default,default,default,default"
|
||||
cli.set_custom_colors()
|
||||
cli.data_out = test_dict
|
||||
self.assertEqual(cli.json_out(), expected_json)
|
||||
|
||||
def test_cli_json_out_mono(self):
|
||||
test_input = [
|
||||
@ -177,7 +190,11 @@ class MyTests(unittest.TestCase):
|
||||
]
|
||||
|
||||
for test_dict, expected_json in zip(test_input, expected_output):
|
||||
self.assertEqual(jc.cli.json_out(test_dict, mono=True), expected_json)
|
||||
cli = JcCli()
|
||||
cli.set_custom_colors()
|
||||
cli.mono = True
|
||||
cli.data_out = test_dict
|
||||
self.assertEqual(cli.json_out(), expected_json)
|
||||
|
||||
def test_cli_json_out_pretty(self):
|
||||
test_input = [
|
||||
@ -197,7 +214,11 @@ class MyTests(unittest.TestCase):
|
||||
]
|
||||
|
||||
for test_dict, expected_json in zip(test_input, expected_output):
|
||||
self.assertEqual(jc.cli.json_out(test_dict, pretty=True), expected_json)
|
||||
cli = JcCli()
|
||||
cli.pretty = True
|
||||
cli.set_custom_colors()
|
||||
cli.data_out = test_dict
|
||||
self.assertEqual(cli.json_out(), expected_json)
|
||||
|
||||
def test_cli_yaml_out(self):
|
||||
test_input = [
|
||||
@ -226,7 +247,11 @@ class MyTests(unittest.TestCase):
|
||||
]
|
||||
|
||||
for test_dict, expected_json in zip(test_input, expected_output):
|
||||
self.assertEqual(jc.cli.yaml_out(test_dict), expected_json)
|
||||
cli = JcCli()
|
||||
os.environ["JC_COLORS"] = "default,default,default,default"
|
||||
cli.set_custom_colors()
|
||||
cli.data_out = test_dict
|
||||
self.assertEqual(cli.yaml_out(), expected_json)
|
||||
|
||||
def test_cli_yaml_out_mono(self):
|
||||
test_input = [
|
||||
@ -248,56 +273,61 @@ class MyTests(unittest.TestCase):
|
||||
]
|
||||
|
||||
for test_dict, expected_json in zip(test_input, expected_output):
|
||||
self.assertEqual(jc.cli.yaml_out(test_dict, mono=True), expected_json)
|
||||
cli = JcCli()
|
||||
cli.set_custom_colors()
|
||||
cli.mono = True
|
||||
cli.data_out = test_dict
|
||||
self.assertEqual(cli.yaml_out(), expected_json)
|
||||
|
||||
def test_cli_about_jc(self):
|
||||
self.assertEqual(jc.cli.about_jc()['name'], 'jc')
|
||||
self.assertGreaterEqual(jc.cli.about_jc()['parser_count'], 55)
|
||||
self.assertEqual(jc.cli.about_jc()['parser_count'], len(jc.cli.about_jc()['parsers']))
|
||||
cli = JcCli()
|
||||
self.assertEqual(cli.about_jc()['name'], 'jc')
|
||||
self.assertGreaterEqual(cli.about_jc()['parser_count'], 55)
|
||||
self.assertEqual(cli.about_jc()['parser_count'], len(cli.about_jc()['parsers']))
|
||||
|
||||
def test_add_meta_to_simple_dict(self):
|
||||
list_or_dict = {'a': 1, 'b': 2}
|
||||
runtime = datetime(2022, 8, 5, 0, 37, 9, 273349, tzinfo=timezone.utc)
|
||||
magic_exit_code = 2
|
||||
run_command = ['ping', '-c3', '192.168.1.123']
|
||||
parser_name = 'ping'
|
||||
cli = JcCli()
|
||||
cli.data_out = {'a': 1, 'b': 2}
|
||||
cli.run_timestamp = datetime(2022, 8, 5, 0, 37, 9, 273349, tzinfo=timezone.utc)
|
||||
cli.magic_returncode = 2
|
||||
cli.magic_run_command = ['ping', '-c3', '192.168.1.123']
|
||||
cli.parser_name = 'ping'
|
||||
expected = {'a': 1, 'b': 2, '_jc_meta': {'parser': 'ping', 'magic_command': ['ping', '-c3', '192.168.1.123'], 'magic_command_exit': 2, 'timestamp': 1659659829.273349}}
|
||||
jc.cli.add_metadata_to(list_or_dict, runtime, run_command, magic_exit_code, parser_name)
|
||||
|
||||
self.assertEqual(list_or_dict, expected)
|
||||
cli.add_metadata_to_output()
|
||||
self.assertEqual(cli.data_out, expected)
|
||||
|
||||
def test_add_meta_to_simple_list(self):
|
||||
list_or_dict = [{'a': 1, 'b': 2},{'a': 3, 'b': 4}]
|
||||
runtime = datetime(2022, 8, 5, 0, 37, 9, 273349, tzinfo=timezone.utc)
|
||||
magic_exit_code = 2
|
||||
run_command = ['ping', '-c3', '192.168.1.123']
|
||||
parser_name = 'ping'
|
||||
cli = JcCli()
|
||||
cli.data_out = [{'a': 1, 'b': 2},{'a': 3, 'b': 4}]
|
||||
cli.run_timestamp = datetime(2022, 8, 5, 0, 37, 9, 273349, tzinfo=timezone.utc)
|
||||
cli.magic_returncode = 2
|
||||
cli.magic_run_command = ['ping', '-c3', '192.168.1.123']
|
||||
cli.parser_name = 'ping'
|
||||
expected = [{'a': 1, 'b': 2, '_jc_meta': {'parser': 'ping', 'magic_command': ['ping', '-c3', '192.168.1.123'], 'magic_command_exit': 2, 'timestamp': 1659659829.273349}}, {'a': 3, 'b': 4, '_jc_meta': {'parser': 'ping', 'magic_command': ['ping', '-c3', '192.168.1.123'], 'magic_command_exit': 2, 'timestamp': 1659659829.273349}}]
|
||||
jc.cli.add_metadata_to(list_or_dict, runtime, run_command, magic_exit_code, parser_name)
|
||||
|
||||
self.assertEqual(list_or_dict, expected)
|
||||
cli.add_metadata_to_output()
|
||||
self.assertEqual(cli.data_out, expected)
|
||||
|
||||
def test_add_meta_to_dict_existing_meta(self):
|
||||
list_or_dict = {'a': 1, 'b': 2, '_jc_meta': {'foo': 'bar'}}
|
||||
runtime = datetime(2022, 8, 5, 0, 37, 9, 273349, tzinfo=timezone.utc)
|
||||
magic_exit_code = 2
|
||||
run_command = ['ping', '-c3', '192.168.1.123']
|
||||
parser_name = 'ping'
|
||||
cli = JcCli()
|
||||
cli.magic_run_command = ['ping', '-c3', '192.168.1.123']
|
||||
cli.magic_returncode = 2
|
||||
cli.data_out = {'a': 1, 'b': 2, '_jc_meta': {'foo': 'bar'}}
|
||||
cli.run_timestamp = datetime(2022, 8, 5, 0, 37, 9, 273349, tzinfo=timezone.utc)
|
||||
cli.parser_name = 'ping'
|
||||
expected = {'a': 1, 'b': 2, '_jc_meta': {'foo': 'bar', 'parser': 'ping', 'magic_command': ['ping', '-c3', '192.168.1.123'], 'magic_command_exit': 2, 'timestamp': 1659659829.273349}}
|
||||
jc.cli.add_metadata_to(list_or_dict, runtime, run_command, magic_exit_code, parser_name)
|
||||
|
||||
self.assertEqual(list_or_dict, expected)
|
||||
cli.add_metadata_to_output()
|
||||
self.assertEqual(cli.data_out, expected)
|
||||
|
||||
def test_add_meta_to_list_existing_meta(self):
|
||||
list_or_dict = [{'a': 1, 'b': 2, '_jc_meta': {'foo': 'bar'}},{'a': 3, 'b': 4, '_jc_meta': {'foo': 'bar'}}]
|
||||
runtime = datetime(2022, 8, 5, 0, 37, 9, 273349, tzinfo=timezone.utc)
|
||||
magic_exit_code = 2
|
||||
run_command = ['ping', '-c3', '192.168.1.123']
|
||||
parser_name = 'ping'
|
||||
cli = JcCli()
|
||||
cli.data_out = [{'a': 1, 'b': 2, '_jc_meta': {'foo': 'bar'}},{'a': 3, 'b': 4, '_jc_meta': {'foo': 'bar'}}]
|
||||
cli.run_timestamp = datetime(2022, 8, 5, 0, 37, 9, 273349, tzinfo=timezone.utc)
|
||||
cli.magic_returncode = 2
|
||||
cli.magic_run_command = ['ping', '-c3', '192.168.1.123']
|
||||
cli.parser_name = 'ping'
|
||||
expected = [{'a': 1, 'b': 2, '_jc_meta': {'foo': 'bar', 'parser': 'ping', 'magic_command': ['ping', '-c3', '192.168.1.123'], 'magic_command_exit': 2, 'timestamp': 1659659829.273349}}, {'a': 3, 'b': 4, '_jc_meta': {'foo': 'bar', 'parser': 'ping', 'magic_command': ['ping', '-c3', '192.168.1.123'], 'magic_command_exit': 2, 'timestamp': 1659659829.273349}}]
|
||||
jc.cli.add_metadata_to(list_or_dict, runtime, run_command, magic_exit_code, parser_name)
|
||||
|
||||
self.assertEqual(list_or_dict, expected)
|
||||
cli.add_metadata_to_output()
|
||||
self.assertEqual(cli.data_out, expected)
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
@ -1,3 +1,4 @@
|
||||
from copy import deepcopy
|
||||
import unittest
|
||||
from typing import Generator
|
||||
import jc.lib
|
||||
@ -74,5 +75,62 @@ class MyTests(unittest.TestCase):
|
||||
def test_lib_modname_to_cliname(self):
|
||||
self.assertEqual(jc.lib._modname_to_cliname('module_name'), 'module-name')
|
||||
|
||||
def test_lib_all_parser_info_show_deprecated(self):
|
||||
# save old state
|
||||
old_parsers = deepcopy(jc.lib.parsers)
|
||||
old_get_parser = deepcopy(jc.lib._get_parser)
|
||||
|
||||
# mock data
|
||||
class mock_parser_info:
|
||||
version = "1.1"
|
||||
description = "`deprecated` command parser"
|
||||
author = "nobody"
|
||||
author_email = "nobody@gmail.com"
|
||||
compatible = ["linux", "darwin"]
|
||||
magic_commands = ["deprecated"]
|
||||
deprecated = True
|
||||
|
||||
class mock_parser:
|
||||
info = mock_parser_info
|
||||
|
||||
jc.lib.parsers = ['deprecated']
|
||||
jc.lib._get_parser = lambda x: mock_parser # type: ignore
|
||||
result = jc.lib.all_parser_info(show_deprecated=True)
|
||||
|
||||
# reset
|
||||
jc.lib.parsers = old_parsers
|
||||
jc.lib._get_parser = old_get_parser
|
||||
|
||||
self.assertEqual(len(result), 1)
|
||||
|
||||
def test_lib_all_parser_info_show_hidden(self):
|
||||
# save old state
|
||||
old_parsers = deepcopy(jc.lib.parsers)
|
||||
old_get_parser = deepcopy(jc.lib._get_parser)
|
||||
|
||||
# mock data
|
||||
class mock_parser_info:
|
||||
version = "1.1"
|
||||
description = "`deprecated` command parser"
|
||||
author = "nobody"
|
||||
author_email = "nobody@gmail.com"
|
||||
compatible = ["linux", "darwin"]
|
||||
magic_commands = ["deprecated"]
|
||||
hidden = True
|
||||
|
||||
class mock_parser:
|
||||
info = mock_parser_info
|
||||
|
||||
jc.lib.parsers = ['deprecated']
|
||||
jc.lib._get_parser = lambda x: mock_parser # type: ignore
|
||||
result = jc.lib.all_parser_info(show_hidden=True)
|
||||
|
||||
# reset
|
||||
jc.lib.parsers = old_parsers
|
||||
jc.lib._get_parser = old_get_parser
|
||||
|
||||
self.assertEqual(len(result), 1)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
@ -73,7 +73,15 @@ class MyTests(unittest.TestCase):
|
||||
}
|
||||
|
||||
for input_string, expected_output in datetime_map.items():
|
||||
self.assertEqual(jc.utils.timestamp(input_string).__dict__, expected_output)
|
||||
ts = jc.utils.timestamp(input_string)
|
||||
ts_dict = {
|
||||
'string': ts.string,
|
||||
'format': ts.format,
|
||||
'naive': ts.naive,
|
||||
'utc': ts.utc
|
||||
}
|
||||
|
||||
self.assertEqual(ts_dict, expected_output)
|
||||
|
||||
def test_utils_convert_to_int(self):
|
||||
io_map = {
|
||||
|
64
tests/test_lspci.py
Normal file
64
tests/test_lspci.py
Normal file
@ -0,0 +1,64 @@
|
||||
import os
|
||||
import unittest
|
||||
import json
|
||||
from typing import Dict
|
||||
import jc.parsers.lspci
|
||||
|
||||
THIS_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
||||
class MyTests(unittest.TestCase):
|
||||
f_in: Dict = {}
|
||||
f_json: Dict = {}
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
fixtures = {
|
||||
'ubuntu_20_10_lspci_mmv': (
|
||||
'fixtures/ubuntu-20.10/lspci-mmv.out',
|
||||
'fixtures/ubuntu-20.10/lspci-mmv.json'),
|
||||
'ubuntu_20_10_lspci_nmmv': (
|
||||
'fixtures/ubuntu-20.10/lspci-nmmv.out',
|
||||
'fixtures/ubuntu-20.10/lspci-nmmv.json'),
|
||||
'ubuntu_20_10_lspci_nnmmv': (
|
||||
'fixtures/ubuntu-20.10/lspci-nnmmv.out',
|
||||
'fixtures/ubuntu-20.10/lspci-nnmmv.json')
|
||||
}
|
||||
|
||||
for file, filepaths in fixtures.items():
|
||||
with open(os.path.join(THIS_DIR, filepaths[0]), 'r', encoding='utf-8') as a, \
|
||||
open(os.path.join(THIS_DIR, filepaths[1]), 'r', encoding='utf-8') as b:
|
||||
cls.f_in[file] = a.read()
|
||||
cls.f_json[file] = json.loads(b.read())
|
||||
|
||||
|
||||
def test_lspci_nodata(self):
|
||||
"""
|
||||
Test 'lspci' with no data
|
||||
"""
|
||||
self.assertEqual(jc.parsers.lspci.parse('', quiet=True), [])
|
||||
|
||||
def test_lspci_mmv_ubuntu_20_10(self):
|
||||
"""
|
||||
Test 'lspci -mmv' on Ubuntu 20.10
|
||||
"""
|
||||
self.assertEqual(jc.parsers.lspci.parse(self.f_in['ubuntu_20_10_lspci_mmv'], quiet=True),
|
||||
self.f_json['ubuntu_20_10_lspci_mmv'])
|
||||
|
||||
def test_lspci_nmmv_ubuntu_20_10(self):
|
||||
"""
|
||||
Test 'lspci -nmmv' on Ubuntu 20.10
|
||||
"""
|
||||
self.assertEqual(jc.parsers.lspci.parse(self.f_in['ubuntu_20_10_lspci_nmmv'], quiet=True),
|
||||
self.f_json['ubuntu_20_10_lspci_nmmv'])
|
||||
|
||||
def test_lspci_nnmmv_ubuntu_20_10(self):
|
||||
"""
|
||||
Test 'lspci -nnmmv' on Ubuntu 20.10
|
||||
"""
|
||||
self.assertEqual(jc.parsers.lspci.parse(self.f_in['ubuntu_20_10_lspci_nnmmv'], quiet=True),
|
||||
self.f_json['ubuntu_20_10_lspci_nnmmv'])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
44
tests/test_pci_ids.py
Normal file
44
tests/test_pci_ids.py
Normal file
@ -0,0 +1,44 @@
|
||||
import os
|
||||
import unittest
|
||||
import json
|
||||
from typing import Dict
|
||||
import jc.parsers.pci_ids
|
||||
|
||||
THIS_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
||||
class MyTests(unittest.TestCase):
|
||||
f_in: Dict = {}
|
||||
f_json: Dict = {}
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
fixtures = {
|
||||
'pci_ids': (
|
||||
'fixtures/generic/pci.ids',
|
||||
'fixtures/generic/pci.ids.json')
|
||||
}
|
||||
|
||||
for file, filepaths in fixtures.items():
|
||||
with open(os.path.join(THIS_DIR, filepaths[0]), 'r', encoding='utf-8') as a, \
|
||||
open(os.path.join(THIS_DIR, filepaths[1]), 'r', encoding='utf-8') as b:
|
||||
cls.f_in[file] = a.read()
|
||||
cls.f_json[file] = json.loads(b.read())
|
||||
|
||||
|
||||
def test_pci_ids_nodata(self):
|
||||
"""
|
||||
Test 'pci_ids' with no data
|
||||
"""
|
||||
self.assertEqual(jc.parsers.pci_ids.parse('', quiet=True), {})
|
||||
|
||||
def test_pci_ids(self):
|
||||
"""
|
||||
Test 'pci_ids'
|
||||
"""
|
||||
self.assertEqual(jc.parsers.pci_ids.parse(self.f_in['pci_ids'], quiet=True),
|
||||
self.f_json['pci_ids'])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
@ -203,6 +203,9 @@ class MyTests(unittest.TestCase):
|
||||
'proc_pid_stat': (
|
||||
'fixtures/linux-proc/pid_stat',
|
||||
'fixtures/linux-proc/pid_stat.json'),
|
||||
'pid_stat_w_space_and_nl_in_comm': (
|
||||
'fixtures/linux-proc/pid_stat_w_space_and_nl_in_comm',
|
||||
'fixtures/linux-proc/pid_stat_w_space_and_nl_in_comm.json'),
|
||||
'proc_pid_statm': (
|
||||
'fixtures/linux-proc/pid_statm',
|
||||
'fixtures/linux-proc/pid_statm.json'),
|
||||
|
@ -16,7 +16,10 @@ class MyTests(unittest.TestCase):
|
||||
fixtures = {
|
||||
'proc_pid_stat': (
|
||||
'fixtures/linux-proc/pid_stat',
|
||||
'fixtures/linux-proc/pid_stat.json')
|
||||
'fixtures/linux-proc/pid_stat.json'),
|
||||
'pid_stat_w_space_and_nl_in_comm': (
|
||||
'fixtures/linux-proc/pid_stat_w_space_and_nl_in_comm',
|
||||
'fixtures/linux-proc/pid_stat_w_space_and_nl_in_comm.json')
|
||||
}
|
||||
|
||||
for file, filepaths in fixtures.items():
|
||||
@ -39,6 +42,13 @@ class MyTests(unittest.TestCase):
|
||||
self.assertEqual(jc.parsers.proc_pid_stat.parse(self.f_in['proc_pid_stat'], quiet=True),
|
||||
self.f_json['proc_pid_stat'])
|
||||
|
||||
def test_proc_pid_stat_w_space_and_nl(self):
|
||||
"""
|
||||
Test '/proc/<pid>/stat' with command with spaces and newline
|
||||
"""
|
||||
self.assertEqual(jc.parsers.proc_pid_stat.parse(self.f_in['pid_stat_w_space_and_nl_in_comm'], quiet=True),
|
||||
self.f_json['pid_stat_w_space_and_nl_in_comm'])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
44
tests/test_udevadm.py
Normal file
44
tests/test_udevadm.py
Normal file
@ -0,0 +1,44 @@
|
||||
import os
|
||||
import unittest
|
||||
import json
|
||||
from typing import Dict
|
||||
import jc.parsers.udevadm
|
||||
|
||||
THIS_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
||||
class MyTests(unittest.TestCase):
|
||||
f_in: Dict = {}
|
||||
f_json: Dict = {}
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
fixtures = {
|
||||
'udevadm': (
|
||||
'fixtures/generic/udevadm.out',
|
||||
'fixtures/generic/udevadm.json')
|
||||
}
|
||||
|
||||
for file, filepaths in fixtures.items():
|
||||
with open(os.path.join(THIS_DIR, filepaths[0]), 'r', encoding='utf-8') as a, \
|
||||
open(os.path.join(THIS_DIR, filepaths[1]), 'r', encoding='utf-8') as b:
|
||||
cls.f_in[file] = a.read()
|
||||
cls.f_json[file] = json.loads(b.read())
|
||||
|
||||
|
||||
def test_udevadm_nodata(self):
|
||||
"""
|
||||
Test 'udevadm' with no data
|
||||
"""
|
||||
self.assertEqual(jc.parsers.udevadm.parse('', quiet=True), {})
|
||||
|
||||
def test_udevadm(self):
|
||||
"""
|
||||
Test 'udevadm'
|
||||
"""
|
||||
self.assertEqual(jc.parsers.udevadm.parse(self.f_in['udevadm'], quiet=True),
|
||||
self.f_json['udevadm'])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
Reference in New Issue
Block a user