1
0
mirror of https://github.com/kellyjonbrazil/jc.git synced 2026-04-03 17:44:07 +02:00

Compare commits

...

62 Commits

Author SHA1 Message Date
Kelly Brazil
70cb4453be Merge pull request #195 from kellyjonbrazil/dev
Dev v1.17.7
2022-01-14 11:54:29 -08:00
Kelly Brazil
830674cc6f version bump 2022-01-14 11:47:25 -08:00
Kelly Brazil
fb406b58a1 formatting 2022-01-08 20:22:53 -08:00
Kelly Brazil
55b272e412 tighten stat data detection 2022-01-06 11:13:14 -08:00
Kelly Brazil
94f62a9bf3 formatting 2022-01-06 11:03:49 -08:00
Kelly Brazil
8d19e4cb7b doc update 2022-01-06 11:00:53 -08:00
Kelly Brazil
7e510d48e0 simplify non-stat parse error logic 2022-01-06 10:36:33 -08:00
Kelly Brazil
7b20cffb14 simplify last item logic 2022-01-05 11:44:00 -08:00
Kelly Brazil
5c934c43c1 add continue to simplify logic 2022-01-05 11:39:00 -08:00
Kelly Brazil
8609298449 update docstring 2022-01-05 11:25:58 -08:00
Kelly Brazil
72cb0dc10b fixup for first and last items 2022-01-05 11:22:01 -08:00
Kelly Brazil
7b22fa81de raise for non-stat data 2022-01-05 07:46:58 -08:00
Kelly Brazil
0b6a130779 ignore blank lines 2022-01-04 16:43:38 -08:00
Kelly Brazil
fdcf4338e0 add examples to docstring 2022-01-04 15:31:00 -08:00
Kelly Brazil
ee43037f48 remove unused continue lines 2022-01-04 15:13:42 -08:00
Kelly Brazil
26e365563c add schema and _process logic 2022-01-04 15:07:45 -08:00
Kelly Brazil
1b39586bb1 add stat streaming parser 2022-01-04 15:01:42 -08:00
Kelly Brazil
8bb3a6bea3 Merge pull request #194 from kellyjonbrazil/master
sync to dev
2022-01-04 13:33:35 -08:00
Kelly Brazil
e6900e2000 add jar-manifest 2022-01-03 09:41:16 -08:00
Kelly Brazil
0ee244756b spelling 2022-01-03 09:22:35 -08:00
Kelly Brazil
6d5ac9abe6 update docs 2022-01-03 09:19:40 -08:00
Kelly Brazil
44f6d9e132 changelog update 2022-01-03 09:18:43 -08:00
Kelly Brazil
360154559c Merge pull request #193 from kellyjonbrazil/dev
Dev add csv doublequote fix
2022-01-03 09:12:50 -08:00
Kelly Brazil
78672bd7ad Merge pull request #190 from shaikustin/csv-doubleqouted
fix doubleqoute in csv
2022-01-03 09:08:57 -08:00
Kelly Brazil
65d96e26b5 add streaming tests 2022-01-03 09:06:00 -08:00
Kelly Brazil
241d53af9a Merge pull request #192 from kellyjonbrazil/dev
Dev v1.17.6
2022-01-03 08:49:15 -08:00
Kelly Brazil
5563829df2 make dialect sniff behavior match non-streaming parser 2022-01-03 08:48:23 -08:00
Kelly Brazil
3a4a27e1f9 version bump 2022-01-02 11:44:25 -08:00
Kelly Brazil
9c887a36a8 update csv_s parser with csv changes 2022-01-02 11:44:18 -08:00
Kelly Brazil
bc7973af36 update copyright 2022-01-02 11:07:15 -08:00
Kelly Brazil
32972d8fdb doc update 2022-01-02 11:00:52 -08:00
Kelly Brazil
b128d9109c add MANIFEST.MF tests 2022-01-02 10:51:02 -08:00
Kelly Brazil
929d7273a4 doc updates 2022-01-02 10:29:56 -08:00
shaik
2a40f84274 fix doubleqoute in csv 2022-01-02 17:30:25 +02:00
Kelly Brazil
9ff6fa818f add jar-manifest 2022-01-01 10:08:40 -08:00
Kelly Brazil
dac73a4bfe Merge pull request #189 from kellyjonbrazil/master
use github releases instead of packaging site
2022-01-01 10:06:46 -08:00
Kelly Brazil
d7895547f7 Merge pull request #188 from listuser/new_branch
Create key value pairs from a MANIFEST.MF file, to include key multiline value pairs.
2022-01-01 10:04:47 -08:00
listuser
b7d439cb87 Renamed jar-manifest.py to jar_manifest.py, added multi manifest outputs to tests... 2021-12-31 12:15:50 -08:00
listuser
7cc903a5f5 Removed inflating from examples in jar-manifest.py 2021-12-30 15:10:53 -08:00
listuser
c495a8291b Updated test output in MANIFEST.MF.json 2021-12-30 15:06:06 -08:00
listuser
5e1d7d777c Renamed metamf.py to jar-manifest.py, plus other changes 2021-12-30 15:02:48 -08:00
listuser
7edad3f676 Create key value pairs from a MANIFEST.MF file, to include key multiline value pairs. 2021-12-29 12:15:15 -08:00
Kelly Brazil
f7331001d4 use github releases instead of packaging site 2021-12-23 11:35:39 -08:00
Kelly Brazil
f8e09ae2ff Merge pull request #187 from kellyjonbrazil/master
sync to dev
2021-12-21 15:46:39 -06:00
Kelly Brazil
433c7cc0f0 formatting 2021-12-21 13:42:24 -08:00
Kelly Brazil
d753e71a74 Merge pull request #186 from kellyjonbrazil/dev
Dev v1.17.5
2021-12-21 15:21:18 -06:00
Kelly Brazil
2e4f5a508b version bump 2021-12-21 12:19:17 -08:00
Kelly Brazil
88b960eff6 doc update 2021-12-21 12:14:20 -08:00
Kelly Brazil
88c77bd89e add zipinfo tests 2021-12-21 12:08:16 -08:00
Kelly Brazil
51a7a4251f add multi-archive test output 2021-12-21 11:11:56 -08:00
Kelly Brazil
51d2f316f3 add multi-archive support 2021-12-21 11:11:44 -08:00
Kelly Brazil
ff78a46c48 add zipinfo parser 2021-12-21 08:13:17 -08:00
Kelly Brazil
ed4a9dc1d4 formatting 2021-12-21 08:13:00 -08:00
Kelly Brazil
63182dba26 Merge pull request #185 from listuser/new_branch
contributed zipinfo parser
2021-12-21 10:08:46 -06:00
Matt J
9c1eaa9389 revised zipinfo.py nested version 2021-12-20 21:53:34 -08:00
Matt J
bc520fcbcd added zipinfo.py nested version 2021-12-20 14:29:50 -08:00
Matt J
46faac1a12 add test data zipinfo.json and zipinfo.out 2021-12-19 18:08:11 -08:00
Matt J
3c424c0cb3 initial commit zipinfo.py to new_branch 2021-12-19 14:05:48 -08:00
Kelly Brazil
3ac8d0362b use quotes around python versions 2021-12-16 07:04:35 -08:00
Kelly Brazil
d88b998e6c formatting 2021-12-09 10:58:06 -08:00
Kelly Brazil
a9ed55c006 fix spelling 2021-12-09 10:54:04 -08:00
Kelly Brazil
ea61434123 fix schema docs 2021-12-09 10:21:37 -08:00
53 changed files with 5141 additions and 43 deletions

View File

@@ -14,7 +14,7 @@ jobs:
strategy:
matrix:
os: [macos-latest, ubuntu-latest, windows-latest]
python-version: [3.7, 3.8, 3.9, 3.10.0]
python-version: ["3.7", "3.8", "3.9", "3.10"]
steps:
- uses: actions/checkout@v2

View File

@@ -1,5 +1,15 @@
jc changelog
20220106 v1.17.7
- Add stat command streaming parser tested on linux and macOS
20220103 v1.17.6
- Add jar-manifest file parser (for MANIFEST.MF files)
- Fix CSV parsers for some files that include doublequotes
20211221 v1.17.5
- Add zipinfo parser tested on linux and macOS
20211207 v1.17.4
- Add support for the NO_COLOR environment variable to set mono (http://no-color.org/)
- Add -C option to force color output even when using pipes (overrides -m and NO_COLOR)

View File

@@ -1713,6 +1713,27 @@ iw dev wlan0 scan | jc --iw-scan -p # or: jc -p iw dev wlan0 scan
}
]
```
### jar-manifest
```bash
cat MANIFEST.MF | jc --jar-manifest -p
```
```json
[
{
"Import_Package": "com.conversantmedia.util.concurrent;resolution:=optional,com.fasterxml.jackson.annotation;version=\"[2.12,3)\";resolution:=optional,com.fasterxml.jackson.core;version=\"[2.12,3)\";resolution:=optional,com.fasterxml.jackson.core.type;version=\"[2.12,3)\";resolution:=optional,com.fasterxml.jackson.cor...",
"Export_Package": "org.apache.logging.log4j.core;uses:=\"org.apache.logging.log4j,org.apache.logging.log4j.core.config,org.apache.logging.log4j.core.impl,org.apache.logging.log4j.core.layout,org.apache.logging.log4j.core.time,org.apache.logging.log4j.message,org.apache.logging.log4j.spi,org.apache.logging.log4j.status...",
"Manifest_Version": "1.0",
"Bundle_License": "https://www.apache.org/licenses/LICENSE-2.0.txt",
"Bundle_SymbolicName": "org.apache.logging.log4j.core",
"Built_By": "matt",
"Bnd_LastModified": "1639373735804",
"Implementation_Vendor_Id": "org.apache.logging.log4j",
"Specification_Title": "Apache Log4j Core",
"Log4jReleaseManager": "Matt Sicker",
...
}
]
```
### jobs
```bash
jobs -l | jc --jobs -p
@@ -3793,5 +3814,36 @@ cat istio.yaml | jc --yaml -p
}
]
```
### zipinfo
```bash
zipinfo file.zip | jc --zipinfo -p # or: jc -p zipinfo file.zip
```
```json
[
{
"archive": "file.zip",
"size": 4116,
"size_unit": "bytes",
"number_entries": 1,
"number_files": 1,
"bytes_uncompressed": 11837,
"bytes_compressed": 3966,
"percent_compressed": 66.5,
"files": [
{
"flags": "-rw-r--r--",
"zipversion": "2.1",
"zipunder": "unx",
"filesize": 11837,
"type": "bX",
"method": "defN",
"date": "21-Dec-08",
"time": "20:50",
"filename": "compressed_file"
}
]
}
]
```
© 2019-2021 Kelly Brazil

View File

@@ -91,7 +91,10 @@ pip3 install jc
| FreeBSD | `portsnap fetch update && cd /usr/ports/textproc/py-jc && make install clean` |
| Ansible filter plugin | `ansible-galaxy collection install community.general` |
> For more packages and binaries, see the [jc packaging](https://kellyjonbrazil.github.io/jc-packaging/) site.
> For more OS Packages, see https://repology.org/project/jc/versions.
### Binaries and Packages
For precompiled binaries and packages, see [Releases](https://github.com/kellyjonbrazil/jc/releases) on Github.
## Usage
`jc` accepts piped input from `STDIN` and outputs a JSON representation of the previous command's output to `STDOUT`.
@@ -142,6 +145,7 @@ The JSON output can be compact (default) or pretty formatted with the `-p` optio
- `--iostat-s` enables the `iostat` command streaming parser ([documentation](https://kellyjonbrazil.github.io/jc/docs/parsers/iostat_s))
- `--iptables` enables the `iptables` command parser ([documentation](https://kellyjonbrazil.github.io/jc/docs/parsers/iptables))
- `--iw-scan` enables the `iw dev [device] scan` command parser ([documentation](https://kellyjonbrazil.github.io/jc/docs/parsers/iw_scan))
- `--jar-manifest` enables the MANIFEST.MF file parser ([documentation](https://kellyjonbrazil.github.io/jc/docs/parsers/jar_manifest))
- `--jobs` enables the `jobs` command parser ([documentation](https://kellyjonbrazil.github.io/jc/docs/parsers/jobs))
- `--kv` enables the Key/Value file parser ([documentation](https://kellyjonbrazil.github.io/jc/docs/parsers/kv))
- `--last` enables the `last` and `lastb` command parser ([documentation](https://kellyjonbrazil.github.io/jc/docs/parsers/last))
@@ -166,6 +170,7 @@ The JSON output can be compact (default) or pretty formatted with the `-p` optio
- `--shadow` enables the `/etc/shadow` file parser ([documentation](https://kellyjonbrazil.github.io/jc/docs/parsers/shadow))
- `--ss` enables the `ss` command parser ([documentation](https://kellyjonbrazil.github.io/jc/docs/parsers/ss))
- `--stat` enables the `stat` command parser ([documentation](https://kellyjonbrazil.github.io/jc/docs/parsers/stat))
- `--stat-s` enables the `stat` command streaming parser ([documentation](https://kellyjonbrazil.github.io/jc/docs/parsers/stat_s))
- `--sysctl` enables the `sysctl` command parser ([documentation](https://kellyjonbrazil.github.io/jc/docs/parsers/sysctl))
- `--systemctl` enables the `systemctl` command parser ([documentation](https://kellyjonbrazil.github.io/jc/docs/parsers/systemctl))
- `--systemctl-lj` enables the `systemctl list-jobs` command parser ([documentation](https://kellyjonbrazil.github.io/jc/docs/parsers/systemctl_lj))
@@ -188,6 +193,7 @@ The JSON output can be compact (default) or pretty formatted with the `-p` optio
- `--who` enables the `who` command parser ([documentation](https://kellyjonbrazil.github.io/jc/docs/parsers/who))
- `--xml` enables the XML file parser ([documentation](https://kellyjonbrazil.github.io/jc/docs/parsers/xml))
- `--yaml` enables the YAML file parser ([documentation](https://kellyjonbrazil.github.io/jc/docs/parsers/yaml))
- `--zipinfo` enables the `zipinfo` command parser ([documentation](https://kellyjonbrazil.github.io/jc/docs/parsers/zipinfo))
### Options
- `-a` about `jc`. Prints information about `jc` and the parsers (in JSON, of course!)
@@ -1005,4 +1011,4 @@ cat istio.yaml | jc --yaml -p
]
```
© 2019-2021 Kelly Brazil
© 2019-2022 Kelly Brazil

View File

@@ -22,8 +22,10 @@ Schema:
{
"variables": [
"name": string,
"value": string
{
"name": string,
"value": string
}
],
"schedule": [
{

View File

@@ -18,8 +18,10 @@ Schema:
{
"variables": [
"name": string,
"value": string
{
"name": string,
"value": string
}
],
"schedule": [
{

View File

@@ -99,4 +99,4 @@ Returns:
## Parser Information
Compatibility: linux, darwin, cygwin, win32, aix, freebsd
Version 1.3 by Kelly Brazil (kellyjonbrazil@gmail.com)
Version 1.4 by Kelly Brazil (kellyjonbrazil@gmail.com)

View File

@@ -27,11 +27,11 @@ Schema:
{
"column_name1": string,
"column_name2": string,
"_jc_meta": # This object only exists if using -qq or ignore_exceptions=True
"_jc_meta": # This object only exists if using -qq or ignore_exceptions=True
{
"success": booean, # true if successfully parsed, false if error
"error": string, # exists if "success" is false
"line": string # exists if "success" is false
"success": boolean, # true if successfully parsed, false if error
"error": string, # exists if "success" is false
"line": string # exists if "success" is false
}
}
@@ -82,4 +82,4 @@ Returns:
## Parser Information
Compatibility: linux, darwin, cygwin, win32, aix, freebsd
Version 1.1 by Kelly Brazil (kellyjonbrazil@gmail.com)
Version 1.2 by Kelly Brazil (kellyjonbrazil@gmail.com)

View File

@@ -73,7 +73,7 @@ Schema:
"percent_wrqm": float,
"_jc_meta": # This object only exists if using -qq or ignore_exceptions=True
{
"success": booean, # true if successfully parsed, false if error
"success": boolean, # true if successfully parsed, false if error
"error": string, # exists if "success" is false
"line": string # exists if "success" is false
}

View File

@@ -0,0 +1,104 @@
[Home](https://kellyjonbrazil.github.io/jc/)
# jc.parsers.jar_manifest
jc - JSON CLI output utility `MANIFEST.MF` file parser
Usage (cli):
$ cat MANIFEST.MF | jc --jar-manifest
Usage (module):
import jc.parsers.jar_manifest
result = jc.parsers.jar_manifest.parse(jar_manifest_file_output)
Schema:
[
{
"key1": string,
"key2": string
}
]
Examples:
$ cat MANIFEST.MF | jc --jar-manifest -p
$ unzip -c apache-log4j-2.16.0-bin/log4j-core-2.16.0.jar META-INF/MANIFEST.MF | jc --jar-manifest -p
$ unzip -c 'apache-log4j-2.16.0-bin/*.jar' META-INF/MANIFEST.MF | jc --jar-manifest -p
$ cat MANIFEST.MF | jc --jar-manifest -p
[
{
"Import_Package": "com.conversantmedia.util.concurrent;resolution:=optional,com.fasterxml.jackson.annotation;version="[2.12,3)";resolution:=optional,com.fasterxml.jackson.core;version="[2.12,3)";resolution:=optional,com.fasterxml.jackson.core.type;version="[2.12,3)";resolution:=optional,com.fasterxml.jackson.cor...",
"Export_Package": "org.apache.logging.log4j.core;uses:="org.apache.logging.log4j,org.apache.logging.log4j.core.config,org.apache.logging.log4j.core.impl,org.apache.logging.log4j.core.layout,org.apache.logging.log4j.core.time,org.apache.logging.log4j.message,org.apache.logging.log4j.spi,org.apache.logging.log4j.status...",
"Manifest_Version": "1.0",
"Bundle_License": "https://www.apache.org/licenses/LICENSE-2.0.txt",
"Bundle_SymbolicName": "org.apache.logging.log4j.core",
"Built_By": "matt",
"Bnd_LastModified": "1639373735804",
"Implementation_Vendor_Id": "org.apache.logging.log4j",
"Specification_Title": "Apache Log4j Core",
"Log4jReleaseManager": "Matt Sicker",
...
}
]
$ unzip -c 'apache-log4j-2.16.0-bin/*.jar' META-INF/MANIFEST.MF | jc --jar-manifest -p
[
...
{
"Archive": "apache-log4j-2.16.0-bin/log4j-spring-boot-2.16.0-sources.jar",
"Manifest_Version": "1.0",
"Built_By": "matt",
"Created_By": "Apache Maven 3.8.4",
"Build_Jdk": "1.8.0_312"
},
{
"Archive": "apache-log4j-2.16.0-bin/log4j-spring-boot-2.16.0-javadoc.jar",
"Manifest_Version": "1.0",
"Built_By": "matt",
"Created_By": "Apache Maven 3.8.4",
"Build_Jdk": "1.8.0_312"
},
{
"Bundle_SymbolicName": "org.apache.logging.log4j.spring-cloud-config-client.logging.log4j.core.util;version="[2.16,3)",org.springframework.boot.autoconfigure.condition,org.springframework.cloud.context.environment,org.springframework.context,org.springframework.stereotype",
"Export_Package": "org.apache.logging.log4j.spring.cloud.config.controller;version="2.16.0"ient",
"Archive": "apache-log4j-2.16.0-bin/log4j-spring-cloud-config-client-2.16.0.jar",
"Manifest_Version": "1.0",
"Bundle_License": "https://www.apache.org/licenses/LICENSE-2.0.txt",
...
}
...
]
## info
```python
info()
```
Provides parser metadata (version, author, etc.)
## parse
```python
parse(data, raw=False, quiet=False)
```
Main text parsing function
Parameters:
data: (string) text data to parse
raw: (boolean) output preprocessed JSON if True
quiet: (boolean) suppress warning messages if True
Returns:
List of Dictionaries. Raw or processed structured data.
## Parser Information
Compatibility: linux, darwin, cygwin, win32, aix, freebsd
Version 0.01 by Matt J (https://github.com/listuser)

View File

@@ -39,7 +39,7 @@ Schema:
"epoch_utc": integer, # timezone aware timestamp if date field is in UTC and can be converted
"_jc_meta": # This object only exists if using -qq or ignore_exceptions=True
{
"success": booean, # true if successfully parsed, false if error
"success": boolean, # true if successfully parsed, false if error
"error": string, # exists if "success" is false
"line": string # exists if "success" is false
}

View File

@@ -46,7 +46,7 @@ Schema:
"round_trip_ms_stddev": float,
"_jc_meta": # This object only exists if using -qq or ignore_exceptions=True
{
"success": booean, # true if successfully parsed, false if error
"success": boolean, # true if successfully parsed, false if error
"error": string, # exists if "success" is false
"line": string # exists if "success" is false
}

105
docs/parsers/stat_s.md Normal file
View File

@@ -0,0 +1,105 @@
[Home](https://kellyjonbrazil.github.io/jc/)
# jc.parsers.stat_s
jc - JSON CLI output utility `stat` command output streaming parser
> This streaming parser outputs JSON Lines
The `xxx_epoch` calculated timestamp fields are naive (i.e. based on the local time of the system the parser is run on).
The `xxx_epoch_utc` calculated timestamp fields are timezone-aware and are only available if the timezone field is UTC.
Usage (cli):
$ stat * | jc --stat-s
Usage (module):
import jc.parsers.stat_s
result = jc.parsers.stat_s.parse(stat_command_output.splitlines()) # result is an iterable object
for item in result:
# do something
Schema:
{
"file": string,
"link_to" string,
"size": integer,
"blocks": integer,
"io_blocks": integer,
"type": string,
"device": string,
"inode": integer,
"links": integer,
"access": string,
"flags": string,
"uid": integer,
"user": string,
"gid": integer,
"group": string,
"access_time": string, # - = null
"access_time_epoch": integer, # naive timestamp
"access_time_epoch_utc": integer, # timezone-aware timestamp
"modify_time": string, # - = null
"modify_time_epoch": integer, # naive timestamp
"modify_time_epoch_utc": integer, # timezone-aware timestamp
"change_time": string, # - = null
"change_time_epoch": integer, # naive timestamp
"change_time_epoch_utc": integer, # timezone-aware timestamp
"birth_time": string, # - = null
"birth_time_epoch": integer, # naive timestamp
"birth_time_epoch_utc": integer, # timezone-aware timestamp
"unix_device": integer,
"rdev": integer,
"block_size": integer,
"unix_flags": string,
"_jc_meta": # This object only exists if using -qq or ignore_exceptions=True
{
"success": boolean, # true if successfully parsed, false if error
"error": string, # exists if "success" is false
"line": string # exists if "success" is false
}
}
Examples:
$ stat | jc --stat-s
{"file":"(stdin)","unix_device":1027739696,"inode":1155,"flags":"crw--w----","links":1,"user":"kbrazil","group":"tty","rdev":268435456,"size":0,"access_time":"Jan 4 15:27:44 2022","modify_time":"Jan 4 15:27:44 2022","change_time":"Jan 4 15:27:44 2022","birth_time":"Dec 31 16:00:00 1969","block_size":131072,"blocks":0,"unix_flags":"0","access_time_epoch":1641338864,"access_time_epoch_utc":null,"modify_time_epoch":1641338864,"modify_time_epoch_utc":null,"change_time_epoch":1641338864,"change_time_epoch_utc":null,"birth_time_epoch":null,"birth_time_epoch_utc":null}
$ stat | jc --stat-s -r
{"file":"(stdin)","unix_device":"1027739696","inode":"1155","flags":"crw--w----","links":"1","user":"kbrazil","group":"tty","rdev":"268435456","size":"0","access_time":"Jan 4 15:28:08 2022","modify_time":"Jan 4 15:28:08 2022","change_time":"Jan 4 15:28:08 2022","birth_time":"Dec 31 16:00:00 1969","block_size":"131072","blocks":"0","unix_flags":"0"}
## info
```python
info()
```
Provides parser metadata (version, author, etc.)
## parse
```python
parse(data, raw=False, quiet=False, ignore_exceptions=False)
```
Main text parsing generator function. Returns an iterator object.
Parameters:
data: (iterable) line-based text data to parse (e.g. sys.stdin or str.splitlines())
raw: (boolean) output preprocessed JSON if True
quiet: (boolean) suppress warning messages if True
ignore_exceptions: (boolean) ignore parsing exceptions if True
Yields:
Dictionary. Raw or processed structured data.
Returns:
Iterator object
## Parser Information
Compatibility: linux, darwin, freebsd
Version 0.5 by Kelly Brazil (kellyjonbrazil@gmail.com)

View File

@@ -63,7 +63,7 @@ Schema:
"epoch_utc": integer # aware timestamp if -t flag is used and UTC TZ
"_jc_meta": # This object only exists if using -qq or ignore_exceptions=True
{
"success": booean, # true if successfully parsed, false if error
"success": boolean, # true if successfully parsed, false if error
"error": string, # exists if "success" is false
"line": string # exists if "success" is false
}

107
docs/parsers/zipinfo.md Normal file
View File

@@ -0,0 +1,107 @@
[Home](https://kellyjonbrazil.github.io/jc/)
# jc.parsers.zipinfo
jc - JSON CLI output utility `zipinfo` command output parser
Options supported:
- none
Note: The default listing format.
Usage (cli):
$ zipinfo <archive> | jc --zipinfo
or
$ jc zipinfo
Usage (module):
import jc.parsers.zipinfo
result = jc.parsers.zipinfo.parse(zipinfo_command_output)
Schema:
[
{
"archive": string,
"size": integer,
"size_unit": string,
"number_entries": integer,
"number_files": integer,
"bytes_uncompressed": integer,
"bytes_compressed": integer,
"percent_compressed": float,
"files": [
{
"flags": string,
"zipversion": string,
"zipunder": string
"filesize": integer,
"type": string,
"method": string,
"date": string,
"time": string,
"filename": string
}
]
}
]
Examples:
$ zipinfo log4j-core-2.16.0.jar | jc --zipinfo -p
[
{
"archive": "log4j-core-2.16.0.jar",
"size": 1789565,
"size_unit": "bytes",
"number_entries": 1218,
"number_files": 1218,
"bytes_uncompressed": 3974141,
"bytes_compressed": 1515455,
"percent_compressed": 61.9,
"files": [
{
"flags": "-rw-r--r--",
"zipversion": "2.0",
"zipunder": "unx",
"filesize": 19810,
"type": "bl",
"method": "defN",
"date": "21-Dec-12",
"time": "23:35",
"filename": "META-INF/MANIFEST.MF"
},
...
## info
```python
info()
```
Provides parser metadata (version, author, etc.)
## parse
```python
parse(data, raw=False, quiet=False)
```
Main text parsing function
Parameters:
data: (string) text data to parse
raw: (boolean) output preprocessed JSON if True
quiet: (boolean) suppress warning messages if True
Returns:
List of Dictionaries. Raw or processed structured data.
## Parser Information
Compatibility: linux, darwin
Version 0.01 by Matt J (https://github.com/listuser)

View File

@@ -73,4 +73,4 @@ Module Example:
"""
name = 'jc'
__version__ = '1.17.4'
__version__ = '1.17.7'

View File

@@ -37,7 +37,7 @@ class info():
author = 'Kelly Brazil'
author_email = 'kellyjonbrazil@gmail.com'
website = 'https://github.com/kellyjonbrazil/jc'
copyright = '© 2019-2021 Kelly Brazil'
copyright = '© 2019-2022 Kelly Brazil'
license = 'MIT License'
@@ -80,6 +80,7 @@ parsers = [
'iostat-s',
'iptables',
'iw-scan',
'jar-manifest',
'jobs',
'kv',
'last',
@@ -104,6 +105,7 @@ parsers = [
'shadow',
'ss',
'stat',
'stat-s',
'sysctl',
'systemctl',
'systemctl-lj',
@@ -125,7 +127,8 @@ parsers = [
'wc',
'who',
'xml',
'yaml'
'yaml',
'zipinfo'
]
JC_ERROR_EXIT = 100

View File

@@ -19,8 +19,10 @@ Schema:
{
"variables": [
"name": string,
"value": string
{
"name": string,
"value": string
}
],
"schedule": [
{

View File

@@ -15,8 +15,10 @@ Schema:
{
"variables": [
"name": string,
"value": string
{
"name": string,
"value": string
}
],
"schedule": [
{

View File

@@ -75,7 +75,7 @@ import csv
class info():
"""Provides parser metadata (version, author, etc.)"""
version = '1.3'
version = '1.4'
description = 'CSV file parser'
author = 'Kelly Brazil'
author_email = 'kellyjonbrazil@gmail.com'
@@ -130,9 +130,11 @@ def parse(data, raw=False, quiet=False):
if jc.utils.has_data(data):
dialect = None
dialect = 'excel' # default in csv module
try:
dialect = csv.Sniffer().sniff(data[:1024])
if '""' in data:
dialect.doublequote = True
except Exception:
pass

View File

@@ -24,11 +24,11 @@ Schema:
{
"column_name1": string,
"column_name2": string,
"_jc_meta": # This object only exists if using -qq or ignore_exceptions=True
"_jc_meta": # This object only exists if using -qq or ignore_exceptions=True
{
"success": booean, # true if successfully parsed, false if error
"error": string, # exists if "success" is false
"line": string # exists if "success" is false
"success": boolean, # true if successfully parsed, false if error
"error": string, # exists if "success" is false
"line": string # exists if "success" is false
}
}
@@ -56,7 +56,7 @@ from jc.exceptions import ParseError
class info():
"""Provides parser metadata (version, author, etc.)"""
version = '1.1'
version = '1.2'
description = 'CSV file streaming parser'
author = 'Kelly Brazil'
author_email = 'kellyjonbrazil@gmail.com'
@@ -113,18 +113,20 @@ def parse(data, raw=False, quiet=False, ignore_exceptions=False):
# first, load the first 100 lines into a list to detect the CSV dialect
for line in itertools.islice(data, 100):
temp_list.append(line)
temp_list.append(line.rstrip())
# check for Python bug that does not split on `\r` newlines from sys.stdin correctly
# https://bugs.python.org/issue45617
if len(temp_list) == 1:
raise ParseError('Unable to detect line endings. Please try the non-streaming CSV parser instead.')
sniffdata = '\n'.join(temp_list)
sniffdata = '\n'.join(temp_list)[:1024]
dialect = 'excel' # default in csv module
dialect = None
try:
dialect = csv.Sniffer().sniff(sniffdata)
if '""' in sniffdata:
dialect.doublequote = True
except Exception:
pass

View File

@@ -21,7 +21,7 @@ Schema:
"foo": string,
"_jc_meta": # This object only exists if using -qq or ignore_exceptions=True
{
"success": booean, # true if successfully parsed, false if error
"success": boolean, # true if successfully parsed, false if error
"error": string, # exists if "success" is false
"line": string # exists if "success" is false
}

View File

@@ -70,7 +70,7 @@ Schema:
"percent_wrqm": float,
"_jc_meta": # This object only exists if using -qq or ignore_exceptions=True
{
"success": booean, # true if successfully parsed, false if error
"success": boolean, # true if successfully parsed, false if error
"error": string, # exists if "success" is false
"line": string # exists if "success" is false
}

215
jc/parsers/jar_manifest.py Normal file
View File

@@ -0,0 +1,215 @@
"""jc - JSON CLI output utility `MANIFEST.MF` file parser
Usage (cli):
$ cat MANIFEST.MF | jc --jar-manifest
Usage (module):
import jc.parsers.jar_manifest
result = jc.parsers.jar_manifest.parse(jar_manifest_file_output)
Schema:
[
{
"key1": string,
"key2": string
}
]
Examples:
$ cat MANIFEST.MF | jc --jar-manifest -p
$ unzip -c apache-log4j-2.16.0-bin/log4j-core-2.16.0.jar META-INF/MANIFEST.MF | jc --jar-manifest -p
$ unzip -c 'apache-log4j-2.16.0-bin/*.jar' META-INF/MANIFEST.MF | jc --jar-manifest -p
$ cat MANIFEST.MF | jc --jar-manifest -p
[
{
"Import_Package": "com.conversantmedia.util.concurrent;resolution:=optional,com.fasterxml.jackson.annotation;version=\"[2.12,3)\";resolution:=optional,com.fasterxml.jackson.core;version=\"[2.12,3)\";resolution:=optional,com.fasterxml.jackson.core.type;version=\"[2.12,3)\";resolution:=optional,com.fasterxml.jackson.cor...",
"Export_Package": "org.apache.logging.log4j.core;uses:=\"org.apache.logging.log4j,org.apache.logging.log4j.core.config,org.apache.logging.log4j.core.impl,org.apache.logging.log4j.core.layout,org.apache.logging.log4j.core.time,org.apache.logging.log4j.message,org.apache.logging.log4j.spi,org.apache.logging.log4j.status...",
"Manifest_Version": "1.0",
"Bundle_License": "https://www.apache.org/licenses/LICENSE-2.0.txt",
"Bundle_SymbolicName": "org.apache.logging.log4j.core",
"Built_By": "matt",
"Bnd_LastModified": "1639373735804",
"Implementation_Vendor_Id": "org.apache.logging.log4j",
"Specification_Title": "Apache Log4j Core",
"Log4jReleaseManager": "Matt Sicker",
...
}
]
$ unzip -c 'apache-log4j-2.16.0-bin/*.jar' META-INF/MANIFEST.MF | jc --jar-manifest -p
[
...
{
"Archive": "apache-log4j-2.16.0-bin/log4j-spring-boot-2.16.0-sources.jar",
"Manifest_Version": "1.0",
"Built_By": "matt",
"Created_By": "Apache Maven 3.8.4",
"Build_Jdk": "1.8.0_312"
},
{
"Archive": "apache-log4j-2.16.0-bin/log4j-spring-boot-2.16.0-javadoc.jar",
"Manifest_Version": "1.0",
"Built_By": "matt",
"Created_By": "Apache Maven 3.8.4",
"Build_Jdk": "1.8.0_312"
},
{
"Bundle_SymbolicName": "org.apache.logging.log4j.spring-cloud-config-client.logging.log4j.core.util;version=\"[2.16,3)\",org.springframework.boot.autoconfigure.condition,org.springframework.cloud.context.environment,org.springframework.context,org.springframework.stereotype",
"Export_Package": "org.apache.logging.log4j.spring.cloud.config.controller;version=\"2.16.0\"ient",
"Archive": "apache-log4j-2.16.0-bin/log4j-spring-cloud-config-client-2.16.0.jar",
"Manifest_Version": "1.0",
"Bundle_License": "https://www.apache.org/licenses/LICENSE-2.0.txt",
...
}
...
]
"""
import jc.utils
import re
class info():
"""Provides parser metadata (version, author, etc.)"""
version = '0.01'
description = 'MANIFEST.MF file parser'
author = 'Matt J'
author_email = 'https://github.com/listuser'
compatible = ['linux', 'darwin', 'cygwin', 'win32', 'aix', 'freebsd']
__version__ = info.version
def _process(proc_data):
"""
Final processing to conform to the schema.
Parameters:
proc_data: (List of Dictionaries) raw structured data to process
Returns:
List of Dictionaries. Structured data to conform to the schema.
"""
return proc_data
def parse(data, raw=False, quiet=False):
"""
Main text parsing function
Parameters:
data: (string) text data to parse
raw: (boolean) output preprocessed JSON if True
quiet: (boolean) suppress warning messages if True
Returns:
List of Dictionaries. Raw or processed structured data.
"""
jc.utils.compatibility(__name__, info.compatible, quiet)
jc.utils.input_type_check(data)
raw_output = []
archives = []
if jc.utils.has_data(data):
datalines = data.splitlines()
# remove last line of multi-archive output since it is not needed
if datalines[-1].endswith('archives were successfully processed.'):
datalines.pop(-1)
# extract each archive into its own list of lines.
# archives are separated by a blank line
this_archive = []
for row in datalines:
if row == '':
archives.append(this_archive)
this_archive = []
continue
this_archive.append(row)
if this_archive:
archives.append(this_archive)
# iterate through list of archives and parse
for archive_item in archives:
manifests = []
this_manifest = {}
plines = []
for i, line in enumerate(archive_item):
last = archive_item[-1]
# remove line since it is not needed and starts with "space"
if (re.match(r'^\s+inflating\s*:\s*META-INF/MANIFEST.MF', line, re.IGNORECASE)):
archive_item.pop(i)
continue
# if line starts with "space"
# begin key multiline value pair concatenation
if (re.match(r'\s', line)):
# expectation is this "if" sets a key once
if (not this_manifest):
# previous line contains a key
k, v = archive_item[i - 1].split(":", maxsplit=1)
v = v + line
v = re.sub(r'\s', '', v)
this_manifest = {k: v}
plines.append(i - 1)
plines.append(i)
# continue key multiline value pair concatenation
else:
plines.append(i)
linecmp = line
for k, v in this_manifest.items():
line = v + line
line = re.sub(r'\s', '', line)
this_manifest.update({k:line})
if linecmp is not last:
nextline = archive_item[i + 1]
# if next line starts with not "space",
# end key multiline value pair concatenation
if (re.match(r'\S', nextline)):
manifests.append(this_manifest)
this_manifest = False
else:
manifests.append(this_manifest)
# pop key multiline value pair lines
if plines:
for p in reversed(plines):
archive_item.pop(p)
# all other key value pairs
for i, line in enumerate(archive_item):
k, v = line.split(":", maxsplit=1)
v = v.strip()
manifests.append({k: v})
if manifests:
this_manifest = {}
for d in manifests:
for k, v in d.items():
k = re.sub(r'\s', '', k)
k = re.sub(r'-', '_', k)
this_manifest.update({k: v})
raw_output.append(this_manifest)
return raw_output if raw else _process(raw_output)

View File

@@ -36,7 +36,7 @@ Schema:
"epoch_utc": integer, # timezone aware timestamp if date field is in UTC and can be converted
"_jc_meta": # This object only exists if using -qq or ignore_exceptions=True
{
"success": booean, # true if successfully parsed, false if error
"success": boolean, # true if successfully parsed, false if error
"error": string, # exists if "success" is false
"line": string # exists if "success" is false
}

View File

@@ -43,7 +43,7 @@ Schema:
"round_trip_ms_stddev": float,
"_jc_meta": # This object only exists if using -qq or ignore_exceptions=True
{
"success": booean, # true if successfully parsed, false if error
"success": boolean, # true if successfully parsed, false if error
"error": string, # exists if "success" is false
"line": string # exists if "success" is false
}

280
jc/parsers/stat_s.py Normal file
View File

@@ -0,0 +1,280 @@
"""jc - JSON CLI output utility `stat` command output streaming parser
> This streaming parser outputs JSON Lines
The `xxx_epoch` calculated timestamp fields are naive (i.e. based on the local time of the system the parser is run on).
The `xxx_epoch_utc` calculated timestamp fields are timezone-aware and are only available if the timezone field is UTC.
Usage (cli):
$ stat * | jc --stat-s
Usage (module):
import jc.parsers.stat_s
result = jc.parsers.stat_s.parse(stat_command_output.splitlines()) # result is an iterable object
for item in result:
# do something
Schema:
{
"file": string,
"link_to" string,
"size": integer,
"blocks": integer,
"io_blocks": integer,
"type": string,
"device": string,
"inode": integer,
"links": integer,
"access": string,
"flags": string,
"uid": integer,
"user": string,
"gid": integer,
"group": string,
"access_time": string, # - = null
"access_time_epoch": integer, # naive timestamp
"access_time_epoch_utc": integer, # timezone-aware timestamp
"modify_time": string, # - = null
"modify_time_epoch": integer, # naive timestamp
"modify_time_epoch_utc": integer, # timezone-aware timestamp
"change_time": string, # - = null
"change_time_epoch": integer, # naive timestamp
"change_time_epoch_utc": integer, # timezone-aware timestamp
"birth_time": string, # - = null
"birth_time_epoch": integer, # naive timestamp
"birth_time_epoch_utc": integer, # timezone-aware timestamp
"unix_device": integer,
"rdev": integer,
"block_size": integer,
"unix_flags": string,
"_jc_meta": # This object only exists if using -qq or ignore_exceptions=True
{
"success": boolean, # true if successfully parsed, false if error
"error": string, # exists if "success" is false
"line": string # exists if "success" is false
}
}
Examples:
$ stat | jc --stat-s
{"file":"(stdin)","unix_device":1027739696,"inode":1155,"flags":"crw--w----","links":1,"user":"kbrazil","group":"tty","rdev":268435456,"size":0,"access_time":"Jan 4 15:27:44 2022","modify_time":"Jan 4 15:27:44 2022","change_time":"Jan 4 15:27:44 2022","birth_time":"Dec 31 16:00:00 1969","block_size":131072,"blocks":0,"unix_flags":"0","access_time_epoch":1641338864,"access_time_epoch_utc":null,"modify_time_epoch":1641338864,"modify_time_epoch_utc":null,"change_time_epoch":1641338864,"change_time_epoch_utc":null,"birth_time_epoch":null,"birth_time_epoch_utc":null}
$ stat | jc --stat-s -r
{"file":"(stdin)","unix_device":"1027739696","inode":"1155","flags":"crw--w----","links":"1","user":"kbrazil","group":"tty","rdev":"268435456","size":"0","access_time":"Jan 4 15:28:08 2022","modify_time":"Jan 4 15:28:08 2022","change_time":"Jan 4 15:28:08 2022","birth_time":"Dec 31 16:00:00 1969","block_size":"131072","blocks":"0","unix_flags":"0"}
"""
import shlex
import jc.utils
from jc.utils import stream_success, stream_error
from jc.exceptions import ParseError
class info():
"""Provides parser metadata (version, author, etc.)"""
version = '0.5'
description = '`stat` command streaming parser'
author = 'Kelly Brazil'
author_email = 'kellyjonbrazil@gmail.com'
compatible = ['linux', 'darwin', 'freebsd']
streaming = True
__version__ = info.version
def _process(proc_data):
"""
Final processing to conform to the schema.
Parameters:
proc_data: (Dictionary) raw structured data to process
Returns:
Dictionary. Structured data to conform to the schema.
"""
int_list = ['size', 'blocks', 'io_blocks', 'inode', 'links', 'uid', 'gid', 'unix_device',
'rdev', 'block_size']
for key in proc_data:
if key in int_list:
proc_data[key] = jc.utils.convert_to_int(proc_data[key])
# turn - into null for time fields and add calculated timestamp fields
null_list = ['access_time', 'modify_time', 'change_time', 'birth_time']
for key in null_list:
if key in proc_data:
if proc_data[key] == '-':
proc_data[key] = None
ts = jc.utils.timestamp(proc_data[key])
proc_data[key + '_epoch'] = ts.naive
proc_data[key + '_epoch_utc'] = ts.utc
return proc_data
def parse(data, raw=False, quiet=False, ignore_exceptions=False):
"""
Main text parsing generator function. Returns an iterator object.
Parameters:
data: (iterable) line-based text data to parse (e.g. sys.stdin or str.splitlines())
raw: (boolean) output preprocessed JSON if True
quiet: (boolean) suppress warning messages if True
ignore_exceptions: (boolean) ignore parsing exceptions if True
Yields:
Dictionary. Raw or processed structured data.
Returns:
Iterator object
"""
jc.utils.compatibility(__name__, info.compatible, quiet)
jc.utils.streaming_input_type_check(data)
output_line = {}
os_type = ''
for line in data:
try:
jc.utils.streaming_line_input_type_check(line)
line = line.rstrip()
# ignore blank lines
if line == '':
continue
# linux output
if line.startswith(' File: '):
os_type = 'linux'
if os_type == 'linux':
# stats output contains 9 lines
# line #1
if line.startswith(' File: '):
if output_line:
yield stream_success(output_line, ignore_exceptions) if raw else stream_success(_process(output_line), ignore_exceptions)
output_line = {}
line_list = line.split(maxsplit=1)
output_line['file'] = line_list[1]
# populate link_to field if -> found
if ' -> ' in output_line['file']:
filename = output_line['file'].split(' -> ')[0].strip('\u2018').rstrip('\u2019')
link = output_line['file'].split(' -> ')[1].strip('\u2018').rstrip('\u2019')
output_line['file'] = filename
output_line['link_to'] = link
else:
filename = output_line['file'].split(' -> ')[0].strip('\u2018').rstrip('\u2019')
output_line['file'] = filename
continue
# line #2
if line.startswith(' Size: '):
line_list = line.split(maxsplit=7)
output_line['size'] = line_list[1]
output_line['blocks'] = line_list[3]
output_line['io_blocks'] = line_list[6]
output_line['type'] = line_list[7]
continue
# line #3
if line.startswith('Device: '):
line_list = line.split()
output_line['device'] = line_list[1]
output_line['inode'] = line_list[3]
output_line['links'] = line_list[5]
continue
# line #4
if line.startswith('Access: ('):
line = line.replace('(', ' ').replace(')', ' ').replace('/', ' ')
line_list = line.split()
output_line['access'] = line_list[1]
output_line['flags'] = line_list[2]
output_line['uid'] = line_list[4]
output_line['user'] = line_list[5]
output_line['gid'] = line_list[7]
output_line['group'] = line_list[8]
continue
# line #5
# not implemented
if line.startswith('Context: '):
continue
# line #6
if line.startswith('Access: 2'):
line_list = line.split(maxsplit=1)
output_line['access_time'] = line_list[1]
continue
# line #7
if line.startswith('Modify: '):
line_list = line.split(maxsplit=1)
output_line['modify_time'] = line_list[1]
continue
# line #8
if line.startswith('Change: '):
line_list = line.split(maxsplit=1)
output_line['change_time'] = line_list[1]
continue
# line #9
if line.startswith(' Birth: '):
line_list = line.split(maxsplit=1)
output_line['birth_time'] = line_list[1]
continue
# catch non-stat data
raise ParseError('Not stat data')
# FreeBSD/OSX output
if os_type != 'linux':
value = shlex.split(line)
if not value[0].isdigit() or not value[1].isdigit():
raise ParseError('Not stat data')
output_line = {
'file': ' '.join(value[15:]),
'unix_device': value[0],
'inode': value[1],
'flags': value[2],
'links': value[3],
'user': value[4],
'group': value[5],
'rdev': value[6],
'size': value[7],
'access_time': value[8],
'modify_time': value[9],
'change_time': value[10],
'birth_time': value[11],
'block_size': value[12],
'blocks': value[13],
'unix_flags': value[14]
}
if output_line:
yield stream_success(output_line, ignore_exceptions) if raw else stream_success(_process(output_line), ignore_exceptions)
output_line = {}
except Exception as e:
yield stream_error(e, ignore_exceptions, line)
output_line = {}
# gather final item
if output_line:
try:
yield stream_success(output_line, ignore_exceptions) if raw else stream_success(_process(output_line), ignore_exceptions)
except Exception as e:
yield stream_error(e, ignore_exceptions, line)

View File

@@ -60,7 +60,7 @@ Schema:
"epoch_utc": integer # aware timestamp if -t flag is used and UTC TZ
"_jc_meta": # This object only exists if using -qq or ignore_exceptions=True
{
"success": booean, # true if successfully parsed, false if error
"success": boolean, # true if successfully parsed, false if error
"error": string, # exists if "success" is false
"line": string # exists if "success" is false
}

206
jc/parsers/zipinfo.py Normal file
View File

@@ -0,0 +1,206 @@
"""jc - JSON CLI output utility `zipinfo` command output parser
Options supported:
- none
Note: The default listing format.
Usage (cli):
$ zipinfo <archive> | jc --zipinfo
or
$ jc zipinfo
Usage (module):
import jc.parsers.zipinfo
result = jc.parsers.zipinfo.parse(zipinfo_command_output)
Schema:
[
{
"archive": string,
"size": integer,
"size_unit": string,
"number_entries": integer,
"number_files": integer,
"bytes_uncompressed": integer,
"bytes_compressed": integer,
"percent_compressed": float,
"files": [
{
"flags": string,
"zipversion": string,
"zipunder": string
"filesize": integer,
"type": string,
"method": string,
"date": string,
"time": string,
"filename": string
}
]
}
]
Examples:
$ zipinfo log4j-core-2.16.0.jar | jc --zipinfo -p
[
{
"archive": "log4j-core-2.16.0.jar",
"size": 1789565,
"size_unit": "bytes",
"number_entries": 1218,
"number_files": 1218,
"bytes_uncompressed": 3974141,
"bytes_compressed": 1515455,
"percent_compressed": 61.9,
"files": [
{
"flags": "-rw-r--r--",
"zipversion": "2.0",
"zipunder": "unx",
"filesize": 19810,
"type": "bl",
"method": "defN",
"date": "21-Dec-12",
"time": "23:35",
"filename": "META-INF/MANIFEST.MF"
},
...
"""
import jc.utils
import jc.parsers.universal
class info():
"""Provides parser metadata (version, author, etc.)"""
version = '0.01'
description = '`zipinfo` command parser'
author = 'Matt J'
author_email = 'https://github.com/listuser'
compatible = ['linux', 'darwin']
magic_commands = ['zipinfo']
__version__ = info.version
def _process(proc_data):
"""
Final processing to conform to the schema.
Parameters:
proc_data: (List of Dictionaries) raw structured data to process
Returns:
List of Dictionaries. Structured data to conform to the schema.
"""
for entry in proc_data:
int_list = ['bytes_compressed', 'bytes_uncompressed', 'number_entries',
'number_files', 'size', 'filesize']
float_list = ['percent_compressed']
for key in entry:
if key in int_list:
entry[key] = jc.utils.convert_to_int(entry[key])
if key in float_list:
entry[key] = jc.utils.convert_to_float(entry[key])
if 'files' in key:
for item in entry['files']:
for key in item:
if key in int_list:
item[key] = jc.utils.convert_to_int(item[key])
return proc_data
def parse(data, raw=False, quiet=False):
"""
Main text parsing function
Parameters:
data: (string) text data to parse
raw: (boolean) output preprocessed JSON if True
quiet: (boolean) suppress warning messages if True
Returns:
List of Dictionaries. Raw or processed structured data.
"""
jc.utils.compatibility(__name__, info.compatible, quiet)
jc.utils.input_type_check(data)
raw_output = []
archives = []
if jc.utils.has_data(data):
datalines = data.splitlines()
# remove last line of multi-archive output since it is not needed
if datalines[-1].endswith('archives were successfully processed.'):
datalines.pop(-1)
# extract each archive into its own list of lines.
# archives are separated by a blank line
this_archive = []
for row in datalines:
if row == '':
archives.append(this_archive)
this_archive = []
continue
this_archive.append(row)
if this_archive:
archives.append(this_archive)
# iterate through list of archives and parse
for archive_item in archives:
archive_info = {}
# 1st line
# Archive: log4j-core-2.16.0.jar
line = archive_item.pop(0)
_, archive = line.split()
# 2nd line
# Zip file size: 1789565 bytes, number of entries: 1218
line = archive_item.pop(0)
_, _, _, size, size_unit, *_, number_entries = line.split()
size_unit = size_unit.rstrip(',')
# last line
# 1218 files, 3974141 bytes uncompressed, 1515455 bytes compressed: 61.9%
line = archive_item.pop(-1)
number_files, _, bytes_uncompressed, _, _, bytes_compressed, *_, percent_compressed = line.split()
percent_compressed = percent_compressed.rstrip("%")
# Add header row for parsing
archive_item[:0] = ['flags zipversion zipunder filesize type method date time filename']
file_list = jc.parsers.universal.simple_table_parse(archive_item)
archive_info = {
'archive': archive,
'size': size,
'size_unit': size_unit,
'number_entries': number_entries,
'number_files': number_files,
'bytes_uncompressed': bytes_uncompressed,
'bytes_compressed': bytes_compressed,
'percent_compressed': percent_compressed,
'files': file_list
}
raw_output.append(archive_info)
return raw_output if raw else _process(raw_output)

View File

@@ -1,4 +1,4 @@
.TH jc 1 2021-12-08 1.17.4 "JSON CLI output utility"
.TH jc 1 2022-01-14 1.17.7 "JSON CLI output utility"
.SH NAME
jc \- JSONifies the output of many CLI tools and file-types
.SH SYNOPSIS
@@ -197,6 +197,11 @@ INI file parser
\fB--iw-scan\fP
`iw dev [device] scan` command parser
.TP
.B
\fB--jar-manifest\fP
MANIFEST.MF file parser
.TP
.B
\fB--jobs\fP
@@ -317,6 +322,11 @@ Key/Value file parser
\fB--stat\fP
`stat` command parser
.TP
.B
\fB--stat-s\fP
`stat` command streaming parser
.TP
.B
\fB--sysctl\fP
@@ -427,6 +437,11 @@ XML file parser
\fB--yaml\fP
YAML file parser
.TP
.B
\fB--zipinfo\fP
`zipinfo` command parser
.RE
.PP
@@ -634,6 +649,6 @@ Kelly Brazil (kellyjonbrazil@gmail.com)
https://github.com/kellyjonbrazil/jc
.SH COPYRIGHT
Copyright (c) 2019-2021 Kelly Brazil
Copyright (c) 2019-2022 Kelly Brazil
License: MIT License

View File

@@ -5,7 +5,7 @@ with open('README.md', 'r') as f:
setuptools.setup(
name='jc',
version='1.17.4',
version='1.17.7',
author='Kelly Brazil',
author_email='kellyjonbrazil@gmail.com',
description='Converts the output of popular command-line tools and file-types to JSON.',

View File

@@ -91,7 +91,10 @@ pip3 install jc
| FreeBSD | `portsnap fetch update && cd /usr/ports/textproc/py-jc && make install clean` |
| Ansible filter plugin | `ansible-galaxy collection install community.general` |
> For more packages and binaries, see the [jc packaging](https://kellyjonbrazil.github.io/jc-packaging/) site.
> For more OS Packages, see https://repology.org/project/jc/versions.
### Binaries and Packages
For precompiled binaries and packages, see [Releases](https://github.com/kellyjonbrazil/jc/releases) on Github.
## Usage
`jc` accepts piped input from `STDIN` and outputs a JSON representation of the previous command's output to `STDOUT`.

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1 @@
[{"A":"1","B":"this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this"},{"A":"2","B":"this is a field with \" in it\""}]

View File

@@ -0,0 +1,3 @@
A,B
1,"this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this"
2,"this is a field with "" in it"
1 A B
2 1 this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this
3 2 this is a field with " in it

View File

@@ -0,0 +1,4 @@
[
{"A": "1", "B": "this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this is 1024 bytes long field this"},
{"A": "2", "B": "this is a field with \" in it"}
]

View File

@@ -0,0 +1 @@
[{"file":"file name with spaces.txt","unix_device":16777220,"inode":161929661,"flags":"-rw-r--r--","links":1,"user":"kbrazil","group":"staff","rdev":0,"size":0,"access_time":"Aug 13 15:03:52 2021","modify_time":"Aug 13 14:37:03 2021","change_time":"Aug 13 14:37:03 2021","birth_time":"Aug 13 14:37:03 2021","block_size":4096,"blocks":0,"unix_flags":"0","access_time_epoch":1628892232,"access_time_epoch_utc":null,"modify_time_epoch":1628890623,"modify_time_epoch_utc":null,"change_time_epoch":1628890623,"change_time_epoch_utc":null,"birth_time_epoch":1628890623,"birth_time_epoch_utc":null}]

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1 @@
[{"archive":"jc1.zip","size":4116,"size_unit":"bytes","number_entries":1,"number_files":1,"bytes_uncompressed":11837,"bytes_compressed":3966,"percent_compressed":66.5,"files":[{"flags":"-rw-r--r--","zipversion":"2.1","zipunder":"unx","filesize":11837,"type":"bX","method":"defN","date":"21-Dec-08","time":"20:50","filename":"jc.1"}]},{"archive":"testzip.zip","size":8106,"size_unit":"bytes","number_entries":2,"number_files":2,"bytes_uncompressed":8539,"bytes_compressed":7651,"percent_compressed":10.4,"files":[{"flags":"-rw-r--r--","zipversion":"3.0","zipunder":"unx","filesize":197,"type":"tx","method":"defN","date":"21-Aug-03","time":"15:12","filename":"round-table.gv"},{"flags":"-rw-r--r--","zipversion":"3.0","zipunder":"unx","filesize":8342,"type":"bx","method":"defN","date":"21-Aug-03","time":"15:12","filename":"round-table.gv.pdf"}]},{"archive":"micro.zip","size":6144,"size_unit":"bytes","number_entries":8,"number_files":8,"bytes_uncompressed":22839,"bytes_compressed":4908,"percent_compressed":78.5,"files":[{"flags":"-rw-r--r--","zipversion":"2.1","zipunder":"unx","filesize":10688,"type":"bX","method":"defN","date":"19-Sep-30","time":"16:47","filename":"microsimservermac.py"},{"flags":"drwxrwxr-x","zipversion":"2.1","zipunder":"unx","filesize":0,"type":"bx","method":"stor","date":"21-Dec-20","time":"14:33","filename":"__MACOSX/"},{"flags":"-rw-r--r--","zipversion":"2.1","zipunder":"unx","filesize":176,"type":"bX","method":"defN","date":"19-Sep-30","time":"16:47","filename":"__MACOSX/._microsimservermac.py"},{"flags":"-rw-r--r--","zipversion":"2.1","zipunder":"unx","filesize":528,"type":"bX","method":"defN","date":"19-Aug-27","time":"07:46","filename":"Dockerfile"},{"flags":"-rw-r--r--","zipversion":"2.1","zipunder":"unx","filesize":10538,"type":"bX","method":"defN","date":"19-Oct-01","time":"13:22","filename":"microsimserver.py"},{"flags":"-rw-r--r--","zipversion":"2.1","zipunder":"unx","filesize":380,"type":"bX","method":"defN","date":"19-Oct-01","time":"13:22","filename":"changelog.txt"},{"flags":"-rwxr-xr-x","zipversion":"2.1","zipunder":"unx","filesize":263,"type":"bX","method":"defN","date":"19-Oct-01","time":"12:09","filename":"dockerhub.sh"},{"flags":"-rw-r--r--","zipversion":"2.1","zipunder":"unx","filesize":266,"type":"bX","method":"defN","date":"19-Oct-01","time":"12:09","filename":"__MACOSX/._dockerhub.sh"}]}]

View File

@@ -0,0 +1,24 @@
Archive: jc1.zip
Zip file size: 4116 bytes, number of entries: 1
-rw-r--r-- 2.1 unx 11837 bX defN 21-Dec-08 20:50 jc.1
1 file, 11837 bytes uncompressed, 3966 bytes compressed: 66.5%
Archive: testzip.zip
Zip file size: 8106 bytes, number of entries: 2
-rw-r--r-- 3.0 unx 197 tx defN 21-Aug-03 15:12 round-table.gv
-rw-r--r-- 3.0 unx 8342 bx defN 21-Aug-03 15:12 round-table.gv.pdf
2 files, 8539 bytes uncompressed, 7651 bytes compressed: 10.4%
Archive: micro.zip
Zip file size: 6144 bytes, number of entries: 8
-rw-r--r-- 2.1 unx 10688 bX defN 19-Sep-30 16:47 microsimservermac.py
drwxrwxr-x 2.1 unx 0 bx stor 21-Dec-20 14:33 __MACOSX/
-rw-r--r-- 2.1 unx 176 bX defN 19-Sep-30 16:47 __MACOSX/._microsimservermac.py
-rw-r--r-- 2.1 unx 528 bX defN 19-Aug-27 07:46 Dockerfile
-rw-r--r-- 2.1 unx 10538 bX defN 19-Oct-01 13:22 microsimserver.py
-rw-r--r-- 2.1 unx 380 bX defN 19-Oct-01 13:22 changelog.txt
-rwxr-xr-x 2.1 unx 263 bX defN 19-Oct-01 12:09 dockerhub.sh
-rw-r--r-- 2.1 unx 266 bX defN 19-Oct-01 12:09 __MACOSX/._dockerhub.sh
8 files, 22839 bytes uncompressed, 4908 bytes compressed: 78.5%
3 archives were successfully processed.

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

292
tests/fixtures/rhel-8/MANIFEST.MF.out vendored Normal file
View File

@@ -0,0 +1,292 @@
Manifest-Version: 1.0
Bundle-License: https://www.apache.org/licenses/LICENSE-2.0.txt
Bundle-SymbolicName: org.apache.logging.log4j.core
Built-By: rgoers
Bnd-LastModified: 1639792304782
Implementation-Vendor-Id: org.apache.logging.log4j
Specification-Title: Apache Log4j Core
Log4jReleaseManager: Ralph Goers
Bundle-DocURL: https://www.apache.org/
Import-Package: com.conversantmedia.util.concurrent;resolution:=option
al,com.fasterxml.jackson.annotation;version="[2.12,3)";resolution:=op
tional,com.fasterxml.jackson.core;version="[2.12,3)";resolution:=opti
onal,com.fasterxml.jackson.core.type;version="[2.12,3)";resolution:=o
ptional,com.fasterxml.jackson.core.util;version="[2.12,3)";resolution
:=optional,com.fasterxml.jackson.databind;version="[2.12,3)";resoluti
on:=optional,com.fasterxml.jackson.databind.annotation;version="[2.12
,3)";resolution:=optional,com.fasterxml.jackson.databind.deser.std;ve
rsion="[2.12,3)";resolution:=optional,com.fasterxml.jackson.databind.
module;version="[2.12,3)";resolution:=optional,com.fasterxml.jackson.
databind.node;version="[2.12,3)";resolution:=optional,com.fasterxml.j
ackson.databind.ser;version="[2.12,3)";resolution:=optional,com.faste
rxml.jackson.databind.ser.impl;version="[2.12,3)";resolution:=optiona
l,com.fasterxml.jackson.databind.ser.std;version="[2.12,3)";resolutio
n:=optional,com.fasterxml.jackson.dataformat.xml;version="[2.12,3)";r
esolution:=optional,com.fasterxml.jackson.dataformat.xml.annotation;v
ersion="[2.12,3)";resolution:=optional,com.fasterxml.jackson.dataform
at.xml.util;version="[2.12,3)";resolution:=optional,com.fasterxml.jac
kson.dataformat.yaml;version="[2.12,3)";resolution:=optional,com.lmax
.disruptor;version="[3.4,4)";resolution:=optional,com.lmax.disruptor.
dsl;version="[3.4,4)";resolution:=optional,javax.activation;version="
[1.2,2)";resolution:=optional,javax.annotation.processing,javax.crypt
o,javax.jms;version="[1.1,2)";resolution:=optional,javax.lang.model,j
avax.lang.model.element,javax.lang.model.util,javax.mail;version="[1.
6,2)";resolution:=optional,javax.mail.internet;version="[1.6,2)";reso
lution:=optional,javax.mail.util;version="[1.6,2)";resolution:=option
al,javax.management,javax.naming,javax.net,javax.net.ssl,javax.script
,javax.sql,javax.tools,javax.xml.parsers,javax.xml.stream,javax.xml.t
ransform,javax.xml.transform.stream,javax.xml.validation,org.apache.c
ommons.compress.compressors;version="[1.21,2)";resolution:=optional,o
rg.apache.commons.compress.utils;version="[1.21,2)";resolution:=optio
nal,org.apache.commons.csv;version="[1.9,2)";resolution:=optional,org
.apache.kafka.clients.producer;resolution:=optional,org.apache.loggin
g.log4j;version="[2.17,3)",org.apache.logging.log4j.core,org.apache.l
ogging.log4j.core.appender,org.apache.logging.log4j.core.appender.db,
org.apache.logging.log4j.core.appender.rewrite,org.apache.logging.log
4j.core.appender.rolling,org.apache.logging.log4j.core.appender.rolli
ng.action,org.apache.logging.log4j.core.async,org.apache.logging.log4
j.core.config,org.apache.logging.log4j.core.config.arbiters,org.apach
e.logging.log4j.core.config.builder.api,org.apache.logging.log4j.core
.config.builder.impl,org.apache.logging.log4j.core.config.composite,o
rg.apache.logging.log4j.core.config.json,org.apache.logging.log4j.cor
e.config.plugins,org.apache.logging.log4j.core.config.plugins.convert
,org.apache.logging.log4j.core.config.plugins.processor,org.apache.lo
gging.log4j.core.config.plugins.util,org.apache.logging.log4j.core.co
nfig.plugins.validation,org.apache.logging.log4j.core.config.plugins.
validation.constraints,org.apache.logging.log4j.core.config.plugins.v
alidation.validators,org.apache.logging.log4j.core.config.plugins.vis
itors,org.apache.logging.log4j.core.config.status,org.apache.logging.
log4j.core.filter,org.apache.logging.log4j.core.impl,org.apache.loggi
ng.log4j.core.jackson,org.apache.logging.log4j.core.jmx,org.apache.lo
gging.log4j.core.layout,org.apache.logging.log4j.core.layout.internal
,org.apache.logging.log4j.core.lookup,org.apache.logging.log4j.core.n
et,org.apache.logging.log4j.core.net.ssl,org.apache.logging.log4j.cor
e.pattern,org.apache.logging.log4j.core.script,org.apache.logging.log
4j.core.selector,org.apache.logging.log4j.core.time,org.apache.loggin
g.log4j.core.tools.picocli,org.apache.logging.log4j.core.util,org.apa
che.logging.log4j.core.util.datetime,org.apache.logging.log4j.message
;version="[2.17,3)",org.apache.logging.log4j.spi;version="[2.17,3)",o
rg.apache.logging.log4j.status;version="[2.17,3)",org.apache.logging.
log4j.util;version="[2.17,3)",org.codehaus.stax2;version="[4.2,5)";re
solution:=optional,org.fusesource.jansi;version="[2.3,3)";resolution:
=optional,org.jctools.queues;resolution:=optional,org.osgi.framework;
version="[1.6,2)",org.osgi.framework.wiring;version="[1.0,2)",org.w3c
.dom,org.xml.sax,org.zeromq;version="[0.4,1)";resolution:=optional,su
n.reflect;resolution:=optional
Require-Capability: osgi.ee;filter:="(&(osgi.ee=JavaSE)(version=1.8))"
Export-Package: org.apache.logging.log4j.core;uses:="org.apache.loggin
g.log4j,org.apache.logging.log4j.core.config,org.apache.logging.log4j
.core.impl,org.apache.logging.log4j.core.layout,org.apache.logging.lo
g4j.core.time,org.apache.logging.log4j.message,org.apache.logging.log
4j.spi,org.apache.logging.log4j.status,org.apache.logging.log4j.util"
;version="2.17.0",org.apache.logging.log4j.core.appender;uses:="org.a
pache.logging.log4j,org.apache.logging.log4j.core,org.apache.logging.
log4j.core.appender.rolling,org.apache.logging.log4j.core.async,org.a
pache.logging.log4j.core.config,org.apache.logging.log4j.core.config.
plugins,org.apache.logging.log4j.core.config.plugins.validation.const
raints,org.apache.logging.log4j.core.filter,org.apache.logging.log4j.
core.impl,org.apache.logging.log4j.core.layout,org.apache.logging.log
4j.core.net,org.apache.logging.log4j.core.net.ssl,org.apache.logging.
log4j.core.script,org.apache.logging.log4j.core.util,org.apache.loggi
ng.log4j.status";version="2.17.0",org.apache.logging.log4j.core.appen
der.db;uses:="org.apache.logging.log4j.core,org.apache.logging.log4j.
core.appender,org.apache.logging.log4j.core.config,org.apache.logging
.log4j.core.config.plugins,org.apache.logging.log4j.core.util";versio
n="2.17.0",org.apache.logging.log4j.core.appender.db.jdbc;uses:="org.
apache.logging.log4j,org.apache.logging.log4j.core,org.apache.logging
.log4j.core.appender.db,org.apache.logging.log4j.core.config,org.apac
he.logging.log4j.core.config.plugins,org.apache.logging.log4j.core.co
nfig.plugins.validation.constraints,org.apache.logging.log4j.core.lay
out,org.apache.logging.log4j.core.util";version="2.17.0",org.apache.l
ogging.log4j.core.appender.mom;uses:="javax.jms,org.apache.logging.lo
g4j.core,org.apache.logging.log4j.core.appender,org.apache.logging.lo
g4j.core.config,org.apache.logging.log4j.core.config.plugins,org.apac
he.logging.log4j.core.net,org.apache.logging.log4j.core.util";version
="2.17.0",org.apache.logging.log4j.core.appender.mom.jeromq;uses:="or
g.apache.logging.log4j.core,org.apache.logging.log4j.core.appender,or
g.apache.logging.log4j.core.config,org.apache.logging.log4j.core.conf
ig.plugins,org.apache.logging.log4j.core.config.plugins.validation.co
nstraints,org.zeromq";version="2.17.0",org.apache.logging.log4j.core.
appender.mom.kafka;uses:="org.apache.kafka.clients.producer,org.apach
e.logging.log4j.core,org.apache.logging.log4j.core.appender,org.apach
e.logging.log4j.core.config,org.apache.logging.log4j.core.config.plug
ins,org.apache.logging.log4j.core.util";version="2.17.0",org.apache.l
ogging.log4j.core.appender.nosql;uses:="org.apache.logging.log4j.core
,org.apache.logging.log4j.core.appender,org.apache.logging.log4j.core
.appender.db,org.apache.logging.log4j.core.config.plugins,org.apache.
logging.log4j.core.util";version="2.17.0",org.apache.logging.log4j.co
re.appender.rewrite;uses:="org.apache.logging.log4j,org.apache.loggin
g.log4j.core,org.apache.logging.log4j.core.appender,org.apache.loggin
g.log4j.core.config,org.apache.logging.log4j.core.config.plugins,org.
apache.logging.log4j.core.util";version="2.17.0",org.apache.logging.l
og4j.core.appender.rolling;uses:="org.apache.logging.log4j,org.apache
.logging.log4j.core,org.apache.logging.log4j.core.appender,org.apache
.logging.log4j.core.appender.rolling.action,org.apache.logging.log4j.
core.config,org.apache.logging.log4j.core.config.plugins,org.apache.l
ogging.log4j.core.lookup,org.apache.logging.log4j.core.util";version=
"2.17.0",org.apache.logging.log4j.core.appender.rolling.action;uses:=
"org.apache.logging.log4j,org.apache.logging.log4j.core.config,org.ap
ache.logging.log4j.core.config.plugins,org.apache.logging.log4j.core.
lookup,org.apache.logging.log4j.core.script,org.apache.logging.log4j.
core.util";version="2.17.0",org.apache.logging.log4j.core.appender.ro
uting;uses:="org.apache.logging.log4j.core,org.apache.logging.log4j.c
ore.appender,org.apache.logging.log4j.core.appender.rewrite,org.apach
e.logging.log4j.core.config,org.apache.logging.log4j.core.config.plug
ins,org.apache.logging.log4j.core.script,org.apache.logging.log4j.cor
e.util";version="2.17.0",org.apache.logging.log4j.core.async;uses:="c
om.conversantmedia.util.concurrent,com.lmax.disruptor,org.apache.logg
ing.log4j,org.apache.logging.log4j.core,org.apache.logging.log4j.core
.appender,org.apache.logging.log4j.core.config,org.apache.logging.log
4j.core.config.plugins,org.apache.logging.log4j.core.config.plugins.v
alidation.constraints,org.apache.logging.log4j.core.impl,org.apache.l
ogging.log4j.core.jmx,org.apache.logging.log4j.core.selector,org.apac
he.logging.log4j.core.time,org.apache.logging.log4j.core.util,org.apa
che.logging.log4j.message,org.apache.logging.log4j.util";version="2.1
7.0",org.apache.logging.log4j.core.config;uses:="org.apache.logging.l
og4j,org.apache.logging.log4j.core,org.apache.logging.log4j.core.asyn
c,org.apache.logging.log4j.core.config.builder.api,org.apache.logging
.log4j.core.config.plugins,org.apache.logging.log4j.core.config.plugi
ns.util,org.apache.logging.log4j.core.config.plugins.validation.const
raints,org.apache.logging.log4j.core.filter,org.apache.logging.log4j.
core.impl,org.apache.logging.log4j.core.lookup,org.apache.logging.log
4j.core.net,org.apache.logging.log4j.core.script,org.apache.logging.l
og4j.core.util,org.apache.logging.log4j.message,org.apache.logging.lo
g4j.util";version="2.17.0",org.apache.logging.log4j.core.config.arbit
ers;uses:="org.apache.logging.log4j.core.config,org.apache.logging.lo
g4j.core.config.plugins,org.apache.logging.log4j.core.util";version="
2.17.0",org.apache.logging.log4j.core.config.builder.api;uses:="org.a
pache.logging.log4j,org.apache.logging.log4j.core,org.apache.logging.
log4j.core.config,org.apache.logging.log4j.core.config.builder.impl,o
rg.apache.logging.log4j.core.util";version="2.17.0",org.apache.loggin
g.log4j.core.config.builder.impl;uses:="javax.xml.transform,org.apach
e.logging.log4j,org.apache.logging.log4j.core,org.apache.logging.log4
j.core.config,org.apache.logging.log4j.core.config.builder.api,org.ap
ache.logging.log4j.core.config.plugins.util,org.apache.logging.log4j.
core.config.status";version="2.17.0",org.apache.logging.log4j.core.co
nfig.composite;uses:="org.apache.logging.log4j.core.config,org.apache
.logging.log4j.core.config.plugins.util";version="2.17.0",org.apache.
logging.log4j.core.config.json;uses:="com.fasterxml.jackson.databind,
org.apache.logging.log4j.core,org.apache.logging.log4j.core.config,or
g.apache.logging.log4j.core.config.plugins";version="2.17.0",org.apac
he.logging.log4j.core.config.plugins;uses:="org.apache.logging.log4j.
core.config.plugins.visitors";version="2.17.0",org.apache.logging.log
4j.core.config.plugins.convert;uses:="org.apache.logging.log4j,org.ap
ache.logging.log4j.core.appender.rolling.action,org.apache.logging.lo
g4j.core.config.plugins,org.apache.logging.log4j.core.util";version="
2.17.0",org.apache.logging.log4j.core.config.plugins.processor;uses:=
"javax.annotation.processing,javax.lang.model,javax.lang.model.elemen
t";version="2.17.0",org.apache.logging.log4j.core.config.plugins.util
;uses:="org.apache.logging.log4j.core,org.apache.logging.log4j.core.c
onfig,org.apache.logging.log4j.core.config.plugins.processor,org.apac
he.logging.log4j.core.util";version="2.17.0",org.apache.logging.log4j
.core.config.plugins.validation;version="2.17.0",org.apache.logging.l
og4j.core.config.plugins.validation.constraints;uses:="org.apache.log
ging.log4j.core.config.plugins.validation,org.apache.logging.log4j.co
re.config.plugins.validation.validators";version="2.17.0",org.apache.
logging.log4j.core.config.plugins.validation.validators;uses:="org.ap
ache.logging.log4j.core.config.plugins.validation,org.apache.logging.
log4j.core.config.plugins.validation.constraints";version="2.17.0",or
g.apache.logging.log4j.core.config.plugins.visitors;uses:="org.apache
.logging.log4j,org.apache.logging.log4j.core,org.apache.logging.log4j
.core.config,org.apache.logging.log4j.core.config.plugins,org.apache.
logging.log4j.core.lookup";version="2.17.0",org.apache.logging.log4j.
core.config.properties;uses:="org.apache.logging.log4j.core,org.apach
e.logging.log4j.core.config,org.apache.logging.log4j.core.config.buil
der.api,org.apache.logging.log4j.core.config.builder.impl,org.apache.
logging.log4j.core.config.plugins,org.apache.logging.log4j.core.util"
;version="2.17.0",org.apache.logging.log4j.core.config.status;uses:="
org.apache.logging.log4j";version="2.17.0",org.apache.logging.log4j.c
ore.config.xml;uses:="org.apache.logging.log4j.core,org.apache.loggin
g.log4j.core.config,org.apache.logging.log4j.core.config.plugins";ver
sion="2.17.0",org.apache.logging.log4j.core.config.yaml;uses:="com.fa
sterxml.jackson.databind,org.apache.logging.log4j.core,org.apache.log
ging.log4j.core.config,org.apache.logging.log4j.core.config.json,org.
apache.logging.log4j.core.config.plugins";version="2.17.0",org.apache
.logging.log4j.core.filter;uses:="org.apache.logging.log4j,org.apache
.logging.log4j.core,org.apache.logging.log4j.core.config,org.apache.l
ogging.log4j.core.config.plugins,org.apache.logging.log4j.core.script
,org.apache.logging.log4j.core.util,org.apache.logging.log4j.message,
org.apache.logging.log4j.util";version="2.17.0",org.apache.logging.lo
g4j.core.impl;uses:="org.apache.logging.log4j,org.apache.logging.log4
j.core,org.apache.logging.log4j.core.config,org.apache.logging.log4j.
core.pattern,org.apache.logging.log4j.core.selector,org.apache.loggin
g.log4j.core.time,org.apache.logging.log4j.core.util,org.apache.loggi
ng.log4j.message,org.apache.logging.log4j.spi,org.apache.logging.log4
j.util";version="2.17.0",org.apache.logging.log4j.core.jackson;uses:=
"com.fasterxml.jackson.core,com.fasterxml.jackson.databind,com.faster
xml.jackson.databind.deser.std,com.fasterxml.jackson.databind.ser.std
,com.fasterxml.jackson.dataformat.xml,com.fasterxml.jackson.dataforma
t.yaml,org.apache.logging.log4j.message,org.apache.logging.log4j.util
";version="2.17.0",org.apache.logging.log4j.core.jmx;uses:="com.lmax.
disruptor,javax.management,org.apache.logging.log4j,org.apache.loggin
g.log4j.core,org.apache.logging.log4j.core.appender,org.apache.loggin
g.log4j.core.config,org.apache.logging.log4j.core.selector,org.apache
.logging.log4j.status";version="2.17.0",org.apache.logging.log4j.core
.layout;uses:="com.fasterxml.jackson.annotation,com.fasterxml.jackson
.core,com.fasterxml.jackson.databind,com.fasterxml.jackson.dataformat
.xml.annotation,org.apache.commons.csv,org.apache.logging.log4j,org.a
pache.logging.log4j.core,org.apache.logging.log4j.core.config,org.apa
che.logging.log4j.core.config.plugins,org.apache.logging.log4j.core.i
mpl,org.apache.logging.log4j.core.net,org.apache.logging.log4j.core.p
attern,org.apache.logging.log4j.core.script,org.apache.logging.log4j.
core.util,org.apache.logging.log4j.message";version="2.17.0",org.apac
he.logging.log4j.core.layout.internal;version="2.17.0",org.apache.log
ging.log4j.core.lookup;uses:="org.apache.logging.log4j.core,org.apach
e.logging.log4j.core.config,org.apache.logging.log4j.core.config.plug
ins";version="2.17.0",org.apache.logging.log4j.core.message;uses:="or
g.apache.logging.log4j.message";version="2.17.0",org.apache.logging.l
og4j.core.net;uses:="javax.mail,javax.mail.internet,javax.naming,org.
apache.logging.log4j,org.apache.logging.log4j.core,org.apache.logging
.log4j.core.appender,org.apache.logging.log4j.core.config,org.apache.
logging.log4j.core.config.plugins,org.apache.logging.log4j.core.net.s
sl,org.apache.logging.log4j.core.util";version="2.17.0",org.apache.lo
gging.log4j.core.net.ssl;uses:="javax.net.ssl,org.apache.logging.log4
j.core.config.plugins,org.apache.logging.log4j.status";version="2.17.
0",org.apache.logging.log4j.core.osgi;uses:="org.apache.logging.log4j
.core,org.apache.logging.log4j.core.selector,org.osgi.framework";vers
ion="2.17.0",org.apache.logging.log4j.core.parser;uses:="org.apache.l
ogging.log4j.core";version="2.17.0",org.apache.logging.log4j.core.pat
tern;uses:="org.apache.logging.log4j,org.apache.logging.log4j.core,or
g.apache.logging.log4j.core.config,org.apache.logging.log4j.core.conf
ig.plugins,org.apache.logging.log4j.core.impl,org.apache.logging.log4
j.core.time,org.apache.logging.log4j.message,org.fusesource.jansi";ve
rsion="2.17.0",org.apache.logging.log4j.core.script;uses:="javax.scri
pt,org.apache.logging.log4j,org.apache.logging.log4j.core.config,org.
apache.logging.log4j.core.config.plugins,org.apache.logging.log4j.cor
e.util";version="2.17.0",org.apache.logging.log4j.core.selector;uses:
="org.apache.logging.log4j.core,org.apache.logging.log4j.spi,org.apac
he.logging.log4j.status";version="2.17.0",org.apache.logging.log4j.co
re.time;uses:="org.apache.logging.log4j.core.util,org.apache.logging.
log4j.util";version="2.17.0",org.apache.logging.log4j.core.time.inter
nal;uses:="org.apache.logging.log4j.core.time";version="2.17.0",org.a
pache.logging.log4j.core.tools;version="2.17.0",org.apache.logging.lo
g4j.core.tools.picocli;version="2.17.0",org.apache.logging.log4j.core
.util;uses:="javax.crypto,javax.naming,org.apache.logging.log4j,org.a
pache.logging.log4j.core,org.apache.logging.log4j.core.config,org.apa
che.logging.log4j.core.config.plugins,org.apache.logging.log4j.util";
version="2.17.0",org.apache.logging.log4j.core.util.datetime;uses:="o
rg.apache.logging.log4j.core.time";version="2.17.0"
Bundle-Name: Apache Log4j Core
Log4jReleaseVersionJava6: 2.3
Multi-Release: true
Bundle-Activator: org.apache.logging.log4j.core.osgi.Activator
Log4jReleaseVersionJava7: 2.12.2
Log4jReleaseVersion: 2.17.0
Implementation-Title: Apache Log4j Core
Bundle-Description: The Apache Log4j Implementation
Automatic-Module-Name: org.apache.logging.log4j.core
Implementation-Version: 2.17.0
Specification-Vendor: The Apache Software Foundation
Bundle-ManifestVersion: 2
Bundle-Vendor: The Apache Software Foundation
Tool: Bnd-3.5.0.201709291849
Implementation-Vendor: The Apache Software Foundation
Bundle-Version: 2.17.0
X-Compile-Target-JDK: 1.8
X-Compile-Source-JDK: 1.8
Created-By: Apache Maven Bundle Plugin
Build-Jdk: 1.8.0_144
Specification-Version: 2.17.0
Implementation-URL: https://logging.apache.org/log4j/2.x/log4j-core/
Log4jReleaseKey: B3D8E1BA

1
tests/fixtures/rhel-8/zipinfo.json vendored Normal file

File diff suppressed because one or more lines are too long

1221
tests/fixtures/rhel-8/zipinfo.out vendored Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

View File

@@ -37,6 +37,9 @@ class MyTests(unittest.TestCase):
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/csv-insurance.csv'), 'r', encoding='utf-8') as f:
self.generic_csv_insurance = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/csv-doubleqouted.csv'), 'r', encoding='utf-8') as f:
self.generic_csv_doubleqouted = f.read()
# output
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/csv-biostats.json'), 'r', encoding='utf-8') as f:
self.generic_csv_biostats_json = json.loads(f.read())
@@ -65,6 +68,9 @@ class MyTests(unittest.TestCase):
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/csv-insurance.json'), 'r', encoding='utf-8') as f:
self.generic_csv_insurance_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/csv-doubleqouted.json'), 'r', encoding='utf-8') as f:
self.generic_csv_doubleqouted_json = json.loads(f.read())
def test_csv_nodata(self):
"""
Test with no data
@@ -125,6 +131,12 @@ class MyTests(unittest.TestCase):
"""
self.assertEqual(jc.parsers.csv.parse(self.generic_csv_insurance, quiet=True), self.generic_csv_insurance_json)
def test_doubleqouted(self):
"""
Test 'csv-doubleqouted.csv' file
"""
self.assertEqual(jc.parsers.csv.parse(self.generic_csv_doubleqouted, quiet=True), self.generic_csv_doubleqouted_json)
if __name__ == '__main__':
unittest.main()

View File

@@ -42,6 +42,9 @@ class MyTests(unittest.TestCase):
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/csv-10k-sales-records.csv'), 'r', encoding='utf-8') as f:
self.generic_csv_10k_sales_records = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/csv-doubleqouted.csv'), 'r', encoding='utf-8') as f:
self.generic_csv_doubleqouted = f.read()
# output
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/csv-biostats-streaming.json'), 'r', encoding='utf-8') as f:
self.generic_csv_biostats_streaming_json = json.loads(f.read())
@@ -70,6 +73,9 @@ class MyTests(unittest.TestCase):
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/csv-10k-sales-records-streaming.json'), 'r', encoding='utf-8') as f:
self.generic_csv_10k_sales_records_streaming_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/csv-doubleqouted-streaming.json'), 'r', encoding='utf-8') as f:
self.generic_csv_doublequoted_streaming_json = json.loads(f.read())
def test_csv_s_nodata(self):
"""
Test CSV parser with no data
@@ -141,6 +147,12 @@ class MyTests(unittest.TestCase):
"""
self.assertEqual(list(jc.parsers.csv_s.parse(self.generic_csv_10k_sales_records.splitlines(), quiet=True)), self.generic_csv_10k_sales_records_streaming_json)
def test_csv_s_doublequoted(self):
"""
Test 'doublequoted.csv' file
"""
self.assertEqual(list(jc.parsers.csv_s.parse(self.generic_csv_doubleqouted.splitlines(), quiet=True)), self.generic_csv_doublequoted_streaming_json)
if __name__ == '__main__':
unittest.main()

View File

@@ -0,0 +1,46 @@
import os
import unittest
import json
import jc.parsers.jar_manifest
THIS_DIR = os.path.dirname(os.path.abspath(__file__))
class MyTests(unittest.TestCase):
def setUp(self):
# input
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/rhel-8/MANIFEST.MF.out'), 'r', encoding='utf-8') as f:
self.rhel_8_manifest_mf = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/rhel-8/MANIFEST.MF.MULTI.out'), 'r', encoding='utf-8') as f:
self.rhel_8_manifest_mf_multi = f.read()
# output
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/rhel-8/MANIFEST.MF.json'), 'r', encoding='utf-8') as f:
self.rhel_8_manifest_mf_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/rhel-8/MANIFEST.MF.MULTI.json'), 'r', encoding='utf-8') as f:
self.rhel_8_manifest_mf_multi_json = json.loads(f.read())
def test_jar_manifest_nodata(self):
"""
Test 'jar_manifest' parser with no data
"""
self.assertEqual(jc.parsers.jar_manifest.parse('', quiet=True), [])
def test_jar_manifest_rhel_8(self):
"""
Test 'cat MANIFEST.MF | jc --jar_manifest'
"""
self.assertEqual(jc.parsers.jar_manifest.parse(self.rhel_8_manifest_mf, quiet=True), self.rhel_8_manifest_mf_json)
def test_jar_manifest_multi_rhel_8(self):
"""
Test 'unzip -c apache-log4j-2.16.0-bin/log4j-core-2.16.0.jar META-INF/MANIFEST.MF | jc --jar_manifest'
"""
self.assertEqual(jc.parsers.jar_manifest.parse(self.rhel_8_manifest_mf_multi, quiet=True), self.rhel_8_manifest_mf_multi_json)
if __name__ == '__main__':
unittest.main()

100
tests/test_stat_s.py Normal file
View File

@@ -0,0 +1,100 @@
import os
import sys
import time
import json
import unittest
from jc.exceptions import ParseError
import jc.parsers.stat_s
THIS_DIR = os.path.dirname(os.path.abspath(__file__))
# Set the timezone on POSIX systems. Need to manually set for Windows tests
if not sys.platform.startswith('win32'):
os.environ['TZ'] = 'America/Los_Angeles'
time.tzset()
# To create streaming output use:
# $ cat stat.out | jc --stat-s | jello -c > stat-streaming.json
class MyTests(unittest.TestCase):
def setUp(self):
# input
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/centos-7.7/stat.out'), 'r', encoding='utf-8') as f:
self.centos_7_7_stat = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/ubuntu-18.04/stat.out'), 'r', encoding='utf-8') as f:
self.ubuntu_18_4_stat = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/stat.out'), 'r', encoding='utf-8') as f:
self.osx_10_14_6_stat = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/stat-filename-with-spaces.out'), 'r', encoding='utf-8') as f:
self.osx_10_14_6_stat_filename_with_spaces = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/freebsd12/stat.out'), 'r', encoding='utf-8') as f:
self.freebsd12_stat = f.read()
# output
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/centos-7.7/stat-streaming.json'), 'r', encoding='utf-8') as f:
self.centos_7_7_stat_streaming_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/ubuntu-18.04/stat-streaming.json'), 'r', encoding='utf-8') as f:
self.ubuntu_18_4_stat_streaming_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/stat-streaming.json'), 'r', encoding='utf-8') as f:
self.osx_10_14_6_stat_streaming_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/stat-filename-with-spaces-streaming.json'), 'r', encoding='utf-8') as f:
self.osx_10_14_6_stat_filename_with_spaces_streaming_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/freebsd12/stat-streaming.json'), 'r', encoding='utf-8') as f:
self.freebsd12_stat_streaming_json = json.loads(f.read())
def test_stat_s_nodata(self):
"""
Test 'stat' with no data
"""
self.assertEqual(list(jc.parsers.stat_s.parse([], quiet=True)), [])
def test_stat_s_unparsable(self):
data = 'unparsable data'
g = jc.parsers.stat_s.parse(data.splitlines(), quiet=True)
with self.assertRaises(ParseError):
list(g)
def test_stat_s_centos_7_7(self):
"""
Test 'stat /bin/*' on Centos 7.7
"""
self.assertEqual(list(jc.parsers.stat_s.parse(self.centos_7_7_stat.splitlines(), quiet=True)), self.centos_7_7_stat_streaming_json)
def test_stat_s_ubuntu_18_4(self):
"""
Test 'stat /bin/*' on Ubuntu 18.4
"""
self.assertEqual(list(jc.parsers.stat_s.parse(self.ubuntu_18_4_stat.splitlines(), quiet=True)), self.ubuntu_18_4_stat_streaming_json)
def test_stat_s_osx_10_14_6(self):
"""
Test 'stat /bin/*' on OSX 10.14.6
"""
self.assertEqual(list(jc.parsers.stat_s.parse(self.osx_10_14_6_stat.splitlines(), quiet=True)), self.osx_10_14_6_stat_streaming_json)
def test_stat_s_filename_with_spaces_osx_10_14_6(self):
"""
Test 'stat' filename with spaces on OSX 10.14.6
"""
self.assertEqual(list(jc.parsers.stat_s.parse(self.osx_10_14_6_stat_filename_with_spaces.splitlines(), quiet=True)), self.osx_10_14_6_stat_filename_with_spaces_streaming_json)
def test_stat_s_freebsd12(self):
"""
Test 'stat /foo/*' on FreeBSD12
"""
self.assertEqual(list(jc.parsers.stat_s.parse(self.freebsd12_stat.splitlines(), quiet=True)), self.freebsd12_stat_streaming_json)
if __name__ == '__main__':
unittest.main()

46
tests/test_zipinfo.py Normal file
View File

@@ -0,0 +1,46 @@
import os
import unittest
import json
import jc.parsers.zipinfo
THIS_DIR = os.path.dirname(os.path.abspath(__file__))
class MyTests(unittest.TestCase):
def setUp(self):
# input
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/rhel-8/zipinfo.out'), 'r', encoding='utf-8') as f:
self.rhel_8_zipinfo = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/zipinfo-multi.out'), 'r', encoding='utf-8') as f:
self.osx_10_14_6_zipinfo_multi = f.read()
# output
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/rhel-8/zipinfo.json'), 'r', encoding='utf-8') as f:
self.rhel_8_zipinfo_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/zipinfo-multi.json'), 'r', encoding='utf-8') as f:
self.osx_10_14_6_zipinfo_multi_json = json.loads(f.read())
def test_zipinfo_nodata(self):
"""
Test 'zipinfo' parser with no data
"""
self.assertEqual(jc.parsers.zipinfo.parse('', quiet=True), [])
def test_zipinfo_rhel_8(self):
"""
Test 'zipinfo' on Red Hat 8
"""
self.assertEqual(jc.parsers.zipinfo.parse(self.rhel_8_zipinfo, quiet=True), self.rhel_8_zipinfo_json)
def test_zipinfo_multi_osx_10_14_6(self):
"""
Test 'zipinfo' with multiple archives on OSX 10.14.6
"""
self.assertEqual(jc.parsers.zipinfo.parse(self.osx_10_14_6_zipinfo_multi, quiet=True), self.osx_10_14_6_zipinfo_multi_json)
if __name__ == '__main__':
unittest.main()