1
0
mirror of https://github.com/kellyjonbrazil/jc.git synced 2025-07-09 01:05:53 +02:00

Merge pull request #260 from kellyjonbrazil/dev

Dev v1.20.2
This commit is contained in:
Kelly Brazil
2022-07-05 19:39:40 -07:00
committed by GitHub
55 changed files with 18879 additions and 57 deletions

View File

@ -1,5 +1,10 @@
jc changelog jc changelog
20220705 v1.20.2
- Add `gpg --with-colons` parser tested on linux
- Add DER and PEM encoded X.509 Certificate parser
- Add Bash and Zsh completion scripts to DEB and RPM packages
20220615 v1.20.1 20220615 v1.20.1
- Add `postconf -M` parser tested on linux - Add `postconf -M` parser tested on linux
- Update `asciitable` and `asciitable-m` parsers to preserve case in key - Update `asciitable` and `asciitable-m` parsers to preserve case in key

View File

@ -1121,6 +1121,37 @@ git log --stat | jc --git-log -p or: jc -p git log --stat
} }
] ]
``` ```
### gpg --with-colons
```bash
gpg --with-colons --show-keys file.gpg | jc --gpg -p # or jc -p gpg --with-colons --show-keys file.gpg
```
```json
[
{
"type": "pub",
"validity": "f",
"key_length": "1024",
"pub_key_alg": "17",
"key_id": "6C7EE1B8621CC013",
"creation_date": "899817715",
"expiration_date": "1055898235",
"certsn_uidhash_trustinfo": null,
"owner_trust": "m",
"user_id": null,
"signature_class": null,
"key_capabilities": "scESC",
"cert_fingerprint_other": null,
"flag": null,
"token_sn": null,
"hash_alg": null,
"curve_name": null,
"compliance_flags": null,
"last_update_date": null,
"origin": null,
"comment": null
}
]
```
### /etc/group file ### /etc/group file
```bash ```bash
cat /etc/group | jc --group -p cat /etc/group | jc --group -p
@ -4091,6 +4122,85 @@ who -a | jc --who -p # or: jc -p who -a
} }
] ]
``` ```
### X.509 PEM and DER certificate files
```bash
cat entrust.pem | jc --x509-cert -p
```
```json
[
{
"tbs_certificate": {
"version": "v3",
"serial_number": "a6:8b:79:29:00:00:00:00:50:d0:91:f9",
"signature": {
"algorithm": "sha384_ecdsa",
"parameters": null
},
"issuer": {
"country_name": "US",
"organization_name": "Entrust, Inc.",
"organizational_unit_name": [
"See www.entrust.net/legal-terms",
"(c) 2012 Entrust, Inc. - for authorized use only"
],
"common_name": "Entrust Root Certification Authority - EC1"
},
"validity": {
"not_before": 1355844336,
"not_after": 2144764536,
"not_before_iso": "2012-12-18T15:25:36+00:00",
"not_after_iso": "2037-12-18T15:55:36+00:00"
},
"subject": {
"country_name": "US",
"organization_name": "Entrust, Inc.",
"organizational_unit_name": [
"See www.entrust.net/legal-terms",
"(c) 2012 Entrust, Inc. - for authorized use only"
],
"common_name": "Entrust Root Certification Authority - EC1"
},
"subject_public_key_info": {
"algorithm": {
"algorithm": "ec",
"parameters": "secp384r1"
},
"public_key": "04:84:13:c9:d0:ba:6d:41:7b:e2:6c:d0:eb:55:5f:66:02:1a:24:f4:5b:89:69:47:e3:b8:c2:7d:f1:f2:02:c5:9f:a0:f6:5b:d5:8b:06:19:86:4f:53:10:6d:07:24:27:a1:a0:f8:d5:47:19:61:4c:7d:ca:93:27:ea:74:0c:ef:6f:96:09:fe:63:ec:70:5d:36:ad:67:77:ae:c9:9d:7c:55:44:3a:a2:63:51:1f:f5:e3:62:d4:a9:47:07:3e:cc:20"
},
"issuer_unique_id": null,
"subject_unique_id": null,
"extensions": [
{
"extn_id": "key_usage",
"critical": true,
"extn_value": [
"key_cert_sign",
"crl_sign"
]
},
{
"extn_id": "basic_constraints",
"critical": true,
"extn_value": {
"ca": true,
"path_len_constraint": null
}
},
{
"extn_id": "key_identifier",
"critical": false,
"extn_value": "b7:63:e7:1a:dd:8d:e9:08:a6:55:83:a4:e0:6a:50:41:65:11:42:49"
}
]
},
"signature_algorithm": {
"algorithm": "sha384_ecdsa",
"parameters": null
},
"signature_value": "30:64:02:30:61:79:d8:e5:42:47:df:1c:ae:53:99:17:b6:6f:1c:7d:e1:bf:11:94:d1:03:88:75:e4:8d:89:a4:8a:77:46:de:6d:61:ef:02:f5:fb:b5:df:cc:fe:4e:ff:fe:a9:e6:a7:02:30:5b:99:d7:85:37:06:b5:7b:08:fd:eb:27:8b:4a:94:f9:e1:fa:a7:8e:26:08:e8:7c:92:68:6d:73:d8:6f:26:ac:21:02:b8:99:b7:26:41:5b:25:60:ae:d0:48:1a:ee:06"
}
]
```
### XML files ### XML files
```bash ```bash
cat cd_catalog.xml cat cd_catalog.xml

View File

@ -175,6 +175,7 @@ option.
| ` --fstab` | `/etc/fstab` file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/fstab) | | ` --fstab` | `/etc/fstab` file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/fstab) |
| ` --git-log` | `git log` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/git_log) | | ` --git-log` | `git log` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/git_log) |
| ` --git-log-s` | `git log` command streaming parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/git_log_s) | | ` --git-log-s` | `git log` command streaming parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/git_log_s) |
| ` --gpg` | `gpg --with-colons` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/gpg) |
| ` --group` | `/etc/group` file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/group) | | ` --group` | `/etc/group` file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/group) |
| ` --gshadow` | `/etc/gshadow` file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/gshadow) | | ` --gshadow` | `/etc/gshadow` file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/gshadow) |
| ` --hash` | `hash` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/hash) | | ` --hash` | `hash` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/hash) |
@ -247,6 +248,7 @@ option.
| ` --w` | `w` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/w) | | ` --w` | `w` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/w) |
| ` --wc` | `wc` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/wc) | | ` --wc` | `wc` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/wc) |
| ` --who` | `who` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/who) | | ` --who` | `who` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/who) |
| ` --x509-cert` | X.509 PEM and DER certificate file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/x509_cert) |
| ` --xml` | XML file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/xml) | | ` --xml` | XML file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/xml) |
| ` --xrandr` | `xrandr` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/xrandr) | | ` --xrandr` | `xrandr` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/xrandr) |
| ` --yaml` | YAML file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/yaml) | | ` --yaml` | YAML file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/yaml) |

9
build-completions.py Executable file
View File

@ -0,0 +1,9 @@
#!/usr/bin/env python3
# build Bash and Zsh completion scripts and add to the completions folder
from jc.shell_completions import bash_completion, zsh_completion
with open('completions/jc_bash_completion.sh', 'w') as f:
print(bash_completion(), file=f)
with open('completions/jc_zsh_completion.sh', 'w') as f:
print(zsh_completion(), file=f)

View File

@ -0,0 +1,84 @@
_jc()
{
local cur prev words cword jc_commands jc_parsers jc_options \
jc_about_options jc_about_mod_options jc_help_options jc_special_options
jc_commands=(acpi airport arp blkid chage cksum crontab date df dig dmidecode dpkg du env file finger free git gpg hciconfig id ifconfig iostat iptables iw jobs last lastb ls lsblk lsmod lsof lsusb md5 md5sum mount mpstat netstat nmcli ntpq pidstat ping ping6 pip pip3 postconf printenv ps route rpm rsync sfdisk sha1sum sha224sum sha256sum sha384sum sha512sum shasum ss stat sum sysctl systemctl systeminfo timedatectl top tracepath tracepath6 traceroute traceroute6 ufw uname update-alternatives upower uptime vdir vmstat w wc who xrandr zipinfo)
jc_parsers=(--acpi --airport --airport-s --arp --asciitable --asciitable-m --blkid --chage --cksum --crontab --crontab-u --csv --csv-s --date --df --dig --dir --dmidecode --dpkg-l --du --env --file --finger --free --fstab --git-log --git-log-s --gpg --group --gshadow --hash --hashsum --hciconfig --history --hosts --id --ifconfig --ini --iostat --iostat-s --iptables --iw-scan --jar-manifest --jobs --kv --last --ls --ls-s --lsblk --lsmod --lsof --lsusb --mount --mpstat --mpstat-s --netstat --nmcli --ntpq --passwd --pidstat --pidstat-s --ping --ping-s --pip-list --pip-show --postconf --ps --route --rpm-qi --rsync --rsync-s --sfdisk --shadow --ss --stat --stat-s --sysctl --systemctl --systemctl-lj --systemctl-ls --systemctl-luf --systeminfo --time --timedatectl --top --top-s --tracepath --traceroute --ufw --ufw-appinfo --uname --update-alt-gs --update-alt-q --upower --uptime --vmstat --vmstat-s --w --wc --who --x509-cert --xml --xrandr --yaml --zipinfo)
jc_options=(--force-color -C --debug -d --monochrome -m --pretty -p --quiet -q --raw -r --unbuffer -u --yaml-out -y)
jc_about_options=(--about -a)
jc_about_mod_options=(--pretty -p --yaml-out -y --monochrome -m --force-color -C)
jc_help_options=(--help -h)
jc_special_options=(--version -v --bash-comp -B --zsh-comp -Z)
COMPREPLY=()
_get_comp_words_by_ref cur prev words cword
# if jc_about_options are found anywhere in the line, then only complete from jc_about_mod_options
for i in "${words[@]::${#words[@]}-1}"; do
if [[ " ${jc_about_options[*]} " =~ " ${i} " ]]; then
COMPREPLY=( $( compgen -W "${jc_about_mod_options[*]}" \
-- "${cur}" ) )
return 0
fi
done
# if jc_help_options and a parser are found anywhere in the line, then no more completions
if
(
for i in "${words[@]::${#words[@]}-1}"; do
if [[ " ${jc_help_options[*]} " =~ " ${i} " ]]; then
return 0
fi
done
return 1
) && (
for i in "${words[@]::${#words[@]}-1}"; do
if [[ " ${jc_parsers[*]} " =~ " ${i} " ]]; then
return 0
fi
done
return 1
); then
return 0
fi
# if jc_help_options are found anywhere in the line, then only complete with parsers
for i in "${words[@]::${#words[@]}-1}"; do
if [[ " ${jc_help_options[*]} " =~ " ${i} " ]]; then
COMPREPLY=( $( compgen -W "${jc_parsers[*]}" \
-- "${cur}" ) )
return 0
fi
done
# if special options are found anywhere in the line, then no more completions
for i in "${words[@]::${#words[@]}-1}"; do
if [[ " ${jc_special_options[*]} " =~ " ${i} " ]]; then
return 0
fi
done
# if magic command is found anywhere in the line, use called command's autocompletion
for i in "${words[@]::${#words[@]}-1}"; do
if [[ " ${jc_commands[*]} " =~ " ${i} " ]]; then
_command
return 0
fi
done
# if a parser arg is found anywhere in the line, only show options and help options
for i in "${words[@]::${#words[@]}-1}"; do
if [[ " ${jc_parsers[*]} " =~ " ${i} " ]]; then
COMPREPLY=( $( compgen -W "${jc_options[*]} ${jc_help_options[*]}" \
-- "${cur}" ) )
return 0
fi
done
# default completion
COMPREPLY=( $( compgen -W "${jc_options[*]} ${jc_about_options[*]} ${jc_help_options[*]} ${jc_special_options[*]} ${jc_parsers[*]} ${jc_commands[*]}" \
-- "${cur}" ) )
} &&
complete -F _jc jc

View File

@ -0,0 +1,325 @@
#compdef jc
_jc() {
local -a jc_commands jc_commands_describe \
jc_parsers jc_parsers_describe \
jc_options jc_options_describe \
jc_about_options jc_about_options_describe \
jc_about_mod_options jc_about_mod_options_describe \
jc_help_options jc_help_options_describe \
jc_special_options jc_special_options_describe
jc_commands=(acpi airport arp blkid chage cksum crontab date df dig dmidecode dpkg du env file finger free git gpg hciconfig id ifconfig iostat iptables iw jobs last lastb ls lsblk lsmod lsof lsusb md5 md5sum mount mpstat netstat nmcli ntpq pidstat ping ping6 pip pip3 postconf printenv ps route rpm rsync sfdisk sha1sum sha224sum sha256sum sha384sum sha512sum shasum ss stat sum sysctl systemctl systeminfo timedatectl top tracepath tracepath6 traceroute traceroute6 ufw uname update-alternatives upower uptime vdir vmstat w wc who xrandr zipinfo)
jc_commands_describe=(
'acpi:run "acpi" command with magic syntax.'
'airport:run "airport" command with magic syntax.'
'arp:run "arp" command with magic syntax.'
'blkid:run "blkid" command with magic syntax.'
'chage:run "chage" command with magic syntax.'
'cksum:run "cksum" command with magic syntax.'
'crontab:run "crontab" command with magic syntax.'
'date:run "date" command with magic syntax.'
'df:run "df" command with magic syntax.'
'dig:run "dig" command with magic syntax.'
'dmidecode:run "dmidecode" command with magic syntax.'
'dpkg:run "dpkg" command with magic syntax.'
'du:run "du" command with magic syntax.'
'env:run "env" command with magic syntax.'
'file:run "file" command with magic syntax.'
'finger:run "finger" command with magic syntax.'
'free:run "free" command with magic syntax.'
'git:run "git" command with magic syntax.'
'gpg:run "gpg" command with magic syntax.'
'hciconfig:run "hciconfig" command with magic syntax.'
'id:run "id" command with magic syntax.'
'ifconfig:run "ifconfig" command with magic syntax.'
'iostat:run "iostat" command with magic syntax.'
'iptables:run "iptables" command with magic syntax.'
'iw:run "iw" command with magic syntax.'
'jobs:run "jobs" command with magic syntax.'
'last:run "last" command with magic syntax.'
'lastb:run "lastb" command with magic syntax.'
'ls:run "ls" command with magic syntax.'
'lsblk:run "lsblk" command with magic syntax.'
'lsmod:run "lsmod" command with magic syntax.'
'lsof:run "lsof" command with magic syntax.'
'lsusb:run "lsusb" command with magic syntax.'
'md5:run "md5" command with magic syntax.'
'md5sum:run "md5sum" command with magic syntax.'
'mount:run "mount" command with magic syntax.'
'mpstat:run "mpstat" command with magic syntax.'
'netstat:run "netstat" command with magic syntax.'
'nmcli:run "nmcli" command with magic syntax.'
'ntpq:run "ntpq" command with magic syntax.'
'pidstat:run "pidstat" command with magic syntax.'
'ping:run "ping" command with magic syntax.'
'ping6:run "ping6" command with magic syntax.'
'pip:run "pip" command with magic syntax.'
'pip3:run "pip3" command with magic syntax.'
'postconf:run "postconf" command with magic syntax.'
'printenv:run "printenv" command with magic syntax.'
'ps:run "ps" command with magic syntax.'
'route:run "route" command with magic syntax.'
'rpm:run "rpm" command with magic syntax.'
'rsync:run "rsync" command with magic syntax.'
'sfdisk:run "sfdisk" command with magic syntax.'
'sha1sum:run "sha1sum" command with magic syntax.'
'sha224sum:run "sha224sum" command with magic syntax.'
'sha256sum:run "sha256sum" command with magic syntax.'
'sha384sum:run "sha384sum" command with magic syntax.'
'sha512sum:run "sha512sum" command with magic syntax.'
'shasum:run "shasum" command with magic syntax.'
'ss:run "ss" command with magic syntax.'
'stat:run "stat" command with magic syntax.'
'sum:run "sum" command with magic syntax.'
'sysctl:run "sysctl" command with magic syntax.'
'systemctl:run "systemctl" command with magic syntax.'
'systeminfo:run "systeminfo" command with magic syntax.'
'timedatectl:run "timedatectl" command with magic syntax.'
'top:run "top" command with magic syntax.'
'tracepath:run "tracepath" command with magic syntax.'
'tracepath6:run "tracepath6" command with magic syntax.'
'traceroute:run "traceroute" command with magic syntax.'
'traceroute6:run "traceroute6" command with magic syntax.'
'ufw:run "ufw" command with magic syntax.'
'uname:run "uname" command with magic syntax.'
'update-alternatives:run "update-alternatives" command with magic syntax.'
'upower:run "upower" command with magic syntax.'
'uptime:run "uptime" command with magic syntax.'
'vdir:run "vdir" command with magic syntax.'
'vmstat:run "vmstat" command with magic syntax.'
'w:run "w" command with magic syntax.'
'wc:run "wc" command with magic syntax.'
'who:run "who" command with magic syntax.'
'xrandr:run "xrandr" command with magic syntax.'
'zipinfo:run "zipinfo" command with magic syntax.'
)
jc_parsers=(--acpi --airport --airport-s --arp --asciitable --asciitable-m --blkid --chage --cksum --crontab --crontab-u --csv --csv-s --date --df --dig --dir --dmidecode --dpkg-l --du --env --file --finger --free --fstab --git-log --git-log-s --gpg --group --gshadow --hash --hashsum --hciconfig --history --hosts --id --ifconfig --ini --iostat --iostat-s --iptables --iw-scan --jar-manifest --jobs --kv --last --ls --ls-s --lsblk --lsmod --lsof --lsusb --mount --mpstat --mpstat-s --netstat --nmcli --ntpq --passwd --pidstat --pidstat-s --ping --ping-s --pip-list --pip-show --postconf --ps --route --rpm-qi --rsync --rsync-s --sfdisk --shadow --ss --stat --stat-s --sysctl --systemctl --systemctl-lj --systemctl-ls --systemctl-luf --systeminfo --time --timedatectl --top --top-s --tracepath --traceroute --ufw --ufw-appinfo --uname --update-alt-gs --update-alt-q --upower --uptime --vmstat --vmstat-s --w --wc --who --x509-cert --xml --xrandr --yaml --zipinfo)
jc_parsers_describe=(
'--acpi:`acpi` command parser'
'--airport:`airport -I` command parser'
'--airport-s:`airport -s` command parser'
'--arp:`arp` command parser'
'--asciitable:ASCII and Unicode table parser'
'--asciitable-m:multi-line ASCII and Unicode table parser'
'--blkid:`blkid` command parser'
'--chage:`chage --list` command parser'
'--cksum:`cksum` and `sum` command parser'
'--crontab:`crontab` command and file parser'
'--crontab-u:`crontab` file parser with user support'
'--csv:CSV file parser'
'--csv-s:CSV file streaming parser'
'--date:`date` command parser'
'--df:`df` command parser'
'--dig:`dig` command parser'
'--dir:`dir` command parser'
'--dmidecode:`dmidecode` command parser'
'--dpkg-l:`dpkg -l` command parser'
'--du:`du` command parser'
'--env:`env` command parser'
'--file:`file` command parser'
'--finger:`finger` command parser'
'--free:`free` command parser'
'--fstab:`/etc/fstab` file parser'
'--git-log:`git log` command parser'
'--git-log-s:`git log` command streaming parser'
'--gpg:`gpg --with-colons` command parser'
'--group:`/etc/group` file parser'
'--gshadow:`/etc/gshadow` file parser'
'--hash:`hash` command parser'
'--hashsum:hashsum command parser (`md5sum`, `shasum`, etc.)'
'--hciconfig:`hciconfig` command parser'
'--history:`history` command parser'
'--hosts:`/etc/hosts` file parser'
'--id:`id` command parser'
'--ifconfig:`ifconfig` command parser'
'--ini:INI file parser'
'--iostat:`iostat` command parser'
'--iostat-s:`iostat` command streaming parser'
'--iptables:`iptables` command parser'
'--iw-scan:`iw dev [device] scan` command parser'
'--jar-manifest:MANIFEST.MF file parser'
'--jobs:`jobs` command parser'
'--kv:Key/Value file parser'
'--last:`last` and `lastb` command parser'
'--ls:`ls` command parser'
'--ls-s:`ls` command streaming parser'
'--lsblk:`lsblk` command parser'
'--lsmod:`lsmod` command parser'
'--lsof:`lsof` command parser'
'--lsusb:`lsusb` command parser'
'--mount:`mount` command parser'
'--mpstat:`mpstat` command parser'
'--mpstat-s:`mpstat` command streaming parser'
'--netstat:`netstat` command parser'
'--nmcli:`nmcli` command parser'
'--ntpq:`ntpq -p` command parser'
'--passwd:`/etc/passwd` file parser'
'--pidstat:`pidstat -h` command parser'
'--pidstat-s:`pidstat -h` command streaming parser'
'--ping:`ping` and `ping6` command parser'
'--ping-s:`ping` and `ping6` command streaming parser'
'--pip-list:`pip list` command parser'
'--pip-show:`pip show` command parser'
'--postconf:`postconf -M` command parser'
'--ps:`ps` command parser'
'--route:`route` command parser'
'--rpm-qi:`rpm -qi` command parser'
'--rsync:`rsync` command parser'
'--rsync-s:`rsync` command streaming parser'
'--sfdisk:`sfdisk` command parser'
'--shadow:`/etc/shadow` file parser'
'--ss:`ss` command parser'
'--stat:`stat` command parser'
'--stat-s:`stat` command streaming parser'
'--sysctl:`sysctl` command parser'
'--systemctl:`systemctl` command parser'
'--systemctl-lj:`systemctl list-jobs` command parser'
'--systemctl-ls:`systemctl list-sockets` command parser'
'--systemctl-luf:`systemctl list-unit-files` command parser'
'--systeminfo:`systeminfo` command parser'
'--time:`/usr/bin/time` command parser'
'--timedatectl:`timedatectl status` command parser'
'--top:`top -b` command parser'
'--top-s:`top -b` command streaming parser'
'--tracepath:`tracepath` and `tracepath6` command parser'
'--traceroute:`traceroute` and `traceroute6` command parser'
'--ufw:`ufw status` command parser'
'--ufw-appinfo:`ufw app info [application]` command parser'
'--uname:`uname -a` command parser'
'--update-alt-gs:`update-alternatives --get-selections` command parser'
'--update-alt-q:`update-alternatives --query` command parser'
'--upower:`upower` command parser'
'--uptime:`uptime` command parser'
'--vmstat:`vmstat` command parser'
'--vmstat-s:`vmstat` command streaming parser'
'--w:`w` command parser'
'--wc:`wc` command parser'
'--who:`who` command parser'
'--x509-cert:X.509 PEM and DER certificate file parser'
'--xml:XML file parser'
'--xrandr:`xrandr` command parser'
'--yaml:YAML file parser'
'--zipinfo:`zipinfo` command parser'
)
jc_options=(--force-color -C --debug -d --monochrome -m --pretty -p --quiet -q --raw -r --unbuffer -u --yaml-out -y)
jc_options_describe=(
'--force-color:force color output even when using pipes (overrides -m)'
'-C:force color output even when using pipes (overrides -m)'
'--debug:debug (double for verbose debug)'
'-d:debug (double for verbose debug)'
'--monochrome:monochrome output'
'-m:monochrome output'
'--pretty:pretty print output'
'-p:pretty print output'
'--quiet:suppress warnings (double to ignore streaming errors)'
'-q:suppress warnings (double to ignore streaming errors)'
'--raw:raw output'
'-r:raw output'
'--unbuffer:unbuffer output'
'-u:unbuffer output'
'--yaml-out:YAML output'
'-y:YAML output'
)
jc_about_options=(--about -a)
jc_about_options_describe=(
'--about:about jc'
'-a:about jc'
)
jc_about_mod_options=(--pretty -p --yaml-out -y --monochrome -m --force-color -C)
jc_about_mod_options_describe=(
'--pretty:pretty print output'
'-p:pretty print output'
'--yaml-out:YAML output'
'-y:YAML output'
'--monochrome:monochrome output'
'-m:monochrome output'
'--force-color:force color output even when using pipes (overrides -m)'
'-C:force color output even when using pipes (overrides -m)'
)
jc_help_options=(--help -h)
jc_help_options_describe=(
'--help:help (--help --parser_name for parser documentation)'
'-h:help (--help --parser_name for parser documentation)'
)
jc_special_options=(--version -v --bash-comp -B --zsh-comp -Z)
jc_special_options_describe=(
'--version:version info'
'-v:version info'
'--bash-comp:gen Bash completion: jc -B > /etc/bash_completion.d/jc'
'-B:gen Bash completion: jc -B > /etc/bash_completion.d/jc'
'--zsh-comp:gen Zsh completion: jc -Z > "${fpath[1]}/_jc"'
'-Z:gen Zsh completion: jc -Z > "${fpath[1]}/_jc"'
)
# if jc_about_options are found anywhere in the line, then only complete from jc_about_mod_options
for i in ${words:0:-1}; do
if (( $jc_about_options[(Ie)${i}] )); then
_describe 'commands' jc_about_mod_options_describe
return 0
fi
done
# if jc_help_options and a parser are found anywhere in the line, then no more completions
if
(
for i in ${words:0:-1}; do
if (( $jc_help_options[(Ie)${i}] )); then
return 0
fi
done
return 1
) && (
for i in ${words:0:-1}; do
if (( $jc_parsers[(Ie)${i}] )); then
return 0
fi
done
return 1
); then
return 0
fi
# if jc_help_options are found anywhere in the line, then only complete with parsers
for i in ${words:0:-1}; do
if (( $jc_help_options[(Ie)${i}] )); then
_describe 'commands' jc_parsers_describe
return 0
fi
done
# if special options are found anywhere in the line, then no more completions
for i in ${words:0:-1}; do
if (( $jc_special_options[(Ie)${i}] )); then
return 0
fi
done
# if magic command is found anywhere in the line, use called command's autocompletion
for i in ${words:0:-1}; do
if (( $jc_commands[(Ie)${i}] )); then
# hack to remove options between jc and the magic command
shift $(( ${#words} - 2 )) words
words[1,0]=(jc)
CURRENT=${#words}
# run the magic command's completions
_arguments '*::arguments:_normal'
return 0
fi
done
# if a parser arg is found anywhere in the line, only show options and help options
for i in ${words:0:-1}; do
if (( $jc_parsers[(Ie)${i}] )); then
_describe 'commands' jc_options_describe -- jc_help_options_describe
return 0
fi
done
# default completion
_describe 'commands' jc_options_describe -- jc_about_options_describe -- jc_help_options_describe -- jc_special_options_describe -- jc_parsers_describe -- jc_commands_describe
}
_jc

145
docs/parsers/gpg.md Normal file
View File

@ -0,0 +1,145 @@
[Home](https://kellyjonbrazil.github.io/jc/)
<a id="jc.parsers.gpg"></a>
# jc.parsers.gpg
jc - JSON Convert `gpg --with-colons` command output parser
Usage (cli):
$ gpg --with-colons --show-keys file.gpg | jc --gpg
or
$ jc gpg --with-colons --show-keys file.gpg
Usage (module):
import jc
result = jc.parse('gpg', gpg_command_output)
Schema:
Field definitions from https://git.gnupg.org/cgi-bin/gitweb.cgi?p=gnupg.git;a=blob_plain;f=doc/DETAILS
> Note: Number values are not converted to integers because many field
> specifications are overloaded and future augmentations are implied in the
> documentation.
[
{
"type": string,
"validity": string,
"key_length": string,
"pub_key_alg": string,
"key_id": string,
"creation_date": string,
"expiration_date": string,
"certsn_uidhash_trustinfo": string,
"owner_trust": string,
"user_id": string,
"signature_class": string,
"key_capabilities": string,
"cert_fingerprint_other": string,
"flag": string,
"token_sn": string,
"hash_alg": string,
"curve_name": string,
"compliance_flags": string,
"last_update_date": string,
"origin": string,
"comment": string,
"index": string, # [0]
"bits": string, # [0]
"value": string, # [0]
"version": string, # [1], [4]
"signature_count": string, # [1]
"encryption_count": string, # [1]
"policy": string, # [1]
"signature_first_seen": string, # [1]
"signature_most_recent_seen": string, # [1]
"encryption_first_done": string, # [1]
"encryption_most_recent_done": string, # [1]
"staleness_reason": string, # [2]
"trust_model": string, # [2]
"trust_db_created": string, # [2]
"trust_db_expires": string, # [2]
"marginally_trusted_users": string, # [2]
"completely_trusted_users": string, # [2]
"cert_chain_max_depth": string, # [2]
"subpacket_number": string, # [3]
"hex_flags": string, # [3]
"subpacket_length": string, # [3]
"subpacket_data": string, # [3]
"pubkey": string, # [4]
"cipher": string, # [4]
"digest": string, # [4]
"compress": string, # [4]
"group": string, # [4]
"members": string, # [4]
"curve_names": string, # [4]
}
]
All blank values are converted to null/None.
[0] for 'pkd' type
[1] for 'tfs' type
[2] for 'tru' type
[3] for 'skp' type
[4] for 'cfg' type
Examples:
$ gpg --with-colons --show-keys file.gpg | jc --gpg -p
[
{
"type": "pub",
"validity": "f",
"key_length": "1024",
"pub_key_alg": "17",
"key_id": "6C7EE1B8621CC013",
"creation_date": "899817715",
"expiration_date": "1055898235",
"certsn_uidhash_trustinfo": null,
"owner_trust": "m",
"user_id": null,
"signature_class": null,
"key_capabilities": "scESC",
"cert_fingerprint_other": null,
"flag": null,
"token_sn": null,
"hash_alg": null,
"curve_name": null,
"compliance_flags": null,
"last_update_date": null,
"origin": null,
"comment": null
},
...
]
<a id="jc.parsers.gpg.parse"></a>
### parse
```python
def parse(data: str, raw: bool = False, quiet: bool = False) -> List[Dict]
```
Main text parsing function
Parameters:
data: (string) text data to parse
raw: (boolean) unprocessed output if True
quiet: (boolean) suppress warning messages if True
Returns:
List of Dictionaries. Raw or processed structured data.
### Parser Information
Compatibility: linux
Version 1.0 by Kelly Brazil (kellyjonbrazil@gmail.com)

View File

@ -186,4 +186,4 @@ Returns:
### Parser Information ### Parser Information
Compatibility: linux Compatibility: linux
Version 1.7 by Kelly Brazil (kellyjonbrazil@gmail.com) Version 1.8 by Kelly Brazil (kellyjonbrazil@gmail.com)

225
docs/parsers/x509_cert.md Normal file
View File

@ -0,0 +1,225 @@
[Home](https://kellyjonbrazil.github.io/jc/)
<a id="jc.parsers.x509_cert"></a>
# jc.parsers.x509\_cert
jc - JSON Convert X.509 Certificate format file parser
This parser will convert DER and PEM encoded X.509 certificate files.
Usage (cli):
$ cat certificate.pem | jc --x509-cert
Usage (module):
import jc
result = jc.parse('x509_cert', x509_cert_file_output)
Schema:
[
{
"tbs_certificate": {
"version": string,
"serial_number": string, # [0]
"signature": {
"algorithm": string,
"parameters": string/null,
},
"issuer": {
"country_name": string,
"state_or_province_name" string,
"locality_name": string,
"organization_name": array/string,
"organizational_unit_name": array/string,
"common_name": string,
"email_address": string
},
"validity": {
"not_before": integer, # [1]
"not_after": integer, # [1]
"not_before_iso": string,
"not_after_iso": string
},
"subject": {
"country_name": string,
"state_or_province_name": string,
"locality_name": string,
"organization_name": array/string,
"organizational_unit_name": array/string,
"common_name": string,
"email_address": string
},
"subject_public_key_info": {
"algorithm": {
"algorithm": string,
"parameters": string/null,
},
"public_key": {
"modulus": string, # [0]
"public_exponent": integer
}
},
"issuer_unique_id": string/null,
"subject_unique_id": string/null,
"extensions": [
{
"extn_id": string,
"critical": boolean,
"extn_value": array/object/string/integer # [2]
}
]
},
"signature_algorithm": {
"algorithm": string,
"parameters": string/null
},
"signature_value": string # [0]
}
]
[0] in colon-delimited hex notation
[1] time-zone-aware (UTC) epoch timestamp
[2] See below for well-known Extension schemas:
Basic Constraints:
{
"extn_id": "basic_constraints",
"critical": boolean,
"extn_value": {
"ca": boolean,
"path_len_constraint": string/null
}
}
Key Usage:
{
"extn_id": "key_usage",
"critical": boolean,
"extn_value": [
string
]
}
Key Identifier:
{
"extn_id": "key_identifier",
"critical": boolean,
"extn_value": string # [0]
}
Authority Key Identifier:
{
"extn_id": "authority_key_identifier",
"critical": boolean,
"extn_value": {
"key_identifier": string, # [0]
"authority_cert_issuer": string/null,
"authority_cert_serial_number": string/null
}
}
Examples:
$ cat entrust-ec1.pem| jc --x509-cert -p
[
{
"tbs_certificate": {
"version": "v3",
"serial_number": "a6:8b:79:29:00:00:00:00:50:d0:91:f9",
"signature": {
"algorithm": "sha384_ecdsa",
"parameters": null
},
"issuer": {
"country_name": "US",
"organization_name": "Entrust, Inc.",
"organizational_unit_name": [
"See www.entrust.net/legal-terms",
"(c) 2012 Entrust, Inc. - for authorized use only"
],
"common_name": "Entrust Root Certification Authority - EC1"
},
"validity": {
"not_before": 1355844336,
"not_after": 2144764536,
"not_before_iso": "2012-12-18T15:25:36+00:00",
"not_after_iso": "2037-12-18T15:55:36+00:00"
},
"subject": {
"country_name": "US",
"organization_name": "Entrust, Inc.",
"organizational_unit_name": [
"See www.entrust.net/legal-terms",
"(c) 2012 Entrust, Inc. - for authorized use only"
],
"common_name": "Entrust Root Certification Authority - EC1"
},
"subject_public_key_info": {
"algorithm": {
"algorithm": "ec",
"parameters": "secp384r1"
},
"public_key": "04:84:13:c9:d0:ba:6d:41:7b:e2:6c:d0:eb:55:..."
},
"issuer_unique_id": null,
"subject_unique_id": null,
"extensions": [
{
"extn_id": "key_usage",
"critical": true,
"extn_value": [
"crl_sign",
"key_cert_sign"
]
},
{
"extn_id": "basic_constraints",
"critical": true,
"extn_value": {
"ca": true,
"path_len_constraint": null
}
},
{
"extn_id": "key_identifier",
"critical": false,
"extn_value": "b7:63:e7:1a:dd:8d:e9:08:a6:55:83:a4:e0:6a:..."
}
]
},
"signature_algorithm": {
"algorithm": "sha384_ecdsa",
"parameters": null
},
"signature_value": "30:64:02:30:61:79:d8:e5:42:47:df:1c:ae:53:..."
}
]
<a id="jc.parsers.x509_cert.parse"></a>
### parse
```python
def parse(data: Union[str, bytes],
raw: bool = False,
quiet: bool = False) -> List[Dict]
```
Main text parsing function
Parameters:
data: (string) text data to parse
raw: (boolean) unprocessed output if True
quiet: (boolean) suppress warning messages if True
Returns:
List of Dictionaries. Raw or processed structured data.
### Parser Information
Compatibility: linux, darwin, cygwin, win32, aix, freebsd
Version 1.0 by Kelly Brazil (kellyjonbrazil@gmail.com)

View File

@ -102,20 +102,23 @@ Returns:
### has\_data ### has\_data
```python ```python
def has_data(data: str) -> bool def has_data(data: Union[str, bytes]) -> bool
``` ```
Checks if the input contains data. If there are any non-whitespace Checks if the string input contains data. If there are any
characters then return `True`, else return `False`. non-whitespace characters then return `True`, else return `False`.
For bytes, returns True if there is any data.
Parameters: Parameters:
data: (string) input to check whether it contains data data: (string, bytes) input to check whether it contains data
Returns: Returns:
Boolean True if input string (data) contains non-whitespace Boolean True if input string (data) contains non-whitespace
characters, otherwise False characters, otherwise False. For bytes data, returns
True if there is any data, otherwise False.
<a id="jc.utils.convert_to_int"></a> <a id="jc.utils.convert_to_int"></a>

View File

@ -14,7 +14,7 @@ from .lib import (__version__, parser_info, all_parser_info, parsers,
_get_parser, _parser_is_streaming, standard_parser_mod_list, _get_parser, _parser_is_streaming, standard_parser_mod_list,
plugin_parser_mod_list, streaming_parser_mod_list) plugin_parser_mod_list, streaming_parser_mod_list)
from . import utils from . import utils
from .cli_data import long_options_map from .cli_data import long_options_map, new_pygments_colors, old_pygments_colors
from .shell_completions import bash_completion, zsh_completion from .shell_completions import bash_completion, zsh_completion
from . import tracebackplus from . import tracebackplus
from .exceptions import LibraryNotInstalled, ParseError from .exceptions import LibraryNotInstalled, ParseError
@ -48,43 +48,9 @@ class info():
# startswith is sufficient and avoids potential exceptions from split and int. # startswith is sufficient and avoids potential exceptions from split and int.
if PYGMENTS_INSTALLED: if PYGMENTS_INSTALLED:
if pygments.__version__.startswith('2.3.'): if pygments.__version__.startswith('2.3.'):
PYGMENT_COLOR = { PYGMENT_COLOR = old_pygments_colors
'black': '#ansiblack',
'red': '#ansidarkred',
'green': '#ansidarkgreen',
'yellow': '#ansibrown',
'blue': '#ansidarkblue',
'magenta': '#ansipurple',
'cyan': '#ansiteal',
'gray': '#ansilightgray',
'brightblack': '#ansidarkgray',
'brightred': '#ansired',
'brightgreen': '#ansigreen',
'brightyellow': '#ansiyellow',
'brightblue': '#ansiblue',
'brightmagenta': '#ansifuchsia',
'brightcyan': '#ansiturquoise',
'white': '#ansiwhite',
}
else: else:
PYGMENT_COLOR = { PYGMENT_COLOR = new_pygments_colors
'black': 'ansiblack',
'red': 'ansired',
'green': 'ansigreen',
'yellow': 'ansiyellow',
'blue': 'ansiblue',
'magenta': 'ansimagenta',
'cyan': 'ansicyan',
'gray': 'ansigray',
'brightblack': 'ansibrightblack',
'brightred': 'ansibrightred',
'brightgreen': 'ansibrightgreen',
'brightyellow': 'ansibrightyellow',
'brightblue': 'ansibrightblue',
'brightmagenta': 'ansibrightmagenta',
'brightcyan': 'ansibrightcyan',
'white': 'ansiwhite',
}
def set_env_colors(env_colors=None): def set_env_colors(env_colors=None):
@ -622,7 +588,7 @@ def main():
try: try:
# differentiate between regular and streaming parsers # differentiate between regular and streaming parsers
# streaming # streaming (only supports UTF-8 string data for now)
if _parser_is_streaming(parser): if _parser_is_streaming(parser):
result = parser.parse(sys.stdin, result = parser.parse(sys.stdin,
raw=raw, raw=raw,
@ -639,9 +605,17 @@ def main():
sys.exit(combined_exit_code(magic_exit_code, 0)) sys.exit(combined_exit_code(magic_exit_code, 0))
# regular # regular (supports binary and UTF-8 string data)
else: else:
data = magic_stdout or sys.stdin.read() data = magic_stdout or sys.stdin.buffer.read()
# convert to UTF-8, if possible. Otherwise, leave as bytes
try:
if isinstance(data, bytes):
data = data.decode('utf-8')
except UnicodeDecodeError:
pass
result = parser.parse(data, result = parser.parse(data,
raw=raw, raw=raw,
quiet=quiet) quiet=quiet)

View File

@ -16,3 +16,41 @@ long_options_map: Dict[str, List[str]] = {
'--bash-comp': ['B', 'gen Bash completion: jc -B > /etc/bash_completion.d/jc'], '--bash-comp': ['B', 'gen Bash completion: jc -B > /etc/bash_completion.d/jc'],
'--zsh-comp': ['Z', 'gen Zsh completion: jc -Z > "${fpath[1]}/_jc"'] '--zsh-comp': ['Z', 'gen Zsh completion: jc -Z > "${fpath[1]}/_jc"']
} }
new_pygments_colors = {
'black': 'ansiblack',
'red': 'ansired',
'green': 'ansigreen',
'yellow': 'ansiyellow',
'blue': 'ansiblue',
'magenta': 'ansimagenta',
'cyan': 'ansicyan',
'gray': 'ansigray',
'brightblack': 'ansibrightblack',
'brightred': 'ansibrightred',
'brightgreen': 'ansibrightgreen',
'brightyellow': 'ansibrightyellow',
'brightblue': 'ansibrightblue',
'brightmagenta': 'ansibrightmagenta',
'brightcyan': 'ansibrightcyan',
'white': 'ansiwhite',
}
old_pygments_colors = {
'black': '#ansiblack',
'red': '#ansidarkred',
'green': '#ansidarkgreen',
'yellow': '#ansibrown',
'blue': '#ansidarkblue',
'magenta': '#ansipurple',
'cyan': '#ansiteal',
'gray': '#ansilightgray',
'brightblack': '#ansidarkgray',
'brightred': '#ansired',
'brightgreen': '#ansigreen',
'brightyellow': '#ansiyellow',
'brightblue': '#ansiblue',
'brightmagenta': '#ansifuchsia',
'brightcyan': '#ansiturquoise',
'white': '#ansiwhite',
}

View File

@ -6,7 +6,7 @@ import importlib
from typing import Dict, List, Iterable, Union, Iterator from typing import Dict, List, Iterable, Union, Iterator
from jc import appdirs from jc import appdirs
__version__ = '1.20.1' __version__ = '1.20.2'
parsers = [ parsers = [
'acpi', 'acpi',
@ -36,6 +36,7 @@ parsers = [
'fstab', 'fstab',
'git-log', 'git-log',
'git-log-s', 'git-log-s',
'gpg',
'group', 'group',
'gshadow', 'gshadow',
'hash', 'hash',
@ -108,6 +109,7 @@ parsers = [
'w', 'w',
'wc', 'wc',
'who', 'who',
'x509-cert',
'xml', 'xml',
'xrandr', 'xrandr',
'yaml', 'yaml',

View File

@ -0,0 +1,47 @@
# coding: utf-8
from __future__ import unicode_literals, division, absolute_import, print_function
from .version import __version__, __version_info__
__all__ = [
'__version__',
'__version_info__',
'load_order',
]
def load_order():
"""
Returns a list of the module and sub-module names for asn1crypto in
dependency load order, for the sake of live reloading code
:return:
A list of unicode strings of module names, as they would appear in
sys.modules, ordered by which module should be reloaded first
"""
return [
'asn1crypto._errors',
'asn1crypto._int',
'asn1crypto._ordereddict',
'asn1crypto._teletex_codec',
'asn1crypto._types',
'asn1crypto._inet',
'asn1crypto._iri',
'asn1crypto.version',
'asn1crypto.pem',
'asn1crypto.util',
'asn1crypto.parser',
'asn1crypto.core',
'asn1crypto.algos',
'asn1crypto.keys',
'asn1crypto.x509',
'asn1crypto.crl',
'asn1crypto.csr',
'asn1crypto.ocsp',
'asn1crypto.cms',
'asn1crypto.pdf',
'asn1crypto.pkcs12',
'asn1crypto.tsp',
'asn1crypto',
]

View File

@ -0,0 +1,54 @@
# coding: utf-8
"""
Exports the following items:
- unwrap()
- APIException()
"""
from __future__ import unicode_literals, division, absolute_import, print_function
import re
import textwrap
class APIException(Exception):
"""
An exception indicating an API has been removed from asn1crypto
"""
pass
def unwrap(string, *params):
"""
Takes a multi-line string and does the following:
- dedents
- converts newlines with text before and after into a single line
- strips leading and trailing whitespace
:param string:
The string to format
:param *params:
Params to interpolate into the string
:return:
The formatted string
"""
output = textwrap.dedent(string)
# Unwrap lines, taking into account bulleted lists, ordered lists and
# underlines consisting of = signs
if output.find('\n') != -1:
output = re.sub('(?<=\\S)\n(?=[^ \n\t\\d\\*\\-=])', ' ', output)
if params:
output = output % params
output = output.strip()
return output

View File

@ -0,0 +1,170 @@
# coding: utf-8
from __future__ import unicode_literals, division, absolute_import, print_function
import socket
import struct
from ._errors import unwrap
from ._types import byte_cls, bytes_to_list, str_cls, type_name
def inet_ntop(address_family, packed_ip):
"""
Windows compatibility shim for socket.inet_ntop().
:param address_family:
socket.AF_INET for IPv4 or socket.AF_INET6 for IPv6
:param packed_ip:
A byte string of the network form of an IP address
:return:
A unicode string of the IP address
"""
if address_family not in set([socket.AF_INET, socket.AF_INET6]):
raise ValueError(unwrap(
'''
address_family must be socket.AF_INET (%s) or socket.AF_INET6 (%s),
not %s
''',
repr(socket.AF_INET),
repr(socket.AF_INET6),
repr(address_family)
))
if not isinstance(packed_ip, byte_cls):
raise TypeError(unwrap(
'''
packed_ip must be a byte string, not %s
''',
type_name(packed_ip)
))
required_len = 4 if address_family == socket.AF_INET else 16
if len(packed_ip) != required_len:
raise ValueError(unwrap(
'''
packed_ip must be %d bytes long - is %d
''',
required_len,
len(packed_ip)
))
if address_family == socket.AF_INET:
return '%d.%d.%d.%d' % tuple(bytes_to_list(packed_ip))
octets = struct.unpack(b'!HHHHHHHH', packed_ip)
runs_of_zero = {}
longest_run = 0
zero_index = None
for i, octet in enumerate(octets + (-1,)):
if octet != 0:
if zero_index is not None:
length = i - zero_index
if length not in runs_of_zero:
runs_of_zero[length] = zero_index
longest_run = max(longest_run, length)
zero_index = None
elif zero_index is None:
zero_index = i
hexed = [hex(o)[2:] for o in octets]
if longest_run < 2:
return ':'.join(hexed)
zero_start = runs_of_zero[longest_run]
zero_end = zero_start + longest_run
return ':'.join(hexed[:zero_start]) + '::' + ':'.join(hexed[zero_end:])
def inet_pton(address_family, ip_string):
"""
Windows compatibility shim for socket.inet_ntop().
:param address_family:
socket.AF_INET for IPv4 or socket.AF_INET6 for IPv6
:param ip_string:
A unicode string of an IP address
:return:
A byte string of the network form of the IP address
"""
if address_family not in set([socket.AF_INET, socket.AF_INET6]):
raise ValueError(unwrap(
'''
address_family must be socket.AF_INET (%s) or socket.AF_INET6 (%s),
not %s
''',
repr(socket.AF_INET),
repr(socket.AF_INET6),
repr(address_family)
))
if not isinstance(ip_string, str_cls):
raise TypeError(unwrap(
'''
ip_string must be a unicode string, not %s
''',
type_name(ip_string)
))
if address_family == socket.AF_INET:
octets = ip_string.split('.')
error = len(octets) != 4
if not error:
ints = []
for o in octets:
o = int(o)
if o > 255 or o < 0:
error = True
break
ints.append(o)
if error:
raise ValueError(unwrap(
'''
ip_string must be a dotted string with four integers in the
range of 0 to 255, got %s
''',
repr(ip_string)
))
return struct.pack(b'!BBBB', *ints)
error = False
omitted = ip_string.count('::')
if omitted > 1:
error = True
elif omitted == 0:
octets = ip_string.split(':')
error = len(octets) != 8
else:
begin, end = ip_string.split('::')
begin_octets = begin.split(':')
end_octets = end.split(':')
missing = 8 - len(begin_octets) - len(end_octets)
octets = begin_octets + (['0'] * missing) + end_octets
if not error:
ints = []
for o in octets:
o = int(o, 16)
if o > 65535 or o < 0:
error = True
break
ints.append(o)
return struct.pack(b'!HHHHHHHH', *ints)
raise ValueError(unwrap(
'''
ip_string must be a valid ipv6 string, got %s
''',
repr(ip_string)
))

View File

@ -0,0 +1,22 @@
# coding: utf-8
from __future__ import unicode_literals, division, absolute_import, print_function
def fill_width(bytes_, width):
"""
Ensure a byte string representing a positive integer is a specific width
(in bytes)
:param bytes_:
The integer byte string
:param width:
The desired width as an integer
:return:
A byte string of the width specified
"""
while len(bytes_) < width:
bytes_ = b'\x00' + bytes_
return bytes_

View File

@ -0,0 +1,291 @@
# coding: utf-8
"""
Functions to convert unicode IRIs into ASCII byte string URIs and back. Exports
the following items:
- iri_to_uri()
- uri_to_iri()
"""
from __future__ import unicode_literals, division, absolute_import, print_function
from encodings import idna # noqa
import codecs
import re
import sys
from ._errors import unwrap
from ._types import byte_cls, str_cls, type_name, bytes_to_list, int_types
if sys.version_info < (3,):
from urlparse import urlsplit, urlunsplit
from urllib import (
quote as urlquote,
unquote as unquote_to_bytes,
)
else:
from urllib.parse import (
quote as urlquote,
unquote_to_bytes,
urlsplit,
urlunsplit,
)
def iri_to_uri(value, normalize=False):
"""
Encodes a unicode IRI into an ASCII byte string URI
:param value:
A unicode string of an IRI
:param normalize:
A bool that controls URI normalization
:return:
A byte string of the ASCII-encoded URI
"""
if not isinstance(value, str_cls):
raise TypeError(unwrap(
'''
value must be a unicode string, not %s
''',
type_name(value)
))
scheme = None
# Python 2.6 doesn't split properly is the URL doesn't start with http:// or https://
if sys.version_info < (2, 7) and not value.startswith('http://') and not value.startswith('https://'):
real_prefix = None
prefix_match = re.match('^[^:]*://', value)
if prefix_match:
real_prefix = prefix_match.group(0)
value = 'http://' + value[len(real_prefix):]
parsed = urlsplit(value)
if real_prefix:
value = real_prefix + value[7:]
scheme = _urlquote(real_prefix[:-3])
else:
parsed = urlsplit(value)
if scheme is None:
scheme = _urlquote(parsed.scheme)
hostname = parsed.hostname
if hostname is not None:
hostname = hostname.encode('idna')
# RFC 3986 allows userinfo to contain sub-delims
username = _urlquote(parsed.username, safe='!$&\'()*+,;=')
password = _urlquote(parsed.password, safe='!$&\'()*+,;=')
port = parsed.port
if port is not None:
port = str_cls(port).encode('ascii')
netloc = b''
if username is not None:
netloc += username
if password:
netloc += b':' + password
netloc += b'@'
if hostname is not None:
netloc += hostname
if port is not None:
default_http = scheme == b'http' and port == b'80'
default_https = scheme == b'https' and port == b'443'
if not normalize or (not default_http and not default_https):
netloc += b':' + port
# RFC 3986 allows a path to contain sub-delims, plus "@" and ":"
path = _urlquote(parsed.path, safe='/!$&\'()*+,;=@:')
# RFC 3986 allows the query to contain sub-delims, plus "@", ":" , "/" and "?"
query = _urlquote(parsed.query, safe='/?!$&\'()*+,;=@:')
# RFC 3986 allows the fragment to contain sub-delims, plus "@", ":" , "/" and "?"
fragment = _urlquote(parsed.fragment, safe='/?!$&\'()*+,;=@:')
if normalize and query is None and fragment is None and path == b'/':
path = None
# Python 2.7 compat
if path is None:
path = ''
output = urlunsplit((scheme, netloc, path, query, fragment))
if isinstance(output, str_cls):
output = output.encode('latin1')
return output
def uri_to_iri(value):
"""
Converts an ASCII URI byte string into a unicode IRI
:param value:
An ASCII-encoded byte string of the URI
:return:
A unicode string of the IRI
"""
if not isinstance(value, byte_cls):
raise TypeError(unwrap(
'''
value must be a byte string, not %s
''',
type_name(value)
))
parsed = urlsplit(value)
scheme = parsed.scheme
if scheme is not None:
scheme = scheme.decode('ascii')
username = _urlunquote(parsed.username, remap=[':', '@'])
password = _urlunquote(parsed.password, remap=[':', '@'])
hostname = parsed.hostname
if hostname:
hostname = hostname.decode('idna')
port = parsed.port
if port and not isinstance(port, int_types):
port = port.decode('ascii')
netloc = ''
if username is not None:
netloc += username
if password:
netloc += ':' + password
netloc += '@'
if hostname is not None:
netloc += hostname
if port is not None:
netloc += ':' + str_cls(port)
path = _urlunquote(parsed.path, remap=['/'], preserve=True)
query = _urlunquote(parsed.query, remap=['&', '='], preserve=True)
fragment = _urlunquote(parsed.fragment)
return urlunsplit((scheme, netloc, path, query, fragment))
def _iri_utf8_errors_handler(exc):
"""
Error handler for decoding UTF-8 parts of a URI into an IRI. Leaves byte
sequences encoded in %XX format, but as part of a unicode string.
:param exc:
The UnicodeDecodeError exception
:return:
A 2-element tuple of (replacement unicode string, integer index to
resume at)
"""
bytes_as_ints = bytes_to_list(exc.object[exc.start:exc.end])
replacements = ['%%%02x' % num for num in bytes_as_ints]
return (''.join(replacements), exc.end)
codecs.register_error('iriutf8', _iri_utf8_errors_handler)
def _urlquote(string, safe=''):
"""
Quotes a unicode string for use in a URL
:param string:
A unicode string
:param safe:
A unicode string of character to not encode
:return:
None (if string is None) or an ASCII byte string of the quoted string
"""
if string is None or string == '':
return None
# Anything already hex quoted is pulled out of the URL and unquoted if
# possible
escapes = []
if re.search('%[0-9a-fA-F]{2}', string):
# Try to unquote any percent values, restoring them if they are not
# valid UTF-8. Also, requote any safe chars since encoded versions of
# those are functionally different than the unquoted ones.
def _try_unescape(match):
byte_string = unquote_to_bytes(match.group(0))
unicode_string = byte_string.decode('utf-8', 'iriutf8')
for safe_char in list(safe):
unicode_string = unicode_string.replace(safe_char, '%%%02x' % ord(safe_char))
return unicode_string
string = re.sub('(?:%[0-9a-fA-F]{2})+', _try_unescape, string)
# Once we have the minimal set of hex quoted values, removed them from
# the string so that they are not double quoted
def _extract_escape(match):
escapes.append(match.group(0).encode('ascii'))
return '\x00'
string = re.sub('%[0-9a-fA-F]{2}', _extract_escape, string)
output = urlquote(string.encode('utf-8'), safe=safe.encode('utf-8'))
if not isinstance(output, byte_cls):
output = output.encode('ascii')
# Restore the existing quoted values that we extracted
if len(escapes) > 0:
def _return_escape(_):
return escapes.pop(0)
output = re.sub(b'%00', _return_escape, output)
return output
def _urlunquote(byte_string, remap=None, preserve=None):
"""
Unquotes a URI portion from a byte string into unicode using UTF-8
:param byte_string:
A byte string of the data to unquote
:param remap:
A list of characters (as unicode) that should be re-mapped to a
%XX encoding. This is used when characters are not valid in part of a
URL.
:param preserve:
A bool - indicates that the chars to be remapped if they occur in
non-hex form, should be preserved. E.g. / for URL path.
:return:
A unicode string
"""
if byte_string is None:
return byte_string
if byte_string == b'':
return ''
if preserve:
replacements = ['\x1A', '\x1C', '\x1D', '\x1E', '\x1F']
preserve_unmap = {}
for char in remap:
replacement = replacements.pop(0)
preserve_unmap[replacement] = char
byte_string = byte_string.replace(char.encode('ascii'), replacement.encode('ascii'))
byte_string = unquote_to_bytes(byte_string)
if remap:
for char in remap:
byte_string = byte_string.replace(char.encode('ascii'), ('%%%02x' % ord(char)).encode('ascii'))
output = byte_string.decode('utf-8', 'iriutf8')
if preserve:
for replacement, original in preserve_unmap.items():
output = output.replace(replacement, original)
return output

View File

@ -0,0 +1,135 @@
# Copyright (c) 2009 Raymond Hettinger
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
import sys
if not sys.version_info < (2, 7):
from collections import OrderedDict
else:
from UserDict import DictMixin
class OrderedDict(dict, DictMixin):
def __init__(self, *args, **kwds):
if len(args) > 1:
raise TypeError('expected at most 1 arguments, got %d' % len(args))
try:
self.__end
except AttributeError:
self.clear()
self.update(*args, **kwds)
def clear(self):
self.__end = end = []
end += [None, end, end] # sentinel node for doubly linked list
self.__map = {} # key --> [key, prev, next]
dict.clear(self)
def __setitem__(self, key, value):
if key not in self:
end = self.__end
curr = end[1]
curr[2] = end[1] = self.__map[key] = [key, curr, end]
dict.__setitem__(self, key, value)
def __delitem__(self, key):
dict.__delitem__(self, key)
key, prev, next_ = self.__map.pop(key)
prev[2] = next_
next_[1] = prev
def __iter__(self):
end = self.__end
curr = end[2]
while curr is not end:
yield curr[0]
curr = curr[2]
def __reversed__(self):
end = self.__end
curr = end[1]
while curr is not end:
yield curr[0]
curr = curr[1]
def popitem(self, last=True):
if not self:
raise KeyError('dictionary is empty')
if last:
key = reversed(self).next()
else:
key = iter(self).next()
value = self.pop(key)
return key, value
def __reduce__(self):
items = [[k, self[k]] for k in self]
tmp = self.__map, self.__end
del self.__map, self.__end
inst_dict = vars(self).copy()
self.__map, self.__end = tmp
if inst_dict:
return (self.__class__, (items,), inst_dict)
return self.__class__, (items,)
def keys(self):
return list(self)
setdefault = DictMixin.setdefault
update = DictMixin.update
pop = DictMixin.pop
values = DictMixin.values
items = DictMixin.items
iterkeys = DictMixin.iterkeys
itervalues = DictMixin.itervalues
iteritems = DictMixin.iteritems
def __repr__(self):
if not self:
return '%s()' % (self.__class__.__name__,)
return '%s(%r)' % (self.__class__.__name__, self.items())
def copy(self):
return self.__class__(self)
@classmethod
def fromkeys(cls, iterable, value=None):
d = cls()
for key in iterable:
d[key] = value
return d
def __eq__(self, other):
if isinstance(other, OrderedDict):
if len(self) != len(other):
return False
for p, q in zip(self.items(), other.items()):
if p != q:
return False
return True
return dict.__eq__(self, other)
def __ne__(self, other):
return not self == other

View File

@ -0,0 +1,331 @@
# coding: utf-8
"""
Implementation of the teletex T.61 codec. Exports the following items:
- register()
"""
from __future__ import unicode_literals, division, absolute_import, print_function
import codecs
class TeletexCodec(codecs.Codec):
def encode(self, input_, errors='strict'):
return codecs.charmap_encode(input_, errors, ENCODING_TABLE)
def decode(self, input_, errors='strict'):
return codecs.charmap_decode(input_, errors, DECODING_TABLE)
class TeletexIncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input_, final=False):
return codecs.charmap_encode(input_, self.errors, ENCODING_TABLE)[0]
class TeletexIncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input_, final=False):
return codecs.charmap_decode(input_, self.errors, DECODING_TABLE)[0]
class TeletexStreamWriter(TeletexCodec, codecs.StreamWriter):
pass
class TeletexStreamReader(TeletexCodec, codecs.StreamReader):
pass
def teletex_search_function(name):
"""
Search function for teletex codec that is passed to codecs.register()
"""
if name != 'teletex':
return None
return codecs.CodecInfo(
name='teletex',
encode=TeletexCodec().encode,
decode=TeletexCodec().decode,
incrementalencoder=TeletexIncrementalEncoder,
incrementaldecoder=TeletexIncrementalDecoder,
streamreader=TeletexStreamReader,
streamwriter=TeletexStreamWriter,
)
def register():
"""
Registers the teletex codec
"""
codecs.register(teletex_search_function)
# http://en.wikipedia.org/wiki/ITU_T.61
DECODING_TABLE = (
'\u0000'
'\u0001'
'\u0002'
'\u0003'
'\u0004'
'\u0005'
'\u0006'
'\u0007'
'\u0008'
'\u0009'
'\u000A'
'\u000B'
'\u000C'
'\u000D'
'\u000E'
'\u000F'
'\u0010'
'\u0011'
'\u0012'
'\u0013'
'\u0014'
'\u0015'
'\u0016'
'\u0017'
'\u0018'
'\u0019'
'\u001A'
'\u001B'
'\u001C'
'\u001D'
'\u001E'
'\u001F'
'\u0020'
'\u0021'
'\u0022'
'\ufffe'
'\ufffe'
'\u0025'
'\u0026'
'\u0027'
'\u0028'
'\u0029'
'\u002A'
'\u002B'
'\u002C'
'\u002D'
'\u002E'
'\u002F'
'\u0030'
'\u0031'
'\u0032'
'\u0033'
'\u0034'
'\u0035'
'\u0036'
'\u0037'
'\u0038'
'\u0039'
'\u003A'
'\u003B'
'\u003C'
'\u003D'
'\u003E'
'\u003F'
'\u0040'
'\u0041'
'\u0042'
'\u0043'
'\u0044'
'\u0045'
'\u0046'
'\u0047'
'\u0048'
'\u0049'
'\u004A'
'\u004B'
'\u004C'
'\u004D'
'\u004E'
'\u004F'
'\u0050'
'\u0051'
'\u0052'
'\u0053'
'\u0054'
'\u0055'
'\u0056'
'\u0057'
'\u0058'
'\u0059'
'\u005A'
'\u005B'
'\ufffe'
'\u005D'
'\ufffe'
'\u005F'
'\ufffe'
'\u0061'
'\u0062'
'\u0063'
'\u0064'
'\u0065'
'\u0066'
'\u0067'
'\u0068'
'\u0069'
'\u006A'
'\u006B'
'\u006C'
'\u006D'
'\u006E'
'\u006F'
'\u0070'
'\u0071'
'\u0072'
'\u0073'
'\u0074'
'\u0075'
'\u0076'
'\u0077'
'\u0078'
'\u0079'
'\u007A'
'\ufffe'
'\u007C'
'\ufffe'
'\ufffe'
'\u007F'
'\u0080'
'\u0081'
'\u0082'
'\u0083'
'\u0084'
'\u0085'
'\u0086'
'\u0087'
'\u0088'
'\u0089'
'\u008A'
'\u008B'
'\u008C'
'\u008D'
'\u008E'
'\u008F'
'\u0090'
'\u0091'
'\u0092'
'\u0093'
'\u0094'
'\u0095'
'\u0096'
'\u0097'
'\u0098'
'\u0099'
'\u009A'
'\u009B'
'\u009C'
'\u009D'
'\u009E'
'\u009F'
'\u00A0'
'\u00A1'
'\u00A2'
'\u00A3'
'\u0024'
'\u00A5'
'\u0023'
'\u00A7'
'\u00A4'
'\ufffe'
'\ufffe'
'\u00AB'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\u00B0'
'\u00B1'
'\u00B2'
'\u00B3'
'\u00D7'
'\u00B5'
'\u00B6'
'\u00B7'
'\u00F7'
'\ufffe'
'\ufffe'
'\u00BB'
'\u00BC'
'\u00BD'
'\u00BE'
'\u00BF'
'\ufffe'
'\u0300'
'\u0301'
'\u0302'
'\u0303'
'\u0304'
'\u0306'
'\u0307'
'\u0308'
'\ufffe'
'\u030A'
'\u0327'
'\u0332'
'\u030B'
'\u0328'
'\u030C'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\u2126'
'\u00C6'
'\u00D0'
'\u00AA'
'\u0126'
'\ufffe'
'\u0132'
'\u013F'
'\u0141'
'\u00D8'
'\u0152'
'\u00BA'
'\u00DE'
'\u0166'
'\u014A'
'\u0149'
'\u0138'
'\u00E6'
'\u0111'
'\u00F0'
'\u0127'
'\u0131'
'\u0133'
'\u0140'
'\u0142'
'\u00F8'
'\u0153'
'\u00DF'
'\u00FE'
'\u0167'
'\u014B'
'\ufffe'
)
ENCODING_TABLE = codecs.charmap_build(DECODING_TABLE)

View File

@ -0,0 +1,46 @@
# coding: utf-8
from __future__ import unicode_literals, division, absolute_import, print_function
import inspect
import sys
if sys.version_info < (3,):
str_cls = unicode # noqa
byte_cls = str
int_types = (int, long) # noqa
def bytes_to_list(byte_string):
return [ord(b) for b in byte_string]
chr_cls = chr
else:
str_cls = str
byte_cls = bytes
int_types = int
bytes_to_list = list
def chr_cls(num):
return bytes([num])
def type_name(value):
"""
Returns a user-readable name for the type of an object
:param value:
A value to get the type name of
:return:
A unicode string of the object's type name
"""
if inspect.isclass(value):
cls = value
else:
cls = value.__class__
if cls.__module__ in set(['builtins', '__builtin__']):
return cls.__name__
return '%s.%s' % (cls.__module__, cls.__name__)

File diff suppressed because it is too large Load Diff

1003
jc/parsers/asn1crypto/cms.py Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,536 @@
# coding: utf-8
"""
ASN.1 type classes for certificate revocation lists (CRL). Exports the
following items:
- CertificateList()
Other type classes are defined that help compose the types listed above.
"""
from __future__ import unicode_literals, division, absolute_import, print_function
import hashlib
from .algos import SignedDigestAlgorithm
from .core import (
Boolean,
Enumerated,
GeneralizedTime,
Integer,
ObjectIdentifier,
OctetBitString,
ParsableOctetString,
Sequence,
SequenceOf,
)
from .x509 import (
AuthorityInfoAccessSyntax,
AuthorityKeyIdentifier,
CRLDistributionPoints,
DistributionPointName,
GeneralNames,
Name,
ReasonFlags,
Time,
)
# The structures in this file are taken from https://tools.ietf.org/html/rfc5280
class Version(Integer):
_map = {
0: 'v1',
1: 'v2',
2: 'v3',
}
class IssuingDistributionPoint(Sequence):
_fields = [
('distribution_point', DistributionPointName, {'explicit': 0, 'optional': True}),
('only_contains_user_certs', Boolean, {'implicit': 1, 'default': False}),
('only_contains_ca_certs', Boolean, {'implicit': 2, 'default': False}),
('only_some_reasons', ReasonFlags, {'implicit': 3, 'optional': True}),
('indirect_crl', Boolean, {'implicit': 4, 'default': False}),
('only_contains_attribute_certs', Boolean, {'implicit': 5, 'default': False}),
]
class TBSCertListExtensionId(ObjectIdentifier):
_map = {
'2.5.29.18': 'issuer_alt_name',
'2.5.29.20': 'crl_number',
'2.5.29.27': 'delta_crl_indicator',
'2.5.29.28': 'issuing_distribution_point',
'2.5.29.35': 'authority_key_identifier',
'2.5.29.46': 'freshest_crl',
'1.3.6.1.5.5.7.1.1': 'authority_information_access',
}
class TBSCertListExtension(Sequence):
_fields = [
('extn_id', TBSCertListExtensionId),
('critical', Boolean, {'default': False}),
('extn_value', ParsableOctetString),
]
_oid_pair = ('extn_id', 'extn_value')
_oid_specs = {
'issuer_alt_name': GeneralNames,
'crl_number': Integer,
'delta_crl_indicator': Integer,
'issuing_distribution_point': IssuingDistributionPoint,
'authority_key_identifier': AuthorityKeyIdentifier,
'freshest_crl': CRLDistributionPoints,
'authority_information_access': AuthorityInfoAccessSyntax,
}
class TBSCertListExtensions(SequenceOf):
_child_spec = TBSCertListExtension
class CRLReason(Enumerated):
_map = {
0: 'unspecified',
1: 'key_compromise',
2: 'ca_compromise',
3: 'affiliation_changed',
4: 'superseded',
5: 'cessation_of_operation',
6: 'certificate_hold',
8: 'remove_from_crl',
9: 'privilege_withdrawn',
10: 'aa_compromise',
}
@property
def human_friendly(self):
"""
:return:
A unicode string with revocation description that is suitable to
show to end-users. Starts with a lower case letter and phrased in
such a way that it makes sense after the phrase "because of" or
"due to".
"""
return {
'unspecified': 'an unspecified reason',
'key_compromise': 'a compromised key',
'ca_compromise': 'the CA being compromised',
'affiliation_changed': 'an affiliation change',
'superseded': 'certificate supersession',
'cessation_of_operation': 'a cessation of operation',
'certificate_hold': 'a certificate hold',
'remove_from_crl': 'removal from the CRL',
'privilege_withdrawn': 'privilege withdrawl',
'aa_compromise': 'the AA being compromised',
}[self.native]
class CRLEntryExtensionId(ObjectIdentifier):
_map = {
'2.5.29.21': 'crl_reason',
'2.5.29.23': 'hold_instruction_code',
'2.5.29.24': 'invalidity_date',
'2.5.29.29': 'certificate_issuer',
}
class CRLEntryExtension(Sequence):
_fields = [
('extn_id', CRLEntryExtensionId),
('critical', Boolean, {'default': False}),
('extn_value', ParsableOctetString),
]
_oid_pair = ('extn_id', 'extn_value')
_oid_specs = {
'crl_reason': CRLReason,
'hold_instruction_code': ObjectIdentifier,
'invalidity_date': GeneralizedTime,
'certificate_issuer': GeneralNames,
}
class CRLEntryExtensions(SequenceOf):
_child_spec = CRLEntryExtension
class RevokedCertificate(Sequence):
_fields = [
('user_certificate', Integer),
('revocation_date', Time),
('crl_entry_extensions', CRLEntryExtensions, {'optional': True}),
]
_processed_extensions = False
_critical_extensions = None
_crl_reason_value = None
_invalidity_date_value = None
_certificate_issuer_value = None
_issuer_name = False
def _set_extensions(self):
"""
Sets common named extensions to private attributes and creates a list
of critical extensions
"""
self._critical_extensions = set()
for extension in self['crl_entry_extensions']:
name = extension['extn_id'].native
attribute_name = '_%s_value' % name
if hasattr(self, attribute_name):
setattr(self, attribute_name, extension['extn_value'].parsed)
if extension['critical'].native:
self._critical_extensions.add(name)
self._processed_extensions = True
@property
def critical_extensions(self):
"""
Returns a set of the names (or OID if not a known extension) of the
extensions marked as critical
:return:
A set of unicode strings
"""
if not self._processed_extensions:
self._set_extensions()
return self._critical_extensions
@property
def crl_reason_value(self):
"""
This extension indicates the reason that a certificate was revoked.
:return:
None or a CRLReason object
"""
if self._processed_extensions is False:
self._set_extensions()
return self._crl_reason_value
@property
def invalidity_date_value(self):
"""
This extension indicates the suspected date/time the private key was
compromised or the certificate became invalid. This would usually be
before the revocation date, which is when the CA processed the
revocation.
:return:
None or a GeneralizedTime object
"""
if self._processed_extensions is False:
self._set_extensions()
return self._invalidity_date_value
@property
def certificate_issuer_value(self):
"""
This extension indicates the issuer of the certificate in question,
and is used in indirect CRLs. CRL entries without this extension are
for certificates issued from the last seen issuer.
:return:
None or an x509.GeneralNames object
"""
if self._processed_extensions is False:
self._set_extensions()
return self._certificate_issuer_value
@property
def issuer_name(self):
"""
:return:
None, or an asn1crypto.x509.Name object for the issuer of the cert
"""
if self._issuer_name is False:
self._issuer_name = None
if self.certificate_issuer_value:
for general_name in self.certificate_issuer_value:
if general_name.name == 'directory_name':
self._issuer_name = general_name.chosen
break
return self._issuer_name
class RevokedCertificates(SequenceOf):
_child_spec = RevokedCertificate
class TbsCertList(Sequence):
_fields = [
('version', Version, {'optional': True}),
('signature', SignedDigestAlgorithm),
('issuer', Name),
('this_update', Time),
('next_update', Time, {'optional': True}),
('revoked_certificates', RevokedCertificates, {'optional': True}),
('crl_extensions', TBSCertListExtensions, {'explicit': 0, 'optional': True}),
]
class CertificateList(Sequence):
_fields = [
('tbs_cert_list', TbsCertList),
('signature_algorithm', SignedDigestAlgorithm),
('signature', OctetBitString),
]
_processed_extensions = False
_critical_extensions = None
_issuer_alt_name_value = None
_crl_number_value = None
_delta_crl_indicator_value = None
_issuing_distribution_point_value = None
_authority_key_identifier_value = None
_freshest_crl_value = None
_authority_information_access_value = None
_issuer_cert_urls = None
_delta_crl_distribution_points = None
_sha1 = None
_sha256 = None
def _set_extensions(self):
"""
Sets common named extensions to private attributes and creates a list
of critical extensions
"""
self._critical_extensions = set()
for extension in self['tbs_cert_list']['crl_extensions']:
name = extension['extn_id'].native
attribute_name = '_%s_value' % name
if hasattr(self, attribute_name):
setattr(self, attribute_name, extension['extn_value'].parsed)
if extension['critical'].native:
self._critical_extensions.add(name)
self._processed_extensions = True
@property
def critical_extensions(self):
"""
Returns a set of the names (or OID if not a known extension) of the
extensions marked as critical
:return:
A set of unicode strings
"""
if not self._processed_extensions:
self._set_extensions()
return self._critical_extensions
@property
def issuer_alt_name_value(self):
"""
This extension allows associating one or more alternative names with
the issuer of the CRL.
:return:
None or an x509.GeneralNames object
"""
if self._processed_extensions is False:
self._set_extensions()
return self._issuer_alt_name_value
@property
def crl_number_value(self):
"""
This extension adds a monotonically increasing number to the CRL and is
used to distinguish different versions of the CRL.
:return:
None or an Integer object
"""
if self._processed_extensions is False:
self._set_extensions()
return self._crl_number_value
@property
def delta_crl_indicator_value(self):
"""
This extension indicates a CRL is a delta CRL, and contains the CRL
number of the base CRL that it is a delta from.
:return:
None or an Integer object
"""
if self._processed_extensions is False:
self._set_extensions()
return self._delta_crl_indicator_value
@property
def issuing_distribution_point_value(self):
"""
This extension includes information about what types of revocations
and certificates are part of the CRL.
:return:
None or an IssuingDistributionPoint object
"""
if self._processed_extensions is False:
self._set_extensions()
return self._issuing_distribution_point_value
@property
def authority_key_identifier_value(self):
"""
This extension helps in identifying the public key with which to
validate the authenticity of the CRL.
:return:
None or an AuthorityKeyIdentifier object
"""
if self._processed_extensions is False:
self._set_extensions()
return self._authority_key_identifier_value
@property
def freshest_crl_value(self):
"""
This extension is used in complete CRLs to indicate where a delta CRL
may be located.
:return:
None or a CRLDistributionPoints object
"""
if self._processed_extensions is False:
self._set_extensions()
return self._freshest_crl_value
@property
def authority_information_access_value(self):
"""
This extension is used to provide a URL with which to download the
certificate used to sign this CRL.
:return:
None or an AuthorityInfoAccessSyntax object
"""
if self._processed_extensions is False:
self._set_extensions()
return self._authority_information_access_value
@property
def issuer(self):
"""
:return:
An asn1crypto.x509.Name object for the issuer of the CRL
"""
return self['tbs_cert_list']['issuer']
@property
def authority_key_identifier(self):
"""
:return:
None or a byte string of the key_identifier from the authority key
identifier extension
"""
if not self.authority_key_identifier_value:
return None
return self.authority_key_identifier_value['key_identifier'].native
@property
def issuer_cert_urls(self):
"""
:return:
A list of unicode strings that are URLs that should contain either
an individual DER-encoded X.509 certificate, or a DER-encoded CMS
message containing multiple certificates
"""
if self._issuer_cert_urls is None:
self._issuer_cert_urls = []
if self.authority_information_access_value:
for entry in self.authority_information_access_value:
if entry['access_method'].native == 'ca_issuers':
location = entry['access_location']
if location.name != 'uniform_resource_identifier':
continue
url = location.native
if url.lower()[0:7] == 'http://':
self._issuer_cert_urls.append(url)
return self._issuer_cert_urls
@property
def delta_crl_distribution_points(self):
"""
Returns delta CRL URLs - only applies to complete CRLs
:return:
A list of zero or more DistributionPoint objects
"""
if self._delta_crl_distribution_points is None:
self._delta_crl_distribution_points = []
if self.freshest_crl_value is not None:
for distribution_point in self.freshest_crl_value:
distribution_point_name = distribution_point['distribution_point']
# RFC 5280 indicates conforming CA should not use the relative form
if distribution_point_name.name == 'name_relative_to_crl_issuer':
continue
# This library is currently only concerned with HTTP-based CRLs
for general_name in distribution_point_name.chosen:
if general_name.name == 'uniform_resource_identifier':
self._delta_crl_distribution_points.append(distribution_point)
return self._delta_crl_distribution_points
@property
def signature(self):
"""
:return:
A byte string of the signature
"""
return self['signature'].native
@property
def sha1(self):
"""
:return:
The SHA1 hash of the DER-encoded bytes of this certificate list
"""
if self._sha1 is None:
self._sha1 = hashlib.sha1(self.dump()).digest()
return self._sha1
@property
def sha256(self):
"""
:return:
The SHA-256 hash of the DER-encoded bytes of this certificate list
"""
if self._sha256 is None:
self._sha256 = hashlib.sha256(self.dump()).digest()
return self._sha256

View File

@ -0,0 +1,133 @@
# coding: utf-8
"""
ASN.1 type classes for certificate signing requests (CSR). Exports the
following items:
- CertificationRequest()
Other type classes are defined that help compose the types listed above.
"""
from __future__ import unicode_literals, division, absolute_import, print_function
from .algos import SignedDigestAlgorithm
from .core import (
Any,
BitString,
BMPString,
Integer,
ObjectIdentifier,
OctetBitString,
Sequence,
SetOf,
UTF8String
)
from .keys import PublicKeyInfo
from .x509 import DirectoryString, Extensions, Name
# The structures in this file are taken from https://tools.ietf.org/html/rfc2986
# and https://tools.ietf.org/html/rfc2985
class Version(Integer):
_map = {
0: 'v1',
}
class CSRAttributeType(ObjectIdentifier):
_map = {
'1.2.840.113549.1.9.7': 'challenge_password',
'1.2.840.113549.1.9.9': 'extended_certificate_attributes',
'1.2.840.113549.1.9.14': 'extension_request',
# https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-wcce/a5eaae36-e9f3-4dc5-a687-bfa7115954f1
'1.3.6.1.4.1.311.13.2.2': 'microsoft_enrollment_csp_provider',
# https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-wcce/7c677cba-030d-48be-ba2b-01e407705f34
'1.3.6.1.4.1.311.13.2.3': 'microsoft_os_version',
# https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-wcce/64e5ff6d-c6dd-4578-92f7-b3d895f9b9c7
'1.3.6.1.4.1.311.21.20': 'microsoft_request_client_info',
}
class SetOfDirectoryString(SetOf):
_child_spec = DirectoryString
class Attribute(Sequence):
_fields = [
('type', ObjectIdentifier),
('values', SetOf, {'spec': Any}),
]
class SetOfAttributes(SetOf):
_child_spec = Attribute
class SetOfExtensions(SetOf):
_child_spec = Extensions
class MicrosoftEnrollmentCSProvider(Sequence):
_fields = [
('keyspec', Integer),
('cspname', BMPString), # cryptographic service provider name
('signature', BitString),
]
class SetOfMicrosoftEnrollmentCSProvider(SetOf):
_child_spec = MicrosoftEnrollmentCSProvider
class MicrosoftRequestClientInfo(Sequence):
_fields = [
('clientid', Integer),
('machinename', UTF8String),
('username', UTF8String),
('processname', UTF8String),
]
class SetOfMicrosoftRequestClientInfo(SetOf):
_child_spec = MicrosoftRequestClientInfo
class CRIAttribute(Sequence):
_fields = [
('type', CSRAttributeType),
('values', Any),
]
_oid_pair = ('type', 'values')
_oid_specs = {
'challenge_password': SetOfDirectoryString,
'extended_certificate_attributes': SetOfAttributes,
'extension_request': SetOfExtensions,
'microsoft_enrollment_csp_provider': SetOfMicrosoftEnrollmentCSProvider,
'microsoft_os_version': SetOfDirectoryString,
'microsoft_request_client_info': SetOfMicrosoftRequestClientInfo,
}
class CRIAttributes(SetOf):
_child_spec = CRIAttribute
class CertificationRequestInfo(Sequence):
_fields = [
('version', Version),
('subject', Name),
('subject_pk_info', PublicKeyInfo),
('attributes', CRIAttributes, {'implicit': 0, 'optional': True}),
]
class CertificationRequest(Sequence):
_fields = [
('certification_request_info', CertificationRequestInfo),
('signature_algorithm', SignedDigestAlgorithm),
('signature', OctetBitString),
]

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,703 @@
# coding: utf-8
"""
ASN.1 type classes for the online certificate status protocol (OCSP). Exports
the following items:
- OCSPRequest()
- OCSPResponse()
Other type classes are defined that help compose the types listed above.
"""
from __future__ import unicode_literals, division, absolute_import, print_function
from ._errors import unwrap
from .algos import DigestAlgorithm, SignedDigestAlgorithm
from .core import (
Boolean,
Choice,
Enumerated,
GeneralizedTime,
IA5String,
Integer,
Null,
ObjectIdentifier,
OctetBitString,
OctetString,
ParsableOctetString,
Sequence,
SequenceOf,
)
from .crl import AuthorityInfoAccessSyntax, CRLReason
from .keys import PublicKeyAlgorithm
from .x509 import Certificate, GeneralName, GeneralNames, Name
# The structures in this file are taken from https://tools.ietf.org/html/rfc6960
class Version(Integer):
_map = {
0: 'v1'
}
class CertId(Sequence):
_fields = [
('hash_algorithm', DigestAlgorithm),
('issuer_name_hash', OctetString),
('issuer_key_hash', OctetString),
('serial_number', Integer),
]
class ServiceLocator(Sequence):
_fields = [
('issuer', Name),
('locator', AuthorityInfoAccessSyntax),
]
class RequestExtensionId(ObjectIdentifier):
_map = {
'1.3.6.1.5.5.7.48.1.7': 'service_locator',
}
class RequestExtension(Sequence):
_fields = [
('extn_id', RequestExtensionId),
('critical', Boolean, {'default': False}),
('extn_value', ParsableOctetString),
]
_oid_pair = ('extn_id', 'extn_value')
_oid_specs = {
'service_locator': ServiceLocator,
}
class RequestExtensions(SequenceOf):
_child_spec = RequestExtension
class Request(Sequence):
_fields = [
('req_cert', CertId),
('single_request_extensions', RequestExtensions, {'explicit': 0, 'optional': True}),
]
_processed_extensions = False
_critical_extensions = None
_service_locator_value = None
def _set_extensions(self):
"""
Sets common named extensions to private attributes and creates a list
of critical extensions
"""
self._critical_extensions = set()
for extension in self['single_request_extensions']:
name = extension['extn_id'].native
attribute_name = '_%s_value' % name
if hasattr(self, attribute_name):
setattr(self, attribute_name, extension['extn_value'].parsed)
if extension['critical'].native:
self._critical_extensions.add(name)
self._processed_extensions = True
@property
def critical_extensions(self):
"""
Returns a set of the names (or OID if not a known extension) of the
extensions marked as critical
:return:
A set of unicode strings
"""
if not self._processed_extensions:
self._set_extensions()
return self._critical_extensions
@property
def service_locator_value(self):
"""
This extension is used when communicating with an OCSP responder that
acts as a proxy for OCSP requests
:return:
None or a ServiceLocator object
"""
if self._processed_extensions is False:
self._set_extensions()
return self._service_locator_value
class Requests(SequenceOf):
_child_spec = Request
class ResponseType(ObjectIdentifier):
_map = {
'1.3.6.1.5.5.7.48.1.1': 'basic_ocsp_response',
}
class AcceptableResponses(SequenceOf):
_child_spec = ResponseType
class PreferredSignatureAlgorithm(Sequence):
_fields = [
('sig_identifier', SignedDigestAlgorithm),
('cert_identifier', PublicKeyAlgorithm, {'optional': True}),
]
class PreferredSignatureAlgorithms(SequenceOf):
_child_spec = PreferredSignatureAlgorithm
class TBSRequestExtensionId(ObjectIdentifier):
_map = {
'1.3.6.1.5.5.7.48.1.2': 'nonce',
'1.3.6.1.5.5.7.48.1.4': 'acceptable_responses',
'1.3.6.1.5.5.7.48.1.8': 'preferred_signature_algorithms',
}
class TBSRequestExtension(Sequence):
_fields = [
('extn_id', TBSRequestExtensionId),
('critical', Boolean, {'default': False}),
('extn_value', ParsableOctetString),
]
_oid_pair = ('extn_id', 'extn_value')
_oid_specs = {
'nonce': OctetString,
'acceptable_responses': AcceptableResponses,
'preferred_signature_algorithms': PreferredSignatureAlgorithms,
}
class TBSRequestExtensions(SequenceOf):
_child_spec = TBSRequestExtension
class TBSRequest(Sequence):
_fields = [
('version', Version, {'explicit': 0, 'default': 'v1'}),
('requestor_name', GeneralName, {'explicit': 1, 'optional': True}),
('request_list', Requests),
('request_extensions', TBSRequestExtensions, {'explicit': 2, 'optional': True}),
]
class Certificates(SequenceOf):
_child_spec = Certificate
class Signature(Sequence):
_fields = [
('signature_algorithm', SignedDigestAlgorithm),
('signature', OctetBitString),
('certs', Certificates, {'explicit': 0, 'optional': True}),
]
class OCSPRequest(Sequence):
_fields = [
('tbs_request', TBSRequest),
('optional_signature', Signature, {'explicit': 0, 'optional': True}),
]
_processed_extensions = False
_critical_extensions = None
_nonce_value = None
_acceptable_responses_value = None
_preferred_signature_algorithms_value = None
def _set_extensions(self):
"""
Sets common named extensions to private attributes and creates a list
of critical extensions
"""
self._critical_extensions = set()
for extension in self['tbs_request']['request_extensions']:
name = extension['extn_id'].native
attribute_name = '_%s_value' % name
if hasattr(self, attribute_name):
setattr(self, attribute_name, extension['extn_value'].parsed)
if extension['critical'].native:
self._critical_extensions.add(name)
self._processed_extensions = True
@property
def critical_extensions(self):
"""
Returns a set of the names (or OID if not a known extension) of the
extensions marked as critical
:return:
A set of unicode strings
"""
if not self._processed_extensions:
self._set_extensions()
return self._critical_extensions
@property
def nonce_value(self):
"""
This extension is used to prevent replay attacks by including a unique,
random value with each request/response pair
:return:
None or an OctetString object
"""
if self._processed_extensions is False:
self._set_extensions()
return self._nonce_value
@property
def acceptable_responses_value(self):
"""
This extension is used to allow the client and server to communicate
with alternative response formats other than just basic_ocsp_response,
although no other formats are defined in the standard.
:return:
None or an AcceptableResponses object
"""
if self._processed_extensions is False:
self._set_extensions()
return self._acceptable_responses_value
@property
def preferred_signature_algorithms_value(self):
"""
This extension is used by the client to define what signature algorithms
are preferred, including both the hash algorithm and the public key
algorithm, with a level of detail down to even the public key algorithm
parameters, such as curve name.
:return:
None or a PreferredSignatureAlgorithms object
"""
if self._processed_extensions is False:
self._set_extensions()
return self._preferred_signature_algorithms_value
class OCSPResponseStatus(Enumerated):
_map = {
0: 'successful',
1: 'malformed_request',
2: 'internal_error',
3: 'try_later',
5: 'sign_required',
6: 'unauthorized',
}
class ResponderId(Choice):
_alternatives = [
('by_name', Name, {'explicit': 1}),
('by_key', OctetString, {'explicit': 2}),
]
# Custom class to return a meaningful .native attribute from CertStatus()
class StatusGood(Null):
def set(self, value):
"""
Sets the value of the object
:param value:
None or 'good'
"""
if value is not None and value != 'good' and not isinstance(value, Null):
raise ValueError(unwrap(
'''
value must be one of None, "good", not %s
''',
repr(value)
))
self.contents = b''
@property
def native(self):
return 'good'
# Custom class to return a meaningful .native attribute from CertStatus()
class StatusUnknown(Null):
def set(self, value):
"""
Sets the value of the object
:param value:
None or 'unknown'
"""
if value is not None and value != 'unknown' and not isinstance(value, Null):
raise ValueError(unwrap(
'''
value must be one of None, "unknown", not %s
''',
repr(value)
))
self.contents = b''
@property
def native(self):
return 'unknown'
class RevokedInfo(Sequence):
_fields = [
('revocation_time', GeneralizedTime),
('revocation_reason', CRLReason, {'explicit': 0, 'optional': True}),
]
class CertStatus(Choice):
_alternatives = [
('good', StatusGood, {'implicit': 0}),
('revoked', RevokedInfo, {'implicit': 1}),
('unknown', StatusUnknown, {'implicit': 2}),
]
class CrlId(Sequence):
_fields = [
('crl_url', IA5String, {'explicit': 0, 'optional': True}),
('crl_num', Integer, {'explicit': 1, 'optional': True}),
('crl_time', GeneralizedTime, {'explicit': 2, 'optional': True}),
]
class SingleResponseExtensionId(ObjectIdentifier):
_map = {
'1.3.6.1.5.5.7.48.1.3': 'crl',
'1.3.6.1.5.5.7.48.1.6': 'archive_cutoff',
# These are CRLEntryExtension values from
# https://tools.ietf.org/html/rfc5280
'2.5.29.21': 'crl_reason',
'2.5.29.24': 'invalidity_date',
'2.5.29.29': 'certificate_issuer',
# https://tools.ietf.org/html/rfc6962.html#page-13
'1.3.6.1.4.1.11129.2.4.5': 'signed_certificate_timestamp_list',
}
class SingleResponseExtension(Sequence):
_fields = [
('extn_id', SingleResponseExtensionId),
('critical', Boolean, {'default': False}),
('extn_value', ParsableOctetString),
]
_oid_pair = ('extn_id', 'extn_value')
_oid_specs = {
'crl': CrlId,
'archive_cutoff': GeneralizedTime,
'crl_reason': CRLReason,
'invalidity_date': GeneralizedTime,
'certificate_issuer': GeneralNames,
'signed_certificate_timestamp_list': OctetString,
}
class SingleResponseExtensions(SequenceOf):
_child_spec = SingleResponseExtension
class SingleResponse(Sequence):
_fields = [
('cert_id', CertId),
('cert_status', CertStatus),
('this_update', GeneralizedTime),
('next_update', GeneralizedTime, {'explicit': 0, 'optional': True}),
('single_extensions', SingleResponseExtensions, {'explicit': 1, 'optional': True}),
]
_processed_extensions = False
_critical_extensions = None
_crl_value = None
_archive_cutoff_value = None
_crl_reason_value = None
_invalidity_date_value = None
_certificate_issuer_value = None
def _set_extensions(self):
"""
Sets common named extensions to private attributes and creates a list
of critical extensions
"""
self._critical_extensions = set()
for extension in self['single_extensions']:
name = extension['extn_id'].native
attribute_name = '_%s_value' % name
if hasattr(self, attribute_name):
setattr(self, attribute_name, extension['extn_value'].parsed)
if extension['critical'].native:
self._critical_extensions.add(name)
self._processed_extensions = True
@property
def critical_extensions(self):
"""
Returns a set of the names (or OID if not a known extension) of the
extensions marked as critical
:return:
A set of unicode strings
"""
if not self._processed_extensions:
self._set_extensions()
return self._critical_extensions
@property
def crl_value(self):
"""
This extension is used to locate the CRL that a certificate's revocation
is contained within.
:return:
None or a CrlId object
"""
if self._processed_extensions is False:
self._set_extensions()
return self._crl_value
@property
def archive_cutoff_value(self):
"""
This extension is used to indicate the date at which an archived
(historical) certificate status entry will no longer be available.
:return:
None or a GeneralizedTime object
"""
if self._processed_extensions is False:
self._set_extensions()
return self._archive_cutoff_value
@property
def crl_reason_value(self):
"""
This extension indicates the reason that a certificate was revoked.
:return:
None or a CRLReason object
"""
if self._processed_extensions is False:
self._set_extensions()
return self._crl_reason_value
@property
def invalidity_date_value(self):
"""
This extension indicates the suspected date/time the private key was
compromised or the certificate became invalid. This would usually be
before the revocation date, which is when the CA processed the
revocation.
:return:
None or a GeneralizedTime object
"""
if self._processed_extensions is False:
self._set_extensions()
return self._invalidity_date_value
@property
def certificate_issuer_value(self):
"""
This extension indicates the issuer of the certificate in question.
:return:
None or an x509.GeneralNames object
"""
if self._processed_extensions is False:
self._set_extensions()
return self._certificate_issuer_value
class Responses(SequenceOf):
_child_spec = SingleResponse
class ResponseDataExtensionId(ObjectIdentifier):
_map = {
'1.3.6.1.5.5.7.48.1.2': 'nonce',
'1.3.6.1.5.5.7.48.1.9': 'extended_revoke',
}
class ResponseDataExtension(Sequence):
_fields = [
('extn_id', ResponseDataExtensionId),
('critical', Boolean, {'default': False}),
('extn_value', ParsableOctetString),
]
_oid_pair = ('extn_id', 'extn_value')
_oid_specs = {
'nonce': OctetString,
'extended_revoke': Null,
}
class ResponseDataExtensions(SequenceOf):
_child_spec = ResponseDataExtension
class ResponseData(Sequence):
_fields = [
('version', Version, {'explicit': 0, 'default': 'v1'}),
('responder_id', ResponderId),
('produced_at', GeneralizedTime),
('responses', Responses),
('response_extensions', ResponseDataExtensions, {'explicit': 1, 'optional': True}),
]
class BasicOCSPResponse(Sequence):
_fields = [
('tbs_response_data', ResponseData),
('signature_algorithm', SignedDigestAlgorithm),
('signature', OctetBitString),
('certs', Certificates, {'explicit': 0, 'optional': True}),
]
class ResponseBytes(Sequence):
_fields = [
('response_type', ResponseType),
('response', ParsableOctetString),
]
_oid_pair = ('response_type', 'response')
_oid_specs = {
'basic_ocsp_response': BasicOCSPResponse,
}
class OCSPResponse(Sequence):
_fields = [
('response_status', OCSPResponseStatus),
('response_bytes', ResponseBytes, {'explicit': 0, 'optional': True}),
]
_processed_extensions = False
_critical_extensions = None
_nonce_value = None
_extended_revoke_value = None
def _set_extensions(self):
"""
Sets common named extensions to private attributes and creates a list
of critical extensions
"""
self._critical_extensions = set()
for extension in self['response_bytes']['response'].parsed['tbs_response_data']['response_extensions']:
name = extension['extn_id'].native
attribute_name = '_%s_value' % name
if hasattr(self, attribute_name):
setattr(self, attribute_name, extension['extn_value'].parsed)
if extension['critical'].native:
self._critical_extensions.add(name)
self._processed_extensions = True
@property
def critical_extensions(self):
"""
Returns a set of the names (or OID if not a known extension) of the
extensions marked as critical
:return:
A set of unicode strings
"""
if not self._processed_extensions:
self._set_extensions()
return self._critical_extensions
@property
def nonce_value(self):
"""
This extension is used to prevent replay attacks on the request/response
exchange
:return:
None or an OctetString object
"""
if self._processed_extensions is False:
self._set_extensions()
return self._nonce_value
@property
def extended_revoke_value(self):
"""
This extension is used to signal that the responder will return a
"revoked" status for non-issued certificates.
:return:
None or a Null object (if present)
"""
if self._processed_extensions is False:
self._set_extensions()
return self._extended_revoke_value
@property
def basic_ocsp_response(self):
"""
A shortcut into the BasicOCSPResponse sequence
:return:
None or an asn1crypto.ocsp.BasicOCSPResponse object
"""
return self['response_bytes']['response'].parsed
@property
def response_data(self):
"""
A shortcut into the parsed, ResponseData sequence
:return:
None or an asn1crypto.ocsp.ResponseData object
"""
return self['response_bytes']['response'].parsed['tbs_response_data']

View File

@ -0,0 +1,292 @@
# coding: utf-8
"""
Functions for parsing and dumping using the ASN.1 DER encoding. Exports the
following items:
- emit()
- parse()
- peek()
Other type classes are defined that help compose the types listed above.
"""
from __future__ import unicode_literals, division, absolute_import, print_function
import sys
from ._types import byte_cls, chr_cls, type_name
from .util import int_from_bytes, int_to_bytes
_PY2 = sys.version_info <= (3,)
_INSUFFICIENT_DATA_MESSAGE = 'Insufficient data - %s bytes requested but only %s available'
_MAX_DEPTH = 10
def emit(class_, method, tag, contents):
"""
Constructs a byte string of an ASN.1 DER-encoded value
This is typically not useful. Instead, use one of the standard classes from
asn1crypto.core, or construct a new class with specific fields, and call the
.dump() method.
:param class_:
An integer ASN.1 class value: 0 (universal), 1 (application),
2 (context), 3 (private)
:param method:
An integer ASN.1 method value: 0 (primitive), 1 (constructed)
:param tag:
An integer ASN.1 tag value
:param contents:
A byte string of the encoded byte contents
:return:
A byte string of the ASN.1 DER value (header and contents)
"""
if not isinstance(class_, int):
raise TypeError('class_ must be an integer, not %s' % type_name(class_))
if class_ < 0 or class_ > 3:
raise ValueError('class_ must be one of 0, 1, 2 or 3, not %s' % class_)
if not isinstance(method, int):
raise TypeError('method must be an integer, not %s' % type_name(method))
if method < 0 or method > 1:
raise ValueError('method must be 0 or 1, not %s' % method)
if not isinstance(tag, int):
raise TypeError('tag must be an integer, not %s' % type_name(tag))
if tag < 0:
raise ValueError('tag must be greater than zero, not %s' % tag)
if not isinstance(contents, byte_cls):
raise TypeError('contents must be a byte string, not %s' % type_name(contents))
return _dump_header(class_, method, tag, contents) + contents
def parse(contents, strict=False):
"""
Parses a byte string of ASN.1 BER/DER-encoded data.
This is typically not useful. Instead, use one of the standard classes from
asn1crypto.core, or construct a new class with specific fields, and call the
.load() class method.
:param contents:
A byte string of BER/DER-encoded data
:param strict:
A boolean indicating if trailing data should be forbidden - if so, a
ValueError will be raised when trailing data exists
:raises:
ValueError - when the contents do not contain an ASN.1 header or are truncated in some way
TypeError - when contents is not a byte string
:return:
A 6-element tuple:
- 0: integer class (0 to 3)
- 1: integer method
- 2: integer tag
- 3: byte string header
- 4: byte string content
- 5: byte string trailer
"""
if not isinstance(contents, byte_cls):
raise TypeError('contents must be a byte string, not %s' % type_name(contents))
contents_len = len(contents)
info, consumed = _parse(contents, contents_len)
if strict and consumed != contents_len:
raise ValueError('Extra data - %d bytes of trailing data were provided' % (contents_len - consumed))
return info
def peek(contents):
"""
Parses a byte string of ASN.1 BER/DER-encoded data to find the length
This is typically used to look into an encoded value to see how long the
next chunk of ASN.1-encoded data is. Primarily it is useful when a
value is a concatenation of multiple values.
:param contents:
A byte string of BER/DER-encoded data
:raises:
ValueError - when the contents do not contain an ASN.1 header or are truncated in some way
TypeError - when contents is not a byte string
:return:
An integer with the number of bytes occupied by the ASN.1 value
"""
if not isinstance(contents, byte_cls):
raise TypeError('contents must be a byte string, not %s' % type_name(contents))
info, consumed = _parse(contents, len(contents))
return consumed
def _parse(encoded_data, data_len, pointer=0, lengths_only=False, depth=0):
"""
Parses a byte string into component parts
:param encoded_data:
A byte string that contains BER-encoded data
:param data_len:
The integer length of the encoded data
:param pointer:
The index in the byte string to parse from
:param lengths_only:
A boolean to cause the call to return a 2-element tuple of the integer
number of bytes in the header and the integer number of bytes in the
contents. Internal use only.
:param depth:
The recursion depth when evaluating indefinite-length encoding.
:return:
A 2-element tuple:
- 0: A tuple of (class_, method, tag, header, content, trailer)
- 1: An integer indicating how many bytes were consumed
"""
if depth > _MAX_DEPTH:
raise ValueError('Indefinite-length recursion limit exceeded')
start = pointer
if data_len < pointer + 1:
raise ValueError(_INSUFFICIENT_DATA_MESSAGE % (1, data_len - pointer))
first_octet = ord(encoded_data[pointer]) if _PY2 else encoded_data[pointer]
pointer += 1
tag = first_octet & 31
constructed = (first_octet >> 5) & 1
# Base 128 length using 8th bit as continuation indicator
if tag == 31:
tag = 0
while True:
if data_len < pointer + 1:
raise ValueError(_INSUFFICIENT_DATA_MESSAGE % (1, data_len - pointer))
num = ord(encoded_data[pointer]) if _PY2 else encoded_data[pointer]
pointer += 1
if num == 0x80 and tag == 0:
raise ValueError('Non-minimal tag encoding')
tag *= 128
tag += num & 127
if num >> 7 == 0:
break
if tag < 31:
raise ValueError('Non-minimal tag encoding')
if data_len < pointer + 1:
raise ValueError(_INSUFFICIENT_DATA_MESSAGE % (1, data_len - pointer))
length_octet = ord(encoded_data[pointer]) if _PY2 else encoded_data[pointer]
pointer += 1
trailer = b''
if length_octet >> 7 == 0:
contents_end = pointer + (length_octet & 127)
else:
length_octets = length_octet & 127
if length_octets:
if data_len < pointer + length_octets:
raise ValueError(_INSUFFICIENT_DATA_MESSAGE % (length_octets, data_len - pointer))
pointer += length_octets
contents_end = pointer + int_from_bytes(encoded_data[pointer - length_octets:pointer], signed=False)
else:
# To properly parse indefinite length values, we need to scan forward
# parsing headers until we find a value with a length of zero. If we
# just scanned looking for \x00\x00, nested indefinite length values
# would not work.
if not constructed:
raise ValueError('Indefinite-length element must be constructed')
contents_end = pointer
while data_len < contents_end + 2 or encoded_data[contents_end:contents_end+2] != b'\x00\x00':
_, contents_end = _parse(encoded_data, data_len, contents_end, lengths_only=True, depth=depth+1)
contents_end += 2
trailer = b'\x00\x00'
if contents_end > data_len:
raise ValueError(_INSUFFICIENT_DATA_MESSAGE % (contents_end - pointer, data_len - pointer))
if lengths_only:
return (pointer, contents_end)
return (
(
first_octet >> 6,
constructed,
tag,
encoded_data[start:pointer],
encoded_data[pointer:contents_end-len(trailer)],
trailer
),
contents_end
)
def _dump_header(class_, method, tag, contents):
"""
Constructs the header bytes for an ASN.1 object
:param class_:
An integer ASN.1 class value: 0 (universal), 1 (application),
2 (context), 3 (private)
:param method:
An integer ASN.1 method value: 0 (primitive), 1 (constructed)
:param tag:
An integer ASN.1 tag value
:param contents:
A byte string of the encoded byte contents
:return:
A byte string of the ASN.1 DER header
"""
header = b''
id_num = 0
id_num |= class_ << 6
id_num |= method << 5
if tag >= 31:
cont_bit = 0
while tag > 0:
header = chr_cls(cont_bit | (tag & 0x7f)) + header
if not cont_bit:
cont_bit = 0x80
tag = tag >> 7
header = chr_cls(id_num | 31) + header
else:
header += chr_cls(id_num | tag)
length = len(contents)
if length <= 127:
header += chr_cls(length)
else:
length_bytes = int_to_bytes(length)
header += chr_cls(0x80 | len(length_bytes))
header += length_bytes
return header

View File

@ -0,0 +1,84 @@
# coding: utf-8
"""
ASN.1 type classes for PDF signature structures. Adds extra oid mapping and
value parsing to asn1crypto.x509.Extension() and asn1crypto.xms.CMSAttribute().
"""
from __future__ import unicode_literals, division, absolute_import, print_function
from .cms import CMSAttributeType, CMSAttribute
from .core import (
Boolean,
Integer,
Null,
ObjectIdentifier,
OctetString,
Sequence,
SequenceOf,
SetOf,
)
from .crl import CertificateList
from .ocsp import OCSPResponse
from .x509 import (
Extension,
ExtensionId,
GeneralName,
KeyPurposeId,
)
class AdobeArchiveRevInfo(Sequence):
_fields = [
('version', Integer)
]
class AdobeTimestamp(Sequence):
_fields = [
('version', Integer),
('location', GeneralName),
('requires_auth', Boolean, {'optional': True, 'default': False}),
]
class OtherRevInfo(Sequence):
_fields = [
('type', ObjectIdentifier),
('value', OctetString),
]
class SequenceOfCertificateList(SequenceOf):
_child_spec = CertificateList
class SequenceOfOCSPResponse(SequenceOf):
_child_spec = OCSPResponse
class SequenceOfOtherRevInfo(SequenceOf):
_child_spec = OtherRevInfo
class RevocationInfoArchival(Sequence):
_fields = [
('crl', SequenceOfCertificateList, {'explicit': 0, 'optional': True}),
('ocsp', SequenceOfOCSPResponse, {'explicit': 1, 'optional': True}),
('other_rev_info', SequenceOfOtherRevInfo, {'explicit': 2, 'optional': True}),
]
class SetOfRevocationInfoArchival(SetOf):
_child_spec = RevocationInfoArchival
ExtensionId._map['1.2.840.113583.1.1.9.2'] = 'adobe_archive_rev_info'
ExtensionId._map['1.2.840.113583.1.1.9.1'] = 'adobe_timestamp'
ExtensionId._map['1.2.840.113583.1.1.10'] = 'adobe_ppklite_credential'
Extension._oid_specs['adobe_archive_rev_info'] = AdobeArchiveRevInfo
Extension._oid_specs['adobe_timestamp'] = AdobeTimestamp
Extension._oid_specs['adobe_ppklite_credential'] = Null
KeyPurposeId._map['1.2.840.113583.1.1.5'] = 'pdf_signing'
CMSAttributeType._map['1.2.840.113583.1.1.8'] = 'adobe_revocation_info_archival'
CMSAttribute._oid_specs['adobe_revocation_info_archival'] = SetOfRevocationInfoArchival

View File

@ -0,0 +1,222 @@
# coding: utf-8
"""
Encoding DER to PEM and decoding PEM to DER. Exports the following items:
- armor()
- detect()
- unarmor()
"""
from __future__ import unicode_literals, division, absolute_import, print_function
import base64
import re
import sys
from ._errors import unwrap
from ._types import type_name as _type_name, str_cls, byte_cls
if sys.version_info < (3,):
from cStringIO import StringIO as BytesIO
else:
from io import BytesIO
def detect(byte_string):
"""
Detect if a byte string seems to contain a PEM-encoded block
:param byte_string:
A byte string to look through
:return:
A boolean, indicating if a PEM-encoded block is contained in the byte
string
"""
if not isinstance(byte_string, byte_cls):
raise TypeError(unwrap(
'''
byte_string must be a byte string, not %s
''',
_type_name(byte_string)
))
return byte_string.find(b'-----BEGIN') != -1 or byte_string.find(b'---- BEGIN') != -1
def armor(type_name, der_bytes, headers=None):
"""
Armors a DER-encoded byte string in PEM
:param type_name:
A unicode string that will be capitalized and placed in the header
and footer of the block. E.g. "CERTIFICATE", "PRIVATE KEY", etc. This
will appear as "-----BEGIN CERTIFICATE-----" and
"-----END CERTIFICATE-----".
:param der_bytes:
A byte string to be armored
:param headers:
An OrderedDict of the header lines to write after the BEGIN line
:return:
A byte string of the PEM block
"""
if not isinstance(der_bytes, byte_cls):
raise TypeError(unwrap(
'''
der_bytes must be a byte string, not %s
''' % _type_name(der_bytes)
))
if not isinstance(type_name, str_cls):
raise TypeError(unwrap(
'''
type_name must be a unicode string, not %s
''',
_type_name(type_name)
))
type_name = type_name.upper().encode('ascii')
output = BytesIO()
output.write(b'-----BEGIN ')
output.write(type_name)
output.write(b'-----\n')
if headers:
for key in headers:
output.write(key.encode('ascii'))
output.write(b': ')
output.write(headers[key].encode('ascii'))
output.write(b'\n')
output.write(b'\n')
b64_bytes = base64.b64encode(der_bytes)
b64_len = len(b64_bytes)
i = 0
while i < b64_len:
output.write(b64_bytes[i:i + 64])
output.write(b'\n')
i += 64
output.write(b'-----END ')
output.write(type_name)
output.write(b'-----\n')
return output.getvalue()
def _unarmor(pem_bytes):
"""
Convert a PEM-encoded byte string into one or more DER-encoded byte strings
:param pem_bytes:
A byte string of the PEM-encoded data
:raises:
ValueError - when the pem_bytes do not appear to be PEM-encoded bytes
:return:
A generator of 3-element tuples in the format: (object_type, headers,
der_bytes). The object_type is a unicode string of what is between
"-----BEGIN " and "-----". Examples include: "CERTIFICATE",
"PUBLIC KEY", "PRIVATE KEY". The headers is a dict containing any lines
in the form "Name: Value" that are right after the begin line.
"""
if not isinstance(pem_bytes, byte_cls):
raise TypeError(unwrap(
'''
pem_bytes must be a byte string, not %s
''',
_type_name(pem_bytes)
))
# Valid states include: "trash", "headers", "body"
state = 'trash'
headers = {}
base64_data = b''
object_type = None
found_start = False
found_end = False
for line in pem_bytes.splitlines(False):
if line == b'':
continue
if state == "trash":
# Look for a starting line since some CA cert bundle show the cert
# into in a parsed format above each PEM block
type_name_match = re.match(b'^(?:---- |-----)BEGIN ([A-Z0-9 ]+)(?: ----|-----)', line)
if not type_name_match:
continue
object_type = type_name_match.group(1).decode('ascii')
found_start = True
state = 'headers'
continue
if state == 'headers':
if line.find(b':') == -1:
state = 'body'
else:
decoded_line = line.decode('ascii')
name, value = decoded_line.split(':', 1)
headers[name] = value.strip()
continue
if state == 'body':
if line[0:5] in (b'-----', b'---- '):
der_bytes = base64.b64decode(base64_data)
yield (object_type, headers, der_bytes)
state = 'trash'
headers = {}
base64_data = b''
object_type = None
found_end = True
continue
base64_data += line
if not found_start or not found_end:
raise ValueError(unwrap(
'''
pem_bytes does not appear to contain PEM-encoded data - no
BEGIN/END combination found
'''
))
def unarmor(pem_bytes, multiple=False):
"""
Convert a PEM-encoded byte string into a DER-encoded byte string
:param pem_bytes:
A byte string of the PEM-encoded data
:param multiple:
If True, function will return a generator
:raises:
ValueError - when the pem_bytes do not appear to be PEM-encoded bytes
:return:
A 3-element tuple (object_name, headers, der_bytes). The object_name is
a unicode string of what is between "-----BEGIN " and "-----". Examples
include: "CERTIFICATE", "PUBLIC KEY", "PRIVATE KEY". The headers is a
dict containing any lines in the form "Name: Value" that are right
after the begin line.
"""
generator = _unarmor(pem_bytes)
if not multiple:
return next(generator)
return generator

View File

@ -0,0 +1,193 @@
# coding: utf-8
"""
ASN.1 type classes for PKCS#12 files. Exports the following items:
- CertBag()
- CrlBag()
- Pfx()
- SafeBag()
- SecretBag()
Other type classes are defined that help compose the types listed above.
"""
from __future__ import unicode_literals, division, absolute_import, print_function
from .algos import DigestInfo
from .cms import ContentInfo, SignedData
from .core import (
Any,
BMPString,
Integer,
ObjectIdentifier,
OctetString,
ParsableOctetString,
Sequence,
SequenceOf,
SetOf,
)
from .keys import PrivateKeyInfo, EncryptedPrivateKeyInfo
from .x509 import Certificate, KeyPurposeId
# The structures in this file are taken from https://tools.ietf.org/html/rfc7292
class MacData(Sequence):
_fields = [
('mac', DigestInfo),
('mac_salt', OctetString),
('iterations', Integer, {'default': 1}),
]
class Version(Integer):
_map = {
3: 'v3'
}
class AttributeType(ObjectIdentifier):
_map = {
# https://tools.ietf.org/html/rfc2985#page-18
'1.2.840.113549.1.9.20': 'friendly_name',
'1.2.840.113549.1.9.21': 'local_key_id',
# https://support.microsoft.com/en-us/kb/287547
'1.3.6.1.4.1.311.17.1': 'microsoft_local_machine_keyset',
# https://github.com/frohoff/jdk8u-dev-jdk/blob/master/src/share/classes/sun/security/pkcs12/PKCS12KeyStore.java
# this is a set of OIDs, representing key usage, the usual value is a SET of one element OID 2.5.29.37.0
'2.16.840.1.113894.746875.1.1': 'trusted_key_usage',
}
class SetOfAny(SetOf):
_child_spec = Any
class SetOfBMPString(SetOf):
_child_spec = BMPString
class SetOfOctetString(SetOf):
_child_spec = OctetString
class SetOfKeyPurposeId(SetOf):
_child_spec = KeyPurposeId
class Attribute(Sequence):
_fields = [
('type', AttributeType),
('values', None),
]
_oid_specs = {
'friendly_name': SetOfBMPString,
'local_key_id': SetOfOctetString,
'microsoft_csp_name': SetOfBMPString,
'trusted_key_usage': SetOfKeyPurposeId,
}
def _values_spec(self):
return self._oid_specs.get(self['type'].native, SetOfAny)
_spec_callbacks = {
'values': _values_spec
}
class Attributes(SetOf):
_child_spec = Attribute
class Pfx(Sequence):
_fields = [
('version', Version),
('auth_safe', ContentInfo),
('mac_data', MacData, {'optional': True})
]
_authenticated_safe = None
@property
def authenticated_safe(self):
if self._authenticated_safe is None:
content = self['auth_safe']['content']
if isinstance(content, SignedData):
content = content['content_info']['content']
self._authenticated_safe = AuthenticatedSafe.load(content.native)
return self._authenticated_safe
class AuthenticatedSafe(SequenceOf):
_child_spec = ContentInfo
class BagId(ObjectIdentifier):
_map = {
'1.2.840.113549.1.12.10.1.1': 'key_bag',
'1.2.840.113549.1.12.10.1.2': 'pkcs8_shrouded_key_bag',
'1.2.840.113549.1.12.10.1.3': 'cert_bag',
'1.2.840.113549.1.12.10.1.4': 'crl_bag',
'1.2.840.113549.1.12.10.1.5': 'secret_bag',
'1.2.840.113549.1.12.10.1.6': 'safe_contents',
}
class CertId(ObjectIdentifier):
_map = {
'1.2.840.113549.1.9.22.1': 'x509',
'1.2.840.113549.1.9.22.2': 'sdsi',
}
class CertBag(Sequence):
_fields = [
('cert_id', CertId),
('cert_value', ParsableOctetString, {'explicit': 0}),
]
_oid_pair = ('cert_id', 'cert_value')
_oid_specs = {
'x509': Certificate,
}
class CrlBag(Sequence):
_fields = [
('crl_id', ObjectIdentifier),
('crl_value', OctetString, {'explicit': 0}),
]
class SecretBag(Sequence):
_fields = [
('secret_type_id', ObjectIdentifier),
('secret_value', OctetString, {'explicit': 0}),
]
class SafeContents(SequenceOf):
pass
class SafeBag(Sequence):
_fields = [
('bag_id', BagId),
('bag_value', Any, {'explicit': 0}),
('bag_attributes', Attributes, {'optional': True}),
]
_oid_pair = ('bag_id', 'bag_value')
_oid_specs = {
'key_bag': PrivateKeyInfo,
'pkcs8_shrouded_key_bag': EncryptedPrivateKeyInfo,
'cert_bag': CertBag,
'crl_bag': CrlBag,
'secret_bag': SecretBag,
'safe_contents': SafeContents
}
SafeContents._child_spec = SafeBag

View File

@ -0,0 +1,310 @@
# coding: utf-8
"""
ASN.1 type classes for the time stamp protocol (TSP). Exports the following
items:
- TimeStampReq()
- TimeStampResp()
Also adds TimeStampedData() support to asn1crypto.cms.ContentInfo(),
TimeStampedData() and TSTInfo() support to
asn1crypto.cms.EncapsulatedContentInfo() and some oids and value parsers to
asn1crypto.cms.CMSAttribute().
Other type classes are defined that help compose the types listed above.
"""
from __future__ import unicode_literals, division, absolute_import, print_function
from .algos import DigestAlgorithm
from .cms import (
CMSAttribute,
CMSAttributeType,
ContentInfo,
ContentType,
EncapsulatedContentInfo,
)
from .core import (
Any,
BitString,
Boolean,
Choice,
GeneralizedTime,
IA5String,
Integer,
ObjectIdentifier,
OctetString,
Sequence,
SequenceOf,
SetOf,
UTF8String,
)
from .crl import CertificateList
from .x509 import (
Attributes,
CertificatePolicies,
GeneralName,
GeneralNames,
)
# The structures in this file are based on https://tools.ietf.org/html/rfc3161,
# https://tools.ietf.org/html/rfc4998, https://tools.ietf.org/html/rfc5544,
# https://tools.ietf.org/html/rfc5035, https://tools.ietf.org/html/rfc2634
class Version(Integer):
_map = {
0: 'v0',
1: 'v1',
2: 'v2',
3: 'v3',
4: 'v4',
5: 'v5',
}
class MessageImprint(Sequence):
_fields = [
('hash_algorithm', DigestAlgorithm),
('hashed_message', OctetString),
]
class Accuracy(Sequence):
_fields = [
('seconds', Integer, {'optional': True}),
('millis', Integer, {'implicit': 0, 'optional': True}),
('micros', Integer, {'implicit': 1, 'optional': True}),
]
class Extension(Sequence):
_fields = [
('extn_id', ObjectIdentifier),
('critical', Boolean, {'default': False}),
('extn_value', OctetString),
]
class Extensions(SequenceOf):
_child_spec = Extension
class TSTInfo(Sequence):
_fields = [
('version', Version),
('policy', ObjectIdentifier),
('message_imprint', MessageImprint),
('serial_number', Integer),
('gen_time', GeneralizedTime),
('accuracy', Accuracy, {'optional': True}),
('ordering', Boolean, {'default': False}),
('nonce', Integer, {'optional': True}),
('tsa', GeneralName, {'explicit': 0, 'optional': True}),
('extensions', Extensions, {'implicit': 1, 'optional': True}),
]
class TimeStampReq(Sequence):
_fields = [
('version', Version),
('message_imprint', MessageImprint),
('req_policy', ObjectIdentifier, {'optional': True}),
('nonce', Integer, {'optional': True}),
('cert_req', Boolean, {'default': False}),
('extensions', Extensions, {'implicit': 0, 'optional': True}),
]
class PKIStatus(Integer):
_map = {
0: 'granted',
1: 'granted_with_mods',
2: 'rejection',
3: 'waiting',
4: 'revocation_warning',
5: 'revocation_notification',
}
class PKIFreeText(SequenceOf):
_child_spec = UTF8String
class PKIFailureInfo(BitString):
_map = {
0: 'bad_alg',
2: 'bad_request',
5: 'bad_data_format',
14: 'time_not_available',
15: 'unaccepted_policy',
16: 'unaccepted_extensions',
17: 'add_info_not_available',
25: 'system_failure',
}
class PKIStatusInfo(Sequence):
_fields = [
('status', PKIStatus),
('status_string', PKIFreeText, {'optional': True}),
('fail_info', PKIFailureInfo, {'optional': True}),
]
class TimeStampResp(Sequence):
_fields = [
('status', PKIStatusInfo),
('time_stamp_token', ContentInfo),
]
class MetaData(Sequence):
_fields = [
('hash_protected', Boolean),
('file_name', UTF8String, {'optional': True}),
('media_type', IA5String, {'optional': True}),
('other_meta_data', Attributes, {'optional': True}),
]
class TimeStampAndCRL(Sequence):
_fields = [
('time_stamp', EncapsulatedContentInfo),
('crl', CertificateList, {'optional': True}),
]
class TimeStampTokenEvidence(SequenceOf):
_child_spec = TimeStampAndCRL
class DigestAlgorithms(SequenceOf):
_child_spec = DigestAlgorithm
class EncryptionInfo(Sequence):
_fields = [
('encryption_info_type', ObjectIdentifier),
('encryption_info_value', Any),
]
class PartialHashtree(SequenceOf):
_child_spec = OctetString
class PartialHashtrees(SequenceOf):
_child_spec = PartialHashtree
class ArchiveTimeStamp(Sequence):
_fields = [
('digest_algorithm', DigestAlgorithm, {'implicit': 0, 'optional': True}),
('attributes', Attributes, {'implicit': 1, 'optional': True}),
('reduced_hashtree', PartialHashtrees, {'implicit': 2, 'optional': True}),
('time_stamp', ContentInfo),
]
class ArchiveTimeStampSequence(SequenceOf):
_child_spec = ArchiveTimeStamp
class EvidenceRecord(Sequence):
_fields = [
('version', Version),
('digest_algorithms', DigestAlgorithms),
('crypto_infos', Attributes, {'implicit': 0, 'optional': True}),
('encryption_info', EncryptionInfo, {'implicit': 1, 'optional': True}),
('archive_time_stamp_sequence', ArchiveTimeStampSequence),
]
class OtherEvidence(Sequence):
_fields = [
('oe_type', ObjectIdentifier),
('oe_value', Any),
]
class Evidence(Choice):
_alternatives = [
('tst_evidence', TimeStampTokenEvidence, {'implicit': 0}),
('ers_evidence', EvidenceRecord, {'implicit': 1}),
('other_evidence', OtherEvidence, {'implicit': 2}),
]
class TimeStampedData(Sequence):
_fields = [
('version', Version),
('data_uri', IA5String, {'optional': True}),
('meta_data', MetaData, {'optional': True}),
('content', OctetString, {'optional': True}),
('temporal_evidence', Evidence),
]
class IssuerSerial(Sequence):
_fields = [
('issuer', GeneralNames),
('serial_number', Integer),
]
class ESSCertID(Sequence):
_fields = [
('cert_hash', OctetString),
('issuer_serial', IssuerSerial, {'optional': True}),
]
class ESSCertIDs(SequenceOf):
_child_spec = ESSCertID
class SigningCertificate(Sequence):
_fields = [
('certs', ESSCertIDs),
('policies', CertificatePolicies, {'optional': True}),
]
class SetOfSigningCertificates(SetOf):
_child_spec = SigningCertificate
class ESSCertIDv2(Sequence):
_fields = [
('hash_algorithm', DigestAlgorithm, {'default': {'algorithm': 'sha256'}}),
('cert_hash', OctetString),
('issuer_serial', IssuerSerial, {'optional': True}),
]
class ESSCertIDv2s(SequenceOf):
_child_spec = ESSCertIDv2
class SigningCertificateV2(Sequence):
_fields = [
('certs', ESSCertIDv2s),
('policies', CertificatePolicies, {'optional': True}),
]
class SetOfSigningCertificatesV2(SetOf):
_child_spec = SigningCertificateV2
EncapsulatedContentInfo._oid_specs['tst_info'] = TSTInfo
EncapsulatedContentInfo._oid_specs['timestamped_data'] = TimeStampedData
ContentInfo._oid_specs['timestamped_data'] = TimeStampedData
ContentType._map['1.2.840.113549.1.9.16.1.4'] = 'tst_info'
ContentType._map['1.2.840.113549.1.9.16.1.31'] = 'timestamped_data'
CMSAttributeType._map['1.2.840.113549.1.9.16.2.12'] = 'signing_certificate'
CMSAttribute._oid_specs['signing_certificate'] = SetOfSigningCertificates
CMSAttributeType._map['1.2.840.113549.1.9.16.2.47'] = 'signing_certificate_v2'
CMSAttribute._oid_specs['signing_certificate_v2'] = SetOfSigningCertificatesV2

View File

@ -0,0 +1,878 @@
# coding: utf-8
"""
Miscellaneous data helpers, including functions for converting integers to and
from bytes and UTC timezone. Exports the following items:
- OrderedDict()
- int_from_bytes()
- int_to_bytes()
- timezone.utc
- utc_with_dst
- create_timezone()
- inet_ntop()
- inet_pton()
- uri_to_iri()
- iri_to_uri()
"""
from __future__ import unicode_literals, division, absolute_import, print_function
import math
import sys
from datetime import datetime, date, timedelta, tzinfo
from ._errors import unwrap
from ._iri import iri_to_uri, uri_to_iri # noqa
from ._ordereddict import OrderedDict # noqa
from ._types import type_name
if sys.platform == 'win32':
from ._inet import inet_ntop, inet_pton
else:
from socket import inet_ntop, inet_pton # noqa
# Python 2
if sys.version_info <= (3,):
def int_to_bytes(value, signed=False, width=None):
"""
Converts an integer to a byte string
:param value:
The integer to convert
:param signed:
If the byte string should be encoded using two's complement
:param width:
If None, the minimal possible size (but at least 1),
otherwise an integer of the byte width for the return value
:return:
A byte string
"""
if value == 0 and width == 0:
return b''
# Handle negatives in two's complement
is_neg = False
if signed and value < 0:
is_neg = True
bits = int(math.ceil(len('%x' % abs(value)) / 2.0) * 8)
value = (value + (1 << bits)) % (1 << bits)
hex_str = '%x' % value
if len(hex_str) & 1:
hex_str = '0' + hex_str
output = hex_str.decode('hex')
if signed and not is_neg and ord(output[0:1]) & 0x80:
output = b'\x00' + output
if width is not None:
if len(output) > width:
raise OverflowError('int too big to convert')
if is_neg:
pad_char = b'\xFF'
else:
pad_char = b'\x00'
output = (pad_char * (width - len(output))) + output
elif is_neg and ord(output[0:1]) & 0x80 == 0:
output = b'\xFF' + output
return output
def int_from_bytes(value, signed=False):
"""
Converts a byte string to an integer
:param value:
The byte string to convert
:param signed:
If the byte string should be interpreted using two's complement
:return:
An integer
"""
if value == b'':
return 0
num = long(value.encode("hex"), 16) # noqa
if not signed:
return num
# Check for sign bit and handle two's complement
if ord(value[0:1]) & 0x80:
bit_len = len(value) * 8
return num - (1 << bit_len)
return num
class timezone(tzinfo): # noqa
"""
Implements datetime.timezone for py2.
Only full minute offsets are supported.
DST is not supported.
"""
def __init__(self, offset, name=None):
"""
:param offset:
A timedelta with this timezone's offset from UTC
:param name:
Name of the timezone; if None, generate one.
"""
if not timedelta(hours=-24) < offset < timedelta(hours=24):
raise ValueError('Offset must be in [-23:59, 23:59]')
if offset.seconds % 60 or offset.microseconds:
raise ValueError('Offset must be full minutes')
self._offset = offset
if name is not None:
self._name = name
elif not offset:
self._name = 'UTC'
else:
self._name = 'UTC' + _format_offset(offset)
def __eq__(self, other):
"""
Compare two timezones
:param other:
The other timezone to compare to
:return:
A boolean
"""
if type(other) != timezone:
return False
return self._offset == other._offset
def __getinitargs__(self):
"""
Called by tzinfo.__reduce__ to support pickle and copy.
:return:
offset and name, to be used for __init__
"""
return self._offset, self._name
def tzname(self, dt):
"""
:param dt:
A datetime object; ignored.
:return:
Name of this timezone
"""
return self._name
def utcoffset(self, dt):
"""
:param dt:
A datetime object; ignored.
:return:
A timedelta object with the offset from UTC
"""
return self._offset
def dst(self, dt):
"""
:param dt:
A datetime object; ignored.
:return:
Zero timedelta
"""
return timedelta(0)
timezone.utc = timezone(timedelta(0))
# Python 3
else:
from datetime import timezone # noqa
def int_to_bytes(value, signed=False, width=None):
"""
Converts an integer to a byte string
:param value:
The integer to convert
:param signed:
If the byte string should be encoded using two's complement
:param width:
If None, the minimal possible size (but at least 1),
otherwise an integer of the byte width for the return value
:return:
A byte string
"""
if width is None:
if signed:
if value < 0:
bits_required = abs(value + 1).bit_length()
else:
bits_required = value.bit_length()
if bits_required % 8 == 0:
bits_required += 1
else:
bits_required = value.bit_length()
width = math.ceil(bits_required / 8) or 1
return value.to_bytes(width, byteorder='big', signed=signed)
def int_from_bytes(value, signed=False):
"""
Converts a byte string to an integer
:param value:
The byte string to convert
:param signed:
If the byte string should be interpreted using two's complement
:return:
An integer
"""
return int.from_bytes(value, 'big', signed=signed)
def _format_offset(off):
"""
Format a timedelta into "[+-]HH:MM" format or "" for None
"""
if off is None:
return ''
mins = off.days * 24 * 60 + off.seconds // 60
sign = '-' if mins < 0 else '+'
return sign + '%02d:%02d' % divmod(abs(mins), 60)
class _UtcWithDst(tzinfo):
"""
Utc class where dst does not return None; required for astimezone
"""
def tzname(self, dt):
return 'UTC'
def utcoffset(self, dt):
return timedelta(0)
def dst(self, dt):
return timedelta(0)
utc_with_dst = _UtcWithDst()
_timezone_cache = {}
def create_timezone(offset):
"""
Returns a new datetime.timezone object with the given offset.
Uses cached objects if possible.
:param offset:
A datetime.timedelta object; It needs to be in full minutes and between -23:59 and +23:59.
:return:
A datetime.timezone object
"""
try:
tz = _timezone_cache[offset]
except KeyError:
tz = _timezone_cache[offset] = timezone(offset)
return tz
class extended_date(object):
"""
A datetime.datetime-like object that represents the year 0. This is just
to handle 0000-01-01 found in some certificates. Python's datetime does
not support year 0.
The proleptic gregorian calendar repeats itself every 400 years. Therefore,
the simplest way to format is to substitute year 2000.
"""
def __init__(self, year, month, day):
"""
:param year:
The integer 0
:param month:
An integer from 1 to 12
:param day:
An integer from 1 to 31
"""
if year != 0:
raise ValueError('year must be 0')
self._y2k = date(2000, month, day)
@property
def year(self):
"""
:return:
The integer 0
"""
return 0
@property
def month(self):
"""
:return:
An integer from 1 to 12
"""
return self._y2k.month
@property
def day(self):
"""
:return:
An integer from 1 to 31
"""
return self._y2k.day
def strftime(self, format):
"""
Formats the date using strftime()
:param format:
A strftime() format string
:return:
A str, the formatted date as a unicode string
in Python 3 and a byte string in Python 2
"""
# Format the date twice, once with year 2000, once with year 4000.
# The only differences in the result will be in the millennium. Find them and replace by zeros.
y2k = self._y2k.strftime(format)
y4k = self._y2k.replace(year=4000).strftime(format)
return ''.join('0' if (c2, c4) == ('2', '4') else c2 for c2, c4 in zip(y2k, y4k))
def isoformat(self):
"""
Formats the date as %Y-%m-%d
:return:
The date formatted to %Y-%m-%d as a unicode string in Python 3
and a byte string in Python 2
"""
return self.strftime('0000-%m-%d')
def replace(self, year=None, month=None, day=None):
"""
Returns a new datetime.date or asn1crypto.util.extended_date
object with the specified components replaced
:return:
A datetime.date or asn1crypto.util.extended_date object
"""
if year is None:
year = self.year
if month is None:
month = self.month
if day is None:
day = self.day
if year > 0:
cls = date
else:
cls = extended_date
return cls(
year,
month,
day
)
def __str__(self):
"""
:return:
A str representing this extended_date, e.g. "0000-01-01"
"""
return self.strftime('%Y-%m-%d')
def __eq__(self, other):
"""
Compare two extended_date objects
:param other:
The other extended_date to compare to
:return:
A boolean
"""
# datetime.date object wouldn't compare equal because it can't be year 0
if not isinstance(other, self.__class__):
return False
return self.__cmp__(other) == 0
def __ne__(self, other):
"""
Compare two extended_date objects
:param other:
The other extended_date to compare to
:return:
A boolean
"""
return not self.__eq__(other)
def _comparison_error(self, other):
raise TypeError(unwrap(
'''
An asn1crypto.util.extended_date object can only be compared to
an asn1crypto.util.extended_date or datetime.date object, not %s
''',
type_name(other)
))
def __cmp__(self, other):
"""
Compare two extended_date or datetime.date objects
:param other:
The other extended_date object to compare to
:return:
An integer smaller than, equal to, or larger than 0
"""
# self is year 0, other is >= year 1
if isinstance(other, date):
return -1
if not isinstance(other, self.__class__):
self._comparison_error(other)
if self._y2k < other._y2k:
return -1
if self._y2k > other._y2k:
return 1
return 0
def __lt__(self, other):
return self.__cmp__(other) < 0
def __le__(self, other):
return self.__cmp__(other) <= 0
def __gt__(self, other):
return self.__cmp__(other) > 0
def __ge__(self, other):
return self.__cmp__(other) >= 0
class extended_datetime(object):
"""
A datetime.datetime-like object that represents the year 0. This is just
to handle 0000-01-01 found in some certificates. Python's datetime does
not support year 0.
The proleptic gregorian calendar repeats itself every 400 years. Therefore,
the simplest way to format is to substitute year 2000.
"""
# There are 97 leap days during 400 years.
DAYS_IN_400_YEARS = 400 * 365 + 97
DAYS_IN_2000_YEARS = 5 * DAYS_IN_400_YEARS
def __init__(self, year, *args, **kwargs):
"""
:param year:
The integer 0
:param args:
Other positional arguments; see datetime.datetime.
:param kwargs:
Other keyword arguments; see datetime.datetime.
"""
if year != 0:
raise ValueError('year must be 0')
self._y2k = datetime(2000, *args, **kwargs)
@property
def year(self):
"""
:return:
The integer 0
"""
return 0
@property
def month(self):
"""
:return:
An integer from 1 to 12
"""
return self._y2k.month
@property
def day(self):
"""
:return:
An integer from 1 to 31
"""
return self._y2k.day
@property
def hour(self):
"""
:return:
An integer from 1 to 24
"""
return self._y2k.hour
@property
def minute(self):
"""
:return:
An integer from 1 to 60
"""
return self._y2k.minute
@property
def second(self):
"""
:return:
An integer from 1 to 60
"""
return self._y2k.second
@property
def microsecond(self):
"""
:return:
An integer from 0 to 999999
"""
return self._y2k.microsecond
@property
def tzinfo(self):
"""
:return:
If object is timezone aware, a datetime.tzinfo object, else None.
"""
return self._y2k.tzinfo
def utcoffset(self):
"""
:return:
If object is timezone aware, a datetime.timedelta object, else None.
"""
return self._y2k.utcoffset()
def time(self):
"""
:return:
A datetime.time object
"""
return self._y2k.time()
def date(self):
"""
:return:
An asn1crypto.util.extended_date of the date
"""
return extended_date(0, self.month, self.day)
def strftime(self, format):
"""
Performs strftime(), always returning a str
:param format:
A strftime() format string
:return:
A str of the formatted datetime
"""
# Format the datetime twice, once with year 2000, once with year 4000.
# The only differences in the result will be in the millennium. Find them and replace by zeros.
y2k = self._y2k.strftime(format)
y4k = self._y2k.replace(year=4000).strftime(format)
return ''.join('0' if (c2, c4) == ('2', '4') else c2 for c2, c4 in zip(y2k, y4k))
def isoformat(self, sep='T'):
"""
Formats the date as "%Y-%m-%d %H:%M:%S" with the sep param between the
date and time portions
:param set:
A single character of the separator to place between the date and
time
:return:
The formatted datetime as a unicode string in Python 3 and a byte
string in Python 2
"""
s = '0000-%02d-%02d%c%02d:%02d:%02d' % (self.month, self.day, sep, self.hour, self.minute, self.second)
if self.microsecond:
s += '.%06d' % self.microsecond
return s + _format_offset(self.utcoffset())
def replace(self, year=None, *args, **kwargs):
"""
Returns a new datetime.datetime or asn1crypto.util.extended_datetime
object with the specified components replaced
:param year:
The new year to substitute. None to keep it.
:param args:
Other positional arguments; see datetime.datetime.replace.
:param kwargs:
Other keyword arguments; see datetime.datetime.replace.
:return:
A datetime.datetime or asn1crypto.util.extended_datetime object
"""
if year:
return self._y2k.replace(year, *args, **kwargs)
return extended_datetime.from_y2k(self._y2k.replace(2000, *args, **kwargs))
def astimezone(self, tz):
"""
Convert this extended_datetime to another timezone.
:param tz:
A datetime.tzinfo object.
:return:
A new extended_datetime or datetime.datetime object
"""
return extended_datetime.from_y2k(self._y2k.astimezone(tz))
def timestamp(self):
"""
Return POSIX timestamp. Only supported in python >= 3.3
:return:
A float representing the seconds since 1970-01-01 UTC. This will be a negative value.
"""
return self._y2k.timestamp() - self.DAYS_IN_2000_YEARS * 86400
def __str__(self):
"""
:return:
A str representing this extended_datetime, e.g. "0000-01-01 00:00:00.000001-10:00"
"""
return self.isoformat(sep=' ')
def __eq__(self, other):
"""
Compare two extended_datetime objects
:param other:
The other extended_datetime to compare to
:return:
A boolean
"""
# Only compare against other datetime or extended_datetime objects
if not isinstance(other, (self.__class__, datetime)):
return False
# Offset-naive and offset-aware datetimes are never the same
if (self.tzinfo is None) != (other.tzinfo is None):
return False
return self.__cmp__(other) == 0
def __ne__(self, other):
"""
Compare two extended_datetime objects
:param other:
The other extended_datetime to compare to
:return:
A boolean
"""
return not self.__eq__(other)
def _comparison_error(self, other):
"""
Raises a TypeError about the other object not being suitable for
comparison
:param other:
The object being compared to
"""
raise TypeError(unwrap(
'''
An asn1crypto.util.extended_datetime object can only be compared to
an asn1crypto.util.extended_datetime or datetime.datetime object,
not %s
''',
type_name(other)
))
def __cmp__(self, other):
"""
Compare two extended_datetime or datetime.datetime objects
:param other:
The other extended_datetime or datetime.datetime object to compare to
:return:
An integer smaller than, equal to, or larger than 0
"""
if not isinstance(other, (self.__class__, datetime)):
self._comparison_error(other)
if (self.tzinfo is None) != (other.tzinfo is None):
raise TypeError("can't compare offset-naive and offset-aware datetimes")
diff = self - other
zero = timedelta(0)
if diff < zero:
return -1
if diff > zero:
return 1
return 0
def __lt__(self, other):
return self.__cmp__(other) < 0
def __le__(self, other):
return self.__cmp__(other) <= 0
def __gt__(self, other):
return self.__cmp__(other) > 0
def __ge__(self, other):
return self.__cmp__(other) >= 0
def __add__(self, other):
"""
Adds a timedelta
:param other:
A datetime.timedelta object to add.
:return:
A new extended_datetime or datetime.datetime object.
"""
return extended_datetime.from_y2k(self._y2k + other)
def __sub__(self, other):
"""
Subtracts a timedelta or another datetime.
:param other:
A datetime.timedelta or datetime.datetime or extended_datetime object to subtract.
:return:
If a timedelta is passed, a new extended_datetime or datetime.datetime object.
Else a datetime.timedelta object.
"""
if isinstance(other, timedelta):
return extended_datetime.from_y2k(self._y2k - other)
if isinstance(other, extended_datetime):
return self._y2k - other._y2k
if isinstance(other, datetime):
return self._y2k - other - timedelta(days=self.DAYS_IN_2000_YEARS)
return NotImplemented
def __rsub__(self, other):
return -(self - other)
@classmethod
def from_y2k(cls, value):
"""
Revert substitution of year 2000.
:param value:
A datetime.datetime object which is 2000 years in the future.
:return:
A new extended_datetime or datetime.datetime object.
"""
year = value.year - 2000
if year > 0:
new_cls = datetime
else:
new_cls = cls
return new_cls(
year,
value.month,
value.day,
value.hour,
value.minute,
value.second,
value.microsecond,
value.tzinfo
)

View File

@ -0,0 +1,6 @@
# coding: utf-8
from __future__ import unicode_literals, division, absolute_import, print_function
__version__ = '1.5.1'
__version_info__ = (1, 5, 1)

File diff suppressed because it is too large Load Diff

337
jc/parsers/gpg.py Normal file
View File

@ -0,0 +1,337 @@
"""jc - JSON Convert `gpg --with-colons` command output parser
Usage (cli):
$ gpg --with-colons --show-keys file.gpg | jc --gpg
or
$ jc gpg --with-colons --show-keys file.gpg
Usage (module):
import jc
result = jc.parse('gpg', gpg_command_output)
Schema:
Field definitions from https://git.gnupg.org/cgi-bin/gitweb.cgi?p=gnupg.git;a=blob_plain;f=doc/DETAILS
> Note: Number values are not converted to integers because many field
> specifications are overloaded and future augmentations are implied in the
> documentation.
[
{
"type": string,
"validity": string,
"key_length": string,
"pub_key_alg": string,
"key_id": string,
"creation_date": string,
"expiration_date": string,
"certsn_uidhash_trustinfo": string,
"owner_trust": string,
"user_id": string,
"signature_class": string,
"key_capabilities": string,
"cert_fingerprint_other": string,
"flag": string,
"token_sn": string,
"hash_alg": string,
"curve_name": string,
"compliance_flags": string,
"last_update_date": string,
"origin": string,
"comment": string,
"index": string, # [0]
"bits": string, # [0]
"value": string, # [0]
"version": string, # [1], [4]
"signature_count": string, # [1]
"encryption_count": string, # [1]
"policy": string, # [1]
"signature_first_seen": string, # [1]
"signature_most_recent_seen": string, # [1]
"encryption_first_done": string, # [1]
"encryption_most_recent_done": string, # [1]
"staleness_reason": string, # [2]
"trust_model": string, # [2]
"trust_db_created": string, # [2]
"trust_db_expires": string, # [2]
"marginally_trusted_users": string, # [2]
"completely_trusted_users": string, # [2]
"cert_chain_max_depth": string, # [2]
"subpacket_number": string, # [3]
"hex_flags": string, # [3]
"subpacket_length": string, # [3]
"subpacket_data": string, # [3]
"pubkey": string, # [4]
"cipher": string, # [4]
"digest": string, # [4]
"compress": string, # [4]
"group": string, # [4]
"members": string, # [4]
"curve_names": string, # [4]
}
]
All blank values are converted to null/None.
[0] for 'pkd' type
[1] for 'tfs' type
[2] for 'tru' type
[3] for 'skp' type
[4] for 'cfg' type
Examples:
$ gpg --with-colons --show-keys file.gpg | jc --gpg -p
[
{
"type": "pub",
"validity": "f",
"key_length": "1024",
"pub_key_alg": "17",
"key_id": "6C7EE1B8621CC013",
"creation_date": "899817715",
"expiration_date": "1055898235",
"certsn_uidhash_trustinfo": null,
"owner_trust": "m",
"user_id": null,
"signature_class": null,
"key_capabilities": "scESC",
"cert_fingerprint_other": null,
"flag": null,
"token_sn": null,
"hash_alg": null,
"curve_name": null,
"compliance_flags": null,
"last_update_date": null,
"origin": null,
"comment": null
},
...
]
"""
from typing import List, Dict, Optional
import jc.utils
class info():
"""Provides parser metadata (version, author, etc.)"""
version = '1.0'
description = '`gpg --with-colons` command parser'
author = 'Kelly Brazil'
author_email = 'kellyjonbrazil@gmail.com'
compatible = ['linux']
magic_commands = ['gpg --with-colons']
__version__ = info.version
def _process(proc_data: List[Dict]) -> List[Dict]:
"""
Final processing to conform to the schema.
Parameters:
proc_data: (List of Dictionaries) raw structured data to process
Returns:
List of Dictionaries. Structured to conform to the schema.
"""
return proc_data
def _list_get(my_list: List, index: int, default_val=None) -> Optional[str]:
"""get a list value or return None/default value if out of range."""
if index <= len(my_list) - 1:
return my_list[index] or None
return default_val
def parse(
data: str,
raw: bool = False,
quiet: bool = False
) -> List[Dict]:
"""
Main text parsing function
Parameters:
data: (string) text data to parse
raw: (boolean) unprocessed output if True
quiet: (boolean) suppress warning messages if True
Returns:
List of Dictionaries. Raw or processed structured data.
"""
jc.utils.compatibility(__name__, info.compatible, quiet)
jc.utils.input_type_check(data)
raw_output: List = []
if jc.utils.has_data(data):
for line in filter(None, data.splitlines()):
values = line.split(':')
temp_obj = {
'type': _list_get(values, 0),
'validity': _list_get(values, 1),
'key_length': _list_get(values, 2),
'pub_key_alg': _list_get(values, 3),
'key_id': _list_get(values, 4),
'creation_date': _list_get(values, 5),
'expiration_date': _list_get(values, 6),
'certsn_uidhash_trustinfo': _list_get(values, 7),
'owner_trust': _list_get(values, 8),
'user_id': _list_get(values, 9),
'signature_class': _list_get(values, 10),
'key_capabilities': _list_get(values, 11),
'cert_fingerprint_other': _list_get(values, 12),
'flag': _list_get(values, 13),
'token_sn': _list_get(values, 14),
'hash_alg': _list_get(values, 15),
'curve_name': _list_get(values, 16),
'compliance_flags': _list_get(values, 17),
'last_update_date': _list_get(values, 18),
'origin': _list_get(values, 19),
'comment': _list_get(values, 20)
}
# field mappings change for special types: pkd, tfs, tru, skp, cfg
if temp_obj['type'] == 'pkd':
# pkd:0:1024:B665B1435F4C2 .... FF26ABB:
# ! ! !-- the value
# ! !------ for information number of bits in the value
# !--------- index (eg. DSA goes from 0 to 3: p,q,g,y)
line_obj = {
'type': temp_obj['type'],
'index': temp_obj['validity'],
'bits': temp_obj['key_length'],
'value': temp_obj['pub_key_alg']
}
elif temp_obj['type'] == 'tfs':
# - Field 2 :: tfs record version (must be 1)
# - Field 3 :: validity - A number with validity code.
# - Field 4 :: signcount - The number of signatures seen.
# - Field 5 :: encrcount - The number of encryptions done.
# - Field 6 :: policy - A string with the policy
# - Field 7 :: signture-first-seen - a timestamp or 0 if not known.
# - Field 8 :: signature-most-recent-seen - a timestamp or 0 if not known.
# - Field 9 :: encryption-first-done - a timestamp or 0 if not known.
# - Field 10 :: encryption-most-recent-done - a timestamp or 0 if not known.
line_obj = {
'type': temp_obj['type'],
'version': temp_obj['validity'],
'validity': temp_obj['key_length'],
'signature_count': temp_obj['pub_key_alg'],
'encryption_count': temp_obj['key_id'],
'policy': temp_obj['creation_date'],
'signature_first_seen': temp_obj['expiration_date'],
'signature_most_recent_seen': temp_obj['certsn_uidhash_trustinfo'],
'encryption_first_done': temp_obj['owner_trust'],
'encryption_most_recent_done': temp_obj['user_id']
}
elif temp_obj['type'] == 'tru':
# tru:o:0:1166697654:1:3:1:5
# - Field 2 :: Reason for staleness of trust.
# - Field 3 :: Trust model
# - Field 4 :: Date trustdb was created in seconds since Epoch.
# - Field 5 :: Date trustdb will expire in seconds since Epoch.
# - Field 6 :: Number of marginally trusted users to introduce a new key signer.
# - Field 7 :: Number of completely trusted users to introduce a new key signer.
# - Field 8 :: Maximum depth of a certification chain.
line_obj = {
'type': temp_obj['type'],
'staleness_reason': temp_obj['validity'],
'trust_model': temp_obj['key_length'],
'trust_db_created': temp_obj['pub_key_alg'],
'trust_db_expires': temp_obj['key_id'],
'marginally_trusted_users': temp_obj['creation_date'],
'completely_trusted_users': temp_obj['expiration_date'],
'cert_chain_max_depth': temp_obj['certsn_uidhash_trustinfo']
}
elif temp_obj['type'] == 'skp':
# - Field 2 :: Subpacket number as per RFC-4880 and later.
# - Field 3 :: Flags in hex.
# - Field 4 :: Length of the subpacket.
# - Field 5 :: The subpacket data.
line_obj = {
'type': temp_obj['type'],
'subpacket_number': temp_obj['validity'],
'hex_flags': temp_obj['key_length'],
'subpacket_length': temp_obj['pub_key_alg'],
'subpacket_data': temp_obj['key_id']
}
elif temp_obj['type'] == 'cfg':
# there are several 'cfg' formats
if temp_obj['validity'] == 'version':
# cfg:version:1.3.5
line_obj = {
'type': temp_obj['type'],
'version': temp_obj['key_length']
}
elif temp_obj['validity'] == 'pubkey':
# cfg:pubkey:1;2;3;16;17
line_obj = {
'type': temp_obj['type'],
'pubkey': temp_obj['key_length']
}
elif temp_obj['validity'] == 'cipher':
# cfg:cipher:2;3;4;7;8;9;10
line_obj = {
'type': temp_obj['type'],
'cipher': temp_obj['key_length']
}
elif temp_obj['validity'] == 'digest':
# cfg:digest:1;2;3;8;9;10
line_obj = {
'type': temp_obj['type'],
'digest': temp_obj['key_length']
}
elif temp_obj['validity'] == 'compress':
# cfg:compress:0;1;2;3
line_obj = {
'type': temp_obj['type'],
'compress': temp_obj['key_length']
}
elif temp_obj['validity'] == 'group':
# cfg:group:mynames:patti;joe;0x12345678;paige
line_obj = {
'type': temp_obj['type'],
'group': temp_obj['key_length'],
'members': temp_obj['pub_key_alg']
}
elif temp_obj['validity'] == 'curve':
# cfg:curve:ed25519;nistp256;nistp384;nistp521
line_obj = {
'type': temp_obj['type'],
'curve_names': temp_obj['key_length']
}
else:
line_obj = temp_obj
raw_output.append(line_obj)
return raw_output if raw else _process(raw_output)

View File

@ -163,7 +163,7 @@ import jc.utils
class info(): class info():
"""Provides parser metadata (version, author, etc.)""" """Provides parser metadata (version, author, etc.)"""
version = '1.7' version = '1.8'
description = '`iptables` command parser' description = '`iptables` command parser'
author = 'Kelly Brazil' author = 'Kelly Brazil'
author_email = 'kellyjonbrazil@gmail.com' author_email = 'kellyjonbrazil@gmail.com'
@ -264,7 +264,6 @@ def parse(data, raw=False, quiet=False):
continue continue
elif line.startswith('target') or line.find('pkts') == 1 or line.startswith('num'): elif line.startswith('target') or line.find('pkts') == 1 or line.startswith('num'):
headers = []
headers = [h for h in ' '.join(line.lower().strip().split()).split() if h] headers = [h for h in ' '.join(line.lower().strip().split()).split() if h]
headers.append("options") headers.append("options")

349
jc/parsers/x509_cert.py Normal file
View File

@ -0,0 +1,349 @@
"""jc - JSON Convert X.509 Certificate format file parser
This parser will convert DER and PEM encoded X.509 certificate files.
Usage (cli):
$ cat certificate.pem | jc --x509-cert
Usage (module):
import jc
result = jc.parse('x509_cert', x509_cert_file_output)
Schema:
[
{
"tbs_certificate": {
"version": string,
"serial_number": string, # [0]
"signature": {
"algorithm": string,
"parameters": string/null,
},
"issuer": {
"country_name": string,
"state_or_province_name" string,
"locality_name": string,
"organization_name": array/string,
"organizational_unit_name": array/string,
"common_name": string,
"email_address": string
},
"validity": {
"not_before": integer, # [1]
"not_after": integer, # [1]
"not_before_iso": string,
"not_after_iso": string
},
"subject": {
"country_name": string,
"state_or_province_name": string,
"locality_name": string,
"organization_name": array/string,
"organizational_unit_name": array/string,
"common_name": string,
"email_address": string
},
"subject_public_key_info": {
"algorithm": {
"algorithm": string,
"parameters": string/null,
},
"public_key": {
"modulus": string, # [0]
"public_exponent": integer
}
},
"issuer_unique_id": string/null,
"subject_unique_id": string/null,
"extensions": [
{
"extn_id": string,
"critical": boolean,
"extn_value": array/object/string/integer # [2]
}
]
},
"signature_algorithm": {
"algorithm": string,
"parameters": string/null
},
"signature_value": string # [0]
}
]
[0] in colon-delimited hex notation
[1] time-zone-aware (UTC) epoch timestamp
[2] See below for well-known Extension schemas:
Basic Constraints:
{
"extn_id": "basic_constraints",
"critical": boolean,
"extn_value": {
"ca": boolean,
"path_len_constraint": string/null
}
}
Key Usage:
{
"extn_id": "key_usage",
"critical": boolean,
"extn_value": [
string
]
}
Key Identifier:
{
"extn_id": "key_identifier",
"critical": boolean,
"extn_value": string # [0]
}
Authority Key Identifier:
{
"extn_id": "authority_key_identifier",
"critical": boolean,
"extn_value": {
"key_identifier": string, # [0]
"authority_cert_issuer": string/null,
"authority_cert_serial_number": string/null
}
}
Examples:
$ cat entrust-ec1.pem| jc --x509-cert -p
[
{
"tbs_certificate": {
"version": "v3",
"serial_number": "a6:8b:79:29:00:00:00:00:50:d0:91:f9",
"signature": {
"algorithm": "sha384_ecdsa",
"parameters": null
},
"issuer": {
"country_name": "US",
"organization_name": "Entrust, Inc.",
"organizational_unit_name": [
"See www.entrust.net/legal-terms",
"(c) 2012 Entrust, Inc. - for authorized use only"
],
"common_name": "Entrust Root Certification Authority - EC1"
},
"validity": {
"not_before": 1355844336,
"not_after": 2144764536,
"not_before_iso": "2012-12-18T15:25:36+00:00",
"not_after_iso": "2037-12-18T15:55:36+00:00"
},
"subject": {
"country_name": "US",
"organization_name": "Entrust, Inc.",
"organizational_unit_name": [
"See www.entrust.net/legal-terms",
"(c) 2012 Entrust, Inc. - for authorized use only"
],
"common_name": "Entrust Root Certification Authority - EC1"
},
"subject_public_key_info": {
"algorithm": {
"algorithm": "ec",
"parameters": "secp384r1"
},
"public_key": "04:84:13:c9:d0:ba:6d:41:7b:e2:6c:d0:eb:55:..."
},
"issuer_unique_id": null,
"subject_unique_id": null,
"extensions": [
{
"extn_id": "key_usage",
"critical": true,
"extn_value": [
"crl_sign",
"key_cert_sign"
]
},
{
"extn_id": "basic_constraints",
"critical": true,
"extn_value": {
"ca": true,
"path_len_constraint": null
}
},
{
"extn_id": "key_identifier",
"critical": false,
"extn_value": "b7:63:e7:1a:dd:8d:e9:08:a6:55:83:a4:e0:6a:..."
}
]
},
"signature_algorithm": {
"algorithm": "sha384_ecdsa",
"parameters": null
},
"signature_value": "30:64:02:30:61:79:d8:e5:42:47:df:1c:ae:53:..."
}
]
"""
import binascii
from collections import OrderedDict
from datetime import datetime
from typing import List, Dict, Union
import jc.utils
from jc.parsers.asn1crypto import pem, x509
class info():
"""Provides parser metadata (version, author, etc.)"""
version = '1.0'
description = 'X.509 PEM and DER certificate file parser'
author = 'Kelly Brazil'
author_email = 'kellyjonbrazil@gmail.com'
details = 'Using the asn1crypto library at https://github.com/wbond/asn1crypto/releases/tag/1.5.1'
compatible = ['linux', 'darwin', 'cygwin', 'win32', 'aix', 'freebsd']
__version__ = info.version
def _process(proc_data: List[Dict]) -> List[Dict]:
"""
Final processing to conform to the schema.
Parameters:
proc_data: (List of Dictionaries) raw structured data to process
Returns:
List of Dictionaries. Structured to conform to the schema.
"""
return proc_data
def _i2b(integer: int) -> bytes:
"""Convert long integers into a bytes object (big endian)"""
return integer.to_bytes((integer.bit_length() + 7) // 8, byteorder='big')
def _b2a(byte_string: bytes) -> str:
"""Convert a byte string to a colon-delimited hex ascii string"""
# need try/except since seperator was only introduced in python 3.8.
# provides compatibility for python 3.6 and 3.7.
try:
return binascii.hexlify(byte_string, ':').decode('utf-8')
except TypeError:
hex_string = binascii.hexlify(byte_string).decode('utf-8')
colon_seperated = ':'.join(hex_string[i:i+2] for i in range(0, len(hex_string), 2))
return colon_seperated
def _fix_objects(obj):
"""
Recursively traverse the nested dictionary or list and convert objects
into JSON serializable types.
"""
if isinstance(obj, set):
obj = sorted(list(obj))
if isinstance(obj, OrderedDict):
obj = dict(obj)
if isinstance(obj, dict):
for k, v in obj.copy().items():
if k == 'serial_number':
obj.update({k: _b2a(_i2b(v))})
continue
if k == 'modulus':
obj.update({k: _b2a(_i2b(v))})
continue
if isinstance(v, datetime):
iso = v.isoformat()
v = int(round(v.timestamp()))
obj.update({k: v, f'{k}_iso': iso})
continue
if isinstance(v, bytes):
v = _b2a(v)
obj.update({k: v})
continue
if isinstance(v, set):
v = sorted(list(v))
obj.update({k: v})
if isinstance(v, OrderedDict):
v = dict(v)
obj.update({k: v})
if isinstance(v, dict):
obj.update({k: _fix_objects(v)})
continue
if isinstance(v, list):
newlist =[]
for i in v:
newlist.append(_fix_objects(i))
obj.update({k: newlist})
continue
if isinstance(obj, list):
new_list = []
for i in obj:
new_list.append(_fix_objects(i))
obj = new_list
return obj
def parse(
data: Union[str, bytes],
raw: bool = False,
quiet: bool = False
) -> List[Dict]:
"""
Main text parsing function
Parameters:
data: (string) text data to parse
raw: (boolean) unprocessed output if True
quiet: (boolean) suppress warning messages if True
Returns:
List of Dictionaries. Raw or processed structured data.
"""
jc.utils.compatibility(__name__, info.compatible, quiet)
raw_output: List = []
if jc.utils.has_data(data):
# convert to bytes, if not already, for PEM detection since that's
# what pem.detect() needs. (cli.py will auto-convert to UTF-8 if it can)
try:
der_bytes = bytes(data, 'utf-8') # type: ignore
except TypeError:
der_bytes = data # type: ignore
certs = []
if pem.detect(der_bytes):
for type_name, headers, der_bytes in pem.unarmor(der_bytes, multiple=True):
if type_name == 'CERTIFICATE':
certs.append(x509.Certificate.load(der_bytes))
else:
certs.append(x509.Certificate.load(der_bytes))
raw_output = [_fix_objects(cert.native) for cert in certs]
return raw_output if raw else _process(raw_output)

View File

@ -140,22 +140,28 @@ def compatibility(mod_name: str, compatible: List, quiet: bool = False) -> None:
]) ])
def has_data(data: str) -> bool: def has_data(data: Union[str, bytes]) -> bool:
""" """
Checks if the input contains data. If there are any non-whitespace Checks if the string input contains data. If there are any
characters then return `True`, else return `False`. non-whitespace characters then return `True`, else return `False`.
For bytes, returns True if there is any data.
Parameters: Parameters:
data: (string) input to check whether it contains data data: (string, bytes) input to check whether it contains data
Returns: Returns:
Boolean True if input string (data) contains non-whitespace Boolean True if input string (data) contains non-whitespace
characters, otherwise False characters, otherwise False. For bytes data, returns
True if there is any data, otherwise False.
""" """
if isinstance(data, str):
return bool(data and not data.isspace()) return bool(data and not data.isspace())
return bool(data)
def convert_to_int(value: Union[str, float]) -> Optional[int]: def convert_to_int(value: Union[str, float]) -> Optional[int]:
""" """

View File

@ -1,4 +1,4 @@
.TH jc 1 2022-06-15 1.20.1 "JSON Convert" .TH jc 1 2022-07-05 1.20.2 "JSON Convert"
.SH NAME .SH NAME
\fBjc\fP \- JSON Convert JSONifies the output of many CLI tools and file-types \fBjc\fP \- JSON Convert JSONifies the output of many CLI tools and file-types
.SH SYNOPSIS .SH SYNOPSIS
@ -152,6 +152,11 @@ CSV file streaming parser
\fB--git-log-s\fP \fB--git-log-s\fP
`git log` command streaming parser `git log` command streaming parser
.TP
.B
\fB--gpg\fP
`gpg --with-colons` command parser
.TP .TP
.B .B
\fB--group\fP \fB--group\fP
@ -512,6 +517,11 @@ Key/Value file parser
\fB--who\fP \fB--who\fP
`who` command parser `who` command parser
.TP
.B
\fB--x509-cert\fP
X.509 PEM and DER certificate file parser
.TP .TP
.B .B
\fB--xml\fP \fB--xml\fP

View File

@ -5,7 +5,7 @@ with open('README.md', 'r') as f:
setuptools.setup( setuptools.setup(
name='jc', name='jc',
version='1.20.1', version='1.20.2',
author='Kelly Brazil', author='Kelly Brazil',
author_email='kellyjonbrazil@gmail.com', author_email='kellyjonbrazil@gmail.com',
description='Converts the output of popular command-line tools and file-types to JSON.', description='Converts the output of popular command-line tools and file-types to JSON.',

1
tests/fixtures/generic/gpg.json vendored Normal file
View File

@ -0,0 +1 @@
[{"type":"pub","validity":"f","key_length":"1024","pub_key_alg":"17","key_id":"6C7EE1B8621CC013","creation_date":"899817715","expiration_date":"1055898235","certsn_uidhash_trustinfo":null,"owner_trust":"m","user_id":null,"signature_class":null,"key_capabilities":"scESC","cert_fingerprint_other":null,"flag":null,"token_sn":null,"hash_alg":null,"curve_name":null,"compliance_flags":null,"last_update_date":null,"origin":null,"comment":null},{"type":"fpr","validity":null,"key_length":null,"pub_key_alg":null,"key_id":null,"creation_date":null,"expiration_date":null,"certsn_uidhash_trustinfo":null,"owner_trust":null,"user_id":"ECAF7590EB3443B5C7CF3ACB6C7EE1B8621CC013","signature_class":null,"key_capabilities":null,"cert_fingerprint_other":null,"flag":null,"token_sn":null,"hash_alg":null,"curve_name":null,"compliance_flags":null,"last_update_date":null,"origin":null,"comment":null},{"type":"uid","validity":"f","key_length":null,"pub_key_alg":null,"key_id":null,"creation_date":null,"expiration_date":null,"certsn_uidhash_trustinfo":null,"owner_trust":null,"user_id":"Werner Koch <wk@g10code.com>","signature_class":null,"key_capabilities":null,"cert_fingerprint_other":null,"flag":null,"token_sn":null,"hash_alg":null,"curve_name":null,"compliance_flags":null,"last_update_date":null,"origin":null,"comment":null},{"type":"uid","validity":"f","key_length":null,"pub_key_alg":null,"key_id":null,"creation_date":null,"expiration_date":null,"certsn_uidhash_trustinfo":null,"owner_trust":null,"user_id":"Werner Koch <wk@gnupg.org>","signature_class":null,"key_capabilities":null,"cert_fingerprint_other":null,"flag":null,"token_sn":null,"hash_alg":null,"curve_name":null,"compliance_flags":null,"last_update_date":null,"origin":null,"comment":null},{"type":"sub","validity":"f","key_length":"1536","pub_key_alg":"16","key_id":"06AD222CADF6A6E1","creation_date":"919537416","expiration_date":"1036177416","certsn_uidhash_trustinfo":null,"owner_trust":null,"user_id":null,"signature_class":null,"key_capabilities":"e","cert_fingerprint_other":null,"flag":null,"token_sn":null,"hash_alg":null,"curve_name":null,"compliance_flags":null,"last_update_date":null,"origin":null,"comment":null},{"type":"fpr","validity":null,"key_length":null,"pub_key_alg":null,"key_id":null,"creation_date":null,"expiration_date":null,"certsn_uidhash_trustinfo":null,"owner_trust":null,"user_id":"CF8BCC4B18DE08FCD8A1615906AD222CADF6A6E1","signature_class":null,"key_capabilities":null,"cert_fingerprint_other":null,"flag":null,"token_sn":null,"hash_alg":null,"curve_name":null,"compliance_flags":null,"last_update_date":null,"origin":null,"comment":null},{"type":"sub","validity":"r","key_length":"1536","pub_key_alg":"20","key_id":"5CE086B5B5A18FF4","creation_date":"899817788","expiration_date":"1025961788","certsn_uidhash_trustinfo":null,"owner_trust":null,"user_id":null,"signature_class":null,"key_capabilities":"esc","cert_fingerprint_other":null,"flag":null,"token_sn":null,"hash_alg":null,"curve_name":null,"compliance_flags":null,"last_update_date":null,"origin":null,"comment":null},{"type":"fpr","validity":null,"key_length":null,"pub_key_alg":null,"key_id":null,"creation_date":null,"expiration_date":null,"certsn_uidhash_trustinfo":null,"owner_trust":null,"user_id":"AB059359A3B81F410FCFF97F5CE086B5B5A18FF4","signature_class":null,"key_capabilities":null,"cert_fingerprint_other":null,"flag":null,"token_sn":null,"hash_alg":null,"curve_name":null,"compliance_flags":null,"last_update_date":null,"origin":null,"comment":null},{"type":"pkd","index":"0","bits":"1024","value":"B665B1435F4C2FF26ABB"},{"type":"tfs","version":"f1","validity":"f2","signature_count":"f3","encryption_count":"f4","policy":"f5","signature_first_seen":"f6","signature_most_recent_seen":"f7","encryption_first_done":"f8","encryption_most_recent_done":"f9"},{"type":"tru","staleness_reason":"o","trust_model":"0","trust_db_created":"1166697654","trust_db_expires":"1","marginally_trusted_users":"3","completely_trusted_users":"1","cert_chain_max_depth":"5"},{"type":"skp","subpacket_number":"f1","hex_flags":"f2","subpacket_length":"f3","subpacket_data":"f4"},{"type":"cfg","version":"1.3.5"},{"type":"cfg","pubkey":"1;2;3;16;17"},{"type":"cfg","cipher":"2;3;4;7;8;9;10"},{"type":"cfg","digest":"1;2;3;8;9;10"},{"type":"cfg","compress":"0;1;2;3"},{"type":"cfg","group":"mynames","members":"patti;joe;0x12345678;paige"},{"type":"cfg","curve_names":"ed25519;nistp256;nistp384;nistp521"}]

21
tests/fixtures/generic/gpg.out vendored Normal file
View File

@ -0,0 +1,21 @@
pub:f:1024:17:6C7EE1B8621CC013:899817715:1055898235::m:::scESC:
fpr:::::::::ECAF7590EB3443B5C7CF3ACB6C7EE1B8621CC013:
uid:f::::::::Werner Koch <wk@g10code.com>:
uid:f::::::::Werner Koch <wk@gnupg.org>:
sub:f:1536:16:06AD222CADF6A6E1:919537416:1036177416:::::e:
fpr:::::::::CF8BCC4B18DE08FCD8A1615906AD222CADF6A6E1:
sub:r:1536:20:5CE086B5B5A18FF4:899817788:1025961788:::::esc:
fpr:::::::::AB059359A3B81F410FCFF97F5CE086B5B5A18FF4:
pkd:0:1024:B665B1435F4C2FF26ABB:
tfs:f1:f2:f3:f4:f5:f6:f7:f8:f9:
tru:o:0:1166697654:1:3:1:5
skp:f1:f2:f3:f4:
cfg:version:1.3.5
cfg:pubkey:1;2;3;16;17
cfg:cipher:2;3;4;7;8;9;10
cfg:digest:1;2;3;8;9;10
cfg:compress:0;1;2;3
cfg:group:mynames:patti;joe;0x12345678;paige
cfg:curve:ed25519;nistp256;nistp384;nistp521

BIN
tests/fixtures/generic/x509-ca-cert.der vendored Normal file

Binary file not shown.

View File

@ -0,0 +1 @@
[{"tbs_certificate":{"version":"v3","serial_number":"60:01:97:b7:46:a7:ea:b4:b4:9a:d6:4b:2f:f7:90:fb","signature":{"algorithm":"sha256_rsa","parameters":null},"issuer":{"country_name":"US","organization_name":"thawte, Inc.","organizational_unit_name":["Certification Services Division","(c) 2008 thawte, Inc. - For authorized use only"],"common_name":"thawte Primary Root CA - G3"},"validity":{"not_before":1207094400,"not_after":2143324799,"not_before_iso":"2008-04-02T00:00:00+00:00","not_after_iso":"2037-12-01T23:59:59+00:00"},"subject":{"country_name":"US","organization_name":"thawte, Inc.","organizational_unit_name":["Certification Services Division","(c) 2008 thawte, Inc. - For authorized use only"],"common_name":"thawte Primary Root CA - G3"},"subject_public_key_info":{"algorithm":{"algorithm":"rsa","parameters":null},"public_key":{"modulus":"b2:bf:27:2c:fb:db:d8:5b:dd:78:7b:1b:9e:77:66:81:cb:3e:bc:7c:ae:f3:a6:27:9a:34:a3:68:31:71:38:33:62:e4:f3:71:66:79:b1:a9:65:a3:a5:8b:d5:8f:60:2d:3f:42:cc:aa:6b:32:c0:23:cb:2c:41:dd:e4:df:fc:61:9c:e2:73:b2:22:95:11:43:18:5f:c4:b6:1f:57:6c:0a:05:58:22:c8:36:4c:3a:7c:a5:d1:cf:86:af:88:a7:44:02:13:74:71:73:0a:42:59:02:f8:1b:14:6b:42:df:6f:5f:ba:6b:82:a2:9d:5b:e7:4a:bd:1e:01:72:db:4b:74:e8:3b:7f:7f:7d:1f:04:b4:26:9b:e0:b4:5a:ac:47:3d:55:b8:d7:b0:26:52:28:01:31:40:66:d8:d9:24:bd:f6:2a:d8:ec:21:49:5c:9b:f6:7a:e9:7f:55:35:7e:96:6b:8d:93:93:27:cb:92:bb:ea:ac:40:c0:9f:c2:f8:80:cf:5d:f4:5a:dc:ce:74:86:a6:3e:6c:0b:53:ca:bd:92:ce:19:06:72:e6:0c:5c:38:69:c7:04:d6:bc:6c:ce:5b:f6:f7:68:9c:dc:25:15:48:88:a1:e9:a9:f8:98:9c:e0:f3:d5:31:28:61:11:6c:67:96:8d:39:99:cb:c2:45:24:39","public_exponent":65537}},"issuer_unique_id":null,"subject_unique_id":null,"extensions":[{"extn_id":"basic_constraints","critical":true,"extn_value":{"ca":true,"path_len_constraint":null}},{"extn_id":"key_usage","critical":true,"extn_value":["crl_sign","key_cert_sign"]},{"extn_id":"key_identifier","critical":false,"extn_value":"ad:6c:aa:94:60:9c:ed:e4:ff:fa:3e:0a:74:2b:63:03:f7:b6:59:bf"}]},"signature_algorithm":{"algorithm":"sha256_rsa","parameters":null},"signature_value":"1a:40:d8:95:65:ac:09:92:89:c6:39:f4:10:e5:a9:0e:66:53:5d:78:de:fa:24:91:bb:e7:44:51:df:c6:16:34:0a:ef:6a:44:51:ea:2b:07:8a:03:7a:c3:eb:3f:0a:2c:52:16:a0:2b:43:b9:25:90:3f:70:a9:33:25:6d:45:1a:28:3b:27:cf:aa:c3:29:42:1b:df:3b:4c:c0:33:34:5b:41:88:bf:6b:2b:65:af:28:ef:b2:f5:c3:aa:66:ce:7b:56:ee:b7:c8:cb:67:c1:c9:9c:1a:18:b8:c4:c3:49:03:f1:60:0e:50:cd:46:c5:f3:77:79:f7:b6:15:e0:38:db:c7:2f:28:a0:0c:3f:77:26:74:d9:25:12:da:31:da:1a:1e:dc:29:41:91:22:3c:69:a7:bb:02:f2:b6:5c:27:03:89:f4:06:ea:9b:e4:72:82:e3:a1:09:c1:e9:00:19:d3:3e:d4:70:6b:ba:71:a6:aa:58:ae:f4:bb:e9:6c:b6:ef:87:cc:9b:bb:ff:39:e6:56:61:d3:0a:a7:c4:5c:4c:60:7b:05:77:26:7a:bf:d8:07:52:2c:62:f7:70:63:d9:39:bc:6f:1c:c2:79:dc:76:29:af:ce:c5:2c:64:04:5e:88:36:6e:31:d4:40:1a:62:34:36:3f:35:01:ae:ac:63:a0"}]

View File

@ -0,0 +1 @@
[{"tbs_certificate":{"version":"v3","serial_number":"f7:f9:4e:5f:30:7d:ba:c6","signature":{"algorithm":"sha256_rsa","parameters":null},"issuer":{"country_name":"US","state_or_province_name":"California","locality_name":"San Francisco","organization_name":"BadSSL","common_name":"BadSSL Client Root Certificate Authority"},"validity":{"not_before":1652822124,"not_after":1715894124,"not_before_iso":"2022-05-17T21:15:24+00:00","not_after_iso":"2024-05-16T21:15:24+00:00"},"subject":{"country_name":"US","state_or_province_name":"California","locality_name":"San Francisco","organization_name":"BadSSL","common_name":"BadSSL Client Certificate"},"subject_public_key_info":{"algorithm":{"algorithm":"rsa","parameters":null},"public_key":{"modulus":"c7:37:5f:11:eb:1e:4e:cf:eb:ba:48:e5:cb:a3:12:2c:73:3e:46:1d:1e:9c:0d:c0:8b:83:23:da:c7:65:df:5c:77:49:b3:e8:7a:7d:3c:ba:d5:61:8c:f9:a5:c4:85:1d:92:23:06:e3:e7:df:7b:b3:7e:26:d0:cb:1b:be:42:6b:16:69:f4:2c:72:b5:7e:e4:cb:0a:28:44:12:6c:46:74:21:99:03:dc:6b:c3:11:58:02:41:23:3f:b0:fc:bf:b7:00:59:13:22:a5:81:7f:24:fe:d5:53:bc:4d:52:8f:90:4a:46:74:b0:e8:bd:93:a6:cd:90:00:4a:2f:7f:b2:3f:a3:ea:03:3b:01:a0:a2:0d:e6:53:7f:61:12:eb:a6:9b:03:9a:4e:a7:ad:10:e8:e1:1d:c2:0f:ef:09:42:5f:6a:b8:4a:0e:98:bd:b6:3d:cf:ea:a4:e8:cb:d6:38:0e:20:54:84:e7:2d:e0:c1:bc:c3:95:f0:98:a0:02:f9:57:e6:f2:d6:fb:b4:c8:94:a1:4d:32:bc:a2:8e:70:be:98:5c:15:f1:07:69:0f:70:e6:31:60:da:1b:5d:ab:df:54:11:1d:c1:2a:e3:43:b8:bf:b3:7a:3a:86:41:90:96:6f:45:ec:93:c4:b9:58:1b:97:f2:5d:c1:ae:b8:39:82:2a:8d","public_exponent":65537}},"issuer_unique_id":null,"subject_unique_id":null,"extensions":[{"extn_id":"basic_constraints","critical":false,"extn_value":{"ca":false,"path_len_constraint":null}},{"extn_id":"netscape_certificate_type","critical":false,"extn_value":["ssl_client"]},{"extn_id":"key_usage","critical":false,"extn_value":["digital_signature","key_encipherment","non_repudiation"]}]},"signature_algorithm":{"algorithm":"sha256_rsa","parameters":null},"signature_value":"52:34:ca:43:bc:95:21:c5:fa:1d:bd:0c:3b:94:3f:d6:c0:96:ae:3e:7b:61:86:a6:da:94:80:cd:4a:13:2c:e7:11:7d:13:af:0b:c6:63:a9:54:b8:4d:f9:c9:3f:1e:0d:74:ee:db:c9:bf:04:7b:48:6e:18:93:cf:2c:3c:e9:bf:35:48:e0:03:34:1e:11:6c:30:f2:5a:4a:49:f5:d5:54:2d:69:79:c9:a3:bc:a5:73:ea:43:0a:ac:bc:79:09:12:14:40:43:16:95:c5:65:f1:67:f0:6c:b1:33:60:f5:a1:23:68:e6:47:94:52:ef:44:85:85:92:9b:54:ba:61:aa:45:c1:0f:0d:38:6a:4a:f0:47:9d:cf:b3:7e:1c:e1:45:7e:b3:53:54:78:ed:96:7d:89:66:86:49:f6:cd:de:3a:df:69:88:a3:41:1f:7d:60:63:c1:6c:b3:f8:a0:f0:1b:5f:94:d9:a2:19:ee:15:68:06:4f:50:1c:f4:83:f1:9f:13:64:db:47:a0:cc:5b:19:f6:8b:f6:b2:bf:b9:39:16:d9:e6:19:0f:ce:c2:10:15:ea:58:06:58:0c:04:7a:5a:2b:ae:a1:f3:3f:6e:2f:9c:56:0c:7c:85:c2:7f:d0:17:fb:ab:c4:1d:42:fb:fc:4b:96:ff:3c:30:d2:d6:9d:ae:09:25:2c:b6:cc:43:51:df:4b:3e:78:f2:d8:bc:34:b9:81:6d:f2:3a:38:12:4d:64:25:32:e8:a8:8b:e5:5b:24:3a:9e:a5:67:29:3c:34:57:34:c0:b2:b2:6e:80:b5:96:0e:69:7f:fb:e0:f0:36:98:2d:93:fd:1c:2f:28:30:c9:31:9b:3a:3f:48:bb:fd:e8:83:40:59:05:64:74:35:d7:5e:17:b1:6f:5a:ab:63:24:8f:d0:51:58:c8:2c:ab:a8:84:aa:44:b2:13:09:51:26:3b:6e:35:7d:85:41:45:24:54:a9:92:7f:8f:d6:e9:20:03:06:45:64:d6:58:f3:d1:7e:01:7e:16:0b:45:e1:b9:a1:e3:2c:43:ff:1c:9a:aa:e4:c7:82:cb:80:86:d7:3f:17:2c:96:31:93:1b:d4:41:64:24:c0:36:6e:14:b9:ed:eb:da:6d:48:52:1f:31:c1:11:c0:69:71:e0:04:97:11:4f:a4:c6:fc:3a:69:93:b9:02:0a:e0:d2:6b:9e:88:0e:69:1a:e0:fd:17:37:80:01:f4:d0:27:c3:01:f4:64:c5:fc:44:ca:d7:e9:75:55:be:61:fd:5d:7c:ee:47:1d:5b:f6:15:d8:5e:00:dd:23:b3:fa:95:f4:61:79:04:6a:b6:82:97:6c:ab:be:78:c1:8d"}]

View File

@ -0,0 +1,64 @@
Bag Attributes
localKeyID: 68 B5 22 00 77 DE 8B BE AE D8 E1 C2 54 0F EC 6C 16 B4 18 A8
subject=/C=US/ST=California/L=San Francisco/O=BadSSL/CN=BadSSL Client Certificate
issuer=/C=US/ST=California/L=San Francisco/O=BadSSL/CN=BadSSL Client Root Certificate Authority
-----BEGIN CERTIFICATE-----
MIIEnTCCAoWgAwIBAgIJAPf5Tl8wfbrGMA0GCSqGSIb3DQEBCwUAMH4xCzAJBgNV
BAYTAlVTMRMwEQYDVQQIDApDYWxpZm9ybmlhMRYwFAYDVQQHDA1TYW4gRnJhbmNp
c2NvMQ8wDQYDVQQKDAZCYWRTU0wxMTAvBgNVBAMMKEJhZFNTTCBDbGllbnQgUm9v
dCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwHhcNMjIwNTE3MjExNTI0WhcNMjQwNTE2
MjExNTI0WjBvMQswCQYDVQQGEwJVUzETMBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQG
A1UEBwwNU2FuIEZyYW5jaXNjbzEPMA0GA1UECgwGQmFkU1NMMSIwIAYDVQQDDBlC
YWRTU0wgQ2xpZW50IENlcnRpZmljYXRlMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A
MIIBCgKCAQEAxzdfEeseTs/rukjly6MSLHM+Rh0enA3Ai4Mj2sdl31x3SbPoen08
utVhjPmlxIUdkiMG4+ffe7N+JtDLG75CaxZp9CxytX7kywooRBJsRnQhmQPca8MR
WAJBIz+w/L+3AFkTIqWBfyT+1VO8TVKPkEpGdLDovZOmzZAASi9/sj+j6gM7AaCi
DeZTf2ES66abA5pOp60Q6OEdwg/vCUJfarhKDpi9tj3P6qToy9Y4DiBUhOct4MG8
w5XwmKAC+Vfm8tb7tMiUoU0yvKKOcL6YXBXxB2kPcOYxYNobXavfVBEdwSrjQ7i/
s3o6hkGQlm9F7JPEuVgbl/Jdwa64OYIqjQIDAQABoy0wKzAJBgNVHRMEAjAAMBEG
CWCGSAGG+EIBAQQEAwIHgDALBgNVHQ8EBAMCBeAwDQYJKoZIhvcNAQELBQADggIB
AFI0ykO8lSHF+h29DDuUP9bAlq4+e2GGptqUgM1KEyznEX0TrwvGY6lUuE35yT8e
DXTu28m/BHtIbhiTzyw86b81SOADNB4RbDDyWkpJ9dVULWl5yaO8pXPqQwqsvHkJ
EhRAQxaVxWXxZ/BssTNg9aEjaOZHlFLvRIWFkptUumGqRcEPDThqSvBHnc+zfhzh
RX6zU1R47ZZ9iWaGSfbN3jrfaYijQR99YGPBbLP4oPAbX5TZohnuFWgGT1Ac9IPx
nxNk20egzFsZ9ov2sr+5ORbZ5hkPzsIQFepYBlgMBHpaK66h8z9uL5xWDHyFwn/Q
F/urxB1C+/xLlv88MNLWna4JJSy2zENR30s+ePLYvDS5gW3yOjgSTWQlMuioi+Vb
JDqepWcpPDRXNMCysm6AtZYOaX/74PA2mC2T/RwvKDDJMZs6P0i7/eiDQFkFZHQ1
114XsW9aq2Mkj9BRWMgsq6iEqkSyEwlRJjtuNX2FQUUkVKmSf4/W6SADBkVk1ljz
0X4BfhYLReG5oeMsQ/8cmqrkx4LLgIbXPxcsljGTG9RBZCTANm4Uue3r2m1IUh8x
wRHAaXHgBJcRT6TG/Dppk7kCCuDSa56IDmka4P0XN4AB9NAnwwH0ZMX8RMrX6XVV
vmH9XXzuRx1b9hXYXgDdI7P6lfRheQRqtoKXbKu+eMGN
-----END CERTIFICATE-----
Bag Attributes
localKeyID: 68 B5 22 00 77 DE 8B BE AE D8 E1 C2 54 0F EC 6C 16 B4 18 A8
Key Attributes: <No Attributes>
-----BEGIN ENCRYPTED PRIVATE KEY-----
MIIFDjBABgkqhkiG9w0BBQ0wMzAbBgkqhkiG9w0BBQwwDgQIRZnUXGoJtbUCAggA
MBQGCCqGSIb3DQMHBAjgV2LmquuUdwSCBMjaZtNVgqmwUsevUBY4Rx3+Y+Z4lfHT
7BJqv53p0hOMZdaLRAhK4hHfTLrtQRYK+rEroTF1gElfTUnmjdGBs0b2sLaNrRAW
3W5nv//r6HahZ4RmsBOHS2GBj4V3BEa2c2VdjyZ+HuZ/nnqMQyNzBooLXctkyp0/
10UO4a8IU0iPPRI+Zu4d/A1UAGDW2RRbNiYBDbUNdplZr0T1y5vC2felHHptlp8i
cslBR4ASRllYG/kjC6fyI+Jz5yGaFKd3Jlr9wc7cMml8rMHTQrhYScHK2D1L3Bgp
yK3sa9EVu7lBlBAEs8vjl5RE5rO6G487wZpmCX21j3lh6IOyjEEJXPfiFOZxt6Zn
437Aumr84MvuTyPP8mI65B/grdsvth8R8k8dns0n9SKWmW84jtkwRa3yHx3g/9ef
lz5uqHoO1J3+em/rFrj+eJXTrXE4Sqe2Ohb96bs+6ho2Hz8+B+zoQfeGV2/nPduC
Lty/VVnr5YoRPfbmin4rmbmVrURLLRnQ4RujbVKp4msmEQIYa/B6sMzY7gSKFFpT
jJm2nLy0eU0FEWbXPTDn0qi5vcHo7lIahBLX6TRTcV4vkhh9miPQc64TwB+tIjhf
B0/k3gjBblmIbBIb+6N9LZRqO5D4CIN7Su6w3e0lmdVIR+NvU1NMynpVsJvkM44j
YVfQQFckHe0BdmAdsZrcjyTe35P/lNwssZ58XiM3P3W7zrhL2infXJ5sZQ9QOdqS
hIoSSBigGF7CAl2UqZ4G/Yf2MvbswtH9RjRIwZGDA09PrKwfa5lK2y9RnyEeC0Qn
OPis+XmzIA+brOwpDOIdVN6AmZWtyxQnlp+Ad26wm5ACAM5hH1uAJ3Pro9+sDK0J
dDp3LH6eehjt1NJfpmj9yDxgXchsouo2JeE/+Nc46uDc+vEe23bmi9xMqvhlidIT
XtGWjyIWs69iEa78qg+HonHM+ahq7ugSj3zwkKXDc3+67ia5/J6RWh4CiH5+AIt+
5hpgEYZLtAR13JaMm/fH+2loV2vgX55for3CoVbo5pYm2s5Kk+tJ8MFVHw7Khr6i
l+3EEuxfmtT/CqXhusMJ5Yh1sNpCIw14wBGI5Bey9Dw5Su57vE1cZndO94O6XI7t
kK4zxtYy47UU5wOGf5VrUJhEr/YY0hjB5zDRlLoEkXGQ9NmcW3X956b2NeqnX4Tz
xP5hEvOYFKKyn3XPXtjvY+X8cGMmvhOSQ7/C8uz0G0+IpiYI99H8Yxl+26OiPxdI
u4XLEJg3/L82OKs0ekUkNYowGJnlxDQ40wWZUBQonn1imqY3OKn4gXr5OxDvCqX2
fXF2eYkOTmOQGpfjBPcdfDwcRU7eCUS/csW1Ri/XyGI+xdX9SG+2grXLqA1o/BFX
aTxSx5Yw6tZ7knhj53KtxFX8AFSPLQ5CJxsC5YyNAuj7ALsKZtJlZA34kXh23+bS
yAZ/ABg/Us6+F7TXnGPItETChwgCwEK637vQmxeQ4A/Y63HWUEAWkdUiinWjKDXS
NjVDK67Qj3KoOz4Qr4lNZuFkbIE7ipWhQpRudM2NBlYOLlSp2eg2AE97BPi9zM3v
TWSxEAt03CPm1fZtUvsxnlT5LFfZt/EiZpWRaHAyNIAu7kHDThBcYb2ms82aSRVq
DUc=
-----END ENCRYPTED PRIVATE KEY-----

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,56 @@
-----BEGIN CERTIFICATE-----
MIIJ7zCCCNegAwIBAgISBMFHpRZxqK2Eb+XP7MpCzMKtMA0GCSqGSIb3DQEBCwUA
MDIxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1MZXQncyBFbmNyeXB0MQswCQYDVQQD
EwJSMzAeFw0yMjA2MTUwMjQwMzZaFw0yMjA5MTMwMjQwMzVaMB0xGzAZBgNVBAMT
EnRscy5hdXRvbWF0dGljLmNvbTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
ggEBAMJLRf7n8exofN3lm7YtKzHdXJ/UTwcZ19bFamtEOOznxYj/iPJ1Ru8Q5Sid
Lc8/YNFluWlEm1mZ+4sAzXGIhwowKhe/XZfhwFaYrYfFAJ7GvSV43p7X7lNanxYj
UV7zqAlCcNEtbxFulH7bHUX8Cg355aKHM/Rx0jnlIiKbhjGXtT3RNWiijXUuTK4U
K1HNkM/WQ9RJgD9Cqx8hNwUe6ggN4E3gtsxIu/R+jukNOgKFia7Q9Jr3hWsNWMkf
ptusDNU9YrhFqHcxP1HGhNz+HNi1o5MueJ3k/nJ6gelvJv5MYTpVbb34SjhoXpfj
NsPWvDErx8it7mRWPw/qS/mlsWsCAwEAAaOCBxIwggcOMA4GA1UdDwEB/wQEAwIF
oDAdBgNVHSUEFjAUBggrBgEFBQcDAQYIKwYBBQUHAwIwDAYDVR0TAQH/BAIwADAd
BgNVHQ4EFgQUY89QX9I+oHVhhtlgG+zZ2N3HMFowHwYDVR0jBBgwFoAUFC6zF7dY
VsuuUAlA5h+vnYsUwsYwVQYIKwYBBQUHAQEESTBHMCEGCCsGAQUFBzABhhVodHRw
Oi8vcjMuby5sZW5jci5vcmcwIgYIKwYBBQUHMAKGFmh0dHA6Ly9yMy5pLmxlbmNy
Lm9yZy8wggTgBgNVHREEggTXMIIE04IIYmFmZmwuY2GCFGJsb2cua2VsbHlicmF6
aWwuY29tggpicm8tcGEub3JnghRjb21wZXRlbmNlLmdhbWUuYmxvZ4IaZGlydHly
b3VsZXR0ZTM2Ni5nYW1lLmJsb2eCEGdpZnRzYnlwZWFybC5jb22CCWdpdW1hbi5t
ZYIYZ2xvYmFsdHJhbnNhY3RvcnNsdGQuY29tghhncmlsbHRpbWVyZXN0YXVyYW50
cy5jb22CG2d1cmVhbWV0c2V0YWtvbG9yYXRlZ2lhLmNvbYIUaGFwcHlsdWNreWVu
am95LmJsb2eCHmhlYWx0aGJhc2VkYmVhdXR5LmZpdG5lc3MuYmxvZ4IVaGVhbHRo
aXNrbm93bGVkZ2UuY29tghJpbWFrZTNkZGVzaWducy5jb22CFGphdmFzY3JpcHQu
Z2FtZS5ibG9ngiRqb3VybmV5aW5nYmFja3Rvd2VsbG5lc3MuaGVhbHRoLmJsb2eC
EGpxdWVyeS5nYW1lLmJsb2eCGWthaGxlcnRyZWdpb25hbGNhbmNlci5vcmeCG2th
cm1hZGVzaWduc3R1ZGlvcy5ncmFwaGljc4IUbm90aWNpYS5zY2llbmNlLmJsb2eC
FXJleXZpbmdhbWVyLmdhbWUuYmxvZ4IWc2FpbGluZ3Jlc2ZlcmJlci5jby51a4IS
c3RhcmR1c3QuZ2FtZS5ibG9nghZzd2VldGxvdmUuZmFzaGlvbi5ibG9nghJ0bHMu
YXV0b21hdHRpYy5jb22CE3dhbmRlcmx1c3R3YXR0cy5jb22CGHd3dy5hbmdlbGd1
YXJkaWFucy5sZWdhbIIMd3d3LmJhZmZsLmNhghh3d3cuYmxvZy5rZWxseWJyYXpp
bC5jb22CDnd3dy5icm8tcGEub3Jnghh3d3cuY29tcGV0ZW5jZS5nYW1lLmJsb2eC
Hnd3dy5kaXJ0eXJvdWxldHRlMzY2LmdhbWUuYmxvZ4IUd3d3LmdpZnRzYnlwZWFy
bC5jb22CDXd3dy5naXVtYW4ubWWCHHd3dy5nbG9iYWx0cmFuc2FjdG9yc2x0ZC5j
b22CHHd3dy5ncmlsbHRpbWVyZXN0YXVyYW50cy5jb22CH3d3dy5ndXJlYW1ldHNl
dGFrb2xvcmF0ZWdpYS5jb22CGHd3dy5oYXBweWx1Y2t5ZW5qb3kuYmxvZ4Iid3d3
LmhlYWx0aGJhc2VkYmVhdXR5LmZpdG5lc3MuYmxvZ4IZd3d3LmhlYWx0aGlza25v
d2xlZGdlLmNvbYIWd3d3LmltYWtlM2RkZXNpZ25zLmNvbYIYd3d3LmphdmFzY3Jp
cHQuZ2FtZS5ibG9ngih3d3cuam91cm5leWluZ2JhY2t0b3dlbGxuZXNzLmhlYWx0
aC5ibG9nghR3d3cuanF1ZXJ5LmdhbWUuYmxvZ4Idd3d3LmthaGxlcnRyZWdpb25h
bGNhbmNlci5vcmeCH3d3dy5rYXJtYWRlc2lnbnN0dWRpb3MuZ3JhcGhpY3OCGXd3
dy5yZXl2aW5nYW1lci5nYW1lLmJsb2eCHHd3dy5ydXBsYXlpbmdib2FyZC5nYW1l
LmJsb2eCFnd3dy5zdGFyZHVzdC5nYW1lLmJsb2eCGnd3dy5zd2VldGxvdmUuZmFz
aGlvbi5ibG9nMEwGA1UdIARFMEMwCAYGZ4EMAQIBMDcGCysGAQQBgt8TAQEBMCgw
JgYIKwYBBQUHAgEWGmh0dHA6Ly9jcHMubGV0c2VuY3J5cHQub3JnMIIBBAYKKwYB
BAHWeQIEAgSB9QSB8gDwAHYARqVV63X6kSAwtaKJafTzfREsQXS+/Um4havy/HD+
bUcAAAGBZXLjRAAABAMARzBFAiEA3mFZbt4cdXwo1z54gNOFX4r/k4WN40rk9y+Z
HTa0xGICIGrNI2v0J0GkHZyVzTa+LSSbh6o8FBVwX/Xp+tEU3N/aAHYAb1N2rDHw
MRnYmQCkURX/dxUcEdkCwQApBo2yCJo32RMAAAGBZXLkGAAABAMARzBFAiEA9UBQ
vJnC6aJ0eoP57GvlxYliWjewDFHkHBH3i7ukl9kCIAc1Tl1BO++DnxjgWGAGY1Ei
LYyCrrJep8Jc+C1LUBSGMA0GCSqGSIb3DQEBCwUAA4IBAQCorctPt+y50cBQjt3h
FJPLvuVuRQcq65Lwvj29DuZFx415PgnXQEjWiiJDEDkUEej5uKVSMpAMkpR0V79u
PbA/zqEGX5viCtpa/4NPKCusy2cDC3uGqtdgwktv/Wa9jWHRSCQpXENJTnkqjjpG
O86b9uWf3O3IrdSm7uMzr0w0QSfeuNVj30WPpxF4cSin2ClfC4viB0TJVL7go3eC
LQdc8E4KEQZr3JD33/tgKJbwgdxNJKhTD+PQ8CL6XqfrqeRf1Mxwt8W5fEvpOqob
qMgqsod51W1jtC5+1ySa/A2PrAS7mM4Frm8HC0nN1q35N33/G/zmOiWb6tO4u6eD
RIRt
-----END CERTIFICATE-----

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,285 @@
-----BEGIN CERTIFICATE-----
MIIGJzCCBA+gAwIBAgIBATANBgkqhkiG9w0BAQUFADCBsjELMAkGA1UEBhMCRlIx
DzANBgNVBAgMBkFsc2FjZTETMBEGA1UEBwwKU3RyYXNib3VyZzEYMBYGA1UECgwP
d3d3LmZyZWVsYW4ub3JnMRAwDgYDVQQLDAdmcmVlbGFuMS0wKwYDVQQDDCRGcmVl
bGFuIFNhbXBsZSBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxIjAgBgkqhkiG9w0BCQEW
E2NvbnRhY3RAZnJlZWxhbi5vcmcwHhcNMTIwNDI3MTAzMTE4WhcNMjIwNDI1MTAz
MTE4WjB+MQswCQYDVQQGEwJGUjEPMA0GA1UECAwGQWxzYWNlMRgwFgYDVQQKDA93
d3cuZnJlZWxhbi5vcmcxEDAOBgNVBAsMB2ZyZWVsYW4xDjAMBgNVBAMMBWFsaWNl
MSIwIAYJKoZIhvcNAQkBFhNjb250YWN0QGZyZWVsYW4ub3JnMIICIjANBgkqhkiG
9w0BAQEFAAOCAg8AMIICCgKCAgEA3W29+ID6194bH6ejLrIC4hb2Ugo8v6ZC+Mrc
k2dNYMNPjcOKABvxxEtBamnSaeU/IY7FC/giN622LEtV/3oDcrua0+yWuVafyxmZ
yTKUb4/GUgafRQPf/eiX9urWurtIK7XgNGFNUjYPq4dSJQPPhwCHE/LKAykWnZBX
RrX0Dq4XyApNku0IpjIjEXH+8ixE12wH8wt7DEvdO7T3N3CfUbaITl1qBX+Nm2Z6
q4Ag/u5rl8NJfXg71ZmXA3XOj7zFvpyapRIZcPmkvZYn7SMCp8dXyXHPdpSiIWL2
uB3KiO4JrUYvt2GzLBUThp+lNSZaZ/Q3yOaAAUkOx+1h08285Pi+P8lO+H2Xic4S
vMq1xtLg2bNoPC5KnbRfuFPuUD2/3dSiiragJ6uYDLOyWJDivKGt/72OVTEPAL9o
6T2pGZrwbQuiFGrGTMZOvWMSpQtNl+tCCXlT4mWqJDRwuMGrI4DnnGzt3IKqNwS4
Qyo9KqjMIPwnXZAmWPm3FOKe4sFwc5fpawKO01JZewDsYTDxVj+cwXwFxbE2yBiF
z2FAHwfopwaH35p3C6lkcgP2k/zgAlnBluzACUI+MKJ/G0gv/uAhj1OHJQ3L6kn1
SpvQ41/ueBjlunExqQSYD7GtZ1Kg8uOcq2r+WISE3Qc9MpQFFkUVllmgWGwYDuN3
Zsez95kCAwEAAaN7MHkwCQYDVR0TBAIwADAsBglghkgBhvhCAQ0EHxYdT3BlblNT
TCBHZW5lcmF0ZWQgQ2VydGlmaWNhdGUwHQYDVR0OBBYEFFlfyRO6G8y5qEFKikl5
ajb2fT7XMB8GA1UdIwQYMBaAFCNsLT0+KV14uGw+quK7Lh5sh/JTMA0GCSqGSIb3
DQEBBQUAA4ICAQAT5wJFPqervbja5+90iKxi1d0QVtVGB+z6aoAMuWK+qgi0vgvr
mu9ot2lvTSCSnRhjeiP0SIdqFMORmBtOCFk/kYDp9M/91b+vS+S9eAlxrNCB5VOf
PqxEPp/wv1rBcE4GBO/c6HcFon3F+oBYCsUQbZDKSSZxhDm3mj7pb67FNbZbJIzJ
70HDsRe2O04oiTx+h6g6pW3cOQMgIAvFgKN5Ex727K4230B0NIdGkzuj4KSML0NM
slSAcXZ41OoSKNjy44BVEZv0ZdxTDrRM4EwJtNyggFzmtTuV02nkUj1bYYYC5f0L
ADr6s0XMyaNk8twlWYlYDZ5uKDpVRVBfiGcq0uJIzIvemhuTrofh8pBQQNkPRDFT
Rq1iTo1Ihhl3/Fl1kXk1WR3jTjNb4jHX7lIoXwpwp767HAPKGhjQ9cFbnHMEtkro
RlJYdtRq5mccDtwT0GFyoJLLBZdHHMHJz0F9H7FNk2tTQQMhK5MVYwg+LIaee586
CQVqfbscp7evlgjLW98H+5zylRHAgoH2G79aHljNKMp9BOuq6SnEglEsiWGVtu2l
hnx8SB3sVJZHeer8f/UQQwqbAO+Kdy70NmbSaqaVtp8jOxLiidWkwSyRTsuU6D8i
DiH5uEqBXExjrj0FslxcVKdVj5glVcSmkLwZKbEU1OKwleT/iXFhvooWhQ==
-----END CERTIFICATE-----
Certificate:
Data:
Version: 3 (0x2)
Serial Number: 2 (0x2)
Signature Algorithm: sha1WithRSAEncryption
Issuer: C=FR, ST=Alsace, L=Strasbourg, O=www.freelan.org, OU=freelan, CN=Freelan Sample Certificate Authority/emailAddress=contact@freelan.org
Validity
Not Before: Apr 27 10:54:40 2012 GMT
Not After : Apr 25 10:54:40 2022 GMT
Subject: C=FR, ST=Alsace, O=www.freelan.org, OU=freelan, CN=bob/emailAddress=contact@freelan.org
Subject Public Key Info:
Public Key Algorithm: rsaEncryption
Public-Key: (4096 bit)
Modulus:
00:c2:3f:43:14:4a:d4:dd:43:5a:b9:43:5e:2d:bb:
89:a1:17:18:f7:ae:47:4b:7a:f4:d4:dc:a3:e1:b7:
85:3a:10:20:eb:bc:51:18:d8:8b:25:c6:04:95:4f:
80:e9:05:5c:00:f4:7c:23:7b:d1:ad:81:58:f1:9d:
43:c3:37:ee:7f:61:03:b5:ff:29:bb:10:1a:fb:a8:
77:97:9b:de:4c:7d:3f:ca:ff:53:8c:37:30:b6:88:
f2:0e:be:7c:dc:92:76:c9:5f:22:96:19:0b:91:ea:
9c:18:96:9f:43:d1:9d:22:9e:d9:c3:12:9f:80:05:
85:1f:70:bb:87:5d:63:c1:5a:51:3d:7e:69:3d:76:
6d:b0:56:ea:db:3f:ae:f0:cd:0c:19:48:b1:f2:d5:
2e:e7:fa:12:dd:15:bc:8c:dc:09:c2:26:9c:dc:22:
52:8e:c8:1c:c1:cd:01:bd:1a:24:c5:be:4f:18:08:
f3:de:59:1c:8f:63:a6:63:1d:4f:5a:92:68:7a:49:
94:26:54:d1:83:be:16:e4:5e:8f:73:2f:81:3a:3a:
30:80:fd:57:a9:7f:1b:7b:e5:0f:6c:01:68:f7:1f:
45:49:fe:06:3c:08:57:64:27:a5:0b:55:18:b7:30:
be:08:45:70:8b:cd:43:ea:fc:80:1e:03:5c:c3:52:
8d:a9:55:53:55:f4:61:2e:8b:50:64:6a:30:a7:6f:
bd:b8:80:12:ee:66:98:d8:78:5f:a0:f5:65:6a:6d:
f5:09:cc:62:4d:55:56:80:21:75:48:73:4d:b9:e3:
f9:1d:96:c9:2c:5d:79:4d:3c:c5:7a:9e:84:ff:9d:
c7:94:87:0a:3e:69:81:d2:7f:c0:5f:67:9c:06:8c:
33:5c:a3:9f:52:e7:04:c7:d3:81:ef:b2:77:1e:d0:
57:1f:1f:90:a5:69:c0:0d:43:c5:f6:a6:7e:f7:ea:
45:7c:60:b6:68:1f:64:59:dc:60:33:c2:13:8c:b7:
06:c2:2a:cd:cc:2b:02:de:a2:e9:70:0c:db:79:fe:
ce:eb:5e:c0:06:eb:76:43:09:e0:2a:c7:ee:1e:6a:
af:60:49:73:3c:a8:53:8c:e1:39:2c:e7:9e:fe:fd:
44:20:f0:85:9a:1f:eb:c7:40:c8:5b:90:43:e6:a1:
6a:00:50:4b:73:73:72:c5:39:77:13:1e:3c:95:be:
a9:37:6a:d1:4e:34:3d:34:ec:87:f8:1e:6c:e7:dc:
8b:7f:8e:d1:3c:78:c2:e2:09:93:d7:c0:68:ae:70:
81:b9:f0:d0:f7:26:a4:e2:c0:12:1d:2f:01:63:eb:
53:05:cb:aa:db:66:b0:fb:16:9b:e7:e7:be:c3:66:
da:5c:c9
Exponent: 65537 (0x10001)
X509v3 extensions:
X509v3 Basic Constraints:
CA:FALSE
Netscape Comment:
OpenSSL Generated Certificate
X509v3 Subject Key Identifier:
9C:D2:71:50:35:F7:10:43:DD:E8:CE:75:29:A3:53:5D:11:A7:A8:3B
X509v3 Authority Key Identifier:
keyid:23:6C:2D:3D:3E:29:5D:78:B8:6C:3E:AA:E2:BB:2E:1E:6C:87:F2:53
Signature Algorithm: sha1WithRSAEncryption
c3:b0:a4:82:f5:64:e5:4e:a0:e5:74:5e:c4:3d:d0:9c:f7:4e:
f7:8d:af:8b:2e:80:59:63:b5:6e:2f:10:5b:66:d6:29:2a:ca:
e2:01:20:68:e1:2b:ff:d6:e1:e1:f2:a6:e0:cc:f5:8f:9f:5c:
72:b8:fa:81:76:7d:5c:ee:60:29:e5:d7:de:8f:4a:9c:55:3e:
e5:27:1c:76:bc:35:e7:16:80:6f:32:77:fd:57:ae:51:87:fb:
be:c2:a1:cc:76:9a:61:01:c9:ff:86:00:ff:d1:96:cd:ff:2c:
0f:48:9e:ae:83:d8:df:d4:78:1d:4c:37:87:f5:58:5d:26:c6:
ca:16:cd:fa:16:1d:6f:42:ae:57:4a:99:45:52:80:5c:1c:76:
42:a8:f8:f3:15:9c:1b:3e:36:01:e0:09:5e:d8:19:b1:ed:a0:
ef:3b:c7:09:a7:aa:5f:b6:2d:c1:20:84:9b:2c:87:1a:2b:35:
de:9e:9c:0c:d9:0c:5e:cf:51:38:d6:d6:80:ae:91:15:b5:c6:
22:df:7e:17:9f:c3:eb:bf:fd:d5:3b:4b:ea:66:00:72:a0:b5:
b7:65:a8:5a:d9:a8:f1:67:c1:41:d8:79:dd:cc:2f:78:7a:9e:
5e:0a:9d:77:0e:59:52:49:d2:10:94:1c:eb:f4:3c:04:0e:3c:
1c:1a:75:a6:e8:23:d5:f0:73:14:90:b1:71:5a:32:57:8d:34:
d7:6a:61:dc:73:1a:da:1d:1f:56:a7:2e:ef:0d:a4:f5:fb:94:
0b:f4:cf:1d:d2:10:0f:07:cd:ba:9d:78:87:e8:04:63:6a:e5:
7a:6b:20:bd:bd:29:c2:39:5b:fc:86:84:77:0b:e3:f8:2c:37:
ac:af:1b:ed:4f:b9:d6:08:a3:ac:2f:31:07:4a:f8:8e:cf:11:
dd:92:1c:c9:aa:c7:a5:b7:62:a4:77:6e:58:20:78:17:cb:5e:
ef:6d:41:eb:b6:c2:1f:7f:a1:de:fa:bb:71:92:20:de:b1:5e:
34:84:6c:ed:6c:e1:43:86:13:f0:3f:d7:2d:c5:ba:c0:de:37:
8d:48:bc:df:c7:4f:b3:a6:a5:e5:c2:db:f1:ef:db:0c:25:69:
e6:58:8d:ba:72:bd:5e:3f:cf:81:36:b6:ab:ee:a8:67:8f:ee:
bb:fe:6f:c9:1f:8a:1f:ef:e9:c9:7a:52:40:ad:a0:3f:23:45:
7a:63:95:98:3d:12:b8:e2:f3:0b:88:10:38:04:68:b0:f1:a7:
8b:d0:61:d7:0f:2f:cf:17:51:21:eb:76:69:2d:19:e8:01:c5:
33:fd:61:cd:46:64:87:89:43:e9:31:d0:be:88:a0:a2:82:0c:
7f:9f:66:41:3a:9a:5a:6a
-----BEGIN CERTIFICATE-----
MIIGJTCCBA2gAwIBAgIBAjANBgkqhkiG9w0BAQUFADCBsjELMAkGA1UEBhMCRlIx
DzANBgNVBAgMBkFsc2FjZTETMBEGA1UEBwwKU3RyYXNib3VyZzEYMBYGA1UECgwP
d3d3LmZyZWVsYW4ub3JnMRAwDgYDVQQLDAdmcmVlbGFuMS0wKwYDVQQDDCRGcmVl
bGFuIFNhbXBsZSBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxIjAgBgkqhkiG9w0BCQEW
E2NvbnRhY3RAZnJlZWxhbi5vcmcwHhcNMTIwNDI3MTA1NDQwWhcNMjIwNDI1MTA1
NDQwWjB8MQswCQYDVQQGEwJGUjEPMA0GA1UECAwGQWxzYWNlMRgwFgYDVQQKDA93
d3cuZnJlZWxhbi5vcmcxEDAOBgNVBAsMB2ZyZWVsYW4xDDAKBgNVBAMMA2JvYjEi
MCAGCSqGSIb3DQEJARYTY29udGFjdEBmcmVlbGFuLm9yZzCCAiIwDQYJKoZIhvcN
AQEBBQADggIPADCCAgoCggIBAMI/QxRK1N1DWrlDXi27iaEXGPeuR0t69NTco+G3
hToQIOu8URjYiyXGBJVPgOkFXAD0fCN70a2BWPGdQ8M37n9hA7X/KbsQGvuod5eb
3kx9P8r/U4w3MLaI8g6+fNySdslfIpYZC5HqnBiWn0PRnSKe2cMSn4AFhR9wu4dd
Y8FaUT1+aT12bbBW6ts/rvDNDBlIsfLVLuf6Et0VvIzcCcImnNwiUo7IHMHNAb0a
JMW+TxgI895ZHI9jpmMdT1qSaHpJlCZU0YO+FuRej3MvgTo6MID9V6l/G3vlD2wB
aPcfRUn+BjwIV2QnpQtVGLcwvghFcIvNQ+r8gB4DXMNSjalVU1X0YS6LUGRqMKdv
vbiAEu5mmNh4X6D1ZWpt9QnMYk1VVoAhdUhzTbnj+R2WySxdeU08xXqehP+dx5SH
Cj5pgdJ/wF9nnAaMM1yjn1LnBMfTge+ydx7QVx8fkKVpwA1DxfamfvfqRXxgtmgf
ZFncYDPCE4y3BsIqzcwrAt6i6XAM23n+zutewAbrdkMJ4CrH7h5qr2BJczyoU4zh
OSznnv79RCDwhZof68dAyFuQQ+ahagBQS3NzcsU5dxMePJW+qTdq0U40PTTsh/ge
bOfci3+O0Tx4wuIJk9fAaK5wgbnw0PcmpOLAEh0vAWPrUwXLqttmsPsWm+fnvsNm
2lzJAgMBAAGjezB5MAkGA1UdEwQCMAAwLAYJYIZIAYb4QgENBB8WHU9wZW5TU0wg
R2VuZXJhdGVkIENlcnRpZmljYXRlMB0GA1UdDgQWBBSc0nFQNfcQQ93oznUpo1Nd
EaeoOzAfBgNVHSMEGDAWgBQjbC09PildeLhsPqriuy4ebIfyUzANBgkqhkiG9w0B
AQUFAAOCAgEAw7CkgvVk5U6g5XRexD3QnPdO942viy6AWWO1bi8QW2bWKSrK4gEg
aOEr/9bh4fKm4Mz1j59ccrj6gXZ9XO5gKeXX3o9KnFU+5Sccdrw15xaAbzJ3/Veu
UYf7vsKhzHaaYQHJ/4YA/9GWzf8sD0ieroPY39R4HUw3h/VYXSbGyhbN+hYdb0Ku
V0qZRVKAXBx2Qqj48xWcGz42AeAJXtgZse2g7zvHCaeqX7YtwSCEmyyHGis13p6c
DNkMXs9RONbWgK6RFbXGIt9+F5/D67/91TtL6mYAcqC1t2WoWtmo8WfBQdh53cwv
eHqeXgqddw5ZUknSEJQc6/Q8BA48HBp1pugj1fBzFJCxcVoyV40012ph3HMa2h0f
Vqcu7w2k9fuUC/TPHdIQDwfNup14h+gEY2rlemsgvb0pwjlb/IaEdwvj+Cw3rK8b
7U+51gijrC8xB0r4js8R3ZIcyarHpbdipHduWCB4F8te721B67bCH3+h3vq7cZIg
3rFeNIRs7WzhQ4YT8D/XLcW6wN43jUi838dPs6al5cLb8e/bDCVp5liNunK9Xj/P
gTa2q+6oZ4/uu/5vyR+KH+/pyXpSQK2gPyNFemOVmD0SuOLzC4gQOARosPGni9Bh
1w8vzxdRIet2aS0Z6AHFM/1hzUZkh4lD6THQvoigooIMf59mQTqaWmo=
-----END CERTIFICATE-----
Certificate:
Data:
Version: 3 (0x2)
Serial Number: 3 (0x3)
Signature Algorithm: sha1WithRSAEncryption
Issuer: C=FR, ST=Alsace, L=Strasbourg, O=www.freelan.org, OU=freelan, CN=Freelan Sample Certificate Authority/emailAddress=contact@freelan.org
Validity
Not Before: Apr 27 10:54:53 2012 GMT
Not After : Apr 25 10:54:53 2022 GMT
Subject: C=FR, ST=Alsace, O=www.freelan.org, OU=freelan, CN=carol/emailAddress=contact@freelan.org
Subject Public Key Info:
Public Key Algorithm: rsaEncryption
Public-Key: (4096 bit)
Modulus:
00:d7:c0:a7:c6:e9:48:c4:53:40:b3:76:d9:2f:37:
28:3d:a3:c4:42:d0:76:cd:08:9b:50:e3:1c:51:e5:
14:72:fa:2b:a0:b1:06:23:f3:c1:ad:92:7c:79:fe:
15:54:d1:e5:67:62:da:ed:81:aa:7e:e2:b1:50:a9:
fb:d8:29:09:da:84:4d:3c:f4:6e:13:ab:0b:d5:ee:
80:63:32:7d:57:af:83:3c:1c:27:ed:ec:67:d6:fd:
1c:13:2d:40:bf:d1:da:bf:7a:b6:67:7e:b0:75:3b:
6d:61:9d:cc:6c:1a:97:f1:56:de:9f:80:d3:16:60:
bb:8a:6f:46:9b:be:34:75:c3:4c:d2:f1:c8:f3:3e:
98:28:30:e4:cb:2d:25:61:62:48:be:2e:dc:ed:90:
93:ae:74:b7:fa:49:43:65:20:ac:8e:fe:52:6c:00:
8e:51:3e:b6:9a:c6:4f:44:1c:7b:84:17:bd:5c:f6:
36:e9:4c:91:89:6f:4e:ad:ac:10:41:c5:c5:65:8a:
20:c8:f7:27:a3:ea:ac:5b:74:09:99:27:88:60:c7:
44:69:18:0c:32:1a:77:f2:47:53:46:e3:12:c5:69:
95:45:15:9a:14:60:76:20:a7:b5:8c:51:bf:5a:57:
19:5a:c7:a8:bc:0b:c4:30:ca:0b:e6:d0:f8:c4:a8:
84:d9:24:a2:92:f6:84:f2:13:ea:a4:93:97:fe:ed:
77:d8:2f:75:7a:2c:39:88:3c:44:56:0a:ef:12:57:
d5:9e:8f:35:8e:7f:84:e7:1a:d1:19:8d:23:db:b5:
ce:c5:7f:e1:88:6d:04:d6:01:de:f0:72:3e:51:95:
1d:4f:30:b6:32:0a:0f:84:b5:00:34:e4:bf:80:71:
10:62:14:c1:32:5a:a9:a6:de:c2:58:e8:52:eb:66:
5a:b8:5e:c2:06:7c:a6:6a:33:f2:1e:8a:41:07:53:
bb:6b:41:92:59:85:79:04:a9:df:56:4c:e0:62:1e:
98:87:95:07:b1:10:49:34:9c:90:4c:0b:83:25:27:
9f:01:27:fb:d0:c4:6e:50:cc:f5:02:47:2c:45:9a:
31:e5:ce:7d:86:8f:db:fd:83:ea:a6:00:49:71:14:
44:a1:8e:9d:ba:a4:a4:cf:9d:15:20:2d:67:76:42:
81:63:a2:76:4e:4b:22:b5:de:3d:d8:f8:e0:43:7f:
a3:10:f0:73:fb:6e:e1:6a:37:99:dc:87:a3:05:4c:
29:f5:63:14:9b:eb:a3:3a:9b:2b:b4:51:f5:05:03:
de:41:e5:cb:1a:8e:76:eb:47:93:53:90:71:c5:8f:
86:5f:9e:0b:4d:33:9c:3c:88:8a:90:9f:90:a6:35:
90:81:f1
Exponent: 65537 (0x10001)
X509v3 extensions:
X509v3 Basic Constraints:
CA:FALSE
Netscape Comment:
OpenSSL Generated Certificate
X509v3 Subject Key Identifier:
B5:5D:0D:4F:55:F6:75:1A:23:B3:F5:8C:BC:6B:5A:B6:96:6C:AE:E0
X509v3 Authority Key Identifier:
keyid:23:6C:2D:3D:3E:29:5D:78:B8:6C:3E:AA:E2:BB:2E:1E:6C:87:F2:53
Signature Algorithm: sha1WithRSAEncryption
bf:3f:e7:16:a2:ba:b1:cf:d6:79:f3:84:ed:a5:10:3e:60:42:
0e:d5:1a:c6:e9:b1:39:86:5a:2e:dd:ae:b6:b7:16:33:33:17:
3e:83:f7:a1:f7:b4:1b:09:74:8f:9b:0d:8e:4c:c7:a1:d6:66:
6c:02:3a:b5:f2:72:aa:c9:e4:b3:c6:9d:6e:c0:48:dc:39:21:
30:18:a0:6f:cb:09:be:de:0f:63:83:04:32:73:a7:bc:42:34:
b7:a1:dc:21:21:08:86:65:bc:2e:c5:78:ae:fb:fe:ab:fb:8b:
85:bf:61:e0:e2:aa:52:5f:1e:0d:19:22:13:94:7a:b4:bd:5c:
30:8d:43:22:b4:e9:13:62:7e:3e:f5:e2:7a:2a:3b:da:1f:57:
4a:5d:b8:6c:4c:f5:6e:34:b9:bd:b4:1f:dc:88:d0:28:20:a2:
0c:31:e8:7f:3a:23:b8:60:48:c8:4e:e1:02:62:ae:00:fb:d0:
a5:76:cb:ea:f3:d7:75:0d:9e:56:48:c1:2e:44:c7:0c:9f:03:
b3:ac:96:c5:a2:a0:06:9e:2b:c3:eb:b5:04:15:33:79:4a:9e:
28:94:1d:28:50:98:e3:eb:b5:74:69:7f:69:bc:61:72:d1:8a:
cc:fb:89:be:51:34:81:11:7b:fa:8a:cf:e7:bf:81:91:34:1a:
11:63:92:41:eb:62:7d:7a:2a:5a:2b:a3:85:36:5b:39:08:40:
6b:0d:bc:b7:ed:36:42:60:45:ee:0c:27:f1:41:38:9e:db:99:
8f:0f:ff:1b:ea:02:98:9f:19:21:33:ca:a2:47:89:cb:1d:a9:
4c:94:b6:3d:b2:e2:bf:1d:f7:12:8d:01:ff:77:d6:72:65:70:
ca:80:8e:a2:2d:78:0c:b2:9d:84:3a:50:f9:e8:8e:85:03:58:
eb:0a:d3:5b:d3:55:d0:bd:7d:de:c8:5b:80:ea:0e:53:d6:35:
86:60:10:ed:bd:06:f4:59:15:64:75:4c:bd:2f:fb:8a:fa:c1:
d0:c2:d9:68:09:2b:9a:91:c4:00:b1:65:7d:6d:a8:c2:42:d1:
d7:f1:71:ae:db:96:33:e7:a9:29:27:f3:89:8d:c8:ac:87:14:
fa:a5:cf:ec:b6:1b:a6:03:93:d7:ef:7f:49:b0:d5:22:fe:9e:
5a:1b:e1:ff:e9:e3:71:fa:e9:09:3f:b4:1a:33:ae:3a:60:27:
d2:e6:2f:12:f4:32:54:be:29:be:fc:14:a5:2a:2d:99:88:e0:
9d:d0:c6:07:e1:76:fb:96:60:0e:4c:d9:93:bd:26:29:2a:8f:
49:d9:f6:7d:7a:bc:34:31:84:81:4f:28:e1:e8:5e:cf:45:b1:
c1:8a:2b:e0:52:72:5f:19
-----BEGIN CERTIFICATE-----
MIIGJzCCBA+gAwIBAgIBAzANBgkqhkiG9w0BAQUFADCBsjELMAkGA1UEBhMCRlIx
DzANBgNVBAgMBkFsc2FjZTETMBEGA1UEBwwKU3RyYXNib3VyZzEYMBYGA1UECgwP
d3d3LmZyZWVsYW4ub3JnMRAwDgYDVQQLDAdmcmVlbGFuMS0wKwYDVQQDDCRGcmVl
bGFuIFNhbXBsZSBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxIjAgBgkqhkiG9w0BCQEW
E2NvbnRhY3RAZnJlZWxhbi5vcmcwHhcNMTIwNDI3MTA1NDUzWhcNMjIwNDI1MTA1
NDUzWjB+MQswCQYDVQQGEwJGUjEPMA0GA1UECAwGQWxzYWNlMRgwFgYDVQQKDA93
d3cuZnJlZWxhbi5vcmcxEDAOBgNVBAsMB2ZyZWVsYW4xDjAMBgNVBAMMBWNhcm9s
MSIwIAYJKoZIhvcNAQkBFhNjb250YWN0QGZyZWVsYW4ub3JnMIICIjANBgkqhkiG
9w0BAQEFAAOCAg8AMIICCgKCAgEA18CnxulIxFNAs3bZLzcoPaPEQtB2zQibUOMc
UeUUcvoroLEGI/PBrZJ8ef4VVNHlZ2La7YGqfuKxUKn72CkJ2oRNPPRuE6sL1e6A
YzJ9V6+DPBwn7exn1v0cEy1Av9Hav3q2Z36wdTttYZ3MbBqX8Vben4DTFmC7im9G
m740dcNM0vHI8z6YKDDkyy0lYWJIvi7c7ZCTrnS3+klDZSCsjv5SbACOUT62msZP
RBx7hBe9XPY26UyRiW9OrawQQcXFZYogyPcno+qsW3QJmSeIYMdEaRgMMhp38kdT
RuMSxWmVRRWaFGB2IKe1jFG/WlcZWseovAvEMMoL5tD4xKiE2SSikvaE8hPqpJOX
/u132C91eiw5iDxEVgrvElfVno81jn+E5xrRGY0j27XOxX/hiG0E1gHe8HI+UZUd
TzC2MgoPhLUANOS/gHEQYhTBMlqppt7CWOhS62ZauF7CBnymajPyHopBB1O7a0GS
WYV5BKnfVkzgYh6Yh5UHsRBJNJyQTAuDJSefASf70MRuUMz1AkcsRZox5c59ho/b
/YPqpgBJcRREoY6duqSkz50VIC1ndkKBY6J2Tksitd492PjgQ3+jEPBz+27hajeZ
3IejBUwp9WMUm+ujOpsrtFH1BQPeQeXLGo5260eTU5BxxY+GX54LTTOcPIiKkJ+Q
pjWQgfECAwEAAaN7MHkwCQYDVR0TBAIwADAsBglghkgBhvhCAQ0EHxYdT3BlblNT
TCBHZW5lcmF0ZWQgQ2VydGlmaWNhdGUwHQYDVR0OBBYEFLVdDU9V9nUaI7P1jLxr
WraWbK7gMB8GA1UdIwQYMBaAFCNsLT0+KV14uGw+quK7Lh5sh/JTMA0GCSqGSIb3
DQEBBQUAA4ICAQC/P+cWorqxz9Z584TtpRA+YEIO1RrG6bE5hlou3a62txYzMxc+
g/eh97QbCXSPmw2OTMeh1mZsAjq18nKqyeSzxp1uwEjcOSEwGKBvywm+3g9jgwQy
c6e8QjS3odwhIQiGZbwuxXiu+/6r+4uFv2Hg4qpSXx4NGSITlHq0vVwwjUMitOkT
Yn4+9eJ6KjvaH1dKXbhsTPVuNLm9tB/ciNAoIKIMMeh/OiO4YEjITuECYq4A+9Cl
dsvq89d1DZ5WSMEuRMcMnwOzrJbFoqAGnivD67UEFTN5Sp4olB0oUJjj67V0aX9p
vGFy0YrM+4m+UTSBEXv6is/nv4GRNBoRY5JB62J9eipaK6OFNls5CEBrDby37TZC
YEXuDCfxQTie25mPD/8b6gKYnxkhM8qiR4nLHalMlLY9suK/HfcSjQH/d9ZyZXDK
gI6iLXgMsp2EOlD56I6FA1jrCtNb01XQvX3eyFuA6g5T1jWGYBDtvQb0WRVkdUy9
L/uK+sHQwtloCSuakcQAsWV9bajCQtHX8XGu25Yz56kpJ/OJjcishxT6pc/sthum
A5PX739JsNUi/p5aG+H/6eNx+ukJP7QaM646YCfS5i8S9DJUvim+/BSlKi2ZiOCd
0MYH4Xb7lmAOTNmTvSYpKo9J2fZ9erw0MYSBTyjh6F7PRbHBiivgUnJfGQ==
-----END CERTIFICATE-----

35
tests/test_gpg.py Normal file
View File

@ -0,0 +1,35 @@
import os
import unittest
import json
import jc.parsers.gpg
THIS_DIR = os.path.dirname(os.path.abspath(__file__))
class MyTests(unittest.TestCase):
def setUp(self):
# input
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/gpg.out'), 'r', encoding='utf-8') as f:
self.gpg = f.read()
# output
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/gpg.json'), 'r', encoding='utf-8') as f:
self.gpg_json = json.loads(f.read())
def test_gpg_nodata(self):
"""
Test 'gpg' with no data
"""
self.assertEqual(jc.parsers.gpg.parse('', quiet=True), [])
def test_gpg(self):
"""
Test 'gpg --with-colons --list-keys --with-fingerprint --with-fingerprint wk@gnupg.org'
"""
self.assertEqual(jc.parsers.gpg.parse(self.gpg, quiet=True), self.gpg_json)
if __name__ == '__main__':
unittest.main()

71
tests/test_x509_cert.py Normal file
View File

@ -0,0 +1,71 @@
import os
import unittest
import json
import jc.parsers.x509_cert
THIS_DIR = os.path.dirname(os.path.abspath(__file__))
class MyTests(unittest.TestCase):
def setUp(self):
# input
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/x509-ca-cert.der'), 'rb') as f:
self.x509_ca_cert = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/x509-cert-and-key.pem'), 'r', encoding='utf-8') as f:
self.x509_cert_and_key_pem = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/x509-letsencrypt.pem'), 'r', encoding='utf-8') as f:
self.x509_letsencrypt = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/x509-multi-cert.pem'), 'r', encoding='utf-8') as f:
self.x509_multi_cert = f.read()
# output
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/x509-ca-cert.json'), 'r', encoding='utf-8') as f:
self.x509_ca_cert_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/x509-cert-and-key.json'), 'r', encoding='utf-8') as f:
self.x509_cert_and_key_pem_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/x509-letsencrypt.json'), 'r', encoding='utf-8') as f:
self.x509_letsencrypt_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/x509-multi-cert.json'), 'r', encoding='utf-8') as f:
self.x509_multi_cert_json = json.loads(f.read())
def test_x509_cert_nodata(self):
"""
Test 'x509_cert' with no data
"""
self.assertEqual(jc.parsers.x509_cert.parse('', quiet=True), [])
def test_x509_ca_cert(self):
"""
Test 'cat x509-ca-cert.der' (CA cert in DER format)
"""
self.assertEqual(jc.parsers.x509_cert.parse(self.x509_ca_cert, quiet=True), self.x509_ca_cert_json)
def test_x509_cert_and_key(self):
"""
Test 'cat x509-cert-and-key.pem' (combo cert and key file in PEM format)
"""
self.assertEqual(jc.parsers.x509_cert.parse(self.x509_cert_and_key_pem, quiet=True), self.x509_cert_and_key_pem_json)
def test_x509_letsencrypt(self):
"""
Test 'cat x509-letsencrypt.pem' (letsencrypt cert in PEM format)
"""
self.assertEqual(jc.parsers.x509_cert.parse(self.x509_letsencrypt, quiet=True), self.x509_letsencrypt_json)
def test_x509_multi_cert(self):
"""
Test 'cat x509-multi-cert.pem' (PEM file with multiple certificates)
"""
self.assertEqual(jc.parsers.x509_cert.parse(self.x509_letsencrypt, quiet=True), self.x509_letsencrypt_json)
if __name__ == '__main__':
unittest.main()

View File

@ -19,3 +19,7 @@
wait wait
echo echo
echo "All documentation updated" echo "All documentation updated"
echo
echo "Building shell completion scripts"
./build-completions.py && echo "++++ shell completion build successful" || echo "---- shell completion build failed"