diff --git a/CHANGELOG b/CHANGELOG
index 4d7a93d7..6d65e74b 100644
--- a/CHANGELOG
+++ b/CHANGELOG
@@ -1,5 +1,10 @@
jc changelog
+20220705 v1.20.2
+- Add `gpg --with-colons` parser tested on linux
+- Add DER and PEM encoded X.509 Certificate parser
+- Add Bash and Zsh completion scripts to DEB and RPM packages
+
20220615 v1.20.1
- Add `postconf -M` parser tested on linux
- Update `asciitable` and `asciitable-m` parsers to preserve case in key
diff --git a/EXAMPLES.md b/EXAMPLES.md
index b50c64b1..ec4b5ef7 100644
--- a/EXAMPLES.md
+++ b/EXAMPLES.md
@@ -1121,6 +1121,37 @@ git log --stat | jc --git-log -p or: jc -p git log --stat
}
]
```
+### gpg --with-colons
+```bash
+gpg --with-colons --show-keys file.gpg | jc --gpg -p # or jc -p gpg --with-colons --show-keys file.gpg
+```
+```json
+[
+ {
+ "type": "pub",
+ "validity": "f",
+ "key_length": "1024",
+ "pub_key_alg": "17",
+ "key_id": "6C7EE1B8621CC013",
+ "creation_date": "899817715",
+ "expiration_date": "1055898235",
+ "certsn_uidhash_trustinfo": null,
+ "owner_trust": "m",
+ "user_id": null,
+ "signature_class": null,
+ "key_capabilities": "scESC",
+ "cert_fingerprint_other": null,
+ "flag": null,
+ "token_sn": null,
+ "hash_alg": null,
+ "curve_name": null,
+ "compliance_flags": null,
+ "last_update_date": null,
+ "origin": null,
+ "comment": null
+ }
+]
+```
### /etc/group file
```bash
cat /etc/group | jc --group -p
@@ -4091,6 +4122,85 @@ who -a | jc --who -p # or: jc -p who -a
}
]
```
+### X.509 PEM and DER certificate files
+```bash
+cat entrust.pem | jc --x509-cert -p
+```
+```json
+[
+ {
+ "tbs_certificate": {
+ "version": "v3",
+ "serial_number": "a6:8b:79:29:00:00:00:00:50:d0:91:f9",
+ "signature": {
+ "algorithm": "sha384_ecdsa",
+ "parameters": null
+ },
+ "issuer": {
+ "country_name": "US",
+ "organization_name": "Entrust, Inc.",
+ "organizational_unit_name": [
+ "See www.entrust.net/legal-terms",
+ "(c) 2012 Entrust, Inc. - for authorized use only"
+ ],
+ "common_name": "Entrust Root Certification Authority - EC1"
+ },
+ "validity": {
+ "not_before": 1355844336,
+ "not_after": 2144764536,
+ "not_before_iso": "2012-12-18T15:25:36+00:00",
+ "not_after_iso": "2037-12-18T15:55:36+00:00"
+ },
+ "subject": {
+ "country_name": "US",
+ "organization_name": "Entrust, Inc.",
+ "organizational_unit_name": [
+ "See www.entrust.net/legal-terms",
+ "(c) 2012 Entrust, Inc. - for authorized use only"
+ ],
+ "common_name": "Entrust Root Certification Authority - EC1"
+ },
+ "subject_public_key_info": {
+ "algorithm": {
+ "algorithm": "ec",
+ "parameters": "secp384r1"
+ },
+ "public_key": "04:84:13:c9:d0:ba:6d:41:7b:e2:6c:d0:eb:55:5f:66:02:1a:24:f4:5b:89:69:47:e3:b8:c2:7d:f1:f2:02:c5:9f:a0:f6:5b:d5:8b:06:19:86:4f:53:10:6d:07:24:27:a1:a0:f8:d5:47:19:61:4c:7d:ca:93:27:ea:74:0c:ef:6f:96:09:fe:63:ec:70:5d:36:ad:67:77:ae:c9:9d:7c:55:44:3a:a2:63:51:1f:f5:e3:62:d4:a9:47:07:3e:cc:20"
+ },
+ "issuer_unique_id": null,
+ "subject_unique_id": null,
+ "extensions": [
+ {
+ "extn_id": "key_usage",
+ "critical": true,
+ "extn_value": [
+ "key_cert_sign",
+ "crl_sign"
+ ]
+ },
+ {
+ "extn_id": "basic_constraints",
+ "critical": true,
+ "extn_value": {
+ "ca": true,
+ "path_len_constraint": null
+ }
+ },
+ {
+ "extn_id": "key_identifier",
+ "critical": false,
+ "extn_value": "b7:63:e7:1a:dd:8d:e9:08:a6:55:83:a4:e0:6a:50:41:65:11:42:49"
+ }
+ ]
+ },
+ "signature_algorithm": {
+ "algorithm": "sha384_ecdsa",
+ "parameters": null
+ },
+ "signature_value": "30:64:02:30:61:79:d8:e5:42:47:df:1c:ae:53:99:17:b6:6f:1c:7d:e1:bf:11:94:d1:03:88:75:e4:8d:89:a4:8a:77:46:de:6d:61:ef:02:f5:fb:b5:df:cc:fe:4e:ff:fe:a9:e6:a7:02:30:5b:99:d7:85:37:06:b5:7b:08:fd:eb:27:8b:4a:94:f9:e1:fa:a7:8e:26:08:e8:7c:92:68:6d:73:d8:6f:26:ac:21:02:b8:99:b7:26:41:5b:25:60:ae:d0:48:1a:ee:06"
+ }
+]
+```
### XML files
```bash
cat cd_catalog.xml
diff --git a/README.md b/README.md
index d9b8cb2f..00f695ea 100644
--- a/README.md
+++ b/README.md
@@ -175,6 +175,7 @@ option.
| ` --fstab` | `/etc/fstab` file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/fstab) |
| ` --git-log` | `git log` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/git_log) |
| ` --git-log-s` | `git log` command streaming parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/git_log_s) |
+| ` --gpg` | `gpg --with-colons` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/gpg) |
| ` --group` | `/etc/group` file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/group) |
| ` --gshadow` | `/etc/gshadow` file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/gshadow) |
| ` --hash` | `hash` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/hash) |
@@ -247,6 +248,7 @@ option.
| ` --w` | `w` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/w) |
| ` --wc` | `wc` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/wc) |
| ` --who` | `who` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/who) |
+| ` --x509-cert` | X.509 PEM and DER certificate file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/x509_cert) |
| ` --xml` | XML file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/xml) |
| ` --xrandr` | `xrandr` command parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/xrandr) |
| ` --yaml` | YAML file parser | [details](https://kellyjonbrazil.github.io/jc/docs/parsers/yaml) |
diff --git a/build-completions.py b/build-completions.py
new file mode 100755
index 00000000..ee238d19
--- /dev/null
+++ b/build-completions.py
@@ -0,0 +1,9 @@
+#!/usr/bin/env python3
+# build Bash and Zsh completion scripts and add to the completions folder
+from jc.shell_completions import bash_completion, zsh_completion
+
+with open('completions/jc_bash_completion.sh', 'w') as f:
+ print(bash_completion(), file=f)
+
+with open('completions/jc_zsh_completion.sh', 'w') as f:
+ print(zsh_completion(), file=f)
diff --git a/completions/jc_bash_completion.sh b/completions/jc_bash_completion.sh
new file mode 100644
index 00000000..7addcd1f
--- /dev/null
+++ b/completions/jc_bash_completion.sh
@@ -0,0 +1,84 @@
+_jc()
+{
+ local cur prev words cword jc_commands jc_parsers jc_options \
+ jc_about_options jc_about_mod_options jc_help_options jc_special_options
+
+ jc_commands=(acpi airport arp blkid chage cksum crontab date df dig dmidecode dpkg du env file finger free git gpg hciconfig id ifconfig iostat iptables iw jobs last lastb ls lsblk lsmod lsof lsusb md5 md5sum mount mpstat netstat nmcli ntpq pidstat ping ping6 pip pip3 postconf printenv ps route rpm rsync sfdisk sha1sum sha224sum sha256sum sha384sum sha512sum shasum ss stat sum sysctl systemctl systeminfo timedatectl top tracepath tracepath6 traceroute traceroute6 ufw uname update-alternatives upower uptime vdir vmstat w wc who xrandr zipinfo)
+ jc_parsers=(--acpi --airport --airport-s --arp --asciitable --asciitable-m --blkid --chage --cksum --crontab --crontab-u --csv --csv-s --date --df --dig --dir --dmidecode --dpkg-l --du --env --file --finger --free --fstab --git-log --git-log-s --gpg --group --gshadow --hash --hashsum --hciconfig --history --hosts --id --ifconfig --ini --iostat --iostat-s --iptables --iw-scan --jar-manifest --jobs --kv --last --ls --ls-s --lsblk --lsmod --lsof --lsusb --mount --mpstat --mpstat-s --netstat --nmcli --ntpq --passwd --pidstat --pidstat-s --ping --ping-s --pip-list --pip-show --postconf --ps --route --rpm-qi --rsync --rsync-s --sfdisk --shadow --ss --stat --stat-s --sysctl --systemctl --systemctl-lj --systemctl-ls --systemctl-luf --systeminfo --time --timedatectl --top --top-s --tracepath --traceroute --ufw --ufw-appinfo --uname --update-alt-gs --update-alt-q --upower --uptime --vmstat --vmstat-s --w --wc --who --x509-cert --xml --xrandr --yaml --zipinfo)
+ jc_options=(--force-color -C --debug -d --monochrome -m --pretty -p --quiet -q --raw -r --unbuffer -u --yaml-out -y)
+ jc_about_options=(--about -a)
+ jc_about_mod_options=(--pretty -p --yaml-out -y --monochrome -m --force-color -C)
+ jc_help_options=(--help -h)
+ jc_special_options=(--version -v --bash-comp -B --zsh-comp -Z)
+
+ COMPREPLY=()
+ _get_comp_words_by_ref cur prev words cword
+
+ # if jc_about_options are found anywhere in the line, then only complete from jc_about_mod_options
+ for i in "${words[@]::${#words[@]}-1}"; do
+ if [[ " ${jc_about_options[*]} " =~ " ${i} " ]]; then
+ COMPREPLY=( $( compgen -W "${jc_about_mod_options[*]}" \
+ -- "${cur}" ) )
+ return 0
+ fi
+ done
+
+ # if jc_help_options and a parser are found anywhere in the line, then no more completions
+ if
+ (
+ for i in "${words[@]::${#words[@]}-1}"; do
+ if [[ " ${jc_help_options[*]} " =~ " ${i} " ]]; then
+ return 0
+ fi
+ done
+ return 1
+ ) && (
+ for i in "${words[@]::${#words[@]}-1}"; do
+ if [[ " ${jc_parsers[*]} " =~ " ${i} " ]]; then
+ return 0
+ fi
+ done
+ return 1
+ ); then
+ return 0
+ fi
+
+ # if jc_help_options are found anywhere in the line, then only complete with parsers
+ for i in "${words[@]::${#words[@]}-1}"; do
+ if [[ " ${jc_help_options[*]} " =~ " ${i} " ]]; then
+ COMPREPLY=( $( compgen -W "${jc_parsers[*]}" \
+ -- "${cur}" ) )
+ return 0
+ fi
+ done
+
+ # if special options are found anywhere in the line, then no more completions
+ for i in "${words[@]::${#words[@]}-1}"; do
+ if [[ " ${jc_special_options[*]} " =~ " ${i} " ]]; then
+ return 0
+ fi
+ done
+
+ # if magic command is found anywhere in the line, use called command's autocompletion
+ for i in "${words[@]::${#words[@]}-1}"; do
+ if [[ " ${jc_commands[*]} " =~ " ${i} " ]]; then
+ _command
+ return 0
+ fi
+ done
+
+ # if a parser arg is found anywhere in the line, only show options and help options
+ for i in "${words[@]::${#words[@]}-1}"; do
+ if [[ " ${jc_parsers[*]} " =~ " ${i} " ]]; then
+ COMPREPLY=( $( compgen -W "${jc_options[*]} ${jc_help_options[*]}" \
+ -- "${cur}" ) )
+ return 0
+ fi
+ done
+
+ # default completion
+ COMPREPLY=( $( compgen -W "${jc_options[*]} ${jc_about_options[*]} ${jc_help_options[*]} ${jc_special_options[*]} ${jc_parsers[*]} ${jc_commands[*]}" \
+ -- "${cur}" ) )
+} &&
+complete -F _jc jc
+
diff --git a/completions/jc_zsh_completion.sh b/completions/jc_zsh_completion.sh
new file mode 100644
index 00000000..71460fe4
--- /dev/null
+++ b/completions/jc_zsh_completion.sh
@@ -0,0 +1,325 @@
+#compdef jc
+
+_jc() {
+ local -a jc_commands jc_commands_describe \
+ jc_parsers jc_parsers_describe \
+ jc_options jc_options_describe \
+ jc_about_options jc_about_options_describe \
+ jc_about_mod_options jc_about_mod_options_describe \
+ jc_help_options jc_help_options_describe \
+ jc_special_options jc_special_options_describe
+
+ jc_commands=(acpi airport arp blkid chage cksum crontab date df dig dmidecode dpkg du env file finger free git gpg hciconfig id ifconfig iostat iptables iw jobs last lastb ls lsblk lsmod lsof lsusb md5 md5sum mount mpstat netstat nmcli ntpq pidstat ping ping6 pip pip3 postconf printenv ps route rpm rsync sfdisk sha1sum sha224sum sha256sum sha384sum sha512sum shasum ss stat sum sysctl systemctl systeminfo timedatectl top tracepath tracepath6 traceroute traceroute6 ufw uname update-alternatives upower uptime vdir vmstat w wc who xrandr zipinfo)
+ jc_commands_describe=(
+ 'acpi:run "acpi" command with magic syntax.'
+ 'airport:run "airport" command with magic syntax.'
+ 'arp:run "arp" command with magic syntax.'
+ 'blkid:run "blkid" command with magic syntax.'
+ 'chage:run "chage" command with magic syntax.'
+ 'cksum:run "cksum" command with magic syntax.'
+ 'crontab:run "crontab" command with magic syntax.'
+ 'date:run "date" command with magic syntax.'
+ 'df:run "df" command with magic syntax.'
+ 'dig:run "dig" command with magic syntax.'
+ 'dmidecode:run "dmidecode" command with magic syntax.'
+ 'dpkg:run "dpkg" command with magic syntax.'
+ 'du:run "du" command with magic syntax.'
+ 'env:run "env" command with magic syntax.'
+ 'file:run "file" command with magic syntax.'
+ 'finger:run "finger" command with magic syntax.'
+ 'free:run "free" command with magic syntax.'
+ 'git:run "git" command with magic syntax.'
+ 'gpg:run "gpg" command with magic syntax.'
+ 'hciconfig:run "hciconfig" command with magic syntax.'
+ 'id:run "id" command with magic syntax.'
+ 'ifconfig:run "ifconfig" command with magic syntax.'
+ 'iostat:run "iostat" command with magic syntax.'
+ 'iptables:run "iptables" command with magic syntax.'
+ 'iw:run "iw" command with magic syntax.'
+ 'jobs:run "jobs" command with magic syntax.'
+ 'last:run "last" command with magic syntax.'
+ 'lastb:run "lastb" command with magic syntax.'
+ 'ls:run "ls" command with magic syntax.'
+ 'lsblk:run "lsblk" command with magic syntax.'
+ 'lsmod:run "lsmod" command with magic syntax.'
+ 'lsof:run "lsof" command with magic syntax.'
+ 'lsusb:run "lsusb" command with magic syntax.'
+ 'md5:run "md5" command with magic syntax.'
+ 'md5sum:run "md5sum" command with magic syntax.'
+ 'mount:run "mount" command with magic syntax.'
+ 'mpstat:run "mpstat" command with magic syntax.'
+ 'netstat:run "netstat" command with magic syntax.'
+ 'nmcli:run "nmcli" command with magic syntax.'
+ 'ntpq:run "ntpq" command with magic syntax.'
+ 'pidstat:run "pidstat" command with magic syntax.'
+ 'ping:run "ping" command with magic syntax.'
+ 'ping6:run "ping6" command with magic syntax.'
+ 'pip:run "pip" command with magic syntax.'
+ 'pip3:run "pip3" command with magic syntax.'
+ 'postconf:run "postconf" command with magic syntax.'
+ 'printenv:run "printenv" command with magic syntax.'
+ 'ps:run "ps" command with magic syntax.'
+ 'route:run "route" command with magic syntax.'
+ 'rpm:run "rpm" command with magic syntax.'
+ 'rsync:run "rsync" command with magic syntax.'
+ 'sfdisk:run "sfdisk" command with magic syntax.'
+ 'sha1sum:run "sha1sum" command with magic syntax.'
+ 'sha224sum:run "sha224sum" command with magic syntax.'
+ 'sha256sum:run "sha256sum" command with magic syntax.'
+ 'sha384sum:run "sha384sum" command with magic syntax.'
+ 'sha512sum:run "sha512sum" command with magic syntax.'
+ 'shasum:run "shasum" command with magic syntax.'
+ 'ss:run "ss" command with magic syntax.'
+ 'stat:run "stat" command with magic syntax.'
+ 'sum:run "sum" command with magic syntax.'
+ 'sysctl:run "sysctl" command with magic syntax.'
+ 'systemctl:run "systemctl" command with magic syntax.'
+ 'systeminfo:run "systeminfo" command with magic syntax.'
+ 'timedatectl:run "timedatectl" command with magic syntax.'
+ 'top:run "top" command with magic syntax.'
+ 'tracepath:run "tracepath" command with magic syntax.'
+ 'tracepath6:run "tracepath6" command with magic syntax.'
+ 'traceroute:run "traceroute" command with magic syntax.'
+ 'traceroute6:run "traceroute6" command with magic syntax.'
+ 'ufw:run "ufw" command with magic syntax.'
+ 'uname:run "uname" command with magic syntax.'
+ 'update-alternatives:run "update-alternatives" command with magic syntax.'
+ 'upower:run "upower" command with magic syntax.'
+ 'uptime:run "uptime" command with magic syntax.'
+ 'vdir:run "vdir" command with magic syntax.'
+ 'vmstat:run "vmstat" command with magic syntax.'
+ 'w:run "w" command with magic syntax.'
+ 'wc:run "wc" command with magic syntax.'
+ 'who:run "who" command with magic syntax.'
+ 'xrandr:run "xrandr" command with magic syntax.'
+ 'zipinfo:run "zipinfo" command with magic syntax.'
+ )
+ jc_parsers=(--acpi --airport --airport-s --arp --asciitable --asciitable-m --blkid --chage --cksum --crontab --crontab-u --csv --csv-s --date --df --dig --dir --dmidecode --dpkg-l --du --env --file --finger --free --fstab --git-log --git-log-s --gpg --group --gshadow --hash --hashsum --hciconfig --history --hosts --id --ifconfig --ini --iostat --iostat-s --iptables --iw-scan --jar-manifest --jobs --kv --last --ls --ls-s --lsblk --lsmod --lsof --lsusb --mount --mpstat --mpstat-s --netstat --nmcli --ntpq --passwd --pidstat --pidstat-s --ping --ping-s --pip-list --pip-show --postconf --ps --route --rpm-qi --rsync --rsync-s --sfdisk --shadow --ss --stat --stat-s --sysctl --systemctl --systemctl-lj --systemctl-ls --systemctl-luf --systeminfo --time --timedatectl --top --top-s --tracepath --traceroute --ufw --ufw-appinfo --uname --update-alt-gs --update-alt-q --upower --uptime --vmstat --vmstat-s --w --wc --who --x509-cert --xml --xrandr --yaml --zipinfo)
+ jc_parsers_describe=(
+ '--acpi:`acpi` command parser'
+ '--airport:`airport -I` command parser'
+ '--airport-s:`airport -s` command parser'
+ '--arp:`arp` command parser'
+ '--asciitable:ASCII and Unicode table parser'
+ '--asciitable-m:multi-line ASCII and Unicode table parser'
+ '--blkid:`blkid` command parser'
+ '--chage:`chage --list` command parser'
+ '--cksum:`cksum` and `sum` command parser'
+ '--crontab:`crontab` command and file parser'
+ '--crontab-u:`crontab` file parser with user support'
+ '--csv:CSV file parser'
+ '--csv-s:CSV file streaming parser'
+ '--date:`date` command parser'
+ '--df:`df` command parser'
+ '--dig:`dig` command parser'
+ '--dir:`dir` command parser'
+ '--dmidecode:`dmidecode` command parser'
+ '--dpkg-l:`dpkg -l` command parser'
+ '--du:`du` command parser'
+ '--env:`env` command parser'
+ '--file:`file` command parser'
+ '--finger:`finger` command parser'
+ '--free:`free` command parser'
+ '--fstab:`/etc/fstab` file parser'
+ '--git-log:`git log` command parser'
+ '--git-log-s:`git log` command streaming parser'
+ '--gpg:`gpg --with-colons` command parser'
+ '--group:`/etc/group` file parser'
+ '--gshadow:`/etc/gshadow` file parser'
+ '--hash:`hash` command parser'
+ '--hashsum:hashsum command parser (`md5sum`, `shasum`, etc.)'
+ '--hciconfig:`hciconfig` command parser'
+ '--history:`history` command parser'
+ '--hosts:`/etc/hosts` file parser'
+ '--id:`id` command parser'
+ '--ifconfig:`ifconfig` command parser'
+ '--ini:INI file parser'
+ '--iostat:`iostat` command parser'
+ '--iostat-s:`iostat` command streaming parser'
+ '--iptables:`iptables` command parser'
+ '--iw-scan:`iw dev [device] scan` command parser'
+ '--jar-manifest:MANIFEST.MF file parser'
+ '--jobs:`jobs` command parser'
+ '--kv:Key/Value file parser'
+ '--last:`last` and `lastb` command parser'
+ '--ls:`ls` command parser'
+ '--ls-s:`ls` command streaming parser'
+ '--lsblk:`lsblk` command parser'
+ '--lsmod:`lsmod` command parser'
+ '--lsof:`lsof` command parser'
+ '--lsusb:`lsusb` command parser'
+ '--mount:`mount` command parser'
+ '--mpstat:`mpstat` command parser'
+ '--mpstat-s:`mpstat` command streaming parser'
+ '--netstat:`netstat` command parser'
+ '--nmcli:`nmcli` command parser'
+ '--ntpq:`ntpq -p` command parser'
+ '--passwd:`/etc/passwd` file parser'
+ '--pidstat:`pidstat -h` command parser'
+ '--pidstat-s:`pidstat -h` command streaming parser'
+ '--ping:`ping` and `ping6` command parser'
+ '--ping-s:`ping` and `ping6` command streaming parser'
+ '--pip-list:`pip list` command parser'
+ '--pip-show:`pip show` command parser'
+ '--postconf:`postconf -M` command parser'
+ '--ps:`ps` command parser'
+ '--route:`route` command parser'
+ '--rpm-qi:`rpm -qi` command parser'
+ '--rsync:`rsync` command parser'
+ '--rsync-s:`rsync` command streaming parser'
+ '--sfdisk:`sfdisk` command parser'
+ '--shadow:`/etc/shadow` file parser'
+ '--ss:`ss` command parser'
+ '--stat:`stat` command parser'
+ '--stat-s:`stat` command streaming parser'
+ '--sysctl:`sysctl` command parser'
+ '--systemctl:`systemctl` command parser'
+ '--systemctl-lj:`systemctl list-jobs` command parser'
+ '--systemctl-ls:`systemctl list-sockets` command parser'
+ '--systemctl-luf:`systemctl list-unit-files` command parser'
+ '--systeminfo:`systeminfo` command parser'
+ '--time:`/usr/bin/time` command parser'
+ '--timedatectl:`timedatectl status` command parser'
+ '--top:`top -b` command parser'
+ '--top-s:`top -b` command streaming parser'
+ '--tracepath:`tracepath` and `tracepath6` command parser'
+ '--traceroute:`traceroute` and `traceroute6` command parser'
+ '--ufw:`ufw status` command parser'
+ '--ufw-appinfo:`ufw app info [application]` command parser'
+ '--uname:`uname -a` command parser'
+ '--update-alt-gs:`update-alternatives --get-selections` command parser'
+ '--update-alt-q:`update-alternatives --query` command parser'
+ '--upower:`upower` command parser'
+ '--uptime:`uptime` command parser'
+ '--vmstat:`vmstat` command parser'
+ '--vmstat-s:`vmstat` command streaming parser'
+ '--w:`w` command parser'
+ '--wc:`wc` command parser'
+ '--who:`who` command parser'
+ '--x509-cert:X.509 PEM and DER certificate file parser'
+ '--xml:XML file parser'
+ '--xrandr:`xrandr` command parser'
+ '--yaml:YAML file parser'
+ '--zipinfo:`zipinfo` command parser'
+ )
+ jc_options=(--force-color -C --debug -d --monochrome -m --pretty -p --quiet -q --raw -r --unbuffer -u --yaml-out -y)
+ jc_options_describe=(
+ '--force-color:force color output even when using pipes (overrides -m)'
+ '-C:force color output even when using pipes (overrides -m)'
+ '--debug:debug (double for verbose debug)'
+ '-d:debug (double for verbose debug)'
+ '--monochrome:monochrome output'
+ '-m:monochrome output'
+ '--pretty:pretty print output'
+ '-p:pretty print output'
+ '--quiet:suppress warnings (double to ignore streaming errors)'
+ '-q:suppress warnings (double to ignore streaming errors)'
+ '--raw:raw output'
+ '-r:raw output'
+ '--unbuffer:unbuffer output'
+ '-u:unbuffer output'
+ '--yaml-out:YAML output'
+ '-y:YAML output'
+ )
+ jc_about_options=(--about -a)
+ jc_about_options_describe=(
+ '--about:about jc'
+ '-a:about jc'
+ )
+ jc_about_mod_options=(--pretty -p --yaml-out -y --monochrome -m --force-color -C)
+ jc_about_mod_options_describe=(
+ '--pretty:pretty print output'
+ '-p:pretty print output'
+ '--yaml-out:YAML output'
+ '-y:YAML output'
+ '--monochrome:monochrome output'
+ '-m:monochrome output'
+ '--force-color:force color output even when using pipes (overrides -m)'
+ '-C:force color output even when using pipes (overrides -m)'
+ )
+ jc_help_options=(--help -h)
+ jc_help_options_describe=(
+ '--help:help (--help --parser_name for parser documentation)'
+ '-h:help (--help --parser_name for parser documentation)'
+ )
+ jc_special_options=(--version -v --bash-comp -B --zsh-comp -Z)
+ jc_special_options_describe=(
+ '--version:version info'
+ '-v:version info'
+ '--bash-comp:gen Bash completion: jc -B > /etc/bash_completion.d/jc'
+ '-B:gen Bash completion: jc -B > /etc/bash_completion.d/jc'
+ '--zsh-comp:gen Zsh completion: jc -Z > "${fpath[1]}/_jc"'
+ '-Z:gen Zsh completion: jc -Z > "${fpath[1]}/_jc"'
+ )
+
+ # if jc_about_options are found anywhere in the line, then only complete from jc_about_mod_options
+ for i in ${words:0:-1}; do
+ if (( $jc_about_options[(Ie)${i}] )); then
+ _describe 'commands' jc_about_mod_options_describe
+ return 0
+ fi
+ done
+
+ # if jc_help_options and a parser are found anywhere in the line, then no more completions
+ if
+ (
+ for i in ${words:0:-1}; do
+ if (( $jc_help_options[(Ie)${i}] )); then
+ return 0
+ fi
+ done
+ return 1
+ ) && (
+ for i in ${words:0:-1}; do
+ if (( $jc_parsers[(Ie)${i}] )); then
+ return 0
+ fi
+ done
+ return 1
+ ); then
+ return 0
+ fi
+
+ # if jc_help_options are found anywhere in the line, then only complete with parsers
+ for i in ${words:0:-1}; do
+ if (( $jc_help_options[(Ie)${i}] )); then
+ _describe 'commands' jc_parsers_describe
+ return 0
+ fi
+ done
+
+ # if special options are found anywhere in the line, then no more completions
+ for i in ${words:0:-1}; do
+ if (( $jc_special_options[(Ie)${i}] )); then
+ return 0
+ fi
+ done
+
+ # if magic command is found anywhere in the line, use called command's autocompletion
+ for i in ${words:0:-1}; do
+ if (( $jc_commands[(Ie)${i}] )); then
+ # hack to remove options between jc and the magic command
+ shift $(( ${#words} - 2 )) words
+ words[1,0]=(jc)
+ CURRENT=${#words}
+
+ # run the magic command's completions
+ _arguments '*::arguments:_normal'
+ return 0
+ fi
+ done
+
+ # if a parser arg is found anywhere in the line, only show options and help options
+ for i in ${words:0:-1}; do
+ if (( $jc_parsers[(Ie)${i}] )); then
+ _describe 'commands' jc_options_describe -- jc_help_options_describe
+ return 0
+ fi
+ done
+
+ # default completion
+ _describe 'commands' jc_options_describe -- jc_about_options_describe -- jc_help_options_describe -- jc_special_options_describe -- jc_parsers_describe -- jc_commands_describe
+}
+
+_jc
+
diff --git a/docs/parsers/gpg.md b/docs/parsers/gpg.md
new file mode 100644
index 00000000..c3fba80b
--- /dev/null
+++ b/docs/parsers/gpg.md
@@ -0,0 +1,145 @@
+[Home](https://kellyjonbrazil.github.io/jc/)
+
+
+# jc.parsers.gpg
+
+jc - JSON Convert `gpg --with-colons` command output parser
+
+Usage (cli):
+
+ $ gpg --with-colons --show-keys file.gpg | jc --gpg
+
+ or
+
+ $ jc gpg --with-colons --show-keys file.gpg
+
+Usage (module):
+
+ import jc
+ result = jc.parse('gpg', gpg_command_output)
+
+Schema:
+
+Field definitions from https://git.gnupg.org/cgi-bin/gitweb.cgi?p=gnupg.git;a=blob_plain;f=doc/DETAILS
+
+> Note: Number values are not converted to integers because many field
+> specifications are overloaded and future augmentations are implied in the
+> documentation.
+
+ [
+ {
+ "type": string,
+ "validity": string,
+ "key_length": string,
+ "pub_key_alg": string,
+ "key_id": string,
+ "creation_date": string,
+ "expiration_date": string,
+ "certsn_uidhash_trustinfo": string,
+ "owner_trust": string,
+ "user_id": string,
+ "signature_class": string,
+ "key_capabilities": string,
+ "cert_fingerprint_other": string,
+ "flag": string,
+ "token_sn": string,
+ "hash_alg": string,
+ "curve_name": string,
+ "compliance_flags": string,
+ "last_update_date": string,
+ "origin": string,
+ "comment": string,
+ "index": string, # [0]
+ "bits": string, # [0]
+ "value": string, # [0]
+ "version": string, # [1], [4]
+ "signature_count": string, # [1]
+ "encryption_count": string, # [1]
+ "policy": string, # [1]
+ "signature_first_seen": string, # [1]
+ "signature_most_recent_seen": string, # [1]
+ "encryption_first_done": string, # [1]
+ "encryption_most_recent_done": string, # [1]
+ "staleness_reason": string, # [2]
+ "trust_model": string, # [2]
+ "trust_db_created": string, # [2]
+ "trust_db_expires": string, # [2]
+ "marginally_trusted_users": string, # [2]
+ "completely_trusted_users": string, # [2]
+ "cert_chain_max_depth": string, # [2]
+ "subpacket_number": string, # [3]
+ "hex_flags": string, # [3]
+ "subpacket_length": string, # [3]
+ "subpacket_data": string, # [3]
+ "pubkey": string, # [4]
+ "cipher": string, # [4]
+ "digest": string, # [4]
+ "compress": string, # [4]
+ "group": string, # [4]
+ "members": string, # [4]
+ "curve_names": string, # [4]
+ }
+ ]
+
+ All blank values are converted to null/None.
+
+ [0] for 'pkd' type
+ [1] for 'tfs' type
+ [2] for 'tru' type
+ [3] for 'skp' type
+ [4] for 'cfg' type
+
+Examples:
+
+ $ gpg --with-colons --show-keys file.gpg | jc --gpg -p
+ [
+ {
+ "type": "pub",
+ "validity": "f",
+ "key_length": "1024",
+ "pub_key_alg": "17",
+ "key_id": "6C7EE1B8621CC013",
+ "creation_date": "899817715",
+ "expiration_date": "1055898235",
+ "certsn_uidhash_trustinfo": null,
+ "owner_trust": "m",
+ "user_id": null,
+ "signature_class": null,
+ "key_capabilities": "scESC",
+ "cert_fingerprint_other": null,
+ "flag": null,
+ "token_sn": null,
+ "hash_alg": null,
+ "curve_name": null,
+ "compliance_flags": null,
+ "last_update_date": null,
+ "origin": null,
+ "comment": null
+ },
+ ...
+ ]
+
+
+
+### parse
+
+```python
+def parse(data: str, raw: bool = False, quiet: bool = False) -> List[Dict]
+```
+
+Main text parsing function
+
+Parameters:
+
+ data: (string) text data to parse
+ raw: (boolean) unprocessed output if True
+ quiet: (boolean) suppress warning messages if True
+
+Returns:
+
+ List of Dictionaries. Raw or processed structured data.
+
+### Parser Information
+Compatibility: linux
+
+Version 1.0 by Kelly Brazil (kellyjonbrazil@gmail.com)
diff --git a/docs/parsers/iptables.md b/docs/parsers/iptables.md
index 1bbcbb36..bc646347 100644
--- a/docs/parsers/iptables.md
+++ b/docs/parsers/iptables.md
@@ -186,4 +186,4 @@ Returns:
### Parser Information
Compatibility: linux
-Version 1.7 by Kelly Brazil (kellyjonbrazil@gmail.com)
+Version 1.8 by Kelly Brazil (kellyjonbrazil@gmail.com)
diff --git a/docs/parsers/x509_cert.md b/docs/parsers/x509_cert.md
new file mode 100644
index 00000000..f0c24bd2
--- /dev/null
+++ b/docs/parsers/x509_cert.md
@@ -0,0 +1,225 @@
+[Home](https://kellyjonbrazil.github.io/jc/)
+
+
+# jc.parsers.x509\_cert
+
+jc - JSON Convert X.509 Certificate format file parser
+
+This parser will convert DER and PEM encoded X.509 certificate files.
+
+Usage (cli):
+
+ $ cat certificate.pem | jc --x509-cert
+
+Usage (module):
+
+ import jc
+ result = jc.parse('x509_cert', x509_cert_file_output)
+
+Schema:
+
+ [
+ {
+ "tbs_certificate": {
+ "version": string,
+ "serial_number": string, # [0]
+ "signature": {
+ "algorithm": string,
+ "parameters": string/null,
+ },
+ "issuer": {
+ "country_name": string,
+ "state_or_province_name" string,
+ "locality_name": string,
+ "organization_name": array/string,
+ "organizational_unit_name": array/string,
+ "common_name": string,
+ "email_address": string
+ },
+ "validity": {
+ "not_before": integer, # [1]
+ "not_after": integer, # [1]
+ "not_before_iso": string,
+ "not_after_iso": string
+ },
+ "subject": {
+ "country_name": string,
+ "state_or_province_name": string,
+ "locality_name": string,
+ "organization_name": array/string,
+ "organizational_unit_name": array/string,
+ "common_name": string,
+ "email_address": string
+ },
+ "subject_public_key_info": {
+ "algorithm": {
+ "algorithm": string,
+ "parameters": string/null,
+ },
+ "public_key": {
+ "modulus": string, # [0]
+ "public_exponent": integer
+ }
+ },
+ "issuer_unique_id": string/null,
+ "subject_unique_id": string/null,
+ "extensions": [
+ {
+ "extn_id": string,
+ "critical": boolean,
+ "extn_value": array/object/string/integer # [2]
+ }
+ ]
+ },
+ "signature_algorithm": {
+ "algorithm": string,
+ "parameters": string/null
+ },
+ "signature_value": string # [0]
+ }
+ ]
+
+ [0] in colon-delimited hex notation
+ [1] time-zone-aware (UTC) epoch timestamp
+ [2] See below for well-known Extension schemas:
+
+ Basic Constraints:
+ {
+ "extn_id": "basic_constraints",
+ "critical": boolean,
+ "extn_value": {
+ "ca": boolean,
+ "path_len_constraint": string/null
+ }
+ }
+
+ Key Usage:
+ {
+ "extn_id": "key_usage",
+ "critical": boolean,
+ "extn_value": [
+ string
+ ]
+ }
+
+ Key Identifier:
+ {
+ "extn_id": "key_identifier",
+ "critical": boolean,
+ "extn_value": string # [0]
+ }
+
+ Authority Key Identifier:
+ {
+ "extn_id": "authority_key_identifier",
+ "critical": boolean,
+ "extn_value": {
+ "key_identifier": string, # [0]
+ "authority_cert_issuer": string/null,
+ "authority_cert_serial_number": string/null
+ }
+ }
+
+Examples:
+
+ $ cat entrust-ec1.pem| jc --x509-cert -p
+ [
+ {
+ "tbs_certificate": {
+ "version": "v3",
+ "serial_number": "a6:8b:79:29:00:00:00:00:50:d0:91:f9",
+ "signature": {
+ "algorithm": "sha384_ecdsa",
+ "parameters": null
+ },
+ "issuer": {
+ "country_name": "US",
+ "organization_name": "Entrust, Inc.",
+ "organizational_unit_name": [
+ "See www.entrust.net/legal-terms",
+ "(c) 2012 Entrust, Inc. - for authorized use only"
+ ],
+ "common_name": "Entrust Root Certification Authority - EC1"
+ },
+ "validity": {
+ "not_before": 1355844336,
+ "not_after": 2144764536,
+ "not_before_iso": "2012-12-18T15:25:36+00:00",
+ "not_after_iso": "2037-12-18T15:55:36+00:00"
+ },
+ "subject": {
+ "country_name": "US",
+ "organization_name": "Entrust, Inc.",
+ "organizational_unit_name": [
+ "See www.entrust.net/legal-terms",
+ "(c) 2012 Entrust, Inc. - for authorized use only"
+ ],
+ "common_name": "Entrust Root Certification Authority - EC1"
+ },
+ "subject_public_key_info": {
+ "algorithm": {
+ "algorithm": "ec",
+ "parameters": "secp384r1"
+ },
+ "public_key": "04:84:13:c9:d0:ba:6d:41:7b:e2:6c:d0:eb:55:..."
+ },
+ "issuer_unique_id": null,
+ "subject_unique_id": null,
+ "extensions": [
+ {
+ "extn_id": "key_usage",
+ "critical": true,
+ "extn_value": [
+ "crl_sign",
+ "key_cert_sign"
+ ]
+ },
+ {
+ "extn_id": "basic_constraints",
+ "critical": true,
+ "extn_value": {
+ "ca": true,
+ "path_len_constraint": null
+ }
+ },
+ {
+ "extn_id": "key_identifier",
+ "critical": false,
+ "extn_value": "b7:63:e7:1a:dd:8d:e9:08:a6:55:83:a4:e0:6a:..."
+ }
+ ]
+ },
+ "signature_algorithm": {
+ "algorithm": "sha384_ecdsa",
+ "parameters": null
+ },
+ "signature_value": "30:64:02:30:61:79:d8:e5:42:47:df:1c:ae:53:..."
+ }
+ ]
+
+
+
+### parse
+
+```python
+def parse(data: Union[str, bytes],
+ raw: bool = False,
+ quiet: bool = False) -> List[Dict]
+```
+
+Main text parsing function
+
+Parameters:
+
+ data: (string) text data to parse
+ raw: (boolean) unprocessed output if True
+ quiet: (boolean) suppress warning messages if True
+
+Returns:
+
+ List of Dictionaries. Raw or processed structured data.
+
+### Parser Information
+Compatibility: linux, darwin, cygwin, win32, aix, freebsd
+
+Version 1.0 by Kelly Brazil (kellyjonbrazil@gmail.com)
diff --git a/docs/utils.md b/docs/utils.md
index 7dac5f2e..0004f63f 100644
--- a/docs/utils.md
+++ b/docs/utils.md
@@ -102,20 +102,23 @@ Returns:
### has\_data
```python
-def has_data(data: str) -> bool
+def has_data(data: Union[str, bytes]) -> bool
```
-Checks if the input contains data. If there are any non-whitespace
-characters then return `True`, else return `False`.
+Checks if the string input contains data. If there are any
+non-whitespace characters then return `True`, else return `False`.
+
+For bytes, returns True if there is any data.
Parameters:
- data: (string) input to check whether it contains data
+ data: (string, bytes) input to check whether it contains data
Returns:
Boolean True if input string (data) contains non-whitespace
- characters, otherwise False
+ characters, otherwise False. For bytes data, returns
+ True if there is any data, otherwise False.
diff --git a/jc/cli.py b/jc/cli.py
index c74c73be..ac83863e 100644
--- a/jc/cli.py
+++ b/jc/cli.py
@@ -14,7 +14,7 @@ from .lib import (__version__, parser_info, all_parser_info, parsers,
_get_parser, _parser_is_streaming, standard_parser_mod_list,
plugin_parser_mod_list, streaming_parser_mod_list)
from . import utils
-from .cli_data import long_options_map
+from .cli_data import long_options_map, new_pygments_colors, old_pygments_colors
from .shell_completions import bash_completion, zsh_completion
from . import tracebackplus
from .exceptions import LibraryNotInstalled, ParseError
@@ -48,43 +48,9 @@ class info():
# startswith is sufficient and avoids potential exceptions from split and int.
if PYGMENTS_INSTALLED:
if pygments.__version__.startswith('2.3.'):
- PYGMENT_COLOR = {
- 'black': '#ansiblack',
- 'red': '#ansidarkred',
- 'green': '#ansidarkgreen',
- 'yellow': '#ansibrown',
- 'blue': '#ansidarkblue',
- 'magenta': '#ansipurple',
- 'cyan': '#ansiteal',
- 'gray': '#ansilightgray',
- 'brightblack': '#ansidarkgray',
- 'brightred': '#ansired',
- 'brightgreen': '#ansigreen',
- 'brightyellow': '#ansiyellow',
- 'brightblue': '#ansiblue',
- 'brightmagenta': '#ansifuchsia',
- 'brightcyan': '#ansiturquoise',
- 'white': '#ansiwhite',
- }
+ PYGMENT_COLOR = old_pygments_colors
else:
- PYGMENT_COLOR = {
- 'black': 'ansiblack',
- 'red': 'ansired',
- 'green': 'ansigreen',
- 'yellow': 'ansiyellow',
- 'blue': 'ansiblue',
- 'magenta': 'ansimagenta',
- 'cyan': 'ansicyan',
- 'gray': 'ansigray',
- 'brightblack': 'ansibrightblack',
- 'brightred': 'ansibrightred',
- 'brightgreen': 'ansibrightgreen',
- 'brightyellow': 'ansibrightyellow',
- 'brightblue': 'ansibrightblue',
- 'brightmagenta': 'ansibrightmagenta',
- 'brightcyan': 'ansibrightcyan',
- 'white': 'ansiwhite',
- }
+ PYGMENT_COLOR = new_pygments_colors
def set_env_colors(env_colors=None):
@@ -622,7 +588,7 @@ def main():
try:
# differentiate between regular and streaming parsers
- # streaming
+ # streaming (only supports UTF-8 string data for now)
if _parser_is_streaming(parser):
result = parser.parse(sys.stdin,
raw=raw,
@@ -639,9 +605,17 @@ def main():
sys.exit(combined_exit_code(magic_exit_code, 0))
- # regular
+ # regular (supports binary and UTF-8 string data)
else:
- data = magic_stdout or sys.stdin.read()
+ data = magic_stdout or sys.stdin.buffer.read()
+
+ # convert to UTF-8, if possible. Otherwise, leave as bytes
+ try:
+ if isinstance(data, bytes):
+ data = data.decode('utf-8')
+ except UnicodeDecodeError:
+ pass
+
result = parser.parse(data,
raw=raw,
quiet=quiet)
diff --git a/jc/cli_data.py b/jc/cli_data.py
index f9f6ed2f..2b0814f5 100644
--- a/jc/cli_data.py
+++ b/jc/cli_data.py
@@ -16,3 +16,41 @@ long_options_map: Dict[str, List[str]] = {
'--bash-comp': ['B', 'gen Bash completion: jc -B > /etc/bash_completion.d/jc'],
'--zsh-comp': ['Z', 'gen Zsh completion: jc -Z > "${fpath[1]}/_jc"']
}
+
+new_pygments_colors = {
+ 'black': 'ansiblack',
+ 'red': 'ansired',
+ 'green': 'ansigreen',
+ 'yellow': 'ansiyellow',
+ 'blue': 'ansiblue',
+ 'magenta': 'ansimagenta',
+ 'cyan': 'ansicyan',
+ 'gray': 'ansigray',
+ 'brightblack': 'ansibrightblack',
+ 'brightred': 'ansibrightred',
+ 'brightgreen': 'ansibrightgreen',
+ 'brightyellow': 'ansibrightyellow',
+ 'brightblue': 'ansibrightblue',
+ 'brightmagenta': 'ansibrightmagenta',
+ 'brightcyan': 'ansibrightcyan',
+ 'white': 'ansiwhite',
+}
+
+old_pygments_colors = {
+ 'black': '#ansiblack',
+ 'red': '#ansidarkred',
+ 'green': '#ansidarkgreen',
+ 'yellow': '#ansibrown',
+ 'blue': '#ansidarkblue',
+ 'magenta': '#ansipurple',
+ 'cyan': '#ansiteal',
+ 'gray': '#ansilightgray',
+ 'brightblack': '#ansidarkgray',
+ 'brightred': '#ansired',
+ 'brightgreen': '#ansigreen',
+ 'brightyellow': '#ansiyellow',
+ 'brightblue': '#ansiblue',
+ 'brightmagenta': '#ansifuchsia',
+ 'brightcyan': '#ansiturquoise',
+ 'white': '#ansiwhite',
+}
diff --git a/jc/lib.py b/jc/lib.py
index 1733a351..6fe0243c 100644
--- a/jc/lib.py
+++ b/jc/lib.py
@@ -6,7 +6,7 @@ import importlib
from typing import Dict, List, Iterable, Union, Iterator
from jc import appdirs
-__version__ = '1.20.1'
+__version__ = '1.20.2'
parsers = [
'acpi',
@@ -36,6 +36,7 @@ parsers = [
'fstab',
'git-log',
'git-log-s',
+ 'gpg',
'group',
'gshadow',
'hash',
@@ -108,6 +109,7 @@ parsers = [
'w',
'wc',
'who',
+ 'x509-cert',
'xml',
'xrandr',
'yaml',
diff --git a/jc/parsers/asn1crypto/__init__.py b/jc/parsers/asn1crypto/__init__.py
new file mode 100644
index 00000000..2c93f00e
--- /dev/null
+++ b/jc/parsers/asn1crypto/__init__.py
@@ -0,0 +1,47 @@
+# coding: utf-8
+from __future__ import unicode_literals, division, absolute_import, print_function
+
+from .version import __version__, __version_info__
+
+__all__ = [
+ '__version__',
+ '__version_info__',
+ 'load_order',
+]
+
+
+def load_order():
+ """
+ Returns a list of the module and sub-module names for asn1crypto in
+ dependency load order, for the sake of live reloading code
+
+ :return:
+ A list of unicode strings of module names, as they would appear in
+ sys.modules, ordered by which module should be reloaded first
+ """
+
+ return [
+ 'asn1crypto._errors',
+ 'asn1crypto._int',
+ 'asn1crypto._ordereddict',
+ 'asn1crypto._teletex_codec',
+ 'asn1crypto._types',
+ 'asn1crypto._inet',
+ 'asn1crypto._iri',
+ 'asn1crypto.version',
+ 'asn1crypto.pem',
+ 'asn1crypto.util',
+ 'asn1crypto.parser',
+ 'asn1crypto.core',
+ 'asn1crypto.algos',
+ 'asn1crypto.keys',
+ 'asn1crypto.x509',
+ 'asn1crypto.crl',
+ 'asn1crypto.csr',
+ 'asn1crypto.ocsp',
+ 'asn1crypto.cms',
+ 'asn1crypto.pdf',
+ 'asn1crypto.pkcs12',
+ 'asn1crypto.tsp',
+ 'asn1crypto',
+ ]
diff --git a/jc/parsers/asn1crypto/_errors.py b/jc/parsers/asn1crypto/_errors.py
new file mode 100644
index 00000000..d8797a2f
--- /dev/null
+++ b/jc/parsers/asn1crypto/_errors.py
@@ -0,0 +1,54 @@
+# coding: utf-8
+
+"""
+Exports the following items:
+
+ - unwrap()
+ - APIException()
+"""
+
+from __future__ import unicode_literals, division, absolute_import, print_function
+
+import re
+import textwrap
+
+
+class APIException(Exception):
+ """
+ An exception indicating an API has been removed from asn1crypto
+ """
+
+ pass
+
+
+def unwrap(string, *params):
+ """
+ Takes a multi-line string and does the following:
+
+ - dedents
+ - converts newlines with text before and after into a single line
+ - strips leading and trailing whitespace
+
+ :param string:
+ The string to format
+
+ :param *params:
+ Params to interpolate into the string
+
+ :return:
+ The formatted string
+ """
+
+ output = textwrap.dedent(string)
+
+ # Unwrap lines, taking into account bulleted lists, ordered lists and
+ # underlines consisting of = signs
+ if output.find('\n') != -1:
+ output = re.sub('(?<=\\S)\n(?=[^ \n\t\\d\\*\\-=])', ' ', output)
+
+ if params:
+ output = output % params
+
+ output = output.strip()
+
+ return output
diff --git a/jc/parsers/asn1crypto/_inet.py b/jc/parsers/asn1crypto/_inet.py
new file mode 100644
index 00000000..045ba561
--- /dev/null
+++ b/jc/parsers/asn1crypto/_inet.py
@@ -0,0 +1,170 @@
+# coding: utf-8
+from __future__ import unicode_literals, division, absolute_import, print_function
+
+import socket
+import struct
+
+from ._errors import unwrap
+from ._types import byte_cls, bytes_to_list, str_cls, type_name
+
+
+def inet_ntop(address_family, packed_ip):
+ """
+ Windows compatibility shim for socket.inet_ntop().
+
+ :param address_family:
+ socket.AF_INET for IPv4 or socket.AF_INET6 for IPv6
+
+ :param packed_ip:
+ A byte string of the network form of an IP address
+
+ :return:
+ A unicode string of the IP address
+ """
+
+ if address_family not in set([socket.AF_INET, socket.AF_INET6]):
+ raise ValueError(unwrap(
+ '''
+ address_family must be socket.AF_INET (%s) or socket.AF_INET6 (%s),
+ not %s
+ ''',
+ repr(socket.AF_INET),
+ repr(socket.AF_INET6),
+ repr(address_family)
+ ))
+
+ if not isinstance(packed_ip, byte_cls):
+ raise TypeError(unwrap(
+ '''
+ packed_ip must be a byte string, not %s
+ ''',
+ type_name(packed_ip)
+ ))
+
+ required_len = 4 if address_family == socket.AF_INET else 16
+ if len(packed_ip) != required_len:
+ raise ValueError(unwrap(
+ '''
+ packed_ip must be %d bytes long - is %d
+ ''',
+ required_len,
+ len(packed_ip)
+ ))
+
+ if address_family == socket.AF_INET:
+ return '%d.%d.%d.%d' % tuple(bytes_to_list(packed_ip))
+
+ octets = struct.unpack(b'!HHHHHHHH', packed_ip)
+
+ runs_of_zero = {}
+ longest_run = 0
+ zero_index = None
+ for i, octet in enumerate(octets + (-1,)):
+ if octet != 0:
+ if zero_index is not None:
+ length = i - zero_index
+ if length not in runs_of_zero:
+ runs_of_zero[length] = zero_index
+ longest_run = max(longest_run, length)
+ zero_index = None
+ elif zero_index is None:
+ zero_index = i
+
+ hexed = [hex(o)[2:] for o in octets]
+
+ if longest_run < 2:
+ return ':'.join(hexed)
+
+ zero_start = runs_of_zero[longest_run]
+ zero_end = zero_start + longest_run
+
+ return ':'.join(hexed[:zero_start]) + '::' + ':'.join(hexed[zero_end:])
+
+
+def inet_pton(address_family, ip_string):
+ """
+ Windows compatibility shim for socket.inet_ntop().
+
+ :param address_family:
+ socket.AF_INET for IPv4 or socket.AF_INET6 for IPv6
+
+ :param ip_string:
+ A unicode string of an IP address
+
+ :return:
+ A byte string of the network form of the IP address
+ """
+
+ if address_family not in set([socket.AF_INET, socket.AF_INET6]):
+ raise ValueError(unwrap(
+ '''
+ address_family must be socket.AF_INET (%s) or socket.AF_INET6 (%s),
+ not %s
+ ''',
+ repr(socket.AF_INET),
+ repr(socket.AF_INET6),
+ repr(address_family)
+ ))
+
+ if not isinstance(ip_string, str_cls):
+ raise TypeError(unwrap(
+ '''
+ ip_string must be a unicode string, not %s
+ ''',
+ type_name(ip_string)
+ ))
+
+ if address_family == socket.AF_INET:
+ octets = ip_string.split('.')
+ error = len(octets) != 4
+ if not error:
+ ints = []
+ for o in octets:
+ o = int(o)
+ if o > 255 or o < 0:
+ error = True
+ break
+ ints.append(o)
+
+ if error:
+ raise ValueError(unwrap(
+ '''
+ ip_string must be a dotted string with four integers in the
+ range of 0 to 255, got %s
+ ''',
+ repr(ip_string)
+ ))
+
+ return struct.pack(b'!BBBB', *ints)
+
+ error = False
+ omitted = ip_string.count('::')
+ if omitted > 1:
+ error = True
+ elif omitted == 0:
+ octets = ip_string.split(':')
+ error = len(octets) != 8
+ else:
+ begin, end = ip_string.split('::')
+ begin_octets = begin.split(':')
+ end_octets = end.split(':')
+ missing = 8 - len(begin_octets) - len(end_octets)
+ octets = begin_octets + (['0'] * missing) + end_octets
+
+ if not error:
+ ints = []
+ for o in octets:
+ o = int(o, 16)
+ if o > 65535 or o < 0:
+ error = True
+ break
+ ints.append(o)
+
+ return struct.pack(b'!HHHHHHHH', *ints)
+
+ raise ValueError(unwrap(
+ '''
+ ip_string must be a valid ipv6 string, got %s
+ ''',
+ repr(ip_string)
+ ))
diff --git a/jc/parsers/asn1crypto/_int.py b/jc/parsers/asn1crypto/_int.py
new file mode 100644
index 00000000..094fc958
--- /dev/null
+++ b/jc/parsers/asn1crypto/_int.py
@@ -0,0 +1,22 @@
+# coding: utf-8
+from __future__ import unicode_literals, division, absolute_import, print_function
+
+
+def fill_width(bytes_, width):
+ """
+ Ensure a byte string representing a positive integer is a specific width
+ (in bytes)
+
+ :param bytes_:
+ The integer byte string
+
+ :param width:
+ The desired width as an integer
+
+ :return:
+ A byte string of the width specified
+ """
+
+ while len(bytes_) < width:
+ bytes_ = b'\x00' + bytes_
+ return bytes_
diff --git a/jc/parsers/asn1crypto/_iri.py b/jc/parsers/asn1crypto/_iri.py
new file mode 100644
index 00000000..7394b4d5
--- /dev/null
+++ b/jc/parsers/asn1crypto/_iri.py
@@ -0,0 +1,291 @@
+# coding: utf-8
+
+"""
+Functions to convert unicode IRIs into ASCII byte string URIs and back. Exports
+the following items:
+
+ - iri_to_uri()
+ - uri_to_iri()
+"""
+
+from __future__ import unicode_literals, division, absolute_import, print_function
+
+from encodings import idna # noqa
+import codecs
+import re
+import sys
+
+from ._errors import unwrap
+from ._types import byte_cls, str_cls, type_name, bytes_to_list, int_types
+
+if sys.version_info < (3,):
+ from urlparse import urlsplit, urlunsplit
+ from urllib import (
+ quote as urlquote,
+ unquote as unquote_to_bytes,
+ )
+
+else:
+ from urllib.parse import (
+ quote as urlquote,
+ unquote_to_bytes,
+ urlsplit,
+ urlunsplit,
+ )
+
+
+def iri_to_uri(value, normalize=False):
+ """
+ Encodes a unicode IRI into an ASCII byte string URI
+
+ :param value:
+ A unicode string of an IRI
+
+ :param normalize:
+ A bool that controls URI normalization
+
+ :return:
+ A byte string of the ASCII-encoded URI
+ """
+
+ if not isinstance(value, str_cls):
+ raise TypeError(unwrap(
+ '''
+ value must be a unicode string, not %s
+ ''',
+ type_name(value)
+ ))
+
+ scheme = None
+ # Python 2.6 doesn't split properly is the URL doesn't start with http:// or https://
+ if sys.version_info < (2, 7) and not value.startswith('http://') and not value.startswith('https://'):
+ real_prefix = None
+ prefix_match = re.match('^[^:]*://', value)
+ if prefix_match:
+ real_prefix = prefix_match.group(0)
+ value = 'http://' + value[len(real_prefix):]
+ parsed = urlsplit(value)
+ if real_prefix:
+ value = real_prefix + value[7:]
+ scheme = _urlquote(real_prefix[:-3])
+ else:
+ parsed = urlsplit(value)
+
+ if scheme is None:
+ scheme = _urlquote(parsed.scheme)
+ hostname = parsed.hostname
+ if hostname is not None:
+ hostname = hostname.encode('idna')
+ # RFC 3986 allows userinfo to contain sub-delims
+ username = _urlquote(parsed.username, safe='!$&\'()*+,;=')
+ password = _urlquote(parsed.password, safe='!$&\'()*+,;=')
+ port = parsed.port
+ if port is not None:
+ port = str_cls(port).encode('ascii')
+
+ netloc = b''
+ if username is not None:
+ netloc += username
+ if password:
+ netloc += b':' + password
+ netloc += b'@'
+ if hostname is not None:
+ netloc += hostname
+ if port is not None:
+ default_http = scheme == b'http' and port == b'80'
+ default_https = scheme == b'https' and port == b'443'
+ if not normalize or (not default_http and not default_https):
+ netloc += b':' + port
+
+ # RFC 3986 allows a path to contain sub-delims, plus "@" and ":"
+ path = _urlquote(parsed.path, safe='/!$&\'()*+,;=@:')
+ # RFC 3986 allows the query to contain sub-delims, plus "@", ":" , "/" and "?"
+ query = _urlquote(parsed.query, safe='/?!$&\'()*+,;=@:')
+ # RFC 3986 allows the fragment to contain sub-delims, plus "@", ":" , "/" and "?"
+ fragment = _urlquote(parsed.fragment, safe='/?!$&\'()*+,;=@:')
+
+ if normalize and query is None and fragment is None and path == b'/':
+ path = None
+
+ # Python 2.7 compat
+ if path is None:
+ path = ''
+
+ output = urlunsplit((scheme, netloc, path, query, fragment))
+ if isinstance(output, str_cls):
+ output = output.encode('latin1')
+ return output
+
+
+def uri_to_iri(value):
+ """
+ Converts an ASCII URI byte string into a unicode IRI
+
+ :param value:
+ An ASCII-encoded byte string of the URI
+
+ :return:
+ A unicode string of the IRI
+ """
+
+ if not isinstance(value, byte_cls):
+ raise TypeError(unwrap(
+ '''
+ value must be a byte string, not %s
+ ''',
+ type_name(value)
+ ))
+
+ parsed = urlsplit(value)
+
+ scheme = parsed.scheme
+ if scheme is not None:
+ scheme = scheme.decode('ascii')
+
+ username = _urlunquote(parsed.username, remap=[':', '@'])
+ password = _urlunquote(parsed.password, remap=[':', '@'])
+ hostname = parsed.hostname
+ if hostname:
+ hostname = hostname.decode('idna')
+ port = parsed.port
+ if port and not isinstance(port, int_types):
+ port = port.decode('ascii')
+
+ netloc = ''
+ if username is not None:
+ netloc += username
+ if password:
+ netloc += ':' + password
+ netloc += '@'
+ if hostname is not None:
+ netloc += hostname
+ if port is not None:
+ netloc += ':' + str_cls(port)
+
+ path = _urlunquote(parsed.path, remap=['/'], preserve=True)
+ query = _urlunquote(parsed.query, remap=['&', '='], preserve=True)
+ fragment = _urlunquote(parsed.fragment)
+
+ return urlunsplit((scheme, netloc, path, query, fragment))
+
+
+def _iri_utf8_errors_handler(exc):
+ """
+ Error handler for decoding UTF-8 parts of a URI into an IRI. Leaves byte
+ sequences encoded in %XX format, but as part of a unicode string.
+
+ :param exc:
+ The UnicodeDecodeError exception
+
+ :return:
+ A 2-element tuple of (replacement unicode string, integer index to
+ resume at)
+ """
+
+ bytes_as_ints = bytes_to_list(exc.object[exc.start:exc.end])
+ replacements = ['%%%02x' % num for num in bytes_as_ints]
+ return (''.join(replacements), exc.end)
+
+
+codecs.register_error('iriutf8', _iri_utf8_errors_handler)
+
+
+def _urlquote(string, safe=''):
+ """
+ Quotes a unicode string for use in a URL
+
+ :param string:
+ A unicode string
+
+ :param safe:
+ A unicode string of character to not encode
+
+ :return:
+ None (if string is None) or an ASCII byte string of the quoted string
+ """
+
+ if string is None or string == '':
+ return None
+
+ # Anything already hex quoted is pulled out of the URL and unquoted if
+ # possible
+ escapes = []
+ if re.search('%[0-9a-fA-F]{2}', string):
+ # Try to unquote any percent values, restoring them if they are not
+ # valid UTF-8. Also, requote any safe chars since encoded versions of
+ # those are functionally different than the unquoted ones.
+ def _try_unescape(match):
+ byte_string = unquote_to_bytes(match.group(0))
+ unicode_string = byte_string.decode('utf-8', 'iriutf8')
+ for safe_char in list(safe):
+ unicode_string = unicode_string.replace(safe_char, '%%%02x' % ord(safe_char))
+ return unicode_string
+ string = re.sub('(?:%[0-9a-fA-F]{2})+', _try_unescape, string)
+
+ # Once we have the minimal set of hex quoted values, removed them from
+ # the string so that they are not double quoted
+ def _extract_escape(match):
+ escapes.append(match.group(0).encode('ascii'))
+ return '\x00'
+ string = re.sub('%[0-9a-fA-F]{2}', _extract_escape, string)
+
+ output = urlquote(string.encode('utf-8'), safe=safe.encode('utf-8'))
+ if not isinstance(output, byte_cls):
+ output = output.encode('ascii')
+
+ # Restore the existing quoted values that we extracted
+ if len(escapes) > 0:
+ def _return_escape(_):
+ return escapes.pop(0)
+ output = re.sub(b'%00', _return_escape, output)
+
+ return output
+
+
+def _urlunquote(byte_string, remap=None, preserve=None):
+ """
+ Unquotes a URI portion from a byte string into unicode using UTF-8
+
+ :param byte_string:
+ A byte string of the data to unquote
+
+ :param remap:
+ A list of characters (as unicode) that should be re-mapped to a
+ %XX encoding. This is used when characters are not valid in part of a
+ URL.
+
+ :param preserve:
+ A bool - indicates that the chars to be remapped if they occur in
+ non-hex form, should be preserved. E.g. / for URL path.
+
+ :return:
+ A unicode string
+ """
+
+ if byte_string is None:
+ return byte_string
+
+ if byte_string == b'':
+ return ''
+
+ if preserve:
+ replacements = ['\x1A', '\x1C', '\x1D', '\x1E', '\x1F']
+ preserve_unmap = {}
+ for char in remap:
+ replacement = replacements.pop(0)
+ preserve_unmap[replacement] = char
+ byte_string = byte_string.replace(char.encode('ascii'), replacement.encode('ascii'))
+
+ byte_string = unquote_to_bytes(byte_string)
+
+ if remap:
+ for char in remap:
+ byte_string = byte_string.replace(char.encode('ascii'), ('%%%02x' % ord(char)).encode('ascii'))
+
+ output = byte_string.decode('utf-8', 'iriutf8')
+
+ if preserve:
+ for replacement, original in preserve_unmap.items():
+ output = output.replace(replacement, original)
+
+ return output
diff --git a/jc/parsers/asn1crypto/_ordereddict.py b/jc/parsers/asn1crypto/_ordereddict.py
new file mode 100644
index 00000000..2f18ab5a
--- /dev/null
+++ b/jc/parsers/asn1crypto/_ordereddict.py
@@ -0,0 +1,135 @@
+# Copyright (c) 2009 Raymond Hettinger
+#
+# Permission is hereby granted, free of charge, to any person
+# obtaining a copy of this software and associated documentation files
+# (the "Software"), to deal in the Software without restriction,
+# including without limitation the rights to use, copy, modify, merge,
+# publish, distribute, sublicense, and/or sell copies of the Software,
+# and to permit persons to whom the Software is furnished to do so,
+# subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be
+# included in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+# OTHER DEALINGS IN THE SOFTWARE.
+
+import sys
+
+if not sys.version_info < (2, 7):
+
+ from collections import OrderedDict
+
+else:
+
+ from UserDict import DictMixin
+
+ class OrderedDict(dict, DictMixin):
+
+ def __init__(self, *args, **kwds):
+ if len(args) > 1:
+ raise TypeError('expected at most 1 arguments, got %d' % len(args))
+ try:
+ self.__end
+ except AttributeError:
+ self.clear()
+ self.update(*args, **kwds)
+
+ def clear(self):
+ self.__end = end = []
+ end += [None, end, end] # sentinel node for doubly linked list
+ self.__map = {} # key --> [key, prev, next]
+ dict.clear(self)
+
+ def __setitem__(self, key, value):
+ if key not in self:
+ end = self.__end
+ curr = end[1]
+ curr[2] = end[1] = self.__map[key] = [key, curr, end]
+ dict.__setitem__(self, key, value)
+
+ def __delitem__(self, key):
+ dict.__delitem__(self, key)
+ key, prev, next_ = self.__map.pop(key)
+ prev[2] = next_
+ next_[1] = prev
+
+ def __iter__(self):
+ end = self.__end
+ curr = end[2]
+ while curr is not end:
+ yield curr[0]
+ curr = curr[2]
+
+ def __reversed__(self):
+ end = self.__end
+ curr = end[1]
+ while curr is not end:
+ yield curr[0]
+ curr = curr[1]
+
+ def popitem(self, last=True):
+ if not self:
+ raise KeyError('dictionary is empty')
+ if last:
+ key = reversed(self).next()
+ else:
+ key = iter(self).next()
+ value = self.pop(key)
+ return key, value
+
+ def __reduce__(self):
+ items = [[k, self[k]] for k in self]
+ tmp = self.__map, self.__end
+ del self.__map, self.__end
+ inst_dict = vars(self).copy()
+ self.__map, self.__end = tmp
+ if inst_dict:
+ return (self.__class__, (items,), inst_dict)
+ return self.__class__, (items,)
+
+ def keys(self):
+ return list(self)
+
+ setdefault = DictMixin.setdefault
+ update = DictMixin.update
+ pop = DictMixin.pop
+ values = DictMixin.values
+ items = DictMixin.items
+ iterkeys = DictMixin.iterkeys
+ itervalues = DictMixin.itervalues
+ iteritems = DictMixin.iteritems
+
+ def __repr__(self):
+ if not self:
+ return '%s()' % (self.__class__.__name__,)
+ return '%s(%r)' % (self.__class__.__name__, self.items())
+
+ def copy(self):
+ return self.__class__(self)
+
+ @classmethod
+ def fromkeys(cls, iterable, value=None):
+ d = cls()
+ for key in iterable:
+ d[key] = value
+ return d
+
+ def __eq__(self, other):
+ if isinstance(other, OrderedDict):
+ if len(self) != len(other):
+ return False
+ for p, q in zip(self.items(), other.items()):
+ if p != q:
+ return False
+ return True
+ return dict.__eq__(self, other)
+
+ def __ne__(self, other):
+ return not self == other
diff --git a/jc/parsers/asn1crypto/_teletex_codec.py b/jc/parsers/asn1crypto/_teletex_codec.py
new file mode 100644
index 00000000..b5991aaf
--- /dev/null
+++ b/jc/parsers/asn1crypto/_teletex_codec.py
@@ -0,0 +1,331 @@
+# coding: utf-8
+
+"""
+Implementation of the teletex T.61 codec. Exports the following items:
+
+ - register()
+"""
+
+from __future__ import unicode_literals, division, absolute_import, print_function
+
+import codecs
+
+
+class TeletexCodec(codecs.Codec):
+
+ def encode(self, input_, errors='strict'):
+ return codecs.charmap_encode(input_, errors, ENCODING_TABLE)
+
+ def decode(self, input_, errors='strict'):
+ return codecs.charmap_decode(input_, errors, DECODING_TABLE)
+
+
+class TeletexIncrementalEncoder(codecs.IncrementalEncoder):
+
+ def encode(self, input_, final=False):
+ return codecs.charmap_encode(input_, self.errors, ENCODING_TABLE)[0]
+
+
+class TeletexIncrementalDecoder(codecs.IncrementalDecoder):
+
+ def decode(self, input_, final=False):
+ return codecs.charmap_decode(input_, self.errors, DECODING_TABLE)[0]
+
+
+class TeletexStreamWriter(TeletexCodec, codecs.StreamWriter):
+
+ pass
+
+
+class TeletexStreamReader(TeletexCodec, codecs.StreamReader):
+
+ pass
+
+
+def teletex_search_function(name):
+ """
+ Search function for teletex codec that is passed to codecs.register()
+ """
+
+ if name != 'teletex':
+ return None
+
+ return codecs.CodecInfo(
+ name='teletex',
+ encode=TeletexCodec().encode,
+ decode=TeletexCodec().decode,
+ incrementalencoder=TeletexIncrementalEncoder,
+ incrementaldecoder=TeletexIncrementalDecoder,
+ streamreader=TeletexStreamReader,
+ streamwriter=TeletexStreamWriter,
+ )
+
+
+def register():
+ """
+ Registers the teletex codec
+ """
+
+ codecs.register(teletex_search_function)
+
+
+# http://en.wikipedia.org/wiki/ITU_T.61
+DECODING_TABLE = (
+ '\u0000'
+ '\u0001'
+ '\u0002'
+ '\u0003'
+ '\u0004'
+ '\u0005'
+ '\u0006'
+ '\u0007'
+ '\u0008'
+ '\u0009'
+ '\u000A'
+ '\u000B'
+ '\u000C'
+ '\u000D'
+ '\u000E'
+ '\u000F'
+ '\u0010'
+ '\u0011'
+ '\u0012'
+ '\u0013'
+ '\u0014'
+ '\u0015'
+ '\u0016'
+ '\u0017'
+ '\u0018'
+ '\u0019'
+ '\u001A'
+ '\u001B'
+ '\u001C'
+ '\u001D'
+ '\u001E'
+ '\u001F'
+ '\u0020'
+ '\u0021'
+ '\u0022'
+ '\ufffe'
+ '\ufffe'
+ '\u0025'
+ '\u0026'
+ '\u0027'
+ '\u0028'
+ '\u0029'
+ '\u002A'
+ '\u002B'
+ '\u002C'
+ '\u002D'
+ '\u002E'
+ '\u002F'
+ '\u0030'
+ '\u0031'
+ '\u0032'
+ '\u0033'
+ '\u0034'
+ '\u0035'
+ '\u0036'
+ '\u0037'
+ '\u0038'
+ '\u0039'
+ '\u003A'
+ '\u003B'
+ '\u003C'
+ '\u003D'
+ '\u003E'
+ '\u003F'
+ '\u0040'
+ '\u0041'
+ '\u0042'
+ '\u0043'
+ '\u0044'
+ '\u0045'
+ '\u0046'
+ '\u0047'
+ '\u0048'
+ '\u0049'
+ '\u004A'
+ '\u004B'
+ '\u004C'
+ '\u004D'
+ '\u004E'
+ '\u004F'
+ '\u0050'
+ '\u0051'
+ '\u0052'
+ '\u0053'
+ '\u0054'
+ '\u0055'
+ '\u0056'
+ '\u0057'
+ '\u0058'
+ '\u0059'
+ '\u005A'
+ '\u005B'
+ '\ufffe'
+ '\u005D'
+ '\ufffe'
+ '\u005F'
+ '\ufffe'
+ '\u0061'
+ '\u0062'
+ '\u0063'
+ '\u0064'
+ '\u0065'
+ '\u0066'
+ '\u0067'
+ '\u0068'
+ '\u0069'
+ '\u006A'
+ '\u006B'
+ '\u006C'
+ '\u006D'
+ '\u006E'
+ '\u006F'
+ '\u0070'
+ '\u0071'
+ '\u0072'
+ '\u0073'
+ '\u0074'
+ '\u0075'
+ '\u0076'
+ '\u0077'
+ '\u0078'
+ '\u0079'
+ '\u007A'
+ '\ufffe'
+ '\u007C'
+ '\ufffe'
+ '\ufffe'
+ '\u007F'
+ '\u0080'
+ '\u0081'
+ '\u0082'
+ '\u0083'
+ '\u0084'
+ '\u0085'
+ '\u0086'
+ '\u0087'
+ '\u0088'
+ '\u0089'
+ '\u008A'
+ '\u008B'
+ '\u008C'
+ '\u008D'
+ '\u008E'
+ '\u008F'
+ '\u0090'
+ '\u0091'
+ '\u0092'
+ '\u0093'
+ '\u0094'
+ '\u0095'
+ '\u0096'
+ '\u0097'
+ '\u0098'
+ '\u0099'
+ '\u009A'
+ '\u009B'
+ '\u009C'
+ '\u009D'
+ '\u009E'
+ '\u009F'
+ '\u00A0'
+ '\u00A1'
+ '\u00A2'
+ '\u00A3'
+ '\u0024'
+ '\u00A5'
+ '\u0023'
+ '\u00A7'
+ '\u00A4'
+ '\ufffe'
+ '\ufffe'
+ '\u00AB'
+ '\ufffe'
+ '\ufffe'
+ '\ufffe'
+ '\ufffe'
+ '\u00B0'
+ '\u00B1'
+ '\u00B2'
+ '\u00B3'
+ '\u00D7'
+ '\u00B5'
+ '\u00B6'
+ '\u00B7'
+ '\u00F7'
+ '\ufffe'
+ '\ufffe'
+ '\u00BB'
+ '\u00BC'
+ '\u00BD'
+ '\u00BE'
+ '\u00BF'
+ '\ufffe'
+ '\u0300'
+ '\u0301'
+ '\u0302'
+ '\u0303'
+ '\u0304'
+ '\u0306'
+ '\u0307'
+ '\u0308'
+ '\ufffe'
+ '\u030A'
+ '\u0327'
+ '\u0332'
+ '\u030B'
+ '\u0328'
+ '\u030C'
+ '\ufffe'
+ '\ufffe'
+ '\ufffe'
+ '\ufffe'
+ '\ufffe'
+ '\ufffe'
+ '\ufffe'
+ '\ufffe'
+ '\ufffe'
+ '\ufffe'
+ '\ufffe'
+ '\ufffe'
+ '\ufffe'
+ '\ufffe'
+ '\ufffe'
+ '\ufffe'
+ '\u2126'
+ '\u00C6'
+ '\u00D0'
+ '\u00AA'
+ '\u0126'
+ '\ufffe'
+ '\u0132'
+ '\u013F'
+ '\u0141'
+ '\u00D8'
+ '\u0152'
+ '\u00BA'
+ '\u00DE'
+ '\u0166'
+ '\u014A'
+ '\u0149'
+ '\u0138'
+ '\u00E6'
+ '\u0111'
+ '\u00F0'
+ '\u0127'
+ '\u0131'
+ '\u0133'
+ '\u0140'
+ '\u0142'
+ '\u00F8'
+ '\u0153'
+ '\u00DF'
+ '\u00FE'
+ '\u0167'
+ '\u014B'
+ '\ufffe'
+)
+ENCODING_TABLE = codecs.charmap_build(DECODING_TABLE)
diff --git a/jc/parsers/asn1crypto/_types.py b/jc/parsers/asn1crypto/_types.py
new file mode 100644
index 00000000..b9ca8cc7
--- /dev/null
+++ b/jc/parsers/asn1crypto/_types.py
@@ -0,0 +1,46 @@
+# coding: utf-8
+from __future__ import unicode_literals, division, absolute_import, print_function
+
+import inspect
+import sys
+
+
+if sys.version_info < (3,):
+ str_cls = unicode # noqa
+ byte_cls = str
+ int_types = (int, long) # noqa
+
+ def bytes_to_list(byte_string):
+ return [ord(b) for b in byte_string]
+
+ chr_cls = chr
+
+else:
+ str_cls = str
+ byte_cls = bytes
+ int_types = int
+
+ bytes_to_list = list
+
+ def chr_cls(num):
+ return bytes([num])
+
+
+def type_name(value):
+ """
+ Returns a user-readable name for the type of an object
+
+ :param value:
+ A value to get the type name of
+
+ :return:
+ A unicode string of the object's type name
+ """
+
+ if inspect.isclass(value):
+ cls = value
+ else:
+ cls = value.__class__
+ if cls.__module__ in set(['builtins', '__builtin__']):
+ return cls.__name__
+ return '%s.%s' % (cls.__module__, cls.__name__)
diff --git a/jc/parsers/asn1crypto/algos.py b/jc/parsers/asn1crypto/algos.py
new file mode 100644
index 00000000..cdd0020a
--- /dev/null
+++ b/jc/parsers/asn1crypto/algos.py
@@ -0,0 +1,1189 @@
+# coding: utf-8
+
+"""
+ASN.1 type classes for various algorithms using in various aspects of public
+key cryptography. Exports the following items:
+
+ - AlgorithmIdentifier()
+ - AnyAlgorithmIdentifier()
+ - DigestAlgorithm()
+ - DigestInfo()
+ - DSASignature()
+ - EncryptionAlgorithm()
+ - HmacAlgorithm()
+ - KdfAlgorithm()
+ - Pkcs5MacAlgorithm()
+ - SignedDigestAlgorithm()
+
+Other type classes are defined that help compose the types listed above.
+"""
+
+from __future__ import unicode_literals, division, absolute_import, print_function
+
+from ._errors import unwrap
+from ._int import fill_width
+from .util import int_from_bytes, int_to_bytes
+from .core import (
+ Any,
+ Choice,
+ Integer,
+ Null,
+ ObjectIdentifier,
+ OctetString,
+ Sequence,
+ Void,
+)
+
+
+# Structures and OIDs in this file are pulled from
+# https://tools.ietf.org/html/rfc3279, https://tools.ietf.org/html/rfc4055,
+# https://tools.ietf.org/html/rfc5758, https://tools.ietf.org/html/rfc7292,
+# http://www.emc.com/collateral/white-papers/h11302-pkcs5v2-1-password-based-cryptography-standard-wp.pdf
+
+class AlgorithmIdentifier(Sequence):
+ _fields = [
+ ('algorithm', ObjectIdentifier),
+ ('parameters', Any, {'optional': True}),
+ ]
+
+
+class _ForceNullParameters(object):
+ """
+ Various structures based on AlgorithmIdentifier require that the parameters
+ field be core.Null() for certain OIDs. This mixin ensures that happens.
+ """
+
+ # The following attribute, plus the parameters spec callback and custom
+ # __setitem__ are all to handle a situation where parameters should not be
+ # optional and must be Null for certain OIDs. More info at
+ # https://tools.ietf.org/html/rfc4055#page-15 and
+ # https://tools.ietf.org/html/rfc4055#section-2.1
+ _null_algos = set([
+ '1.2.840.113549.1.1.1', # rsassa_pkcs1v15 / rsaes_pkcs1v15 / rsa
+ '1.2.840.113549.1.1.11', # sha256_rsa
+ '1.2.840.113549.1.1.12', # sha384_rsa
+ '1.2.840.113549.1.1.13', # sha512_rsa
+ '1.2.840.113549.1.1.14', # sha224_rsa
+ '1.3.14.3.2.26', # sha1
+ '2.16.840.1.101.3.4.2.4', # sha224
+ '2.16.840.1.101.3.4.2.1', # sha256
+ '2.16.840.1.101.3.4.2.2', # sha384
+ '2.16.840.1.101.3.4.2.3', # sha512
+ ])
+
+ def _parameters_spec(self):
+ if self._oid_pair == ('algorithm', 'parameters'):
+ algo = self['algorithm'].native
+ if algo in self._oid_specs:
+ return self._oid_specs[algo]
+
+ if self['algorithm'].dotted in self._null_algos:
+ return Null
+
+ return None
+
+ _spec_callbacks = {
+ 'parameters': _parameters_spec
+ }
+
+ # We have to override this since the spec callback uses the value of
+ # algorithm to determine the parameter spec, however default values are
+ # assigned before setting a field, so a default value can't be based on
+ # another field value (unless it is a default also). Thus we have to
+ # manually check to see if the algorithm was set and parameters is unset,
+ # and then fix the value as appropriate.
+ def __setitem__(self, key, value):
+ res = super(_ForceNullParameters, self).__setitem__(key, value)
+ if key != 'algorithm':
+ return res
+ if self['algorithm'].dotted not in self._null_algos:
+ return res
+ if self['parameters'].__class__ != Void:
+ return res
+ self['parameters'] = Null()
+ return res
+
+
+class HmacAlgorithmId(ObjectIdentifier):
+ _map = {
+ '1.3.14.3.2.10': 'des_mac',
+ '1.2.840.113549.2.7': 'sha1',
+ '1.2.840.113549.2.8': 'sha224',
+ '1.2.840.113549.2.9': 'sha256',
+ '1.2.840.113549.2.10': 'sha384',
+ '1.2.840.113549.2.11': 'sha512',
+ '1.2.840.113549.2.12': 'sha512_224',
+ '1.2.840.113549.2.13': 'sha512_256',
+ '2.16.840.1.101.3.4.2.13': 'sha3_224',
+ '2.16.840.1.101.3.4.2.14': 'sha3_256',
+ '2.16.840.1.101.3.4.2.15': 'sha3_384',
+ '2.16.840.1.101.3.4.2.16': 'sha3_512',
+ }
+
+
+class HmacAlgorithm(Sequence):
+ _fields = [
+ ('algorithm', HmacAlgorithmId),
+ ('parameters', Any, {'optional': True}),
+ ]
+
+
+class DigestAlgorithmId(ObjectIdentifier):
+ _map = {
+ '1.2.840.113549.2.2': 'md2',
+ '1.2.840.113549.2.5': 'md5',
+ '1.3.14.3.2.26': 'sha1',
+ '2.16.840.1.101.3.4.2.4': 'sha224',
+ '2.16.840.1.101.3.4.2.1': 'sha256',
+ '2.16.840.1.101.3.4.2.2': 'sha384',
+ '2.16.840.1.101.3.4.2.3': 'sha512',
+ '2.16.840.1.101.3.4.2.5': 'sha512_224',
+ '2.16.840.1.101.3.4.2.6': 'sha512_256',
+ '2.16.840.1.101.3.4.2.7': 'sha3_224',
+ '2.16.840.1.101.3.4.2.8': 'sha3_256',
+ '2.16.840.1.101.3.4.2.9': 'sha3_384',
+ '2.16.840.1.101.3.4.2.10': 'sha3_512',
+ '2.16.840.1.101.3.4.2.11': 'shake128',
+ '2.16.840.1.101.3.4.2.12': 'shake256',
+ '2.16.840.1.101.3.4.2.17': 'shake128_len',
+ '2.16.840.1.101.3.4.2.18': 'shake256_len',
+ }
+
+
+class DigestAlgorithm(_ForceNullParameters, Sequence):
+ _fields = [
+ ('algorithm', DigestAlgorithmId),
+ ('parameters', Any, {'optional': True}),
+ ]
+
+
+# This structure is what is signed with a SignedDigestAlgorithm
+class DigestInfo(Sequence):
+ _fields = [
+ ('digest_algorithm', DigestAlgorithm),
+ ('digest', OctetString),
+ ]
+
+
+class MaskGenAlgorithmId(ObjectIdentifier):
+ _map = {
+ '1.2.840.113549.1.1.8': 'mgf1',
+ }
+
+
+class MaskGenAlgorithm(Sequence):
+ _fields = [
+ ('algorithm', MaskGenAlgorithmId),
+ ('parameters', Any, {'optional': True}),
+ ]
+
+ _oid_pair = ('algorithm', 'parameters')
+ _oid_specs = {
+ 'mgf1': DigestAlgorithm
+ }
+
+
+class TrailerField(Integer):
+ _map = {
+ 1: 'trailer_field_bc',
+ }
+
+
+class RSASSAPSSParams(Sequence):
+ _fields = [
+ (
+ 'hash_algorithm',
+ DigestAlgorithm,
+ {
+ 'explicit': 0,
+ 'default': {'algorithm': 'sha1'},
+ }
+ ),
+ (
+ 'mask_gen_algorithm',
+ MaskGenAlgorithm,
+ {
+ 'explicit': 1,
+ 'default': {
+ 'algorithm': 'mgf1',
+ 'parameters': {'algorithm': 'sha1'},
+ },
+ }
+ ),
+ (
+ 'salt_length',
+ Integer,
+ {
+ 'explicit': 2,
+ 'default': 20,
+ }
+ ),
+ (
+ 'trailer_field',
+ TrailerField,
+ {
+ 'explicit': 3,
+ 'default': 'trailer_field_bc',
+ }
+ ),
+ ]
+
+
+class SignedDigestAlgorithmId(ObjectIdentifier):
+ _map = {
+ '1.3.14.3.2.3': 'md5_rsa',
+ '1.3.14.3.2.29': 'sha1_rsa',
+ '1.3.14.7.2.3.1': 'md2_rsa',
+ '1.2.840.113549.1.1.2': 'md2_rsa',
+ '1.2.840.113549.1.1.4': 'md5_rsa',
+ '1.2.840.113549.1.1.5': 'sha1_rsa',
+ '1.2.840.113549.1.1.14': 'sha224_rsa',
+ '1.2.840.113549.1.1.11': 'sha256_rsa',
+ '1.2.840.113549.1.1.12': 'sha384_rsa',
+ '1.2.840.113549.1.1.13': 'sha512_rsa',
+ '1.2.840.113549.1.1.10': 'rsassa_pss',
+ '1.2.840.10040.4.3': 'sha1_dsa',
+ '1.3.14.3.2.13': 'sha1_dsa',
+ '1.3.14.3.2.27': 'sha1_dsa',
+ '2.16.840.1.101.3.4.3.1': 'sha224_dsa',
+ '2.16.840.1.101.3.4.3.2': 'sha256_dsa',
+ '1.2.840.10045.4.1': 'sha1_ecdsa',
+ '1.2.840.10045.4.3.1': 'sha224_ecdsa',
+ '1.2.840.10045.4.3.2': 'sha256_ecdsa',
+ '1.2.840.10045.4.3.3': 'sha384_ecdsa',
+ '1.2.840.10045.4.3.4': 'sha512_ecdsa',
+ '2.16.840.1.101.3.4.3.9': 'sha3_224_ecdsa',
+ '2.16.840.1.101.3.4.3.10': 'sha3_256_ecdsa',
+ '2.16.840.1.101.3.4.3.11': 'sha3_384_ecdsa',
+ '2.16.840.1.101.3.4.3.12': 'sha3_512_ecdsa',
+ # For when the digest is specified elsewhere in a Sequence
+ '1.2.840.113549.1.1.1': 'rsassa_pkcs1v15',
+ '1.2.840.10040.4.1': 'dsa',
+ '1.2.840.10045.4': 'ecdsa',
+ # RFC 8410 -- https://tools.ietf.org/html/rfc8410
+ '1.3.101.112': 'ed25519',
+ '1.3.101.113': 'ed448',
+ }
+
+ _reverse_map = {
+ 'dsa': '1.2.840.10040.4.1',
+ 'ecdsa': '1.2.840.10045.4',
+ 'md2_rsa': '1.2.840.113549.1.1.2',
+ 'md5_rsa': '1.2.840.113549.1.1.4',
+ 'rsassa_pkcs1v15': '1.2.840.113549.1.1.1',
+ 'rsassa_pss': '1.2.840.113549.1.1.10',
+ 'sha1_dsa': '1.2.840.10040.4.3',
+ 'sha1_ecdsa': '1.2.840.10045.4.1',
+ 'sha1_rsa': '1.2.840.113549.1.1.5',
+ 'sha224_dsa': '2.16.840.1.101.3.4.3.1',
+ 'sha224_ecdsa': '1.2.840.10045.4.3.1',
+ 'sha224_rsa': '1.2.840.113549.1.1.14',
+ 'sha256_dsa': '2.16.840.1.101.3.4.3.2',
+ 'sha256_ecdsa': '1.2.840.10045.4.3.2',
+ 'sha256_rsa': '1.2.840.113549.1.1.11',
+ 'sha384_ecdsa': '1.2.840.10045.4.3.3',
+ 'sha384_rsa': '1.2.840.113549.1.1.12',
+ 'sha512_ecdsa': '1.2.840.10045.4.3.4',
+ 'sha512_rsa': '1.2.840.113549.1.1.13',
+ 'sha3_224_ecdsa': '2.16.840.1.101.3.4.3.9',
+ 'sha3_256_ecdsa': '2.16.840.1.101.3.4.3.10',
+ 'sha3_384_ecdsa': '2.16.840.1.101.3.4.3.11',
+ 'sha3_512_ecdsa': '2.16.840.1.101.3.4.3.12',
+ 'ed25519': '1.3.101.112',
+ 'ed448': '1.3.101.113',
+ }
+
+
+class SignedDigestAlgorithm(_ForceNullParameters, Sequence):
+ _fields = [
+ ('algorithm', SignedDigestAlgorithmId),
+ ('parameters', Any, {'optional': True}),
+ ]
+
+ _oid_pair = ('algorithm', 'parameters')
+ _oid_specs = {
+ 'rsassa_pss': RSASSAPSSParams,
+ }
+
+ @property
+ def signature_algo(self):
+ """
+ :return:
+ A unicode string of "rsassa_pkcs1v15", "rsassa_pss", "dsa",
+ "ecdsa", "ed25519" or "ed448"
+ """
+
+ algorithm = self['algorithm'].native
+
+ algo_map = {
+ 'md2_rsa': 'rsassa_pkcs1v15',
+ 'md5_rsa': 'rsassa_pkcs1v15',
+ 'sha1_rsa': 'rsassa_pkcs1v15',
+ 'sha224_rsa': 'rsassa_pkcs1v15',
+ 'sha256_rsa': 'rsassa_pkcs1v15',
+ 'sha384_rsa': 'rsassa_pkcs1v15',
+ 'sha512_rsa': 'rsassa_pkcs1v15',
+ 'rsassa_pkcs1v15': 'rsassa_pkcs1v15',
+ 'rsassa_pss': 'rsassa_pss',
+ 'sha1_dsa': 'dsa',
+ 'sha224_dsa': 'dsa',
+ 'sha256_dsa': 'dsa',
+ 'dsa': 'dsa',
+ 'sha1_ecdsa': 'ecdsa',
+ 'sha224_ecdsa': 'ecdsa',
+ 'sha256_ecdsa': 'ecdsa',
+ 'sha384_ecdsa': 'ecdsa',
+ 'sha512_ecdsa': 'ecdsa',
+ 'sha3_224_ecdsa': 'ecdsa',
+ 'sha3_256_ecdsa': 'ecdsa',
+ 'sha3_384_ecdsa': 'ecdsa',
+ 'sha3_512_ecdsa': 'ecdsa',
+ 'ecdsa': 'ecdsa',
+ 'ed25519': 'ed25519',
+ 'ed448': 'ed448',
+ }
+ if algorithm in algo_map:
+ return algo_map[algorithm]
+
+ raise ValueError(unwrap(
+ '''
+ Signature algorithm not known for %s
+ ''',
+ algorithm
+ ))
+
+ @property
+ def hash_algo(self):
+ """
+ :return:
+ A unicode string of "md2", "md5", "sha1", "sha224", "sha256",
+ "sha384", "sha512", "sha512_224", "sha512_256" or "shake256"
+ """
+
+ algorithm = self['algorithm'].native
+
+ algo_map = {
+ 'md2_rsa': 'md2',
+ 'md5_rsa': 'md5',
+ 'sha1_rsa': 'sha1',
+ 'sha224_rsa': 'sha224',
+ 'sha256_rsa': 'sha256',
+ 'sha384_rsa': 'sha384',
+ 'sha512_rsa': 'sha512',
+ 'sha1_dsa': 'sha1',
+ 'sha224_dsa': 'sha224',
+ 'sha256_dsa': 'sha256',
+ 'sha1_ecdsa': 'sha1',
+ 'sha224_ecdsa': 'sha224',
+ 'sha256_ecdsa': 'sha256',
+ 'sha384_ecdsa': 'sha384',
+ 'sha512_ecdsa': 'sha512',
+ 'ed25519': 'sha512',
+ 'ed448': 'shake256',
+ }
+ if algorithm in algo_map:
+ return algo_map[algorithm]
+
+ if algorithm == 'rsassa_pss':
+ return self['parameters']['hash_algorithm']['algorithm'].native
+
+ raise ValueError(unwrap(
+ '''
+ Hash algorithm not known for %s
+ ''',
+ algorithm
+ ))
+
+
+class Pbkdf2Salt(Choice):
+ _alternatives = [
+ ('specified', OctetString),
+ ('other_source', AlgorithmIdentifier),
+ ]
+
+
+class Pbkdf2Params(Sequence):
+ _fields = [
+ ('salt', Pbkdf2Salt),
+ ('iteration_count', Integer),
+ ('key_length', Integer, {'optional': True}),
+ ('prf', HmacAlgorithm, {'default': {'algorithm': 'sha1'}}),
+ ]
+
+
+class KdfAlgorithmId(ObjectIdentifier):
+ _map = {
+ '1.2.840.113549.1.5.12': 'pbkdf2'
+ }
+
+
+class KdfAlgorithm(Sequence):
+ _fields = [
+ ('algorithm', KdfAlgorithmId),
+ ('parameters', Any, {'optional': True}),
+ ]
+ _oid_pair = ('algorithm', 'parameters')
+ _oid_specs = {
+ 'pbkdf2': Pbkdf2Params
+ }
+
+
+class DHParameters(Sequence):
+ """
+ Original Name: DHParameter
+ Source: ftp://ftp.rsasecurity.com/pub/pkcs/ascii/pkcs-3.asc section 9
+ """
+
+ _fields = [
+ ('p', Integer),
+ ('g', Integer),
+ ('private_value_length', Integer, {'optional': True}),
+ ]
+
+
+class KeyExchangeAlgorithmId(ObjectIdentifier):
+ _map = {
+ '1.2.840.113549.1.3.1': 'dh',
+ }
+
+
+class KeyExchangeAlgorithm(Sequence):
+ _fields = [
+ ('algorithm', KeyExchangeAlgorithmId),
+ ('parameters', Any, {'optional': True}),
+ ]
+ _oid_pair = ('algorithm', 'parameters')
+ _oid_specs = {
+ 'dh': DHParameters,
+ }
+
+
+class Rc2Params(Sequence):
+ _fields = [
+ ('rc2_parameter_version', Integer, {'optional': True}),
+ ('iv', OctetString),
+ ]
+
+
+class Rc5ParamVersion(Integer):
+ _map = {
+ 16: 'v1-0'
+ }
+
+
+class Rc5Params(Sequence):
+ _fields = [
+ ('version', Rc5ParamVersion),
+ ('rounds', Integer),
+ ('block_size_in_bits', Integer),
+ ('iv', OctetString, {'optional': True}),
+ ]
+
+
+class Pbes1Params(Sequence):
+ _fields = [
+ ('salt', OctetString),
+ ('iterations', Integer),
+ ]
+
+
+class CcmParams(Sequence):
+ # https://tools.ietf.org/html/rfc5084
+ # aes_ICVlen: 4 | 6 | 8 | 10 | 12 | 14 | 16
+ _fields = [
+ ('aes_nonce', OctetString),
+ ('aes_icvlen', Integer),
+ ]
+
+
+class PSourceAlgorithmId(ObjectIdentifier):
+ _map = {
+ '1.2.840.113549.1.1.9': 'p_specified',
+ }
+
+
+class PSourceAlgorithm(Sequence):
+ _fields = [
+ ('algorithm', PSourceAlgorithmId),
+ ('parameters', Any, {'optional': True}),
+ ]
+
+ _oid_pair = ('algorithm', 'parameters')
+ _oid_specs = {
+ 'p_specified': OctetString
+ }
+
+
+class RSAESOAEPParams(Sequence):
+ _fields = [
+ (
+ 'hash_algorithm',
+ DigestAlgorithm,
+ {
+ 'explicit': 0,
+ 'default': {'algorithm': 'sha1'}
+ }
+ ),
+ (
+ 'mask_gen_algorithm',
+ MaskGenAlgorithm,
+ {
+ 'explicit': 1,
+ 'default': {
+ 'algorithm': 'mgf1',
+ 'parameters': {'algorithm': 'sha1'}
+ }
+ }
+ ),
+ (
+ 'p_source_algorithm',
+ PSourceAlgorithm,
+ {
+ 'explicit': 2,
+ 'default': {
+ 'algorithm': 'p_specified',
+ 'parameters': b''
+ }
+ }
+ ),
+ ]
+
+
+class DSASignature(Sequence):
+ """
+ An ASN.1 class for translating between the OS crypto library's
+ representation of an (EC)DSA signature and the ASN.1 structure that is part
+ of various RFCs.
+
+ Original Name: DSS-Sig-Value
+ Source: https://tools.ietf.org/html/rfc3279#section-2.2.2
+ """
+
+ _fields = [
+ ('r', Integer),
+ ('s', Integer),
+ ]
+
+ @classmethod
+ def from_p1363(cls, data):
+ """
+ Reads a signature from a byte string encoding accordint to IEEE P1363,
+ which is used by Microsoft's BCryptSignHash() function.
+
+ :param data:
+ A byte string from BCryptSignHash()
+
+ :return:
+ A DSASignature object
+ """
+
+ r = int_from_bytes(data[0:len(data) // 2])
+ s = int_from_bytes(data[len(data) // 2:])
+ return cls({'r': r, 's': s})
+
+ def to_p1363(self):
+ """
+ Dumps a signature to a byte string compatible with Microsoft's
+ BCryptVerifySignature() function.
+
+ :return:
+ A byte string compatible with BCryptVerifySignature()
+ """
+
+ r_bytes = int_to_bytes(self['r'].native)
+ s_bytes = int_to_bytes(self['s'].native)
+
+ int_byte_length = max(len(r_bytes), len(s_bytes))
+ r_bytes = fill_width(r_bytes, int_byte_length)
+ s_bytes = fill_width(s_bytes, int_byte_length)
+
+ return r_bytes + s_bytes
+
+
+class EncryptionAlgorithmId(ObjectIdentifier):
+ _map = {
+ '1.3.14.3.2.7': 'des',
+ '1.2.840.113549.3.7': 'tripledes_3key',
+ '1.2.840.113549.3.2': 'rc2',
+ '1.2.840.113549.3.4': 'rc4',
+ '1.2.840.113549.3.9': 'rc5',
+ # From http://csrc.nist.gov/groups/ST/crypto_apps_infra/csor/algorithms.html#AES
+ '2.16.840.1.101.3.4.1.1': 'aes128_ecb',
+ '2.16.840.1.101.3.4.1.2': 'aes128_cbc',
+ '2.16.840.1.101.3.4.1.3': 'aes128_ofb',
+ '2.16.840.1.101.3.4.1.4': 'aes128_cfb',
+ '2.16.840.1.101.3.4.1.5': 'aes128_wrap',
+ '2.16.840.1.101.3.4.1.6': 'aes128_gcm',
+ '2.16.840.1.101.3.4.1.7': 'aes128_ccm',
+ '2.16.840.1.101.3.4.1.8': 'aes128_wrap_pad',
+ '2.16.840.1.101.3.4.1.21': 'aes192_ecb',
+ '2.16.840.1.101.3.4.1.22': 'aes192_cbc',
+ '2.16.840.1.101.3.4.1.23': 'aes192_ofb',
+ '2.16.840.1.101.3.4.1.24': 'aes192_cfb',
+ '2.16.840.1.101.3.4.1.25': 'aes192_wrap',
+ '2.16.840.1.101.3.4.1.26': 'aes192_gcm',
+ '2.16.840.1.101.3.4.1.27': 'aes192_ccm',
+ '2.16.840.1.101.3.4.1.28': 'aes192_wrap_pad',
+ '2.16.840.1.101.3.4.1.41': 'aes256_ecb',
+ '2.16.840.1.101.3.4.1.42': 'aes256_cbc',
+ '2.16.840.1.101.3.4.1.43': 'aes256_ofb',
+ '2.16.840.1.101.3.4.1.44': 'aes256_cfb',
+ '2.16.840.1.101.3.4.1.45': 'aes256_wrap',
+ '2.16.840.1.101.3.4.1.46': 'aes256_gcm',
+ '2.16.840.1.101.3.4.1.47': 'aes256_ccm',
+ '2.16.840.1.101.3.4.1.48': 'aes256_wrap_pad',
+ # From PKCS#5
+ '1.2.840.113549.1.5.13': 'pbes2',
+ '1.2.840.113549.1.5.1': 'pbes1_md2_des',
+ '1.2.840.113549.1.5.3': 'pbes1_md5_des',
+ '1.2.840.113549.1.5.4': 'pbes1_md2_rc2',
+ '1.2.840.113549.1.5.6': 'pbes1_md5_rc2',
+ '1.2.840.113549.1.5.10': 'pbes1_sha1_des',
+ '1.2.840.113549.1.5.11': 'pbes1_sha1_rc2',
+ # From PKCS#12
+ '1.2.840.113549.1.12.1.1': 'pkcs12_sha1_rc4_128',
+ '1.2.840.113549.1.12.1.2': 'pkcs12_sha1_rc4_40',
+ '1.2.840.113549.1.12.1.3': 'pkcs12_sha1_tripledes_3key',
+ '1.2.840.113549.1.12.1.4': 'pkcs12_sha1_tripledes_2key',
+ '1.2.840.113549.1.12.1.5': 'pkcs12_sha1_rc2_128',
+ '1.2.840.113549.1.12.1.6': 'pkcs12_sha1_rc2_40',
+ # PKCS#1 v2.2
+ '1.2.840.113549.1.1.1': 'rsaes_pkcs1v15',
+ '1.2.840.113549.1.1.7': 'rsaes_oaep',
+ }
+
+
+class EncryptionAlgorithm(_ForceNullParameters, Sequence):
+ _fields = [
+ ('algorithm', EncryptionAlgorithmId),
+ ('parameters', Any, {'optional': True}),
+ ]
+
+ _oid_pair = ('algorithm', 'parameters')
+ _oid_specs = {
+ 'des': OctetString,
+ 'tripledes_3key': OctetString,
+ 'rc2': Rc2Params,
+ 'rc5': Rc5Params,
+ 'aes128_cbc': OctetString,
+ 'aes192_cbc': OctetString,
+ 'aes256_cbc': OctetString,
+ 'aes128_ofb': OctetString,
+ 'aes192_ofb': OctetString,
+ 'aes256_ofb': OctetString,
+ # From RFC5084
+ 'aes128_ccm': CcmParams,
+ 'aes192_ccm': CcmParams,
+ 'aes256_ccm': CcmParams,
+ # From PKCS#5
+ 'pbes1_md2_des': Pbes1Params,
+ 'pbes1_md5_des': Pbes1Params,
+ 'pbes1_md2_rc2': Pbes1Params,
+ 'pbes1_md5_rc2': Pbes1Params,
+ 'pbes1_sha1_des': Pbes1Params,
+ 'pbes1_sha1_rc2': Pbes1Params,
+ # From PKCS#12
+ 'pkcs12_sha1_rc4_128': Pbes1Params,
+ 'pkcs12_sha1_rc4_40': Pbes1Params,
+ 'pkcs12_sha1_tripledes_3key': Pbes1Params,
+ 'pkcs12_sha1_tripledes_2key': Pbes1Params,
+ 'pkcs12_sha1_rc2_128': Pbes1Params,
+ 'pkcs12_sha1_rc2_40': Pbes1Params,
+ # PKCS#1 v2.2
+ 'rsaes_oaep': RSAESOAEPParams,
+ }
+
+ @property
+ def kdf(self):
+ """
+ Returns the name of the key derivation function to use.
+
+ :return:
+ A unicode from of one of the following: "pbkdf1", "pbkdf2",
+ "pkcs12_kdf"
+ """
+
+ encryption_algo = self['algorithm'].native
+
+ if encryption_algo == 'pbes2':
+ return self['parameters']['key_derivation_func']['algorithm'].native
+
+ if encryption_algo.find('.') == -1:
+ if encryption_algo.find('_') != -1:
+ encryption_algo, _ = encryption_algo.split('_', 1)
+
+ if encryption_algo == 'pbes1':
+ return 'pbkdf1'
+
+ if encryption_algo == 'pkcs12':
+ return 'pkcs12_kdf'
+
+ raise ValueError(unwrap(
+ '''
+ Encryption algorithm "%s" does not have a registered key
+ derivation function
+ ''',
+ encryption_algo
+ ))
+
+ raise ValueError(unwrap(
+ '''
+ Unrecognized encryption algorithm "%s", can not determine key
+ derivation function
+ ''',
+ encryption_algo
+ ))
+
+ @property
+ def kdf_hmac(self):
+ """
+ Returns the HMAC algorithm to use with the KDF.
+
+ :return:
+ A unicode string of one of the following: "md2", "md5", "sha1",
+ "sha224", "sha256", "sha384", "sha512"
+ """
+
+ encryption_algo = self['algorithm'].native
+
+ if encryption_algo == 'pbes2':
+ return self['parameters']['key_derivation_func']['parameters']['prf']['algorithm'].native
+
+ if encryption_algo.find('.') == -1:
+ if encryption_algo.find('_') != -1:
+ _, hmac_algo, _ = encryption_algo.split('_', 2)
+ return hmac_algo
+
+ raise ValueError(unwrap(
+ '''
+ Encryption algorithm "%s" does not have a registered key
+ derivation function
+ ''',
+ encryption_algo
+ ))
+
+ raise ValueError(unwrap(
+ '''
+ Unrecognized encryption algorithm "%s", can not determine key
+ derivation hmac algorithm
+ ''',
+ encryption_algo
+ ))
+
+ @property
+ def kdf_salt(self):
+ """
+ Returns the byte string to use as the salt for the KDF.
+
+ :return:
+ A byte string
+ """
+
+ encryption_algo = self['algorithm'].native
+
+ if encryption_algo == 'pbes2':
+ salt = self['parameters']['key_derivation_func']['parameters']['salt']
+
+ if salt.name == 'other_source':
+ raise ValueError(unwrap(
+ '''
+ Can not determine key derivation salt - the
+ reserved-for-future-use other source salt choice was
+ specified in the PBKDF2 params structure
+ '''
+ ))
+
+ return salt.native
+
+ if encryption_algo.find('.') == -1:
+ if encryption_algo.find('_') != -1:
+ return self['parameters']['salt'].native
+
+ raise ValueError(unwrap(
+ '''
+ Encryption algorithm "%s" does not have a registered key
+ derivation function
+ ''',
+ encryption_algo
+ ))
+
+ raise ValueError(unwrap(
+ '''
+ Unrecognized encryption algorithm "%s", can not determine key
+ derivation salt
+ ''',
+ encryption_algo
+ ))
+
+ @property
+ def kdf_iterations(self):
+ """
+ Returns the number of iterations that should be run via the KDF.
+
+ :return:
+ An integer
+ """
+
+ encryption_algo = self['algorithm'].native
+
+ if encryption_algo == 'pbes2':
+ return self['parameters']['key_derivation_func']['parameters']['iteration_count'].native
+
+ if encryption_algo.find('.') == -1:
+ if encryption_algo.find('_') != -1:
+ return self['parameters']['iterations'].native
+
+ raise ValueError(unwrap(
+ '''
+ Encryption algorithm "%s" does not have a registered key
+ derivation function
+ ''',
+ encryption_algo
+ ))
+
+ raise ValueError(unwrap(
+ '''
+ Unrecognized encryption algorithm "%s", can not determine key
+ derivation iterations
+ ''',
+ encryption_algo
+ ))
+
+ @property
+ def key_length(self):
+ """
+ Returns the key length to pass to the cipher/kdf. The PKCS#5 spec does
+ not specify a way to store the RC5 key length, however this tends not
+ to be a problem since OpenSSL does not support RC5 in PKCS#8 and OS X
+ does not provide an RC5 cipher for use in the Security Transforms
+ library.
+
+ :raises:
+ ValueError - when the key length can not be determined
+
+ :return:
+ An integer representing the length in bytes
+ """
+
+ encryption_algo = self['algorithm'].native
+
+ if encryption_algo[0:3] == 'aes':
+ return {
+ 'aes128_': 16,
+ 'aes192_': 24,
+ 'aes256_': 32,
+ }[encryption_algo[0:7]]
+
+ cipher_lengths = {
+ 'des': 8,
+ 'tripledes_3key': 24,
+ }
+
+ if encryption_algo in cipher_lengths:
+ return cipher_lengths[encryption_algo]
+
+ if encryption_algo == 'rc2':
+ rc2_parameter_version = self['parameters']['rc2_parameter_version'].native
+
+ # See page 24 of
+ # http://www.emc.com/collateral/white-papers/h11302-pkcs5v2-1-password-based-cryptography-standard-wp.pdf
+ encoded_key_bits_map = {
+ 160: 5, # 40-bit
+ 120: 8, # 64-bit
+ 58: 16, # 128-bit
+ }
+
+ if rc2_parameter_version in encoded_key_bits_map:
+ return encoded_key_bits_map[rc2_parameter_version]
+
+ if rc2_parameter_version >= 256:
+ return rc2_parameter_version
+
+ if rc2_parameter_version is None:
+ return 4 # 32-bit default
+
+ raise ValueError(unwrap(
+ '''
+ Invalid RC2 parameter version found in EncryptionAlgorithm
+ parameters
+ '''
+ ))
+
+ if encryption_algo == 'pbes2':
+ key_length = self['parameters']['key_derivation_func']['parameters']['key_length'].native
+ if key_length is not None:
+ return key_length
+
+ # If the KDF params don't specify the key size, we can infer it from
+ # the encryption scheme for all schemes except for RC5. However, in
+ # practical terms, neither OpenSSL or OS X support RC5 for PKCS#8
+ # so it is unlikely to be an issue that is run into.
+
+ return self['parameters']['encryption_scheme'].key_length
+
+ if encryption_algo.find('.') == -1:
+ return {
+ 'pbes1_md2_des': 8,
+ 'pbes1_md5_des': 8,
+ 'pbes1_md2_rc2': 8,
+ 'pbes1_md5_rc2': 8,
+ 'pbes1_sha1_des': 8,
+ 'pbes1_sha1_rc2': 8,
+ 'pkcs12_sha1_rc4_128': 16,
+ 'pkcs12_sha1_rc4_40': 5,
+ 'pkcs12_sha1_tripledes_3key': 24,
+ 'pkcs12_sha1_tripledes_2key': 16,
+ 'pkcs12_sha1_rc2_128': 16,
+ 'pkcs12_sha1_rc2_40': 5,
+ }[encryption_algo]
+
+ raise ValueError(unwrap(
+ '''
+ Unrecognized encryption algorithm "%s"
+ ''',
+ encryption_algo
+ ))
+
+ @property
+ def encryption_mode(self):
+ """
+ Returns the name of the encryption mode to use.
+
+ :return:
+ A unicode string from one of the following: "cbc", "ecb", "ofb",
+ "cfb", "wrap", "gcm", "ccm", "wrap_pad"
+ """
+
+ encryption_algo = self['algorithm'].native
+
+ if encryption_algo[0:7] in set(['aes128_', 'aes192_', 'aes256_']):
+ return encryption_algo[7:]
+
+ if encryption_algo[0:6] == 'pbes1_':
+ return 'cbc'
+
+ if encryption_algo[0:7] == 'pkcs12_':
+ return 'cbc'
+
+ if encryption_algo in set(['des', 'tripledes_3key', 'rc2', 'rc5']):
+ return 'cbc'
+
+ if encryption_algo == 'pbes2':
+ return self['parameters']['encryption_scheme'].encryption_mode
+
+ raise ValueError(unwrap(
+ '''
+ Unrecognized encryption algorithm "%s"
+ ''',
+ encryption_algo
+ ))
+
+ @property
+ def encryption_cipher(self):
+ """
+ Returns the name of the symmetric encryption cipher to use. The key
+ length can be retrieved via the .key_length property to disabiguate
+ between different variations of TripleDES, AES, and the RC* ciphers.
+
+ :return:
+ A unicode string from one of the following: "rc2", "rc5", "des",
+ "tripledes", "aes"
+ """
+
+ encryption_algo = self['algorithm'].native
+
+ if encryption_algo[0:7] in set(['aes128_', 'aes192_', 'aes256_']):
+ return 'aes'
+
+ if encryption_algo in set(['des', 'rc2', 'rc5']):
+ return encryption_algo
+
+ if encryption_algo == 'tripledes_3key':
+ return 'tripledes'
+
+ if encryption_algo == 'pbes2':
+ return self['parameters']['encryption_scheme'].encryption_cipher
+
+ if encryption_algo.find('.') == -1:
+ return {
+ 'pbes1_md2_des': 'des',
+ 'pbes1_md5_des': 'des',
+ 'pbes1_md2_rc2': 'rc2',
+ 'pbes1_md5_rc2': 'rc2',
+ 'pbes1_sha1_des': 'des',
+ 'pbes1_sha1_rc2': 'rc2',
+ 'pkcs12_sha1_rc4_128': 'rc4',
+ 'pkcs12_sha1_rc4_40': 'rc4',
+ 'pkcs12_sha1_tripledes_3key': 'tripledes',
+ 'pkcs12_sha1_tripledes_2key': 'tripledes',
+ 'pkcs12_sha1_rc2_128': 'rc2',
+ 'pkcs12_sha1_rc2_40': 'rc2',
+ }[encryption_algo]
+
+ raise ValueError(unwrap(
+ '''
+ Unrecognized encryption algorithm "%s"
+ ''',
+ encryption_algo
+ ))
+
+ @property
+ def encryption_block_size(self):
+ """
+ Returns the block size of the encryption cipher, in bytes.
+
+ :return:
+ An integer that is the block size in bytes
+ """
+
+ encryption_algo = self['algorithm'].native
+
+ if encryption_algo[0:7] in set(['aes128_', 'aes192_', 'aes256_']):
+ return 16
+
+ cipher_map = {
+ 'des': 8,
+ 'tripledes_3key': 8,
+ 'rc2': 8,
+ }
+ if encryption_algo in cipher_map:
+ return cipher_map[encryption_algo]
+
+ if encryption_algo == 'rc5':
+ return self['parameters']['block_size_in_bits'].native // 8
+
+ if encryption_algo == 'pbes2':
+ return self['parameters']['encryption_scheme'].encryption_block_size
+
+ if encryption_algo.find('.') == -1:
+ return {
+ 'pbes1_md2_des': 8,
+ 'pbes1_md5_des': 8,
+ 'pbes1_md2_rc2': 8,
+ 'pbes1_md5_rc2': 8,
+ 'pbes1_sha1_des': 8,
+ 'pbes1_sha1_rc2': 8,
+ 'pkcs12_sha1_rc4_128': 0,
+ 'pkcs12_sha1_rc4_40': 0,
+ 'pkcs12_sha1_tripledes_3key': 8,
+ 'pkcs12_sha1_tripledes_2key': 8,
+ 'pkcs12_sha1_rc2_128': 8,
+ 'pkcs12_sha1_rc2_40': 8,
+ }[encryption_algo]
+
+ raise ValueError(unwrap(
+ '''
+ Unrecognized encryption algorithm "%s"
+ ''',
+ encryption_algo
+ ))
+
+ @property
+ def encryption_iv(self):
+ """
+ Returns the byte string of the initialization vector for the encryption
+ scheme. Only the PBES2 stores the IV in the params. For PBES1, the IV
+ is derived from the KDF and this property will return None.
+
+ :return:
+ A byte string or None
+ """
+
+ encryption_algo = self['algorithm'].native
+
+ if encryption_algo in set(['rc2', 'rc5']):
+ return self['parameters']['iv'].native
+
+ # For DES/Triple DES and AES the IV is the entirety of the parameters
+ octet_string_iv_oids = set([
+ 'des',
+ 'tripledes_3key',
+ 'aes128_cbc',
+ 'aes192_cbc',
+ 'aes256_cbc',
+ 'aes128_ofb',
+ 'aes192_ofb',
+ 'aes256_ofb',
+ ])
+ if encryption_algo in octet_string_iv_oids:
+ return self['parameters'].native
+
+ if encryption_algo == 'pbes2':
+ return self['parameters']['encryption_scheme'].encryption_iv
+
+ # All of the PBES1 algos use their KDF to create the IV. For the pbkdf1,
+ # the KDF is told to generate a key that is an extra 8 bytes long, and
+ # that is used for the IV. For the PKCS#12 KDF, it is called with an id
+ # of 2 to generate the IV. In either case, we can't return the IV
+ # without knowing the user's password.
+ if encryption_algo.find('.') == -1:
+ return None
+
+ raise ValueError(unwrap(
+ '''
+ Unrecognized encryption algorithm "%s"
+ ''',
+ encryption_algo
+ ))
+
+
+class Pbes2Params(Sequence):
+ _fields = [
+ ('key_derivation_func', KdfAlgorithm),
+ ('encryption_scheme', EncryptionAlgorithm),
+ ]
+
+
+class Pbmac1Params(Sequence):
+ _fields = [
+ ('key_derivation_func', KdfAlgorithm),
+ ('message_auth_scheme', HmacAlgorithm),
+ ]
+
+
+class Pkcs5MacId(ObjectIdentifier):
+ _map = {
+ '1.2.840.113549.1.5.14': 'pbmac1',
+ }
+
+
+class Pkcs5MacAlgorithm(Sequence):
+ _fields = [
+ ('algorithm', Pkcs5MacId),
+ ('parameters', Any),
+ ]
+
+ _oid_pair = ('algorithm', 'parameters')
+ _oid_specs = {
+ 'pbmac1': Pbmac1Params,
+ }
+
+
+EncryptionAlgorithm._oid_specs['pbes2'] = Pbes2Params
+
+
+class AnyAlgorithmId(ObjectIdentifier):
+ _map = {}
+
+ def _setup(self):
+ _map = self.__class__._map
+ for other_cls in (EncryptionAlgorithmId, SignedDigestAlgorithmId, DigestAlgorithmId):
+ for oid, name in other_cls._map.items():
+ _map[oid] = name
+
+
+class AnyAlgorithmIdentifier(_ForceNullParameters, Sequence):
+ _fields = [
+ ('algorithm', AnyAlgorithmId),
+ ('parameters', Any, {'optional': True}),
+ ]
+
+ _oid_pair = ('algorithm', 'parameters')
+ _oid_specs = {}
+
+ def _setup(self):
+ Sequence._setup(self)
+ specs = self.__class__._oid_specs
+ for other_cls in (EncryptionAlgorithm, SignedDigestAlgorithm):
+ for oid, spec in other_cls._oid_specs.items():
+ specs[oid] = spec
diff --git a/jc/parsers/asn1crypto/cms.py b/jc/parsers/asn1crypto/cms.py
new file mode 100644
index 00000000..c395b227
--- /dev/null
+++ b/jc/parsers/asn1crypto/cms.py
@@ -0,0 +1,1003 @@
+# coding: utf-8
+
+"""
+ASN.1 type classes for cryptographic message syntax (CMS). Structures are also
+compatible with PKCS#7. Exports the following items:
+
+ - AuthenticatedData()
+ - AuthEnvelopedData()
+ - CompressedData()
+ - ContentInfo()
+ - DigestedData()
+ - EncryptedData()
+ - EnvelopedData()
+ - SignedAndEnvelopedData()
+ - SignedData()
+
+Other type classes are defined that help compose the types listed above.
+
+Most CMS structures in the wild are formatted as ContentInfo encapsulating one of the other types.
+"""
+
+from __future__ import unicode_literals, division, absolute_import, print_function
+
+try:
+ import zlib
+except (ImportError):
+ zlib = None
+
+from .algos import (
+ _ForceNullParameters,
+ DigestAlgorithm,
+ EncryptionAlgorithm,
+ EncryptionAlgorithmId,
+ HmacAlgorithm,
+ KdfAlgorithm,
+ RSAESOAEPParams,
+ SignedDigestAlgorithm,
+)
+from .core import (
+ Any,
+ BitString,
+ Choice,
+ Enumerated,
+ GeneralizedTime,
+ Integer,
+ ObjectIdentifier,
+ OctetBitString,
+ OctetString,
+ ParsableOctetString,
+ Sequence,
+ SequenceOf,
+ SetOf,
+ UTCTime,
+ UTF8String,
+)
+from .crl import CertificateList
+from .keys import PublicKeyInfo
+from .ocsp import OCSPResponse
+from .x509 import Attributes, Certificate, Extensions, GeneralName, GeneralNames, Name
+
+
+# These structures are taken from
+# ftp://ftp.rsasecurity.com/pub/pkcs/ascii/pkcs-6.asc
+
+class ExtendedCertificateInfo(Sequence):
+ _fields = [
+ ('version', Integer),
+ ('certificate', Certificate),
+ ('attributes', Attributes),
+ ]
+
+
+class ExtendedCertificate(Sequence):
+ _fields = [
+ ('extended_certificate_info', ExtendedCertificateInfo),
+ ('signature_algorithm', SignedDigestAlgorithm),
+ ('signature', OctetBitString),
+ ]
+
+
+# These structures are taken from https://tools.ietf.org/html/rfc5652,
+# https://tools.ietf.org/html/rfc5083, http://tools.ietf.org/html/rfc2315,
+# https://tools.ietf.org/html/rfc5940, https://tools.ietf.org/html/rfc3274,
+# https://tools.ietf.org/html/rfc3281
+
+
+class CMSVersion(Integer):
+ _map = {
+ 0: 'v0',
+ 1: 'v1',
+ 2: 'v2',
+ 3: 'v3',
+ 4: 'v4',
+ 5: 'v5',
+ }
+
+
+class CMSAttributeType(ObjectIdentifier):
+ _map = {
+ '1.2.840.113549.1.9.3': 'content_type',
+ '1.2.840.113549.1.9.4': 'message_digest',
+ '1.2.840.113549.1.9.5': 'signing_time',
+ '1.2.840.113549.1.9.6': 'counter_signature',
+ # https://datatracker.ietf.org/doc/html/rfc2633#section-2.5.2
+ '1.2.840.113549.1.9.15': 'smime_capabilities',
+ # https://tools.ietf.org/html/rfc2633#page-26
+ '1.2.840.113549.1.9.16.2.11': 'encrypt_key_pref',
+ # https://tools.ietf.org/html/rfc3161#page-20
+ '1.2.840.113549.1.9.16.2.14': 'signature_time_stamp_token',
+ # https://tools.ietf.org/html/rfc6211#page-5
+ '1.2.840.113549.1.9.52': 'cms_algorithm_protection',
+ # https://docs.microsoft.com/en-us/previous-versions/hh968145(v%3Dvs.85)
+ '1.3.6.1.4.1.311.2.4.1': 'microsoft_nested_signature',
+ # Some places refer to this as SPC_RFC3161_OBJID, others szOID_RFC3161_counterSign.
+ # https://docs.microsoft.com/en-us/windows/win32/api/wincrypt/ns-wincrypt-crypt_algorithm_identifier
+ # refers to szOID_RFC3161_counterSign as "1.2.840.113549.1.9.16.1.4",
+ # but that OID is also called szOID_TIMESTAMP_TOKEN. Because of there being
+ # no canonical source for this OID, we give it our own name
+ '1.3.6.1.4.1.311.3.3.1': 'microsoft_time_stamp_token',
+ }
+
+
+class Time(Choice):
+ _alternatives = [
+ ('utc_time', UTCTime),
+ ('generalized_time', GeneralizedTime),
+ ]
+
+
+class ContentType(ObjectIdentifier):
+ _map = {
+ '1.2.840.113549.1.7.1': 'data',
+ '1.2.840.113549.1.7.2': 'signed_data',
+ '1.2.840.113549.1.7.3': 'enveloped_data',
+ '1.2.840.113549.1.7.4': 'signed_and_enveloped_data',
+ '1.2.840.113549.1.7.5': 'digested_data',
+ '1.2.840.113549.1.7.6': 'encrypted_data',
+ '1.2.840.113549.1.9.16.1.2': 'authenticated_data',
+ '1.2.840.113549.1.9.16.1.9': 'compressed_data',
+ '1.2.840.113549.1.9.16.1.23': 'authenticated_enveloped_data',
+ }
+
+
+class CMSAlgorithmProtection(Sequence):
+ _fields = [
+ ('digest_algorithm', DigestAlgorithm),
+ ('signature_algorithm', SignedDigestAlgorithm, {'implicit': 1, 'optional': True}),
+ ('mac_algorithm', HmacAlgorithm, {'implicit': 2, 'optional': True}),
+ ]
+
+
+class SetOfContentType(SetOf):
+ _child_spec = ContentType
+
+
+class SetOfOctetString(SetOf):
+ _child_spec = OctetString
+
+
+class SetOfTime(SetOf):
+ _child_spec = Time
+
+
+class SetOfAny(SetOf):
+ _child_spec = Any
+
+
+class SetOfCMSAlgorithmProtection(SetOf):
+ _child_spec = CMSAlgorithmProtection
+
+
+class CMSAttribute(Sequence):
+ _fields = [
+ ('type', CMSAttributeType),
+ ('values', None),
+ ]
+
+ _oid_specs = {}
+
+ def _values_spec(self):
+ return self._oid_specs.get(self['type'].native, SetOfAny)
+
+ _spec_callbacks = {
+ 'values': _values_spec
+ }
+
+
+class CMSAttributes(SetOf):
+ _child_spec = CMSAttribute
+
+
+class IssuerSerial(Sequence):
+ _fields = [
+ ('issuer', GeneralNames),
+ ('serial', Integer),
+ ('issuer_uid', OctetBitString, {'optional': True}),
+ ]
+
+
+class AttCertVersion(Integer):
+ _map = {
+ 0: 'v1',
+ 1: 'v2',
+ }
+
+
+class AttCertSubject(Choice):
+ _alternatives = [
+ ('base_certificate_id', IssuerSerial, {'explicit': 0}),
+ ('subject_name', GeneralNames, {'explicit': 1}),
+ ]
+
+
+class AttCertValidityPeriod(Sequence):
+ _fields = [
+ ('not_before_time', GeneralizedTime),
+ ('not_after_time', GeneralizedTime),
+ ]
+
+
+class AttributeCertificateInfoV1(Sequence):
+ _fields = [
+ ('version', AttCertVersion, {'default': 'v1'}),
+ ('subject', AttCertSubject),
+ ('issuer', GeneralNames),
+ ('signature', SignedDigestAlgorithm),
+ ('serial_number', Integer),
+ ('att_cert_validity_period', AttCertValidityPeriod),
+ ('attributes', Attributes),
+ ('issuer_unique_id', OctetBitString, {'optional': True}),
+ ('extensions', Extensions, {'optional': True}),
+ ]
+
+
+class AttributeCertificateV1(Sequence):
+ _fields = [
+ ('ac_info', AttributeCertificateInfoV1),
+ ('signature_algorithm', SignedDigestAlgorithm),
+ ('signature', OctetBitString),
+ ]
+
+
+class DigestedObjectType(Enumerated):
+ _map = {
+ 0: 'public_key',
+ 1: 'public_key_cert',
+ 2: 'other_objy_types',
+ }
+
+
+class ObjectDigestInfo(Sequence):
+ _fields = [
+ ('digested_object_type', DigestedObjectType),
+ ('other_object_type_id', ObjectIdentifier, {'optional': True}),
+ ('digest_algorithm', DigestAlgorithm),
+ ('object_digest', OctetBitString),
+ ]
+
+
+class Holder(Sequence):
+ _fields = [
+ ('base_certificate_id', IssuerSerial, {'implicit': 0, 'optional': True}),
+ ('entity_name', GeneralNames, {'implicit': 1, 'optional': True}),
+ ('object_digest_info', ObjectDigestInfo, {'implicit': 2, 'optional': True}),
+ ]
+
+
+class V2Form(Sequence):
+ _fields = [
+ ('issuer_name', GeneralNames, {'optional': True}),
+ ('base_certificate_id', IssuerSerial, {'explicit': 0, 'optional': True}),
+ ('object_digest_info', ObjectDigestInfo, {'explicit': 1, 'optional': True}),
+ ]
+
+
+class AttCertIssuer(Choice):
+ _alternatives = [
+ ('v1_form', GeneralNames),
+ ('v2_form', V2Form, {'implicit': 0}),
+ ]
+
+
+class IetfAttrValue(Choice):
+ _alternatives = [
+ ('octets', OctetString),
+ ('oid', ObjectIdentifier),
+ ('string', UTF8String),
+ ]
+
+
+class IetfAttrValues(SequenceOf):
+ _child_spec = IetfAttrValue
+
+
+class IetfAttrSyntax(Sequence):
+ _fields = [
+ ('policy_authority', GeneralNames, {'implicit': 0, 'optional': True}),
+ ('values', IetfAttrValues),
+ ]
+
+
+class SetOfIetfAttrSyntax(SetOf):
+ _child_spec = IetfAttrSyntax
+
+
+class SvceAuthInfo(Sequence):
+ _fields = [
+ ('service', GeneralName),
+ ('ident', GeneralName),
+ ('auth_info', OctetString, {'optional': True}),
+ ]
+
+
+class SetOfSvceAuthInfo(SetOf):
+ _child_spec = SvceAuthInfo
+
+
+class RoleSyntax(Sequence):
+ _fields = [
+ ('role_authority', GeneralNames, {'implicit': 0, 'optional': True}),
+ ('role_name', GeneralName, {'explicit': 1}),
+ ]
+
+
+class SetOfRoleSyntax(SetOf):
+ _child_spec = RoleSyntax
+
+
+class ClassList(BitString):
+ _map = {
+ 0: 'unmarked',
+ 1: 'unclassified',
+ 2: 'restricted',
+ 3: 'confidential',
+ 4: 'secret',
+ 5: 'top_secret',
+ }
+
+
+class SecurityCategory(Sequence):
+ _fields = [
+ ('type', ObjectIdentifier, {'implicit': 0}),
+ ('value', Any, {'explicit': 1}),
+ ]
+
+
+class SetOfSecurityCategory(SetOf):
+ _child_spec = SecurityCategory
+
+
+class Clearance(Sequence):
+ _fields = [
+ ('policy_id', ObjectIdentifier),
+ ('class_list', ClassList, {'default': set(['unclassified'])}),
+ ('security_categories', SetOfSecurityCategory, {'optional': True}),
+ ]
+
+
+class SetOfClearance(SetOf):
+ _child_spec = Clearance
+
+
+class BigTime(Sequence):
+ _fields = [
+ ('major', Integer),
+ ('fractional_seconds', Integer),
+ ('sign', Integer, {'optional': True}),
+ ]
+
+
+class LeapData(Sequence):
+ _fields = [
+ ('leap_time', BigTime),
+ ('action', Integer),
+ ]
+
+
+class SetOfLeapData(SetOf):
+ _child_spec = LeapData
+
+
+class TimingMetrics(Sequence):
+ _fields = [
+ ('ntp_time', BigTime),
+ ('offset', BigTime),
+ ('delay', BigTime),
+ ('expiration', BigTime),
+ ('leap_event', SetOfLeapData, {'optional': True}),
+ ]
+
+
+class SetOfTimingMetrics(SetOf):
+ _child_spec = TimingMetrics
+
+
+class TimingPolicy(Sequence):
+ _fields = [
+ ('policy_id', SequenceOf, {'spec': ObjectIdentifier}),
+ ('max_offset', BigTime, {'explicit': 0, 'optional': True}),
+ ('max_delay', BigTime, {'explicit': 1, 'optional': True}),
+ ]
+
+
+class SetOfTimingPolicy(SetOf):
+ _child_spec = TimingPolicy
+
+
+class AttCertAttributeType(ObjectIdentifier):
+ _map = {
+ '1.3.6.1.5.5.7.10.1': 'authentication_info',
+ '1.3.6.1.5.5.7.10.2': 'access_identity',
+ '1.3.6.1.5.5.7.10.3': 'charging_identity',
+ '1.3.6.1.5.5.7.10.4': 'group',
+ '2.5.4.72': 'role',
+ '2.5.4.55': 'clearance',
+ '1.3.6.1.4.1.601.10.4.1': 'timing_metrics',
+ '1.3.6.1.4.1.601.10.4.2': 'timing_policy',
+ }
+
+
+class AttCertAttribute(Sequence):
+ _fields = [
+ ('type', AttCertAttributeType),
+ ('values', None),
+ ]
+
+ _oid_specs = {
+ 'authentication_info': SetOfSvceAuthInfo,
+ 'access_identity': SetOfSvceAuthInfo,
+ 'charging_identity': SetOfIetfAttrSyntax,
+ 'group': SetOfIetfAttrSyntax,
+ 'role': SetOfRoleSyntax,
+ 'clearance': SetOfClearance,
+ 'timing_metrics': SetOfTimingMetrics,
+ 'timing_policy': SetOfTimingPolicy,
+ }
+
+ def _values_spec(self):
+ return self._oid_specs.get(self['type'].native, SetOfAny)
+
+ _spec_callbacks = {
+ 'values': _values_spec
+ }
+
+
+class AttCertAttributes(SequenceOf):
+ _child_spec = AttCertAttribute
+
+
+class AttributeCertificateInfoV2(Sequence):
+ _fields = [
+ ('version', AttCertVersion),
+ ('holder', Holder),
+ ('issuer', AttCertIssuer),
+ ('signature', SignedDigestAlgorithm),
+ ('serial_number', Integer),
+ ('att_cert_validity_period', AttCertValidityPeriod),
+ ('attributes', AttCertAttributes),
+ ('issuer_unique_id', OctetBitString, {'optional': True}),
+ ('extensions', Extensions, {'optional': True}),
+ ]
+
+
+class AttributeCertificateV2(Sequence):
+ # Handle the situation where a V2 cert is encoded as V1
+ _bad_tag = 1
+
+ _fields = [
+ ('ac_info', AttributeCertificateInfoV2),
+ ('signature_algorithm', SignedDigestAlgorithm),
+ ('signature', OctetBitString),
+ ]
+
+
+class OtherCertificateFormat(Sequence):
+ _fields = [
+ ('other_cert_format', ObjectIdentifier),
+ ('other_cert', Any),
+ ]
+
+
+class CertificateChoices(Choice):
+ _alternatives = [
+ ('certificate', Certificate),
+ ('extended_certificate', ExtendedCertificate, {'implicit': 0}),
+ ('v1_attr_cert', AttributeCertificateV1, {'implicit': 1}),
+ ('v2_attr_cert', AttributeCertificateV2, {'implicit': 2}),
+ ('other', OtherCertificateFormat, {'implicit': 3}),
+ ]
+
+ def validate(self, class_, tag, contents):
+ """
+ Ensures that the class and tag specified exist as an alternative. This
+ custom version fixes parsing broken encodings there a V2 attribute
+ # certificate is encoded as a V1
+
+ :param class_:
+ The integer class_ from the encoded value header
+
+ :param tag:
+ The integer tag from the encoded value header
+
+ :param contents:
+ A byte string of the contents of the value - used when the object
+ is explicitly tagged
+
+ :raises:
+ ValueError - when value is not a valid alternative
+ """
+
+ super(CertificateChoices, self).validate(class_, tag, contents)
+ if self._choice == 2:
+ if AttCertVersion.load(Sequence.load(contents)[0].dump()).native == 'v2':
+ self._choice = 3
+
+
+class CertificateSet(SetOf):
+ _child_spec = CertificateChoices
+
+
+class ContentInfo(Sequence):
+ _fields = [
+ ('content_type', ContentType),
+ ('content', Any, {'explicit': 0, 'optional': True}),
+ ]
+
+ _oid_pair = ('content_type', 'content')
+ _oid_specs = {}
+
+
+class SetOfContentInfo(SetOf):
+ _child_spec = ContentInfo
+
+
+class EncapsulatedContentInfo(Sequence):
+ _fields = [
+ ('content_type', ContentType),
+ ('content', ParsableOctetString, {'explicit': 0, 'optional': True}),
+ ]
+
+ _oid_pair = ('content_type', 'content')
+ _oid_specs = {}
+
+
+class IssuerAndSerialNumber(Sequence):
+ _fields = [
+ ('issuer', Name),
+ ('serial_number', Integer),
+ ]
+
+
+class SignerIdentifier(Choice):
+ _alternatives = [
+ ('issuer_and_serial_number', IssuerAndSerialNumber),
+ ('subject_key_identifier', OctetString, {'implicit': 0}),
+ ]
+
+
+class DigestAlgorithms(SetOf):
+ _child_spec = DigestAlgorithm
+
+
+class CertificateRevocationLists(SetOf):
+ _child_spec = CertificateList
+
+
+class SCVPReqRes(Sequence):
+ _fields = [
+ ('request', ContentInfo, {'explicit': 0, 'optional': True}),
+ ('response', ContentInfo),
+ ]
+
+
+class OtherRevInfoFormatId(ObjectIdentifier):
+ _map = {
+ '1.3.6.1.5.5.7.16.2': 'ocsp_response',
+ '1.3.6.1.5.5.7.16.4': 'scvp',
+ }
+
+
+class OtherRevocationInfoFormat(Sequence):
+ _fields = [
+ ('other_rev_info_format', OtherRevInfoFormatId),
+ ('other_rev_info', Any),
+ ]
+
+ _oid_pair = ('other_rev_info_format', 'other_rev_info')
+ _oid_specs = {
+ 'ocsp_response': OCSPResponse,
+ 'scvp': SCVPReqRes,
+ }
+
+
+class RevocationInfoChoice(Choice):
+ _alternatives = [
+ ('crl', CertificateList),
+ ('other', OtherRevocationInfoFormat, {'implicit': 1}),
+ ]
+
+
+class RevocationInfoChoices(SetOf):
+ _child_spec = RevocationInfoChoice
+
+
+class SignerInfo(Sequence):
+ _fields = [
+ ('version', CMSVersion),
+ ('sid', SignerIdentifier),
+ ('digest_algorithm', DigestAlgorithm),
+ ('signed_attrs', CMSAttributes, {'implicit': 0, 'optional': True}),
+ ('signature_algorithm', SignedDigestAlgorithm),
+ ('signature', OctetString),
+ ('unsigned_attrs', CMSAttributes, {'implicit': 1, 'optional': True}),
+ ]
+
+
+class SignerInfos(SetOf):
+ _child_spec = SignerInfo
+
+
+class SignedData(Sequence):
+ _fields = [
+ ('version', CMSVersion),
+ ('digest_algorithms', DigestAlgorithms),
+ ('encap_content_info', None),
+ ('certificates', CertificateSet, {'implicit': 0, 'optional': True}),
+ ('crls', RevocationInfoChoices, {'implicit': 1, 'optional': True}),
+ ('signer_infos', SignerInfos),
+ ]
+
+ def _encap_content_info_spec(self):
+ # If the encap_content_info is version v1, then this could be a PKCS#7
+ # structure, or a CMS structure. CMS wraps the encoded value in an
+ # Octet String tag.
+
+ # If the version is greater than 1, it is definite CMS
+ if self['version'].native != 'v1':
+ return EncapsulatedContentInfo
+
+ # Otherwise, the ContentInfo spec from PKCS#7 will be compatible with
+ # CMS v1 (which only allows Data, an Octet String) and PKCS#7, which
+ # allows Any
+ return ContentInfo
+
+ _spec_callbacks = {
+ 'encap_content_info': _encap_content_info_spec
+ }
+
+
+class OriginatorInfo(Sequence):
+ _fields = [
+ ('certs', CertificateSet, {'implicit': 0, 'optional': True}),
+ ('crls', RevocationInfoChoices, {'implicit': 1, 'optional': True}),
+ ]
+
+
+class RecipientIdentifier(Choice):
+ _alternatives = [
+ ('issuer_and_serial_number', IssuerAndSerialNumber),
+ ('subject_key_identifier', OctetString, {'implicit': 0}),
+ ]
+
+
+class KeyEncryptionAlgorithmId(ObjectIdentifier):
+ _map = {
+ '1.2.840.113549.1.1.1': 'rsaes_pkcs1v15',
+ '1.2.840.113549.1.1.7': 'rsaes_oaep',
+ '2.16.840.1.101.3.4.1.5': 'aes128_wrap',
+ '2.16.840.1.101.3.4.1.8': 'aes128_wrap_pad',
+ '2.16.840.1.101.3.4.1.25': 'aes192_wrap',
+ '2.16.840.1.101.3.4.1.28': 'aes192_wrap_pad',
+ '2.16.840.1.101.3.4.1.45': 'aes256_wrap',
+ '2.16.840.1.101.3.4.1.48': 'aes256_wrap_pad',
+ }
+
+ _reverse_map = {
+ 'rsa': '1.2.840.113549.1.1.1',
+ 'rsaes_pkcs1v15': '1.2.840.113549.1.1.1',
+ 'rsaes_oaep': '1.2.840.113549.1.1.7',
+ 'aes128_wrap': '2.16.840.1.101.3.4.1.5',
+ 'aes128_wrap_pad': '2.16.840.1.101.3.4.1.8',
+ 'aes192_wrap': '2.16.840.1.101.3.4.1.25',
+ 'aes192_wrap_pad': '2.16.840.1.101.3.4.1.28',
+ 'aes256_wrap': '2.16.840.1.101.3.4.1.45',
+ 'aes256_wrap_pad': '2.16.840.1.101.3.4.1.48',
+ }
+
+
+class KeyEncryptionAlgorithm(_ForceNullParameters, Sequence):
+ _fields = [
+ ('algorithm', KeyEncryptionAlgorithmId),
+ ('parameters', Any, {'optional': True}),
+ ]
+
+ _oid_pair = ('algorithm', 'parameters')
+ _oid_specs = {
+ 'rsaes_oaep': RSAESOAEPParams,
+ }
+
+
+class KeyTransRecipientInfo(Sequence):
+ _fields = [
+ ('version', CMSVersion),
+ ('rid', RecipientIdentifier),
+ ('key_encryption_algorithm', KeyEncryptionAlgorithm),
+ ('encrypted_key', OctetString),
+ ]
+
+
+class OriginatorIdentifierOrKey(Choice):
+ _alternatives = [
+ ('issuer_and_serial_number', IssuerAndSerialNumber),
+ ('subject_key_identifier', OctetString, {'implicit': 0}),
+ ('originator_key', PublicKeyInfo, {'implicit': 1}),
+ ]
+
+
+class OtherKeyAttribute(Sequence):
+ _fields = [
+ ('key_attr_id', ObjectIdentifier),
+ ('key_attr', Any),
+ ]
+
+
+class RecipientKeyIdentifier(Sequence):
+ _fields = [
+ ('subject_key_identifier', OctetString),
+ ('date', GeneralizedTime, {'optional': True}),
+ ('other', OtherKeyAttribute, {'optional': True}),
+ ]
+
+
+class KeyAgreementRecipientIdentifier(Choice):
+ _alternatives = [
+ ('issuer_and_serial_number', IssuerAndSerialNumber),
+ ('r_key_id', RecipientKeyIdentifier, {'implicit': 0}),
+ ]
+
+
+class RecipientEncryptedKey(Sequence):
+ _fields = [
+ ('rid', KeyAgreementRecipientIdentifier),
+ ('encrypted_key', OctetString),
+ ]
+
+
+class RecipientEncryptedKeys(SequenceOf):
+ _child_spec = RecipientEncryptedKey
+
+
+class KeyAgreeRecipientInfo(Sequence):
+ _fields = [
+ ('version', CMSVersion),
+ ('originator', OriginatorIdentifierOrKey, {'explicit': 0}),
+ ('ukm', OctetString, {'explicit': 1, 'optional': True}),
+ ('key_encryption_algorithm', KeyEncryptionAlgorithm),
+ ('recipient_encrypted_keys', RecipientEncryptedKeys),
+ ]
+
+
+class KEKIdentifier(Sequence):
+ _fields = [
+ ('key_identifier', OctetString),
+ ('date', GeneralizedTime, {'optional': True}),
+ ('other', OtherKeyAttribute, {'optional': True}),
+ ]
+
+
+class KEKRecipientInfo(Sequence):
+ _fields = [
+ ('version', CMSVersion),
+ ('kekid', KEKIdentifier),
+ ('key_encryption_algorithm', KeyEncryptionAlgorithm),
+ ('encrypted_key', OctetString),
+ ]
+
+
+class PasswordRecipientInfo(Sequence):
+ _fields = [
+ ('version', CMSVersion),
+ ('key_derivation_algorithm', KdfAlgorithm, {'implicit': 0, 'optional': True}),
+ ('key_encryption_algorithm', KeyEncryptionAlgorithm),
+ ('encrypted_key', OctetString),
+ ]
+
+
+class OtherRecipientInfo(Sequence):
+ _fields = [
+ ('ori_type', ObjectIdentifier),
+ ('ori_value', Any),
+ ]
+
+
+class RecipientInfo(Choice):
+ _alternatives = [
+ ('ktri', KeyTransRecipientInfo),
+ ('kari', KeyAgreeRecipientInfo, {'implicit': 1}),
+ ('kekri', KEKRecipientInfo, {'implicit': 2}),
+ ('pwri', PasswordRecipientInfo, {'implicit': 3}),
+ ('ori', OtherRecipientInfo, {'implicit': 4}),
+ ]
+
+
+class RecipientInfos(SetOf):
+ _child_spec = RecipientInfo
+
+
+class EncryptedContentInfo(Sequence):
+ _fields = [
+ ('content_type', ContentType),
+ ('content_encryption_algorithm', EncryptionAlgorithm),
+ ('encrypted_content', OctetString, {'implicit': 0, 'optional': True}),
+ ]
+
+
+class EnvelopedData(Sequence):
+ _fields = [
+ ('version', CMSVersion),
+ ('originator_info', OriginatorInfo, {'implicit': 0, 'optional': True}),
+ ('recipient_infos', RecipientInfos),
+ ('encrypted_content_info', EncryptedContentInfo),
+ ('unprotected_attrs', CMSAttributes, {'implicit': 1, 'optional': True}),
+ ]
+
+
+class SignedAndEnvelopedData(Sequence):
+ _fields = [
+ ('version', CMSVersion),
+ ('recipient_infos', RecipientInfos),
+ ('digest_algorithms', DigestAlgorithms),
+ ('encrypted_content_info', EncryptedContentInfo),
+ ('certificates', CertificateSet, {'implicit': 0, 'optional': True}),
+ ('crls', CertificateRevocationLists, {'implicit': 1, 'optional': True}),
+ ('signer_infos', SignerInfos),
+ ]
+
+
+class DigestedData(Sequence):
+ _fields = [
+ ('version', CMSVersion),
+ ('digest_algorithm', DigestAlgorithm),
+ ('encap_content_info', None),
+ ('digest', OctetString),
+ ]
+
+ def _encap_content_info_spec(self):
+ # If the encap_content_info is version v1, then this could be a PKCS#7
+ # structure, or a CMS structure. CMS wraps the encoded value in an
+ # Octet String tag.
+
+ # If the version is greater than 1, it is definite CMS
+ if self['version'].native != 'v1':
+ return EncapsulatedContentInfo
+
+ # Otherwise, the ContentInfo spec from PKCS#7 will be compatible with
+ # CMS v1 (which only allows Data, an Octet String) and PKCS#7, which
+ # allows Any
+ return ContentInfo
+
+ _spec_callbacks = {
+ 'encap_content_info': _encap_content_info_spec
+ }
+
+
+class EncryptedData(Sequence):
+ _fields = [
+ ('version', CMSVersion),
+ ('encrypted_content_info', EncryptedContentInfo),
+ ('unprotected_attrs', CMSAttributes, {'implicit': 1, 'optional': True}),
+ ]
+
+
+class AuthenticatedData(Sequence):
+ _fields = [
+ ('version', CMSVersion),
+ ('originator_info', OriginatorInfo, {'implicit': 0, 'optional': True}),
+ ('recipient_infos', RecipientInfos),
+ ('mac_algorithm', HmacAlgorithm),
+ ('digest_algorithm', DigestAlgorithm, {'implicit': 1, 'optional': True}),
+ # This does not require the _spec_callbacks approach of SignedData and
+ # DigestedData since AuthenticatedData was not part of PKCS#7
+ ('encap_content_info', EncapsulatedContentInfo),
+ ('auth_attrs', CMSAttributes, {'implicit': 2, 'optional': True}),
+ ('mac', OctetString),
+ ('unauth_attrs', CMSAttributes, {'implicit': 3, 'optional': True}),
+ ]
+
+
+class AuthEnvelopedData(Sequence):
+ _fields = [
+ ('version', CMSVersion),
+ ('originator_info', OriginatorInfo, {'implicit': 0, 'optional': True}),
+ ('recipient_infos', RecipientInfos),
+ ('auth_encrypted_content_info', EncryptedContentInfo),
+ ('auth_attrs', CMSAttributes, {'implicit': 1, 'optional': True}),
+ ('mac', OctetString),
+ ('unauth_attrs', CMSAttributes, {'implicit': 2, 'optional': True}),
+ ]
+
+
+class CompressionAlgorithmId(ObjectIdentifier):
+ _map = {
+ '1.2.840.113549.1.9.16.3.8': 'zlib',
+ }
+
+
+class CompressionAlgorithm(Sequence):
+ _fields = [
+ ('algorithm', CompressionAlgorithmId),
+ ('parameters', Any, {'optional': True}),
+ ]
+
+
+class CompressedData(Sequence):
+ _fields = [
+ ('version', CMSVersion),
+ ('compression_algorithm', CompressionAlgorithm),
+ ('encap_content_info', EncapsulatedContentInfo),
+ ]
+
+ _decompressed = None
+
+ @property
+ def decompressed(self):
+ if self._decompressed is None:
+ if zlib is None:
+ raise SystemError('The zlib module is not available')
+ self._decompressed = zlib.decompress(self['encap_content_info']['content'].native)
+ return self._decompressed
+
+
+class RecipientKeyIdentifier(Sequence):
+ _fields = [
+ ('subjectKeyIdentifier', OctetString),
+ ('date', GeneralizedTime, {'optional': True}),
+ ('other', OtherKeyAttribute, {'optional': True}),
+ ]
+
+
+class SMIMEEncryptionKeyPreference(Choice):
+ _alternatives = [
+ ('issuer_and_serial_number', IssuerAndSerialNumber, {'implicit': 0}),
+ ('recipientKeyId', RecipientKeyIdentifier, {'implicit': 1}),
+ ('subjectAltKeyIdentifier', PublicKeyInfo, {'implicit': 2}),
+ ]
+
+
+class SMIMEEncryptionKeyPreferences(SetOf):
+ _child_spec = SMIMEEncryptionKeyPreference
+
+
+class SMIMECapabilityIdentifier(Sequence):
+ _fields = [
+ ('capability_id', EncryptionAlgorithmId),
+ ('parameters', Any, {'optional': True}),
+ ]
+
+
+class SMIMECapabilites(SequenceOf):
+ _child_spec = SMIMECapabilityIdentifier
+
+
+class SetOfSMIMECapabilites(SetOf):
+ _child_spec = SMIMECapabilites
+
+
+ContentInfo._oid_specs = {
+ 'data': OctetString,
+ 'signed_data': SignedData,
+ 'enveloped_data': EnvelopedData,
+ 'signed_and_enveloped_data': SignedAndEnvelopedData,
+ 'digested_data': DigestedData,
+ 'encrypted_data': EncryptedData,
+ 'authenticated_data': AuthenticatedData,
+ 'compressed_data': CompressedData,
+ 'authenticated_enveloped_data': AuthEnvelopedData,
+}
+
+
+EncapsulatedContentInfo._oid_specs = {
+ 'signed_data': SignedData,
+ 'enveloped_data': EnvelopedData,
+ 'signed_and_enveloped_data': SignedAndEnvelopedData,
+ 'digested_data': DigestedData,
+ 'encrypted_data': EncryptedData,
+ 'authenticated_data': AuthenticatedData,
+ 'compressed_data': CompressedData,
+ 'authenticated_enveloped_data': AuthEnvelopedData,
+}
+
+
+CMSAttribute._oid_specs = {
+ 'content_type': SetOfContentType,
+ 'message_digest': SetOfOctetString,
+ 'signing_time': SetOfTime,
+ 'counter_signature': SignerInfos,
+ 'signature_time_stamp_token': SetOfContentInfo,
+ 'cms_algorithm_protection': SetOfCMSAlgorithmProtection,
+ 'microsoft_nested_signature': SetOfContentInfo,
+ 'microsoft_time_stamp_token': SetOfContentInfo,
+ 'encrypt_key_pref': SMIMEEncryptionKeyPreferences,
+ 'smime_capabilities': SetOfSMIMECapabilites,
+}
diff --git a/jc/parsers/asn1crypto/core.py b/jc/parsers/asn1crypto/core.py
new file mode 100644
index 00000000..364c6b5c
--- /dev/null
+++ b/jc/parsers/asn1crypto/core.py
@@ -0,0 +1,5676 @@
+# coding: utf-8
+
+"""
+ASN.1 type classes for universal types. Exports the following items:
+
+ - load()
+ - Any()
+ - Asn1Value()
+ - BitString()
+ - BMPString()
+ - Boolean()
+ - CharacterString()
+ - Choice()
+ - EmbeddedPdv()
+ - Enumerated()
+ - GeneralizedTime()
+ - GeneralString()
+ - GraphicString()
+ - IA5String()
+ - InstanceOf()
+ - Integer()
+ - IntegerBitString()
+ - IntegerOctetString()
+ - Null()
+ - NumericString()
+ - ObjectDescriptor()
+ - ObjectIdentifier()
+ - OctetBitString()
+ - OctetString()
+ - PrintableString()
+ - Real()
+ - RelativeOid()
+ - Sequence()
+ - SequenceOf()
+ - Set()
+ - SetOf()
+ - TeletexString()
+ - UniversalString()
+ - UTCTime()
+ - UTF8String()
+ - VideotexString()
+ - VisibleString()
+ - VOID
+ - Void()
+
+Other type classes are defined that help compose the types listed above.
+"""
+
+from __future__ import unicode_literals, division, absolute_import, print_function
+
+from datetime import datetime, timedelta
+from fractions import Fraction
+import binascii
+import copy
+import math
+import re
+import sys
+
+from . import _teletex_codec
+from ._errors import unwrap
+from ._ordereddict import OrderedDict
+from ._types import type_name, str_cls, byte_cls, int_types, chr_cls
+from .parser import _parse, _dump_header
+from .util import int_to_bytes, int_from_bytes, timezone, extended_datetime, create_timezone, utc_with_dst
+
+if sys.version_info <= (3,):
+ from cStringIO import StringIO as BytesIO
+
+ range = xrange # noqa
+ _PY2 = True
+
+else:
+ from io import BytesIO
+
+ _PY2 = False
+
+
+_teletex_codec.register()
+
+
+CLASS_NUM_TO_NAME_MAP = {
+ 0: 'universal',
+ 1: 'application',
+ 2: 'context',
+ 3: 'private',
+}
+
+CLASS_NAME_TO_NUM_MAP = {
+ 'universal': 0,
+ 'application': 1,
+ 'context': 2,
+ 'private': 3,
+ 0: 0,
+ 1: 1,
+ 2: 2,
+ 3: 3,
+}
+
+METHOD_NUM_TO_NAME_MAP = {
+ 0: 'primitive',
+ 1: 'constructed',
+}
+
+
+_OID_RE = re.compile(r'^\d+(\.\d+)*$')
+
+
+# A global tracker to ensure that _setup() is called for every class, even
+# if is has been called for a parent class. This allows different _fields
+# definitions for child classes. Without such a construct, the child classes
+# would just see the parent class attributes and would use them.
+_SETUP_CLASSES = {}
+
+
+def load(encoded_data, strict=False):
+ """
+ Loads a BER/DER-encoded byte string and construct a universal object based
+ on the tag value:
+
+ - 1: Boolean
+ - 2: Integer
+ - 3: BitString
+ - 4: OctetString
+ - 5: Null
+ - 6: ObjectIdentifier
+ - 7: ObjectDescriptor
+ - 8: InstanceOf
+ - 9: Real
+ - 10: Enumerated
+ - 11: EmbeddedPdv
+ - 12: UTF8String
+ - 13: RelativeOid
+ - 16: Sequence,
+ - 17: Set
+ - 18: NumericString
+ - 19: PrintableString
+ - 20: TeletexString
+ - 21: VideotexString
+ - 22: IA5String
+ - 23: UTCTime
+ - 24: GeneralizedTime
+ - 25: GraphicString
+ - 26: VisibleString
+ - 27: GeneralString
+ - 28: UniversalString
+ - 29: CharacterString
+ - 30: BMPString
+
+ :param encoded_data:
+ A byte string of BER or DER-encoded data
+
+ :param strict:
+ A boolean indicating if trailing data should be forbidden - if so, a
+ ValueError will be raised when trailing data exists
+
+ :raises:
+ ValueError - when strict is True and trailing data is present
+ ValueError - when the encoded value tag a tag other than listed above
+ ValueError - when the ASN.1 header length is longer than the data
+ TypeError - when encoded_data is not a byte string
+
+ :return:
+ An instance of the one of the universal classes
+ """
+
+ return Asn1Value.load(encoded_data, strict=strict)
+
+
+class Asn1Value(object):
+ """
+ The basis of all ASN.1 values
+ """
+
+ # The integer 0 for primitive, 1 for constructed
+ method = None
+
+ # An integer 0 through 3 - see CLASS_NUM_TO_NAME_MAP for value
+ class_ = None
+
+ # An integer 1 or greater indicating the tag number
+ tag = None
+
+ # An alternate tag allowed for this type - used for handling broken
+ # structures where a string value is encoded using an incorrect tag
+ _bad_tag = None
+
+ # If the value has been implicitly tagged
+ implicit = False
+
+ # If explicitly tagged, a tuple of 2-element tuples containing the
+ # class int and tag int, from innermost to outermost
+ explicit = None
+
+ # The BER/DER header bytes
+ _header = None
+
+ # Raw encoded value bytes not including class, method, tag, length header
+ contents = None
+
+ # The BER/DER trailer bytes
+ _trailer = b''
+
+ # The native python representation of the value - this is not used by
+ # some classes since they utilize _bytes or _unicode
+ _native = None
+
+ @classmethod
+ def load(cls, encoded_data, strict=False, **kwargs):
+ """
+ Loads a BER/DER-encoded byte string using the current class as the spec
+
+ :param encoded_data:
+ A byte string of BER or DER-encoded data
+
+ :param strict:
+ A boolean indicating if trailing data should be forbidden - if so, a
+ ValueError will be raised when trailing data exists
+
+ :return:
+ An instance of the current class
+ """
+
+ if not isinstance(encoded_data, byte_cls):
+ raise TypeError('encoded_data must be a byte string, not %s' % type_name(encoded_data))
+
+ spec = None
+ if cls.tag is not None:
+ spec = cls
+
+ value, _ = _parse_build(encoded_data, spec=spec, spec_params=kwargs, strict=strict)
+ return value
+
+ def __init__(self, explicit=None, implicit=None, no_explicit=False, tag_type=None, class_=None, tag=None,
+ optional=None, default=None, contents=None, method=None):
+ """
+ The optional parameter is not used, but rather included so we don't
+ have to delete it from the parameter dictionary when passing as keyword
+ args
+
+ :param explicit:
+ An int tag number for explicit tagging, or a 2-element tuple of
+ class and tag.
+
+ :param implicit:
+ An int tag number for implicit tagging, or a 2-element tuple of
+ class and tag.
+
+ :param no_explicit:
+ If explicit tagging info should be removed from this instance.
+ Used internally to allow contructing the underlying value that
+ has been wrapped in an explicit tag.
+
+ :param tag_type:
+ None for normal values, or one of "implicit", "explicit" for tagged
+ values. Deprecated in favor of explicit and implicit params.
+
+ :param class_:
+ The class for the value - defaults to "universal" if tag_type is
+ None, otherwise defaults to "context". Valid values include:
+ - "universal"
+ - "application"
+ - "context"
+ - "private"
+ Deprecated in favor of explicit and implicit params.
+
+ :param tag:
+ The integer tag to override - usually this is used with tag_type or
+ class_. Deprecated in favor of explicit and implicit params.
+
+ :param optional:
+ Dummy parameter that allows "optional" key in spec param dicts
+
+ :param default:
+ The default value to use if the value is currently None
+
+ :param contents:
+ A byte string of the encoded contents of the value
+
+ :param method:
+ The method for the value - no default value since this is
+ normally set on a class. Valid values include:
+ - "primitive" or 0
+ - "constructed" or 1
+
+ :raises:
+ ValueError - when implicit, explicit, tag_type, class_ or tag are invalid values
+ """
+
+ try:
+ if self.__class__ not in _SETUP_CLASSES:
+ cls = self.__class__
+ # Allow explicit to be specified as a simple 2-element tuple
+ # instead of requiring the user make a nested tuple
+ if cls.explicit is not None and isinstance(cls.explicit[0], int_types):
+ cls.explicit = (cls.explicit, )
+ if hasattr(cls, '_setup'):
+ self._setup()
+ _SETUP_CLASSES[cls] = True
+
+ # Normalize tagging values
+ if explicit is not None:
+ if isinstance(explicit, int_types):
+ if class_ is None:
+ class_ = 'context'
+ explicit = (class_, explicit)
+ # Prevent both explicit and tag_type == 'explicit'
+ if tag_type == 'explicit':
+ tag_type = None
+ tag = None
+
+ if implicit is not None:
+ if isinstance(implicit, int_types):
+ if class_ is None:
+ class_ = 'context'
+ implicit = (class_, implicit)
+ # Prevent both implicit and tag_type == 'implicit'
+ if tag_type == 'implicit':
+ tag_type = None
+ tag = None
+
+ # Convert old tag_type API to explicit/implicit params
+ if tag_type is not None:
+ if class_ is None:
+ class_ = 'context'
+ if tag_type == 'explicit':
+ explicit = (class_, tag)
+ elif tag_type == 'implicit':
+ implicit = (class_, tag)
+ else:
+ raise ValueError(unwrap(
+ '''
+ tag_type must be one of "implicit", "explicit", not %s
+ ''',
+ repr(tag_type)
+ ))
+
+ if explicit is not None:
+ # Ensure we have a tuple of 2-element tuples
+ if len(explicit) == 2 and isinstance(explicit[1], int_types):
+ explicit = (explicit, )
+ for class_, tag in explicit:
+ invalid_class = None
+ if isinstance(class_, int_types):
+ if class_ not in CLASS_NUM_TO_NAME_MAP:
+ invalid_class = class_
+ else:
+ if class_ not in CLASS_NAME_TO_NUM_MAP:
+ invalid_class = class_
+ class_ = CLASS_NAME_TO_NUM_MAP[class_]
+ if invalid_class is not None:
+ raise ValueError(unwrap(
+ '''
+ explicit class must be one of "universal", "application",
+ "context", "private", not %s
+ ''',
+ repr(invalid_class)
+ ))
+ if tag is not None:
+ if not isinstance(tag, int_types):
+ raise TypeError(unwrap(
+ '''
+ explicit tag must be an integer, not %s
+ ''',
+ type_name(tag)
+ ))
+ if self.explicit is None:
+ self.explicit = ((class_, tag), )
+ else:
+ self.explicit = self.explicit + ((class_, tag), )
+
+ elif implicit is not None:
+ class_, tag = implicit
+ if class_ not in CLASS_NAME_TO_NUM_MAP:
+ raise ValueError(unwrap(
+ '''
+ implicit class must be one of "universal", "application",
+ "context", "private", not %s
+ ''',
+ repr(class_)
+ ))
+ if tag is not None:
+ if not isinstance(tag, int_types):
+ raise TypeError(unwrap(
+ '''
+ implicit tag must be an integer, not %s
+ ''',
+ type_name(tag)
+ ))
+ self.class_ = CLASS_NAME_TO_NUM_MAP[class_]
+ self.tag = tag
+ self.implicit = True
+ else:
+ if class_ is not None:
+ if class_ not in CLASS_NAME_TO_NUM_MAP:
+ raise ValueError(unwrap(
+ '''
+ class_ must be one of "universal", "application",
+ "context", "private", not %s
+ ''',
+ repr(class_)
+ ))
+ self.class_ = CLASS_NAME_TO_NUM_MAP[class_]
+
+ if self.class_ is None:
+ self.class_ = 0
+
+ if tag is not None:
+ self.tag = tag
+
+ if method is not None:
+ if method not in set(["primitive", 0, "constructed", 1]):
+ raise ValueError(unwrap(
+ '''
+ method must be one of "primitive" or "constructed",
+ not %s
+ ''',
+ repr(method)
+ ))
+ if method == "primitive":
+ method = 0
+ elif method == "constructed":
+ method = 1
+ self.method = method
+
+ if no_explicit:
+ self.explicit = None
+
+ if contents is not None:
+ self.contents = contents
+
+ elif default is not None:
+ self.set(default)
+
+ except (ValueError, TypeError) as e:
+ args = e.args[1:]
+ e.args = (e.args[0] + '\n while constructing %s' % type_name(self),) + args
+ raise e
+
+ def __str__(self):
+ """
+ Since str is different in Python 2 and 3, this calls the appropriate
+ method, __unicode__() or __bytes__()
+
+ :return:
+ A unicode string
+ """
+
+ if _PY2:
+ return self.__bytes__()
+ else:
+ return self.__unicode__()
+
+ def __repr__(self):
+ """
+ :return:
+ A unicode string
+ """
+
+ if _PY2:
+ return '<%s %s b%s>' % (type_name(self), id(self), repr(self.dump()))
+ else:
+ return '<%s %s %s>' % (type_name(self), id(self), repr(self.dump()))
+
+ def __bytes__(self):
+ """
+ A fall-back method for print() in Python 2
+
+ :return:
+ A byte string of the output of repr()
+ """
+
+ return self.__repr__().encode('utf-8')
+
+ def __unicode__(self):
+ """
+ A fall-back method for print() in Python 3
+
+ :return:
+ A unicode string of the output of repr()
+ """
+
+ return self.__repr__()
+
+ def _new_instance(self):
+ """
+ Constructs a new copy of the current object, preserving any tagging
+
+ :return:
+ An Asn1Value object
+ """
+
+ new_obj = self.__class__()
+ new_obj.class_ = self.class_
+ new_obj.tag = self.tag
+ new_obj.implicit = self.implicit
+ new_obj.explicit = self.explicit
+ return new_obj
+
+ def __copy__(self):
+ """
+ Implements the copy.copy() interface
+
+ :return:
+ A new shallow copy of the current Asn1Value object
+ """
+
+ new_obj = self._new_instance()
+ new_obj._copy(self, copy.copy)
+ return new_obj
+
+ def __deepcopy__(self, memo):
+ """
+ Implements the copy.deepcopy() interface
+
+ :param memo:
+ A dict for memoization
+
+ :return:
+ A new deep copy of the current Asn1Value object
+ """
+
+ new_obj = self._new_instance()
+ memo[id(self)] = new_obj
+ new_obj._copy(self, copy.deepcopy)
+ return new_obj
+
+ def copy(self):
+ """
+ Copies the object, preserving any special tagging from it
+
+ :return:
+ An Asn1Value object
+ """
+
+ return copy.deepcopy(self)
+
+ def retag(self, tagging, tag=None):
+ """
+ Copies the object, applying a new tagging to it
+
+ :param tagging:
+ A dict containing the keys "explicit" and "implicit". Legacy
+ API allows a unicode string of "implicit" or "explicit".
+
+ :param tag:
+ A integer tag number. Only used when tagging is a unicode string.
+
+ :return:
+ An Asn1Value object
+ """
+
+ # This is required to preserve the old API
+ if not isinstance(tagging, dict):
+ tagging = {tagging: tag}
+ new_obj = self.__class__(explicit=tagging.get('explicit'), implicit=tagging.get('implicit'))
+ new_obj._copy(self, copy.deepcopy)
+ return new_obj
+
+ def untag(self):
+ """
+ Copies the object, removing any special tagging from it
+
+ :return:
+ An Asn1Value object
+ """
+
+ new_obj = self.__class__()
+ new_obj._copy(self, copy.deepcopy)
+ return new_obj
+
+ def _copy(self, other, copy_func):
+ """
+ Copies the contents of another Asn1Value object to itself
+
+ :param object:
+ Another instance of the same class
+
+ :param copy_func:
+ An reference of copy.copy() or copy.deepcopy() to use when copying
+ lists, dicts and objects
+ """
+
+ if self.__class__ != other.__class__:
+ raise TypeError(unwrap(
+ '''
+ Can not copy values from %s object to %s object
+ ''',
+ type_name(other),
+ type_name(self)
+ ))
+
+ self.contents = other.contents
+ self._native = copy_func(other._native)
+
+ def debug(self, nest_level=1):
+ """
+ Show the binary data and parsed data in a tree structure
+ """
+
+ prefix = ' ' * nest_level
+
+ # This interacts with Any and moves the tag, implicit, explicit, _header,
+ # contents, _footer to the parsed value so duplicate data isn't present
+ has_parsed = hasattr(self, 'parsed')
+
+ _basic_debug(prefix, self)
+ if has_parsed:
+ self.parsed.debug(nest_level + 2)
+ elif hasattr(self, 'chosen'):
+ self.chosen.debug(nest_level + 2)
+ else:
+ if _PY2 and isinstance(self.native, byte_cls):
+ print('%s Native: b%s' % (prefix, repr(self.native)))
+ else:
+ print('%s Native: %s' % (prefix, self.native))
+
+ def dump(self, force=False):
+ """
+ Encodes the value using DER
+
+ :param force:
+ If the encoded contents already exist, clear them and regenerate
+ to ensure they are in DER format instead of BER format
+
+ :return:
+ A byte string of the DER-encoded value
+ """
+
+ contents = self.contents
+
+ # If the length is indefinite, force the re-encoding
+ if self._header is not None and self._header[-1:] == b'\x80':
+ force = True
+
+ if self._header is None or force:
+ if isinstance(self, Constructable) and self._indefinite:
+ self.method = 0
+
+ header = _dump_header(self.class_, self.method, self.tag, self.contents)
+
+ if self.explicit is not None:
+ for class_, tag in self.explicit:
+ header = _dump_header(class_, 1, tag, header + self.contents) + header
+
+ self._header = header
+ self._trailer = b''
+
+ return self._header + contents + self._trailer
+
+
+class ValueMap():
+ """
+ Basic functionality that allows for mapping values from ints or OIDs to
+ python unicode strings
+ """
+
+ # A dict from primitive value (int or OID) to unicode string. This needs
+ # to be defined in the source code
+ _map = None
+
+ # A dict from unicode string to int/OID. This is automatically generated
+ # from _map the first time it is needed
+ _reverse_map = None
+
+ def _setup(self):
+ """
+ Generates _reverse_map from _map
+ """
+
+ cls = self.__class__
+ if cls._map is None or cls._reverse_map is not None:
+ return
+ cls._reverse_map = {}
+ for key, value in cls._map.items():
+ cls._reverse_map[value] = key
+
+
+class Castable(object):
+ """
+ A mixin to handle converting an object between different classes that
+ represent the same encoded value, but with different rules for converting
+ to and from native Python values
+ """
+
+ def cast(self, other_class):
+ """
+ Converts the current object into an object of a different class. The
+ new class must use the ASN.1 encoding for the value.
+
+ :param other_class:
+ The class to instantiate the new object from
+
+ :return:
+ An instance of the type other_class
+ """
+
+ if other_class.tag != self.__class__.tag:
+ raise TypeError(unwrap(
+ '''
+ Can not covert a value from %s object to %s object since they
+ use different tags: %d versus %d
+ ''',
+ type_name(other_class),
+ type_name(self),
+ other_class.tag,
+ self.__class__.tag
+ ))
+
+ new_obj = other_class()
+ new_obj.class_ = self.class_
+ new_obj.implicit = self.implicit
+ new_obj.explicit = self.explicit
+ new_obj._header = self._header
+ new_obj.contents = self.contents
+ new_obj._trailer = self._trailer
+ if isinstance(self, Constructable):
+ new_obj.method = self.method
+ new_obj._indefinite = self._indefinite
+ return new_obj
+
+
+class Constructable(object):
+ """
+ A mixin to handle string types that may be constructed from chunks
+ contained within an indefinite length BER-encoded container
+ """
+
+ # Instance attribute indicating if an object was indefinite
+ # length when parsed - affects parsing and dumping
+ _indefinite = False
+
+ def _merge_chunks(self):
+ """
+ :return:
+ A concatenation of the native values of the contained chunks
+ """
+
+ if not self._indefinite:
+ return self._as_chunk()
+
+ pointer = 0
+ contents_len = len(self.contents)
+ output = None
+
+ while pointer < contents_len:
+ # We pass the current class as the spec so content semantics are preserved
+ sub_value, pointer = _parse_build(self.contents, pointer, spec=self.__class__)
+ if output is None:
+ output = sub_value._merge_chunks()
+ else:
+ output += sub_value._merge_chunks()
+
+ if output is None:
+ return self._as_chunk()
+
+ return output
+
+ def _as_chunk(self):
+ """
+ A method to return a chunk of data that can be combined for
+ constructed method values
+
+ :return:
+ A native Python value that can be added together. Examples include
+ byte strings, unicode strings or tuples.
+ """
+
+ return self.contents
+
+ def _setable_native(self):
+ """
+ Returns a native value that can be round-tripped into .set(), to
+ result in a DER encoding. This differs from .native in that .native
+ is designed for the end use, and may account for the fact that the
+ merged value is further parsed as ASN.1, such as in the case of
+ ParsableOctetString() and ParsableOctetBitString().
+
+ :return:
+ A python value that is valid to pass to .set()
+ """
+
+ return self.native
+
+ def _copy(self, other, copy_func):
+ """
+ Copies the contents of another Constructable object to itself
+
+ :param object:
+ Another instance of the same class
+
+ :param copy_func:
+ An reference of copy.copy() or copy.deepcopy() to use when copying
+ lists, dicts and objects
+ """
+
+ super(Constructable, self)._copy(other, copy_func)
+ # We really don't want to dump BER encodings, so if we see an
+ # indefinite encoding, let's re-encode it
+ if other._indefinite:
+ self.set(other._setable_native())
+
+
+class Void(Asn1Value):
+ """
+ A representation of an optional value that is not present. Has .native
+ property and .dump() method to be compatible with other value classes.
+ """
+
+ contents = b''
+
+ def __eq__(self, other):
+ """
+ :param other:
+ The other Primitive to compare to
+
+ :return:
+ A boolean
+ """
+
+ return other.__class__ == self.__class__
+
+ def __nonzero__(self):
+ return False
+
+ def __len__(self):
+ return 0
+
+ def __iter__(self):
+ return iter(())
+
+ @property
+ def native(self):
+ """
+ The native Python datatype representation of this value
+
+ :return:
+ None
+ """
+
+ return None
+
+ def dump(self, force=False):
+ """
+ Encodes the value using DER
+
+ :param force:
+ If the encoded contents already exist, clear them and regenerate
+ to ensure they are in DER format instead of BER format
+
+ :return:
+ A byte string of the DER-encoded value
+ """
+
+ return b''
+
+
+VOID = Void()
+
+
+class Any(Asn1Value):
+ """
+ A value class that can contain any value, and allows for easy parsing of
+ the underlying encoded value using a spec. This is normally contained in
+ a Structure that has an ObjectIdentifier field and _oid_pair and _oid_specs
+ defined.
+ """
+
+ # The parsed value object
+ _parsed = None
+
+ def __init__(self, value=None, **kwargs):
+ """
+ Sets the value of the object before passing to Asn1Value.__init__()
+
+ :param value:
+ An Asn1Value object that will be set as the parsed value
+ """
+
+ Asn1Value.__init__(self, **kwargs)
+
+ try:
+ if value is not None:
+ if not isinstance(value, Asn1Value):
+ raise TypeError(unwrap(
+ '''
+ value must be an instance of Asn1Value, not %s
+ ''',
+ type_name(value)
+ ))
+
+ self._parsed = (value, value.__class__, None)
+ self.contents = value.dump()
+
+ except (ValueError, TypeError) as e:
+ args = e.args[1:]
+ e.args = (e.args[0] + '\n while constructing %s' % type_name(self),) + args
+ raise e
+
+ @property
+ def native(self):
+ """
+ The native Python datatype representation of this value
+
+ :return:
+ The .native value from the parsed value object
+ """
+
+ if self._parsed is None:
+ self.parse()
+
+ return self._parsed[0].native
+
+ @property
+ def parsed(self):
+ """
+ Returns the parsed object from .parse()
+
+ :return:
+ The object returned by .parse()
+ """
+
+ if self._parsed is None:
+ self.parse()
+
+ return self._parsed[0]
+
+ def parse(self, spec=None, spec_params=None):
+ """
+ Parses the contents generically, or using a spec with optional params
+
+ :param spec:
+ A class derived from Asn1Value that defines what class_ and tag the
+ value should have, and the semantics of the encoded value. The
+ return value will be of this type. If omitted, the encoded value
+ will be decoded using the standard universal tag based on the
+ encoded tag number.
+
+ :param spec_params:
+ A dict of params to pass to the spec object
+
+ :return:
+ An object of the type spec, or if not present, a child of Asn1Value
+ """
+
+ if self._parsed is None or self._parsed[1:3] != (spec, spec_params):
+ try:
+ passed_params = spec_params or {}
+ _tag_type_to_explicit_implicit(passed_params)
+ if self.explicit is not None:
+ if 'explicit' in passed_params:
+ passed_params['explicit'] = self.explicit + passed_params['explicit']
+ else:
+ passed_params['explicit'] = self.explicit
+ contents = self._header + self.contents + self._trailer
+ parsed_value, _ = _parse_build(
+ contents,
+ spec=spec,
+ spec_params=passed_params
+ )
+ self._parsed = (parsed_value, spec, spec_params)
+
+ # Once we've parsed the Any value, clear any attributes from this object
+ # since they are now duplicate
+ self.tag = None
+ self.explicit = None
+ self.implicit = False
+ self._header = b''
+ self.contents = contents
+ self._trailer = b''
+
+ except (ValueError, TypeError) as e:
+ args = e.args[1:]
+ e.args = (e.args[0] + '\n while parsing %s' % type_name(self),) + args
+ raise e
+ return self._parsed[0]
+
+ def _copy(self, other, copy_func):
+ """
+ Copies the contents of another Any object to itself
+
+ :param object:
+ Another instance of the same class
+
+ :param copy_func:
+ An reference of copy.copy() or copy.deepcopy() to use when copying
+ lists, dicts and objects
+ """
+
+ super(Any, self)._copy(other, copy_func)
+ self._parsed = copy_func(other._parsed)
+
+ def dump(self, force=False):
+ """
+ Encodes the value using DER
+
+ :param force:
+ If the encoded contents already exist, clear them and regenerate
+ to ensure they are in DER format instead of BER format
+
+ :return:
+ A byte string of the DER-encoded value
+ """
+
+ if self._parsed is None:
+ self.parse()
+
+ return self._parsed[0].dump(force=force)
+
+
+class Choice(Asn1Value):
+ """
+ A class to handle when a value may be one of several options
+ """
+
+ # The index in _alternatives of the validated alternative
+ _choice = None
+
+ # The name of the chosen alternative
+ _name = None
+
+ # The Asn1Value object for the chosen alternative
+ _parsed = None
+
+ # Choice overrides .contents to be a property so that the code expecting
+ # the .contents attribute will get the .contents of the chosen alternative
+ _contents = None
+
+ # A list of tuples in one of the following forms.
+ #
+ # Option 1, a unicode string field name and a value class
+ #
+ # ("name", Asn1ValueClass)
+ #
+ # Option 2, same as Option 1, but with a dict of class params
+ #
+ # ("name", Asn1ValueClass, {'explicit': 5})
+ _alternatives = None
+
+ # A dict that maps tuples of (class_, tag) to an index in _alternatives
+ _id_map = None
+
+ # A dict that maps alternative names to an index in _alternatives
+ _name_map = None
+
+ @classmethod
+ def load(cls, encoded_data, strict=False, **kwargs):
+ """
+ Loads a BER/DER-encoded byte string using the current class as the spec
+
+ :param encoded_data:
+ A byte string of BER or DER encoded data
+
+ :param strict:
+ A boolean indicating if trailing data should be forbidden - if so, a
+ ValueError will be raised when trailing data exists
+
+ :return:
+ A instance of the current class
+ """
+
+ if not isinstance(encoded_data, byte_cls):
+ raise TypeError('encoded_data must be a byte string, not %s' % type_name(encoded_data))
+
+ value, _ = _parse_build(encoded_data, spec=cls, spec_params=kwargs, strict=strict)
+ return value
+
+ def _setup(self):
+ """
+ Generates _id_map from _alternatives to allow validating contents
+ """
+
+ cls = self.__class__
+ cls._id_map = {}
+ cls._name_map = {}
+ for index, info in enumerate(cls._alternatives):
+ if len(info) < 3:
+ info = info + ({},)
+ cls._alternatives[index] = info
+ id_ = _build_id_tuple(info[2], info[1])
+ cls._id_map[id_] = index
+ cls._name_map[info[0]] = index
+
+ def __init__(self, name=None, value=None, **kwargs):
+ """
+ Checks to ensure implicit tagging is not being used since it is
+ incompatible with Choice, then forwards on to Asn1Value.__init__()
+
+ :param name:
+ The name of the alternative to be set - used with value.
+ Alternatively this may be a dict with a single key being the name
+ and the value being the value, or a two-element tuple of the name
+ and the value.
+
+ :param value:
+ The alternative value to set - used with name
+
+ :raises:
+ ValueError - when implicit param is passed (or legacy tag_type param is "implicit")
+ """
+
+ _tag_type_to_explicit_implicit(kwargs)
+
+ Asn1Value.__init__(self, **kwargs)
+
+ try:
+ if kwargs.get('implicit') is not None:
+ raise ValueError(unwrap(
+ '''
+ The Choice type can not be implicitly tagged even if in an
+ implicit module - due to its nature any tagging must be
+ explicit
+ '''
+ ))
+
+ if name is not None:
+ if isinstance(name, dict):
+ if len(name) != 1:
+ raise ValueError(unwrap(
+ '''
+ When passing a dict as the "name" argument to %s,
+ it must have a single key/value - however %d were
+ present
+ ''',
+ type_name(self),
+ len(name)
+ ))
+ name, value = list(name.items())[0]
+
+ if isinstance(name, tuple):
+ if len(name) != 2:
+ raise ValueError(unwrap(
+ '''
+ When passing a tuple as the "name" argument to %s,
+ it must have two elements, the name and value -
+ however %d were present
+ ''',
+ type_name(self),
+ len(name)
+ ))
+ value = name[1]
+ name = name[0]
+
+ if name not in self._name_map:
+ raise ValueError(unwrap(
+ '''
+ The name specified, "%s", is not a valid alternative
+ for %s
+ ''',
+ name,
+ type_name(self)
+ ))
+
+ self._choice = self._name_map[name]
+ _, spec, params = self._alternatives[self._choice]
+
+ if not isinstance(value, spec):
+ value = spec(value, **params)
+ else:
+ value = _fix_tagging(value, params)
+ self._parsed = value
+
+ except (ValueError, TypeError) as e:
+ args = e.args[1:]
+ e.args = (e.args[0] + '\n while constructing %s' % type_name(self),) + args
+ raise e
+
+ @property
+ def contents(self):
+ """
+ :return:
+ A byte string of the DER-encoded contents of the chosen alternative
+ """
+
+ if self._parsed is not None:
+ return self._parsed.contents
+
+ return self._contents
+
+ @contents.setter
+ def contents(self, value):
+ """
+ :param value:
+ A byte string of the DER-encoded contents of the chosen alternative
+ """
+
+ self._contents = value
+
+ @property
+ def name(self):
+ """
+ :return:
+ A unicode string of the field name of the chosen alternative
+ """
+ if not self._name:
+ self._name = self._alternatives[self._choice][0]
+ return self._name
+
+ def parse(self):
+ """
+ Parses the detected alternative
+
+ :return:
+ An Asn1Value object of the chosen alternative
+ """
+
+ if self._parsed is None:
+ try:
+ _, spec, params = self._alternatives[self._choice]
+ self._parsed, _ = _parse_build(self._contents, spec=spec, spec_params=params)
+ except (ValueError, TypeError) as e:
+ args = e.args[1:]
+ e.args = (e.args[0] + '\n while parsing %s' % type_name(self),) + args
+ raise e
+ return self._parsed
+
+ @property
+ def chosen(self):
+ """
+ :return:
+ An Asn1Value object of the chosen alternative
+ """
+
+ return self.parse()
+
+ @property
+ def native(self):
+ """
+ The native Python datatype representation of this value
+
+ :return:
+ The .native value from the contained value object
+ """
+
+ return self.chosen.native
+
+ def validate(self, class_, tag, contents):
+ """
+ Ensures that the class and tag specified exist as an alternative
+
+ :param class_:
+ The integer class_ from the encoded value header
+
+ :param tag:
+ The integer tag from the encoded value header
+
+ :param contents:
+ A byte string of the contents of the value - used when the object
+ is explicitly tagged
+
+ :raises:
+ ValueError - when value is not a valid alternative
+ """
+
+ id_ = (class_, tag)
+
+ if self.explicit is not None:
+ if self.explicit[-1] != id_:
+ raise ValueError(unwrap(
+ '''
+ %s was explicitly tagged, but the value provided does not
+ match the class and tag
+ ''',
+ type_name(self)
+ ))
+
+ ((class_, _, tag, _, _, _), _) = _parse(contents, len(contents))
+ id_ = (class_, tag)
+
+ if id_ in self._id_map:
+ self._choice = self._id_map[id_]
+ return
+
+ # This means the Choice was implicitly tagged
+ if self.class_ is not None and self.tag is not None:
+ if len(self._alternatives) > 1:
+ raise ValueError(unwrap(
+ '''
+ %s was implicitly tagged, but more than one alternative
+ exists
+ ''',
+ type_name(self)
+ ))
+ if id_ == (self.class_, self.tag):
+ self._choice = 0
+ return
+
+ asn1 = self._format_class_tag(class_, tag)
+ asn1s = [self._format_class_tag(pair[0], pair[1]) for pair in self._id_map]
+
+ raise ValueError(unwrap(
+ '''
+ Value %s did not match the class and tag of any of the alternatives
+ in %s: %s
+ ''',
+ asn1,
+ type_name(self),
+ ', '.join(asn1s)
+ ))
+
+ def _format_class_tag(self, class_, tag):
+ """
+ :return:
+ A unicode string of a human-friendly representation of the class and tag
+ """
+
+ return '[%s %s]' % (CLASS_NUM_TO_NAME_MAP[class_].upper(), tag)
+
+ def _copy(self, other, copy_func):
+ """
+ Copies the contents of another Choice object to itself
+
+ :param object:
+ Another instance of the same class
+
+ :param copy_func:
+ An reference of copy.copy() or copy.deepcopy() to use when copying
+ lists, dicts and objects
+ """
+
+ super(Choice, self)._copy(other, copy_func)
+ self._choice = other._choice
+ self._name = other._name
+ self._parsed = copy_func(other._parsed)
+
+ def dump(self, force=False):
+ """
+ Encodes the value using DER
+
+ :param force:
+ If the encoded contents already exist, clear them and regenerate
+ to ensure they are in DER format instead of BER format
+
+ :return:
+ A byte string of the DER-encoded value
+ """
+
+ # If the length is indefinite, force the re-encoding
+ if self._header is not None and self._header[-1:] == b'\x80':
+ force = True
+
+ self._contents = self.chosen.dump(force=force)
+ if self._header is None or force:
+ self._header = b''
+ if self.explicit is not None:
+ for class_, tag in self.explicit:
+ self._header = _dump_header(class_, 1, tag, self._header + self._contents) + self._header
+ return self._header + self._contents
+
+
+class Concat(object):
+ """
+ A class that contains two or more encoded child values concatentated
+ together. THIS IS NOT PART OF THE ASN.1 SPECIFICATION! This exists to handle
+ the x509.TrustedCertificate() class for OpenSSL certificates containing
+ extra information.
+ """
+
+ # A list of the specs of the concatenated values
+ _child_specs = None
+
+ _children = None
+
+ @classmethod
+ def load(cls, encoded_data, strict=False):
+ """
+ Loads a BER/DER-encoded byte string using the current class as the spec
+
+ :param encoded_data:
+ A byte string of BER or DER encoded data
+
+ :param strict:
+ A boolean indicating if trailing data should be forbidden - if so, a
+ ValueError will be raised when trailing data exists
+
+ :return:
+ A Concat object
+ """
+
+ return cls(contents=encoded_data, strict=strict)
+
+ def __init__(self, value=None, contents=None, strict=False):
+ """
+ :param value:
+ A native Python datatype to initialize the object value with
+
+ :param contents:
+ A byte string of the encoded contents of the value
+
+ :param strict:
+ A boolean indicating if trailing data should be forbidden - if so, a
+ ValueError will be raised when trailing data exists in contents
+
+ :raises:
+ ValueError - when an error occurs with one of the children
+ TypeError - when an error occurs with one of the children
+ """
+
+ if contents is not None:
+ try:
+ contents_len = len(contents)
+ self._children = []
+
+ offset = 0
+ for spec in self._child_specs:
+ if offset < contents_len:
+ child_value, offset = _parse_build(contents, pointer=offset, spec=spec)
+ else:
+ child_value = spec()
+ self._children.append(child_value)
+
+ if strict and offset != contents_len:
+ extra_bytes = contents_len - offset
+ raise ValueError('Extra data - %d bytes of trailing data were provided' % extra_bytes)
+
+ except (ValueError, TypeError) as e:
+ args = e.args[1:]
+ e.args = (e.args[0] + '\n while constructing %s' % type_name(self),) + args
+ raise e
+
+ if value is not None:
+ if self._children is None:
+ self._children = [None] * len(self._child_specs)
+ for index, data in enumerate(value):
+ self.__setitem__(index, data)
+
+ def __str__(self):
+ """
+ Since str is different in Python 2 and 3, this calls the appropriate
+ method, __unicode__() or __bytes__()
+
+ :return:
+ A unicode string
+ """
+
+ if _PY2:
+ return self.__bytes__()
+ else:
+ return self.__unicode__()
+
+ def __bytes__(self):
+ """
+ A byte string of the DER-encoded contents
+ """
+
+ return self.dump()
+
+ def __unicode__(self):
+ """
+ :return:
+ A unicode string
+ """
+
+ return repr(self)
+
+ def __repr__(self):
+ """
+ :return:
+ A unicode string
+ """
+
+ return '<%s %s %s>' % (type_name(self), id(self), repr(self.dump()))
+
+ def __copy__(self):
+ """
+ Implements the copy.copy() interface
+
+ :return:
+ A new shallow copy of the Concat object
+ """
+
+ new_obj = self.__class__()
+ new_obj._copy(self, copy.copy)
+ return new_obj
+
+ def __deepcopy__(self, memo):
+ """
+ Implements the copy.deepcopy() interface
+
+ :param memo:
+ A dict for memoization
+
+ :return:
+ A new deep copy of the Concat object and all child objects
+ """
+
+ new_obj = self.__class__()
+ memo[id(self)] = new_obj
+ new_obj._copy(self, copy.deepcopy)
+ return new_obj
+
+ def copy(self):
+ """
+ Copies the object
+
+ :return:
+ A Concat object
+ """
+
+ return copy.deepcopy(self)
+
+ def _copy(self, other, copy_func):
+ """
+ Copies the contents of another Concat object to itself
+
+ :param object:
+ Another instance of the same class
+
+ :param copy_func:
+ An reference of copy.copy() or copy.deepcopy() to use when copying
+ lists, dicts and objects
+ """
+
+ if self.__class__ != other.__class__:
+ raise TypeError(unwrap(
+ '''
+ Can not copy values from %s object to %s object
+ ''',
+ type_name(other),
+ type_name(self)
+ ))
+
+ self._children = copy_func(other._children)
+
+ def debug(self, nest_level=1):
+ """
+ Show the binary data and parsed data in a tree structure
+ """
+
+ prefix = ' ' * nest_level
+ print('%s%s Object #%s' % (prefix, type_name(self), id(self)))
+ print('%s Children:' % (prefix,))
+ for child in self._children:
+ child.debug(nest_level + 2)
+
+ def dump(self, force=False):
+ """
+ Encodes the value using DER
+
+ :param force:
+ If the encoded contents already exist, clear them and regenerate
+ to ensure they are in DER format instead of BER format
+
+ :return:
+ A byte string of the DER-encoded value
+ """
+
+ contents = b''
+ for child in self._children:
+ contents += child.dump(force=force)
+ return contents
+
+ @property
+ def contents(self):
+ """
+ :return:
+ A byte string of the DER-encoded contents of the children
+ """
+
+ return self.dump()
+
+ def __len__(self):
+ """
+ :return:
+ Integer
+ """
+
+ return len(self._children)
+
+ def __getitem__(self, key):
+ """
+ Allows accessing children by index
+
+ :param key:
+ An integer of the child index
+
+ :raises:
+ KeyError - when an index is invalid
+
+ :return:
+ The Asn1Value object of the child specified
+ """
+
+ if key > len(self._child_specs) - 1 or key < 0:
+ raise KeyError(unwrap(
+ '''
+ No child is definition for position %d of %s
+ ''',
+ key,
+ type_name(self)
+ ))
+
+ return self._children[key]
+
+ def __setitem__(self, key, value):
+ """
+ Allows settings children by index
+
+ :param key:
+ An integer of the child index
+
+ :param value:
+ An Asn1Value object to set the child to
+
+ :raises:
+ KeyError - when an index is invalid
+ ValueError - when the value is not an instance of Asn1Value
+ """
+
+ if key > len(self._child_specs) - 1 or key < 0:
+ raise KeyError(unwrap(
+ '''
+ No child is defined for position %d of %s
+ ''',
+ key,
+ type_name(self)
+ ))
+
+ if not isinstance(value, Asn1Value):
+ raise ValueError(unwrap(
+ '''
+ Value for child %s of %s is not an instance of
+ asn1crypto.core.Asn1Value
+ ''',
+ key,
+ type_name(self)
+ ))
+
+ self._children[key] = value
+
+ def __iter__(self):
+ """
+ :return:
+ An iterator of child values
+ """
+
+ return iter(self._children)
+
+
+class Primitive(Asn1Value):
+ """
+ Sets the class_ and method attributes for primitive, universal values
+ """
+
+ class_ = 0
+
+ method = 0
+
+ def __init__(self, value=None, default=None, contents=None, **kwargs):
+ """
+ Sets the value of the object before passing to Asn1Value.__init__()
+
+ :param value:
+ A native Python datatype to initialize the object value with
+
+ :param default:
+ The default value if no value is specified
+
+ :param contents:
+ A byte string of the encoded contents of the value
+ """
+
+ Asn1Value.__init__(self, **kwargs)
+
+ try:
+ if contents is not None:
+ self.contents = contents
+
+ elif value is not None:
+ self.set(value)
+
+ elif default is not None:
+ self.set(default)
+
+ except (ValueError, TypeError) as e:
+ args = e.args[1:]
+ e.args = (e.args[0] + '\n while constructing %s' % type_name(self),) + args
+ raise e
+
+ def set(self, value):
+ """
+ Sets the value of the object
+
+ :param value:
+ A byte string
+ """
+
+ if not isinstance(value, byte_cls):
+ raise TypeError(unwrap(
+ '''
+ %s value must be a byte string, not %s
+ ''',
+ type_name(self),
+ type_name(value)
+ ))
+
+ self._native = value
+ self.contents = value
+ self._header = None
+ if self._trailer != b'':
+ self._trailer = b''
+
+ def dump(self, force=False):
+ """
+ Encodes the value using DER
+
+ :param force:
+ If the encoded contents already exist, clear them and regenerate
+ to ensure they are in DER format instead of BER format
+
+ :return:
+ A byte string of the DER-encoded value
+ """
+
+ # If the length is indefinite, force the re-encoding
+ if self._header is not None and self._header[-1:] == b'\x80':
+ force = True
+
+ if force:
+ native = self.native
+ self.contents = None
+ self.set(native)
+
+ return Asn1Value.dump(self)
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __eq__(self, other):
+ """
+ :param other:
+ The other Primitive to compare to
+
+ :return:
+ A boolean
+ """
+
+ if not isinstance(other, Primitive):
+ return False
+
+ if self.contents != other.contents:
+ return False
+
+ # We compare class tag numbers since object tag numbers could be
+ # different due to implicit or explicit tagging
+ if self.__class__.tag != other.__class__.tag:
+ return False
+
+ if self.__class__ == other.__class__ and self.contents == other.contents:
+ return True
+
+ # If the objects share a common base class that is not too low-level
+ # then we can compare the contents
+ self_bases = (set(self.__class__.__bases__) | set([self.__class__])) - set([Asn1Value, Primitive, ValueMap])
+ other_bases = (set(other.__class__.__bases__) | set([other.__class__])) - set([Asn1Value, Primitive, ValueMap])
+ if self_bases | other_bases:
+ return self.contents == other.contents
+
+ # When tagging is going on, do the extra work of constructing new
+ # objects to see if the dumped representation are the same
+ if self.implicit or self.explicit or other.implicit or other.explicit:
+ return self.untag().dump() == other.untag().dump()
+
+ return self.dump() == other.dump()
+
+
+class AbstractString(Constructable, Primitive):
+ """
+ A base class for all strings that have a known encoding. In general, we do
+ not worry ourselves with confirming that the decoded values match a specific
+ set of characters, only that they are decoded into a Python unicode string
+ """
+
+ # The Python encoding name to use when decoding or encoded the contents
+ _encoding = 'latin1'
+
+ # Instance attribute of (possibly-merged) unicode string
+ _unicode = None
+
+ def set(self, value):
+ """
+ Sets the value of the string
+
+ :param value:
+ A unicode string
+ """
+
+ if not isinstance(value, str_cls):
+ raise TypeError(unwrap(
+ '''
+ %s value must be a unicode string, not %s
+ ''',
+ type_name(self),
+ type_name(value)
+ ))
+
+ self._unicode = value
+ self.contents = value.encode(self._encoding)
+ self._header = None
+ if self._indefinite:
+ self._indefinite = False
+ self.method = 0
+ if self._trailer != b'':
+ self._trailer = b''
+
+ def __unicode__(self):
+ """
+ :return:
+ A unicode string
+ """
+
+ if self.contents is None:
+ return ''
+ if self._unicode is None:
+ self._unicode = self._merge_chunks().decode(self._encoding)
+ return self._unicode
+
+ def _copy(self, other, copy_func):
+ """
+ Copies the contents of another AbstractString object to itself
+
+ :param object:
+ Another instance of the same class
+
+ :param copy_func:
+ An reference of copy.copy() or copy.deepcopy() to use when copying
+ lists, dicts and objects
+ """
+
+ super(AbstractString, self)._copy(other, copy_func)
+ self._unicode = other._unicode
+
+ @property
+ def native(self):
+ """
+ The native Python datatype representation of this value
+
+ :return:
+ A unicode string or None
+ """
+
+ if self.contents is None:
+ return None
+
+ return self.__unicode__()
+
+
+class Boolean(Primitive):
+ """
+ Represents a boolean in both ASN.1 and Python
+ """
+
+ tag = 1
+
+ def set(self, value):
+ """
+ Sets the value of the object
+
+ :param value:
+ True, False or another value that works with bool()
+ """
+
+ self._native = bool(value)
+ self.contents = b'\x00' if not value else b'\xff'
+ self._header = None
+ if self._trailer != b'':
+ self._trailer = b''
+
+ # Python 2
+ def __nonzero__(self):
+ """
+ :return:
+ True or False
+ """
+ return self.__bool__()
+
+ def __bool__(self):
+ """
+ :return:
+ True or False
+ """
+ return self.contents != b'\x00'
+
+ @property
+ def native(self):
+ """
+ The native Python datatype representation of this value
+
+ :return:
+ True, False or None
+ """
+
+ if self.contents is None:
+ return None
+
+ if self._native is None:
+ self._native = self.__bool__()
+ return self._native
+
+
+class Integer(Primitive, ValueMap):
+ """
+ Represents an integer in both ASN.1 and Python
+ """
+
+ tag = 2
+
+ def set(self, value):
+ """
+ Sets the value of the object
+
+ :param value:
+ An integer, or a unicode string if _map is set
+
+ :raises:
+ ValueError - when an invalid value is passed
+ """
+
+ if isinstance(value, str_cls):
+ if self._map is None:
+ raise ValueError(unwrap(
+ '''
+ %s value is a unicode string, but no _map provided
+ ''',
+ type_name(self)
+ ))
+
+ if value not in self._reverse_map:
+ raise ValueError(unwrap(
+ '''
+ %s value, %s, is not present in the _map
+ ''',
+ type_name(self),
+ value
+ ))
+
+ value = self._reverse_map[value]
+
+ elif not isinstance(value, int_types):
+ raise TypeError(unwrap(
+ '''
+ %s value must be an integer or unicode string when a name_map
+ is provided, not %s
+ ''',
+ type_name(self),
+ type_name(value)
+ ))
+
+ self._native = self._map[value] if self._map and value in self._map else value
+
+ self.contents = int_to_bytes(value, signed=True)
+ self._header = None
+ if self._trailer != b'':
+ self._trailer = b''
+
+ def __int__(self):
+ """
+ :return:
+ An integer
+ """
+ return int_from_bytes(self.contents, signed=True)
+
+ @property
+ def native(self):
+ """
+ The native Python datatype representation of this value
+
+ :return:
+ An integer or None
+ """
+
+ if self.contents is None:
+ return None
+
+ if self._native is None:
+ self._native = self.__int__()
+ if self._map is not None and self._native in self._map:
+ self._native = self._map[self._native]
+ return self._native
+
+
+class _IntegerBitString(object):
+ """
+ A mixin for IntegerBitString and BitString to parse the contents as an integer.
+ """
+
+ # Tuple of 1s and 0s; set through native
+ _unused_bits = ()
+
+ def _as_chunk(self):
+ """
+ Parse the contents of a primitive BitString encoding as an integer value.
+ Allows reconstructing indefinite length values.
+
+ :raises:
+ ValueError - when an invalid value is passed
+
+ :return:
+ A list with one tuple (value, bits, unused_bits) where value is an integer
+ with the value of the BitString, bits is the bit count of value and
+ unused_bits is a tuple of 1s and 0s.
+ """
+
+ if self._indefinite:
+ # return an empty chunk, for cases like \x23\x80\x00\x00
+ return []
+
+ unused_bits_len = ord(self.contents[0]) if _PY2 else self.contents[0]
+ value = int_from_bytes(self.contents[1:])
+ bits = (len(self.contents) - 1) * 8
+
+ if not unused_bits_len:
+ return [(value, bits, ())]
+
+ if len(self.contents) == 1:
+ # Disallowed by X.690 §8.6.2.3
+ raise ValueError('Empty bit string has {0} unused bits'.format(unused_bits_len))
+
+ if unused_bits_len > 7:
+ # Disallowed by X.690 §8.6.2.2
+ raise ValueError('Bit string has {0} unused bits'.format(unused_bits_len))
+
+ unused_bits = _int_to_bit_tuple(value & ((1 << unused_bits_len) - 1), unused_bits_len)
+ value >>= unused_bits_len
+ bits -= unused_bits_len
+
+ return [(value, bits, unused_bits)]
+
+ def _chunks_to_int(self):
+ """
+ Combines the chunks into a single value.
+
+ :raises:
+ ValueError - when an invalid value is passed
+
+ :return:
+ A tuple (value, bits, unused_bits) where value is an integer with the
+ value of the BitString, bits is the bit count of value and unused_bits
+ is a tuple of 1s and 0s.
+ """
+
+ if not self._indefinite:
+ # Fast path
+ return self._as_chunk()[0]
+
+ value = 0
+ total_bits = 0
+ unused_bits = ()
+
+ # X.690 §8.6.3 allows empty indefinite encodings
+ for chunk, bits, unused_bits in self._merge_chunks():
+ if total_bits & 7:
+ # Disallowed by X.690 §8.6.4
+ raise ValueError('Only last chunk in a bit string may have unused bits')
+ total_bits += bits
+ value = (value << bits) | chunk
+
+ return value, total_bits, unused_bits
+
+ def _copy(self, other, copy_func):
+ """
+ Copies the contents of another _IntegerBitString object to itself
+
+ :param object:
+ Another instance of the same class
+
+ :param copy_func:
+ An reference of copy.copy() or copy.deepcopy() to use when copying
+ lists, dicts and objects
+ """
+
+ super(_IntegerBitString, self)._copy(other, copy_func)
+ self._unused_bits = other._unused_bits
+
+ @property
+ def unused_bits(self):
+ """
+ The unused bits of the bit string encoding.
+
+ :return:
+ A tuple of 1s and 0s
+ """
+
+ # call native to set _unused_bits
+ self.native
+
+ return self._unused_bits
+
+
+class BitString(_IntegerBitString, Constructable, Castable, Primitive, ValueMap):
+ """
+ Represents a bit string from ASN.1 as a Python tuple of 1s and 0s
+ """
+
+ tag = 3
+
+ _size = None
+
+ def _setup(self):
+ """
+ Generates _reverse_map from _map
+ """
+
+ ValueMap._setup(self)
+
+ cls = self.__class__
+ if cls._map is not None:
+ cls._size = max(self._map.keys()) + 1
+
+ def set(self, value):
+ """
+ Sets the value of the object
+
+ :param value:
+ An integer or a tuple of integers 0 and 1
+
+ :raises:
+ ValueError - when an invalid value is passed
+ """
+
+ if isinstance(value, set):
+ if self._map is None:
+ raise ValueError(unwrap(
+ '''
+ %s._map has not been defined
+ ''',
+ type_name(self)
+ ))
+
+ bits = [0] * self._size
+ self._native = value
+ for index in range(0, self._size):
+ key = self._map.get(index)
+ if key is None:
+ continue
+ if key in value:
+ bits[index] = 1
+
+ value = ''.join(map(str_cls, bits))
+
+ elif value.__class__ == tuple:
+ if self._map is None:
+ self._native = value
+ else:
+ self._native = set()
+ for index, bit in enumerate(value):
+ if bit:
+ name = self._map.get(index, index)
+ self._native.add(name)
+ value = ''.join(map(str_cls, value))
+
+ else:
+ raise TypeError(unwrap(
+ '''
+ %s value must be a tuple of ones and zeros or a set of unicode
+ strings, not %s
+ ''',
+ type_name(self),
+ type_name(value)
+ ))
+
+ if self._map is not None:
+ if len(value) > self._size:
+ raise ValueError(unwrap(
+ '''
+ %s value must be at most %s bits long, specified was %s long
+ ''',
+ type_name(self),
+ self._size,
+ len(value)
+ ))
+ # A NamedBitList must have trailing zero bit truncated. See
+ # https://www.itu.int/ITU-T/studygroups/com17/languages/X.690-0207.pdf
+ # section 11.2,
+ # https://tools.ietf.org/html/rfc5280#page-134 and
+ # https://www.ietf.org/mail-archive/web/pkix/current/msg10443.html
+ value = value.rstrip('0')
+ size = len(value)
+
+ size_mod = size % 8
+ extra_bits = 0
+ if size_mod != 0:
+ extra_bits = 8 - size_mod
+ value += '0' * extra_bits
+
+ size_in_bytes = int(math.ceil(size / 8))
+
+ if extra_bits:
+ extra_bits_byte = int_to_bytes(extra_bits)
+ else:
+ extra_bits_byte = b'\x00'
+
+ if value == '':
+ value_bytes = b''
+ else:
+ value_bytes = int_to_bytes(int(value, 2))
+ if len(value_bytes) != size_in_bytes:
+ value_bytes = (b'\x00' * (size_in_bytes - len(value_bytes))) + value_bytes
+
+ self.contents = extra_bits_byte + value_bytes
+ self._unused_bits = (0,) * extra_bits
+ self._header = None
+ if self._indefinite:
+ self._indefinite = False
+ self.method = 0
+ if self._trailer != b'':
+ self._trailer = b''
+
+ def __getitem__(self, key):
+ """
+ Retrieves a boolean version of one of the bits based on a name from the
+ _map
+
+ :param key:
+ The unicode string of one of the bit names
+
+ :raises:
+ ValueError - when _map is not set or the key name is invalid
+
+ :return:
+ A boolean if the bit is set
+ """
+
+ is_int = isinstance(key, int_types)
+ if not is_int:
+ if not isinstance(self._map, dict):
+ raise ValueError(unwrap(
+ '''
+ %s._map has not been defined
+ ''',
+ type_name(self)
+ ))
+
+ if key not in self._reverse_map:
+ raise ValueError(unwrap(
+ '''
+ %s._map does not contain an entry for "%s"
+ ''',
+ type_name(self),
+ key
+ ))
+
+ if self._native is None:
+ self.native
+
+ if self._map is None:
+ if len(self._native) >= key + 1:
+ return bool(self._native[key])
+ return False
+
+ if is_int:
+ key = self._map.get(key, key)
+
+ return key in self._native
+
+ def __setitem__(self, key, value):
+ """
+ Sets one of the bits based on a name from the _map
+
+ :param key:
+ The unicode string of one of the bit names
+
+ :param value:
+ A boolean value
+
+ :raises:
+ ValueError - when _map is not set or the key name is invalid
+ """
+
+ is_int = isinstance(key, int_types)
+ if not is_int:
+ if self._map is None:
+ raise ValueError(unwrap(
+ '''
+ %s._map has not been defined
+ ''',
+ type_name(self)
+ ))
+
+ if key not in self._reverse_map:
+ raise ValueError(unwrap(
+ '''
+ %s._map does not contain an entry for "%s"
+ ''',
+ type_name(self),
+ key
+ ))
+
+ if self._native is None:
+ self.native
+
+ if self._map is None:
+ new_native = list(self._native)
+ max_key = len(new_native) - 1
+ if key > max_key:
+ new_native.extend([0] * (key - max_key))
+ new_native[key] = 1 if value else 0
+ self._native = tuple(new_native)
+
+ else:
+ if is_int:
+ key = self._map.get(key, key)
+
+ if value:
+ if key not in self._native:
+ self._native.add(key)
+ else:
+ if key in self._native:
+ self._native.remove(key)
+
+ self.set(self._native)
+
+ @property
+ def native(self):
+ """
+ The native Python datatype representation of this value
+
+ :return:
+ If a _map is set, a set of names, or if no _map is set, a tuple of
+ integers 1 and 0. None if no value.
+ """
+
+ # For BitString we default the value to be all zeros
+ if self.contents is None:
+ if self._map is None:
+ self.set(())
+ else:
+ self.set(set())
+
+ if self._native is None:
+ int_value, bit_count, self._unused_bits = self._chunks_to_int()
+ bits = _int_to_bit_tuple(int_value, bit_count)
+
+ if self._map:
+ self._native = set()
+ for index, bit in enumerate(bits):
+ if bit:
+ name = self._map.get(index, index)
+ self._native.add(name)
+ else:
+ self._native = bits
+ return self._native
+
+
+class OctetBitString(Constructable, Castable, Primitive):
+ """
+ Represents a bit string in ASN.1 as a Python byte string
+ """
+
+ tag = 3
+
+ # Instance attribute of (possibly-merged) byte string
+ _bytes = None
+
+ # Tuple of 1s and 0s; set through native
+ _unused_bits = ()
+
+ def set(self, value):
+ """
+ Sets the value of the object
+
+ :param value:
+ A byte string
+
+ :raises:
+ ValueError - when an invalid value is passed
+ """
+
+ if not isinstance(value, byte_cls):
+ raise TypeError(unwrap(
+ '''
+ %s value must be a byte string, not %s
+ ''',
+ type_name(self),
+ type_name(value)
+ ))
+
+ self._bytes = value
+ # Set the unused bits to 0
+ self.contents = b'\x00' + value
+ self._unused_bits = ()
+ self._header = None
+ if self._indefinite:
+ self._indefinite = False
+ self.method = 0
+ if self._trailer != b'':
+ self._trailer = b''
+
+ def __bytes__(self):
+ """
+ :return:
+ A byte string
+ """
+
+ if self.contents is None:
+ return b''
+ if self._bytes is None:
+ if not self._indefinite:
+ self._bytes, self._unused_bits = self._as_chunk()[0]
+ else:
+ chunks = self._merge_chunks()
+ self._unused_bits = ()
+ for chunk in chunks:
+ if self._unused_bits:
+ # Disallowed by X.690 §8.6.4
+ raise ValueError('Only last chunk in a bit string may have unused bits')
+ self._unused_bits = chunk[1]
+ self._bytes = b''.join(chunk[0] for chunk in chunks)
+
+ return self._bytes
+
+ def _copy(self, other, copy_func):
+ """
+ Copies the contents of another OctetBitString object to itself
+
+ :param object:
+ Another instance of the same class
+
+ :param copy_func:
+ An reference of copy.copy() or copy.deepcopy() to use when copying
+ lists, dicts and objects
+ """
+
+ super(OctetBitString, self)._copy(other, copy_func)
+ self._bytes = other._bytes
+ self._unused_bits = other._unused_bits
+
+ def _as_chunk(self):
+ """
+ Allows reconstructing indefinite length values
+
+ :raises:
+ ValueError - when an invalid value is passed
+
+ :return:
+ List with one tuple, consisting of a byte string and an integer (unused bits)
+ """
+
+ unused_bits_len = ord(self.contents[0]) if _PY2 else self.contents[0]
+ if not unused_bits_len:
+ return [(self.contents[1:], ())]
+
+ if len(self.contents) == 1:
+ # Disallowed by X.690 §8.6.2.3
+ raise ValueError('Empty bit string has {0} unused bits'.format(unused_bits_len))
+
+ if unused_bits_len > 7:
+ # Disallowed by X.690 §8.6.2.2
+ raise ValueError('Bit string has {0} unused bits'.format(unused_bits_len))
+
+ mask = (1 << unused_bits_len) - 1
+ last_byte = ord(self.contents[-1]) if _PY2 else self.contents[-1]
+
+ # zero out the unused bits in the last byte.
+ zeroed_byte = last_byte & ~mask
+ value = self.contents[1:-1] + (chr(zeroed_byte) if _PY2 else bytes((zeroed_byte,)))
+
+ unused_bits = _int_to_bit_tuple(last_byte & mask, unused_bits_len)
+
+ return [(value, unused_bits)]
+
+ @property
+ def native(self):
+ """
+ The native Python datatype representation of this value
+
+ :return:
+ A byte string or None
+ """
+
+ if self.contents is None:
+ return None
+
+ return self.__bytes__()
+
+ @property
+ def unused_bits(self):
+ """
+ The unused bits of the bit string encoding.
+
+ :return:
+ A tuple of 1s and 0s
+ """
+
+ # call native to set _unused_bits
+ self.native
+
+ return self._unused_bits
+
+
+class IntegerBitString(_IntegerBitString, Constructable, Castable, Primitive):
+ """
+ Represents a bit string in ASN.1 as a Python integer
+ """
+
+ tag = 3
+
+ def set(self, value):
+ """
+ Sets the value of the object
+
+ :param value:
+ An integer
+
+ :raises:
+ ValueError - when an invalid value is passed
+ """
+
+ if not isinstance(value, int_types):
+ raise TypeError(unwrap(
+ '''
+ %s value must be a positive integer, not %s
+ ''',
+ type_name(self),
+ type_name(value)
+ ))
+
+ if value < 0:
+ raise ValueError(unwrap(
+ '''
+ %s value must be a positive integer, not %d
+ ''',
+ type_name(self),
+ value
+ ))
+
+ self._native = value
+ # Set the unused bits to 0
+ self.contents = b'\x00' + int_to_bytes(value, signed=True)
+ self._unused_bits = ()
+ self._header = None
+ if self._indefinite:
+ self._indefinite = False
+ self.method = 0
+ if self._trailer != b'':
+ self._trailer = b''
+
+ @property
+ def native(self):
+ """
+ The native Python datatype representation of this value
+
+ :return:
+ An integer or None
+ """
+
+ if self.contents is None:
+ return None
+
+ if self._native is None:
+ self._native, __, self._unused_bits = self._chunks_to_int()
+
+ return self._native
+
+
+class OctetString(Constructable, Castable, Primitive):
+ """
+ Represents a byte string in both ASN.1 and Python
+ """
+
+ tag = 4
+
+ # Instance attribute of (possibly-merged) byte string
+ _bytes = None
+
+ def set(self, value):
+ """
+ Sets the value of the object
+
+ :param value:
+ A byte string
+ """
+
+ if not isinstance(value, byte_cls):
+ raise TypeError(unwrap(
+ '''
+ %s value must be a byte string, not %s
+ ''',
+ type_name(self),
+ type_name(value)
+ ))
+
+ self._bytes = value
+ self.contents = value
+ self._header = None
+ if self._indefinite:
+ self._indefinite = False
+ self.method = 0
+ if self._trailer != b'':
+ self._trailer = b''
+
+ def __bytes__(self):
+ """
+ :return:
+ A byte string
+ """
+
+ if self.contents is None:
+ return b''
+ if self._bytes is None:
+ self._bytes = self._merge_chunks()
+ return self._bytes
+
+ def _copy(self, other, copy_func):
+ """
+ Copies the contents of another OctetString object to itself
+
+ :param object:
+ Another instance of the same class
+
+ :param copy_func:
+ An reference of copy.copy() or copy.deepcopy() to use when copying
+ lists, dicts and objects
+ """
+
+ super(OctetString, self)._copy(other, copy_func)
+ self._bytes = other._bytes
+
+ @property
+ def native(self):
+ """
+ The native Python datatype representation of this value
+
+ :return:
+ A byte string or None
+ """
+
+ if self.contents is None:
+ return None
+
+ return self.__bytes__()
+
+
+class IntegerOctetString(Constructable, Castable, Primitive):
+ """
+ Represents a byte string in ASN.1 as a Python integer
+ """
+
+ tag = 4
+
+ # An explicit length in bytes the integer should be encoded to. This should
+ # generally not be used since DER defines a canonical encoding, however some
+ # use of this, such as when storing elliptic curve private keys, requires an
+ # exact number of bytes, even if the leading bytes are null.
+ _encoded_width = None
+
+ def set(self, value):
+ """
+ Sets the value of the object
+
+ :param value:
+ An integer
+
+ :raises:
+ ValueError - when an invalid value is passed
+ """
+
+ if not isinstance(value, int_types):
+ raise TypeError(unwrap(
+ '''
+ %s value must be a positive integer, not %s
+ ''',
+ type_name(self),
+ type_name(value)
+ ))
+
+ if value < 0:
+ raise ValueError(unwrap(
+ '''
+ %s value must be a positive integer, not %d
+ ''',
+ type_name(self),
+ value
+ ))
+
+ self._native = value
+ self.contents = int_to_bytes(value, signed=False, width=self._encoded_width)
+ self._header = None
+ if self._indefinite:
+ self._indefinite = False
+ self.method = 0
+ if self._trailer != b'':
+ self._trailer = b''
+
+ @property
+ def native(self):
+ """
+ The native Python datatype representation of this value
+
+ :return:
+ An integer or None
+ """
+
+ if self.contents is None:
+ return None
+
+ if self._native is None:
+ self._native = int_from_bytes(self._merge_chunks())
+ return self._native
+
+ def set_encoded_width(self, width):
+ """
+ Set the explicit enoding width for the integer
+
+ :param width:
+ An integer byte width to encode the integer to
+ """
+
+ self._encoded_width = width
+ # Make sure the encoded value is up-to-date with the proper width
+ if self.contents is not None and len(self.contents) != width:
+ self.set(self.native)
+
+
+class ParsableOctetString(Constructable, Castable, Primitive):
+
+ tag = 4
+
+ _parsed = None
+
+ # Instance attribute of (possibly-merged) byte string
+ _bytes = None
+
+ def __init__(self, value=None, parsed=None, **kwargs):
+ """
+ Allows providing a parsed object that will be serialized to get the
+ byte string value
+
+ :param value:
+ A native Python datatype to initialize the object value with
+
+ :param parsed:
+ If value is None and this is an Asn1Value object, this will be
+ set as the parsed value, and the value will be obtained by calling
+ .dump() on this object.
+ """
+
+ set_parsed = False
+ if value is None and parsed is not None and isinstance(parsed, Asn1Value):
+ value = parsed.dump()
+ set_parsed = True
+
+ Primitive.__init__(self, value=value, **kwargs)
+
+ if set_parsed:
+ self._parsed = (parsed, parsed.__class__, None)
+
+ def set(self, value):
+ """
+ Sets the value of the object
+
+ :param value:
+ A byte string
+ """
+
+ if not isinstance(value, byte_cls):
+ raise TypeError(unwrap(
+ '''
+ %s value must be a byte string, not %s
+ ''',
+ type_name(self),
+ type_name(value)
+ ))
+
+ self._bytes = value
+ self.contents = value
+ self._header = None
+ if self._indefinite:
+ self._indefinite = False
+ self.method = 0
+ if self._trailer != b'':
+ self._trailer = b''
+
+ def parse(self, spec=None, spec_params=None):
+ """
+ Parses the contents generically, or using a spec with optional params
+
+ :param spec:
+ A class derived from Asn1Value that defines what class_ and tag the
+ value should have, and the semantics of the encoded value. The
+ return value will be of this type. If omitted, the encoded value
+ will be decoded using the standard universal tag based on the
+ encoded tag number.
+
+ :param spec_params:
+ A dict of params to pass to the spec object
+
+ :return:
+ An object of the type spec, or if not present, a child of Asn1Value
+ """
+
+ if self._parsed is None or self._parsed[1:3] != (spec, spec_params):
+ parsed_value, _ = _parse_build(self.__bytes__(), spec=spec, spec_params=spec_params)
+ self._parsed = (parsed_value, spec, spec_params)
+ return self._parsed[0]
+
+ def __bytes__(self):
+ """
+ :return:
+ A byte string
+ """
+
+ if self.contents is None:
+ return b''
+ if self._bytes is None:
+ self._bytes = self._merge_chunks()
+ return self._bytes
+
+ def _setable_native(self):
+ """
+ Returns a byte string that can be passed into .set()
+
+ :return:
+ A python value that is valid to pass to .set()
+ """
+
+ return self.__bytes__()
+
+ def _copy(self, other, copy_func):
+ """
+ Copies the contents of another ParsableOctetString object to itself
+
+ :param object:
+ Another instance of the same class
+
+ :param copy_func:
+ An reference of copy.copy() or copy.deepcopy() to use when copying
+ lists, dicts and objects
+ """
+
+ super(ParsableOctetString, self)._copy(other, copy_func)
+ self._bytes = other._bytes
+ self._parsed = copy_func(other._parsed)
+
+ @property
+ def native(self):
+ """
+ The native Python datatype representation of this value
+
+ :return:
+ A byte string or None
+ """
+
+ if self.contents is None:
+ return None
+
+ if self._parsed is not None:
+ return self._parsed[0].native
+ else:
+ return self.__bytes__()
+
+ @property
+ def parsed(self):
+ """
+ Returns the parsed object from .parse()
+
+ :return:
+ The object returned by .parse()
+ """
+
+ if self._parsed is None:
+ self.parse()
+
+ return self._parsed[0]
+
+ def dump(self, force=False):
+ """
+ Encodes the value using DER
+
+ :param force:
+ If the encoded contents already exist, clear them and regenerate
+ to ensure they are in DER format instead of BER format
+
+ :return:
+ A byte string of the DER-encoded value
+ """
+
+ # If the length is indefinite, force the re-encoding
+ if self._indefinite:
+ force = True
+
+ if force:
+ if self._parsed is not None:
+ native = self.parsed.dump(force=force)
+ else:
+ native = self.native
+ self.contents = None
+ self.set(native)
+
+ return Asn1Value.dump(self)
+
+
+class ParsableOctetBitString(ParsableOctetString):
+
+ tag = 3
+
+ def set(self, value):
+ """
+ Sets the value of the object
+
+ :param value:
+ A byte string
+
+ :raises:
+ ValueError - when an invalid value is passed
+ """
+
+ if not isinstance(value, byte_cls):
+ raise TypeError(unwrap(
+ '''
+ %s value must be a byte string, not %s
+ ''',
+ type_name(self),
+ type_name(value)
+ ))
+
+ self._bytes = value
+ # Set the unused bits to 0
+ self.contents = b'\x00' + value
+ self._header = None
+ if self._indefinite:
+ self._indefinite = False
+ self.method = 0
+ if self._trailer != b'':
+ self._trailer = b''
+
+ def _as_chunk(self):
+ """
+ Allows reconstructing indefinite length values
+
+ :raises:
+ ValueError - when an invalid value is passed
+
+ :return:
+ A byte string
+ """
+
+ unused_bits_len = ord(self.contents[0]) if _PY2 else self.contents[0]
+ if unused_bits_len:
+ raise ValueError('ParsableOctetBitString should have no unused bits')
+
+ return self.contents[1:]
+
+
+class Null(Primitive):
+ """
+ Represents a null value in ASN.1 as None in Python
+ """
+
+ tag = 5
+
+ contents = b''
+
+ def set(self, value):
+ """
+ Sets the value of the object
+
+ :param value:
+ None
+ """
+
+ self.contents = b''
+
+ @property
+ def native(self):
+ """
+ The native Python datatype representation of this value
+
+ :return:
+ None
+ """
+
+ return None
+
+
+class ObjectIdentifier(Primitive, ValueMap):
+ """
+ Represents an object identifier in ASN.1 as a Python unicode dotted
+ integer string
+ """
+
+ tag = 6
+
+ # A unicode string of the dotted form of the object identifier
+ _dotted = None
+
+ @classmethod
+ def map(cls, value):
+ """
+ Converts a dotted unicode string OID into a mapped unicode string
+
+ :param value:
+ A dotted unicode string OID
+
+ :raises:
+ ValueError - when no _map dict has been defined on the class
+ TypeError - when value is not a unicode string
+
+ :return:
+ A mapped unicode string
+ """
+
+ if cls._map is None:
+ raise ValueError(unwrap(
+ '''
+ %s._map has not been defined
+ ''',
+ type_name(cls)
+ ))
+
+ if not isinstance(value, str_cls):
+ raise TypeError(unwrap(
+ '''
+ value must be a unicode string, not %s
+ ''',
+ type_name(value)
+ ))
+
+ return cls._map.get(value, value)
+
+ @classmethod
+ def unmap(cls, value):
+ """
+ Converts a mapped unicode string value into a dotted unicode string OID
+
+ :param value:
+ A mapped unicode string OR dotted unicode string OID
+
+ :raises:
+ ValueError - when no _map dict has been defined on the class or the value can't be unmapped
+ TypeError - when value is not a unicode string
+
+ :return:
+ A dotted unicode string OID
+ """
+
+ if cls not in _SETUP_CLASSES:
+ cls()._setup()
+ _SETUP_CLASSES[cls] = True
+
+ if cls._map is None:
+ raise ValueError(unwrap(
+ '''
+ %s._map has not been defined
+ ''',
+ type_name(cls)
+ ))
+
+ if not isinstance(value, str_cls):
+ raise TypeError(unwrap(
+ '''
+ value must be a unicode string, not %s
+ ''',
+ type_name(value)
+ ))
+
+ if value in cls._reverse_map:
+ return cls._reverse_map[value]
+
+ if not _OID_RE.match(value):
+ raise ValueError(unwrap(
+ '''
+ %s._map does not contain an entry for "%s"
+ ''',
+ type_name(cls),
+ value
+ ))
+
+ return value
+
+ def set(self, value):
+ """
+ Sets the value of the object
+
+ :param value:
+ A unicode string. May be a dotted integer string, or if _map is
+ provided, one of the mapped values.
+
+ :raises:
+ ValueError - when an invalid value is passed
+ """
+
+ if not isinstance(value, str_cls):
+ raise TypeError(unwrap(
+ '''
+ %s value must be a unicode string, not %s
+ ''',
+ type_name(self),
+ type_name(value)
+ ))
+
+ self._native = value
+
+ if self._map is not None:
+ if value in self._reverse_map:
+ value = self._reverse_map[value]
+
+ self.contents = b''
+ first = None
+ for index, part in enumerate(value.split('.')):
+ part = int(part)
+
+ # The first two parts are merged into a single byte
+ if index == 0:
+ first = part
+ continue
+ elif index == 1:
+ if first > 2:
+ raise ValueError(unwrap(
+ '''
+ First arc must be one of 0, 1 or 2, not %s
+ ''',
+ repr(first)
+ ))
+ elif first < 2 and part >= 40:
+ raise ValueError(unwrap(
+ '''
+ Second arc must be less than 40 if first arc is 0 or
+ 1, not %s
+ ''',
+ repr(part)
+ ))
+ part = (first * 40) + part
+
+ encoded_part = chr_cls(0x7F & part)
+ part = part >> 7
+ while part > 0:
+ encoded_part = chr_cls(0x80 | (0x7F & part)) + encoded_part
+ part = part >> 7
+ self.contents += encoded_part
+
+ self._header = None
+ if self._trailer != b'':
+ self._trailer = b''
+
+ def __unicode__(self):
+ """
+ :return:
+ A unicode string
+ """
+
+ return self.dotted
+
+ @property
+ def dotted(self):
+ """
+ :return:
+ A unicode string of the object identifier in dotted notation, thus
+ ignoring any mapped value
+ """
+
+ if self._dotted is None:
+ output = []
+
+ part = 0
+ for byte in self.contents:
+ if _PY2:
+ byte = ord(byte)
+ part = part * 128
+ part += byte & 127
+ # Last byte in subidentifier has the eighth bit set to 0
+ if byte & 0x80 == 0:
+ if len(output) == 0:
+ if part >= 80:
+ output.append(str_cls(2))
+ output.append(str_cls(part - 80))
+ elif part >= 40:
+ output.append(str_cls(1))
+ output.append(str_cls(part - 40))
+ else:
+ output.append(str_cls(0))
+ output.append(str_cls(part))
+ else:
+ output.append(str_cls(part))
+ part = 0
+
+ self._dotted = '.'.join(output)
+ return self._dotted
+
+ @property
+ def native(self):
+ """
+ The native Python datatype representation of this value
+
+ :return:
+ A unicode string or None. If _map is not defined, the unicode string
+ is a string of dotted integers. If _map is defined and the dotted
+ string is present in the _map, the mapped value is returned.
+ """
+
+ if self.contents is None:
+ return None
+
+ if self._native is None:
+ self._native = self.dotted
+ if self._map is not None and self._native in self._map:
+ self._native = self._map[self._native]
+ return self._native
+
+
+class ObjectDescriptor(Primitive):
+ """
+ Represents an object descriptor from ASN.1 - no Python implementation
+ """
+
+ tag = 7
+
+
+class InstanceOf(Primitive):
+ """
+ Represents an instance from ASN.1 - no Python implementation
+ """
+
+ tag = 8
+
+
+class Real(Primitive):
+ """
+ Represents a real number from ASN.1 - no Python implementation
+ """
+
+ tag = 9
+
+
+class Enumerated(Integer):
+ """
+ Represents a enumerated list of integers from ASN.1 as a Python
+ unicode string
+ """
+
+ tag = 10
+
+ def set(self, value):
+ """
+ Sets the value of the object
+
+ :param value:
+ An integer or a unicode string from _map
+
+ :raises:
+ ValueError - when an invalid value is passed
+ """
+
+ if not isinstance(value, int_types) and not isinstance(value, str_cls):
+ raise TypeError(unwrap(
+ '''
+ %s value must be an integer or a unicode string, not %s
+ ''',
+ type_name(self),
+ type_name(value)
+ ))
+
+ if isinstance(value, str_cls):
+ if value not in self._reverse_map:
+ raise ValueError(unwrap(
+ '''
+ %s value "%s" is not a valid value
+ ''',
+ type_name(self),
+ value
+ ))
+
+ value = self._reverse_map[value]
+
+ elif value not in self._map:
+ raise ValueError(unwrap(
+ '''
+ %s value %s is not a valid value
+ ''',
+ type_name(self),
+ value
+ ))
+
+ Integer.set(self, value)
+
+ @property
+ def native(self):
+ """
+ The native Python datatype representation of this value
+
+ :return:
+ A unicode string or None
+ """
+
+ if self.contents is None:
+ return None
+
+ if self._native is None:
+ self._native = self._map[self.__int__()]
+ return self._native
+
+
+class UTF8String(AbstractString):
+ """
+ Represents a UTF-8 string from ASN.1 as a Python unicode string
+ """
+
+ tag = 12
+ _encoding = 'utf-8'
+
+
+class RelativeOid(ObjectIdentifier):
+ """
+ Represents an object identifier in ASN.1 as a Python unicode dotted
+ integer string
+ """
+
+ tag = 13
+
+
+class Sequence(Asn1Value):
+ """
+ Represents a sequence of fields from ASN.1 as a Python object with a
+ dict-like interface
+ """
+
+ tag = 16
+
+ class_ = 0
+ method = 1
+
+ # A list of child objects, in order of _fields
+ children = None
+
+ # Sequence overrides .contents to be a property so that the mutated state
+ # of child objects can be checked to ensure everything is up-to-date
+ _contents = None
+
+ # Variable to track if the object has been mutated
+ _mutated = False
+
+ # A list of tuples in one of the following forms.
+ #
+ # Option 1, a unicode string field name and a value class
+ #
+ # ("name", Asn1ValueClass)
+ #
+ # Option 2, same as Option 1, but with a dict of class params
+ #
+ # ("name", Asn1ValueClass, {'explicit': 5})
+ _fields = []
+
+ # A dict with keys being the name of a field and the value being a unicode
+ # string of the method name on self to call to get the spec for that field
+ _spec_callbacks = None
+
+ # A dict that maps unicode string field names to an index in _fields
+ _field_map = None
+
+ # A list in the same order as _fields that has tuples in the form (class_, tag)
+ _field_ids = None
+
+ # An optional 2-element tuple that defines the field names of an OID field
+ # and the field that the OID should be used to help decode. Works with the
+ # _oid_specs attribute.
+ _oid_pair = None
+
+ # A dict with keys that are unicode string OID values and values that are
+ # Asn1Value classes to use for decoding a variable-type field.
+ _oid_specs = None
+
+ # A 2-element tuple of the indexes in _fields of the OID and value fields
+ _oid_nums = None
+
+ # Predetermined field specs to optimize away calls to _determine_spec()
+ _precomputed_specs = None
+
+ def __init__(self, value=None, default=None, **kwargs):
+ """
+ Allows setting field values before passing everything else along to
+ Asn1Value.__init__()
+
+ :param value:
+ A native Python datatype to initialize the object value with
+
+ :param default:
+ The default value if no value is specified
+ """
+
+ Asn1Value.__init__(self, **kwargs)
+
+ check_existing = False
+ if value is None and default is not None:
+ check_existing = True
+ if self.children is None:
+ if self.contents is None:
+ check_existing = False
+ else:
+ self._parse_children()
+ value = default
+
+ if value is not None:
+ try:
+ # Fields are iterated in definition order to allow things like
+ # OID-based specs. Otherwise sometimes the value would be processed
+ # before the OID field, resulting in invalid value object creation.
+ if self._fields:
+ keys = [info[0] for info in self._fields]
+ unused_keys = set(value.keys())
+ else:
+ keys = value.keys()
+ unused_keys = set(keys)
+
+ for key in keys:
+ # If we are setting defaults, but a real value has already
+ # been set for the field, then skip it
+ if check_existing:
+ index = self._field_map[key]
+ if index < len(self.children) and self.children[index] is not VOID:
+ if key in unused_keys:
+ unused_keys.remove(key)
+ continue
+
+ if key in value:
+ self.__setitem__(key, value[key])
+ unused_keys.remove(key)
+
+ if len(unused_keys):
+ raise ValueError(unwrap(
+ '''
+ One or more unknown fields was passed to the constructor
+ of %s: %s
+ ''',
+ type_name(self),
+ ', '.join(sorted(list(unused_keys)))
+ ))
+
+ except (ValueError, TypeError) as e:
+ args = e.args[1:]
+ e.args = (e.args[0] + '\n while constructing %s' % type_name(self),) + args
+ raise e
+
+ @property
+ def contents(self):
+ """
+ :return:
+ A byte string of the DER-encoded contents of the sequence
+ """
+
+ if self.children is None:
+ return self._contents
+
+ if self._is_mutated():
+ self._set_contents()
+
+ return self._contents
+
+ @contents.setter
+ def contents(self, value):
+ """
+ :param value:
+ A byte string of the DER-encoded contents of the sequence
+ """
+
+ self._contents = value
+
+ def _is_mutated(self):
+ """
+ :return:
+ A boolean - if the sequence or any children (recursively) have been
+ mutated
+ """
+
+ mutated = self._mutated
+ if self.children is not None:
+ for child in self.children:
+ if isinstance(child, Sequence) or isinstance(child, SequenceOf):
+ mutated = mutated or child._is_mutated()
+
+ return mutated
+
+ def _lazy_child(self, index):
+ """
+ Builds a child object if the child has only been parsed into a tuple so far
+ """
+
+ child = self.children[index]
+ if child.__class__ == tuple:
+ child = self.children[index] = _build(*child)
+ return child
+
+ def __len__(self):
+ """
+ :return:
+ Integer
+ """
+ # We inline this check to prevent method invocation each time
+ if self.children is None:
+ self._parse_children()
+
+ return len(self.children)
+
+ def __getitem__(self, key):
+ """
+ Allows accessing fields by name or index
+
+ :param key:
+ A unicode string of the field name, or an integer of the field index
+
+ :raises:
+ KeyError - when a field name or index is invalid
+
+ :return:
+ The Asn1Value object of the field specified
+ """
+
+ # We inline this check to prevent method invocation each time
+ if self.children is None:
+ self._parse_children()
+
+ if not isinstance(key, int_types):
+ if key not in self._field_map:
+ raise KeyError(unwrap(
+ '''
+ No field named "%s" defined for %s
+ ''',
+ key,
+ type_name(self)
+ ))
+ key = self._field_map[key]
+
+ if key >= len(self.children):
+ raise KeyError(unwrap(
+ '''
+ No field numbered %s is present in this %s
+ ''',
+ key,
+ type_name(self)
+ ))
+
+ try:
+ return self._lazy_child(key)
+
+ except (ValueError, TypeError) as e:
+ args = e.args[1:]
+ e.args = (e.args[0] + '\n while parsing %s' % type_name(self),) + args
+ raise e
+
+ def __setitem__(self, key, value):
+ """
+ Allows settings fields by name or index
+
+ :param key:
+ A unicode string of the field name, or an integer of the field index
+
+ :param value:
+ A native Python datatype to set the field value to. This method will
+ construct the appropriate Asn1Value object from _fields.
+
+ :raises:
+ ValueError - when a field name or index is invalid
+ """
+
+ # We inline this check to prevent method invocation each time
+ if self.children is None:
+ self._parse_children()
+
+ if not isinstance(key, int_types):
+ if key not in self._field_map:
+ raise KeyError(unwrap(
+ '''
+ No field named "%s" defined for %s
+ ''',
+ key,
+ type_name(self)
+ ))
+ key = self._field_map[key]
+
+ field_name, field_spec, value_spec, field_params, _ = self._determine_spec(key)
+
+ new_value = self._make_value(field_name, field_spec, value_spec, field_params, value)
+
+ invalid_value = False
+ if isinstance(new_value, Any):
+ invalid_value = new_value.parsed is None
+ else:
+ invalid_value = new_value.contents is None
+
+ if invalid_value:
+ raise ValueError(unwrap(
+ '''
+ Value for field "%s" of %s is not set
+ ''',
+ field_name,
+ type_name(self)
+ ))
+
+ self.children[key] = new_value
+
+ if self._native is not None:
+ self._native[self._fields[key][0]] = self.children[key].native
+ self._mutated = True
+
+ def __delitem__(self, key):
+ """
+ Allows deleting optional or default fields by name or index
+
+ :param key:
+ A unicode string of the field name, or an integer of the field index
+
+ :raises:
+ ValueError - when a field name or index is invalid, or the field is not optional or defaulted
+ """
+
+ # We inline this check to prevent method invocation each time
+ if self.children is None:
+ self._parse_children()
+
+ if not isinstance(key, int_types):
+ if key not in self._field_map:
+ raise KeyError(unwrap(
+ '''
+ No field named "%s" defined for %s
+ ''',
+ key,
+ type_name(self)
+ ))
+ key = self._field_map[key]
+
+ name, _, params = self._fields[key]
+ if not params or ('default' not in params and 'optional' not in params):
+ raise ValueError(unwrap(
+ '''
+ Can not delete the value for the field "%s" of %s since it is
+ not optional or defaulted
+ ''',
+ name,
+ type_name(self)
+ ))
+
+ if 'optional' in params:
+ self.children[key] = VOID
+ if self._native is not None:
+ self._native[name] = None
+ else:
+ self.__setitem__(key, None)
+ self._mutated = True
+
+ def __iter__(self):
+ """
+ :return:
+ An iterator of field key names
+ """
+
+ for info in self._fields:
+ yield info[0]
+
+ def _set_contents(self, force=False):
+ """
+ Updates the .contents attribute of the value with the encoded value of
+ all of the child objects
+
+ :param force:
+ Ensure all contents are in DER format instead of possibly using
+ cached BER-encoded data
+ """
+
+ if self.children is None:
+ self._parse_children()
+
+ contents = BytesIO()
+ for index, info in enumerate(self._fields):
+ child = self.children[index]
+ if child is None:
+ child_dump = b''
+ elif child.__class__ == tuple:
+ if force:
+ child_dump = self._lazy_child(index).dump(force=force)
+ else:
+ child_dump = child[3] + child[4] + child[5]
+ else:
+ child_dump = child.dump(force=force)
+ # Skip values that are the same as the default
+ if info[2] and 'default' in info[2]:
+ default_value = info[1](**info[2])
+ if default_value.dump() == child_dump:
+ continue
+ contents.write(child_dump)
+ self._contents = contents.getvalue()
+
+ self._header = None
+ if self._trailer != b'':
+ self._trailer = b''
+
+ def _setup(self):
+ """
+ Generates _field_map, _field_ids and _oid_nums for use in parsing
+ """
+
+ cls = self.__class__
+ cls._field_map = {}
+ cls._field_ids = []
+ cls._precomputed_specs = []
+ for index, field in enumerate(cls._fields):
+ if len(field) < 3:
+ field = field + ({},)
+ cls._fields[index] = field
+ cls._field_map[field[0]] = index
+ cls._field_ids.append(_build_id_tuple(field[2], field[1]))
+
+ if cls._oid_pair is not None:
+ cls._oid_nums = (cls._field_map[cls._oid_pair[0]], cls._field_map[cls._oid_pair[1]])
+
+ for index, field in enumerate(cls._fields):
+ has_callback = cls._spec_callbacks is not None and field[0] in cls._spec_callbacks
+ is_mapped_oid = cls._oid_nums is not None and cls._oid_nums[1] == index
+ if has_callback or is_mapped_oid:
+ cls._precomputed_specs.append(None)
+ else:
+ cls._precomputed_specs.append((field[0], field[1], field[1], field[2], None))
+
+ def _determine_spec(self, index):
+ """
+ Determine how a value for a field should be constructed
+
+ :param index:
+ The field number
+
+ :return:
+ A tuple containing the following elements:
+ - unicode string of the field name
+ - Asn1Value class of the field spec
+ - Asn1Value class of the value spec
+ - None or dict of params to pass to the field spec
+ - None or Asn1Value class indicating the value spec was derived from an OID or a spec callback
+ """
+
+ name, field_spec, field_params = self._fields[index]
+ value_spec = field_spec
+ spec_override = None
+
+ if self._spec_callbacks is not None and name in self._spec_callbacks:
+ callback = self._spec_callbacks[name]
+ spec_override = callback(self)
+ if spec_override:
+ # Allow a spec callback to specify both the base spec and
+ # the override, for situations such as OctetString and parse_as
+ if spec_override.__class__ == tuple and len(spec_override) == 2:
+ field_spec, value_spec = spec_override
+ if value_spec is None:
+ value_spec = field_spec
+ spec_override = None
+ # When no field spec is specified, use a single return value as that
+ elif field_spec is None:
+ field_spec = spec_override
+ value_spec = field_spec
+ spec_override = None
+ else:
+ value_spec = spec_override
+
+ elif self._oid_nums is not None and self._oid_nums[1] == index:
+ oid = self._lazy_child(self._oid_nums[0]).native
+ if oid in self._oid_specs:
+ spec_override = self._oid_specs[oid]
+ value_spec = spec_override
+
+ return (name, field_spec, value_spec, field_params, spec_override)
+
+ def _make_value(self, field_name, field_spec, value_spec, field_params, value):
+ """
+ Contructs an appropriate Asn1Value object for a field
+
+ :param field_name:
+ A unicode string of the field name
+
+ :param field_spec:
+ An Asn1Value class that is the field spec
+
+ :param value_spec:
+ An Asn1Value class that is the vaue spec
+
+ :param field_params:
+ None or a dict of params for the field spec
+
+ :param value:
+ The value to construct an Asn1Value object from
+
+ :return:
+ An instance of a child class of Asn1Value
+ """
+
+ if value is None and 'optional' in field_params:
+ return VOID
+
+ specs_different = field_spec != value_spec
+ is_any = issubclass(field_spec, Any)
+
+ if issubclass(value_spec, Choice):
+ is_asn1value = isinstance(value, Asn1Value)
+ is_tuple = isinstance(value, tuple) and len(value) == 2
+ is_dict = isinstance(value, dict) and len(value) == 1
+ if not is_asn1value and not is_tuple and not is_dict:
+ raise ValueError(unwrap(
+ '''
+ Can not set a native python value to %s, which has the
+ choice type of %s - value must be an instance of Asn1Value
+ ''',
+ field_name,
+ type_name(value_spec)
+ ))
+ if is_tuple or is_dict:
+ value = value_spec(value)
+ if not isinstance(value, value_spec):
+ wrapper = value_spec()
+ wrapper.validate(value.class_, value.tag, value.contents)
+ wrapper._parsed = value
+ new_value = wrapper
+ else:
+ new_value = value
+
+ elif isinstance(value, field_spec):
+ new_value = value
+ if specs_different:
+ new_value.parse(value_spec)
+
+ elif (not specs_different or is_any) and not isinstance(value, value_spec):
+ if (not is_any or specs_different) and isinstance(value, Asn1Value):
+ raise TypeError(unwrap(
+ '''
+ %s value must be %s, not %s
+ ''',
+ field_name,
+ type_name(value_spec),
+ type_name(value)
+ ))
+ new_value = value_spec(value, **field_params)
+
+ else:
+ if isinstance(value, value_spec):
+ new_value = value
+ else:
+ if isinstance(value, Asn1Value):
+ raise TypeError(unwrap(
+ '''
+ %s value must be %s, not %s
+ ''',
+ field_name,
+ type_name(value_spec),
+ type_name(value)
+ ))
+ new_value = value_spec(value)
+
+ # For when the field is OctetString or OctetBitString with embedded
+ # values we need to wrap the value in the field spec to get the
+ # appropriate encoded value.
+ if specs_different and not is_any:
+ wrapper = field_spec(value=new_value.dump(), **field_params)
+ wrapper._parsed = (new_value, new_value.__class__, None)
+ new_value = wrapper
+
+ new_value = _fix_tagging(new_value, field_params)
+
+ return new_value
+
+ def _parse_children(self, recurse=False):
+ """
+ Parses the contents and generates Asn1Value objects based on the
+ definitions from _fields.
+
+ :param recurse:
+ If child objects that are Sequence or SequenceOf objects should
+ be recursively parsed
+
+ :raises:
+ ValueError - when an error occurs parsing child objects
+ """
+
+ cls = self.__class__
+ if self._contents is None:
+ if self._fields:
+ self.children = [VOID] * len(self._fields)
+ for index, (_, _, params) in enumerate(self._fields):
+ if 'default' in params:
+ if cls._precomputed_specs[index]:
+ field_name, field_spec, value_spec, field_params, _ = cls._precomputed_specs[index]
+ else:
+ field_name, field_spec, value_spec, field_params, _ = self._determine_spec(index)
+ self.children[index] = self._make_value(field_name, field_spec, value_spec, field_params, None)
+ return
+
+ try:
+ self.children = []
+ contents_length = len(self._contents)
+ child_pointer = 0
+ field = 0
+ field_len = len(self._fields)
+ parts = None
+ again = child_pointer < contents_length
+ while again:
+ if parts is None:
+ parts, child_pointer = _parse(self._contents, contents_length, pointer=child_pointer)
+ again = child_pointer < contents_length
+
+ if field < field_len:
+ _, field_spec, value_spec, field_params, spec_override = (
+ cls._precomputed_specs[field] or self._determine_spec(field))
+
+ # If the next value is optional or default, allow it to be absent
+ if field_params and ('optional' in field_params or 'default' in field_params):
+ if self._field_ids[field] != (parts[0], parts[2]) and field_spec != Any:
+
+ # See if the value is a valid choice before assuming
+ # that we have a missing optional or default value
+ choice_match = False
+ if issubclass(field_spec, Choice):
+ try:
+ tester = field_spec(**field_params)
+ tester.validate(parts[0], parts[2], parts[4])
+ choice_match = True
+ except (ValueError):
+ pass
+
+ if not choice_match:
+ if 'optional' in field_params:
+ self.children.append(VOID)
+ else:
+ self.children.append(field_spec(**field_params))
+ field += 1
+ again = True
+ continue
+
+ if field_spec is None or (spec_override and issubclass(field_spec, Any)):
+ field_spec = value_spec
+ spec_override = None
+
+ if spec_override:
+ child = parts + (field_spec, field_params, value_spec)
+ else:
+ child = parts + (field_spec, field_params)
+
+ # Handle situations where an optional or defaulted field definition is incorrect
+ elif field_len > 0 and field + 1 <= field_len:
+ missed_fields = []
+ prev_field = field - 1
+ while prev_field >= 0:
+ prev_field_info = self._fields[prev_field]
+ if len(prev_field_info) < 3:
+ break
+ if 'optional' in prev_field_info[2] or 'default' in prev_field_info[2]:
+ missed_fields.append(prev_field_info[0])
+ prev_field -= 1
+ plural = 's' if len(missed_fields) > 1 else ''
+ missed_field_names = ', '.join(missed_fields)
+ raise ValueError(unwrap(
+ '''
+ Data for field %s (%s class, %s method, tag %s) does
+ not match the field definition%s of %s
+ ''',
+ field + 1,
+ CLASS_NUM_TO_NAME_MAP.get(parts[0]),
+ METHOD_NUM_TO_NAME_MAP.get(parts[1]),
+ parts[2],
+ plural,
+ missed_field_names
+ ))
+
+ else:
+ child = parts
+
+ if recurse:
+ child = _build(*child)
+ if isinstance(child, (Sequence, SequenceOf)):
+ child._parse_children(recurse=True)
+
+ self.children.append(child)
+ field += 1
+ parts = None
+
+ index = len(self.children)
+ while index < field_len:
+ name, field_spec, field_params = self._fields[index]
+ if 'default' in field_params:
+ self.children.append(field_spec(**field_params))
+ elif 'optional' in field_params:
+ self.children.append(VOID)
+ else:
+ raise ValueError(unwrap(
+ '''
+ Field "%s" is missing from structure
+ ''',
+ name
+ ))
+ index += 1
+
+ except (ValueError, TypeError) as e:
+ self.children = None
+ args = e.args[1:]
+ e.args = (e.args[0] + '\n while parsing %s' % type_name(self),) + args
+ raise e
+
+ def spec(self, field_name):
+ """
+ Determines the spec to use for the field specified. Depending on how
+ the spec is determined (_oid_pair or _spec_callbacks), it may be
+ necessary to set preceding field values before calling this. Usually
+ specs, if dynamic, are controlled by a preceding ObjectIdentifier
+ field.
+
+ :param field_name:
+ A unicode string of the field name to get the spec for
+
+ :return:
+ A child class of asn1crypto.core.Asn1Value that the field must be
+ encoded using
+ """
+
+ if not isinstance(field_name, str_cls):
+ raise TypeError(unwrap(
+ '''
+ field_name must be a unicode string, not %s
+ ''',
+ type_name(field_name)
+ ))
+
+ if self._fields is None:
+ raise ValueError(unwrap(
+ '''
+ Unable to retrieve spec for field %s in the class %s because
+ _fields has not been set
+ ''',
+ repr(field_name),
+ type_name(self)
+ ))
+
+ index = self._field_map[field_name]
+ info = self._determine_spec(index)
+
+ return info[2]
+
+ @property
+ def native(self):
+ """
+ The native Python datatype representation of this value
+
+ :return:
+ An OrderedDict or None. If an OrderedDict, all child values are
+ recursively converted to native representation also.
+ """
+
+ if self.contents is None:
+ return None
+
+ if self._native is None:
+ if self.children is None:
+ self._parse_children(recurse=True)
+ try:
+ self._native = OrderedDict()
+ for index, child in enumerate(self.children):
+ if child.__class__ == tuple:
+ child = _build(*child)
+ self.children[index] = child
+ try:
+ name = self._fields[index][0]
+ except (IndexError):
+ name = str_cls(index)
+ self._native[name] = child.native
+ except (ValueError, TypeError) as e:
+ self._native = None
+ args = e.args[1:]
+ e.args = (e.args[0] + '\n while parsing %s' % type_name(self),) + args
+ raise e
+ return self._native
+
+ def _copy(self, other, copy_func):
+ """
+ Copies the contents of another Sequence object to itself
+
+ :param object:
+ Another instance of the same class
+
+ :param copy_func:
+ An reference of copy.copy() or copy.deepcopy() to use when copying
+ lists, dicts and objects
+ """
+
+ super(Sequence, self)._copy(other, copy_func)
+ if self.children is not None:
+ self.children = []
+ for child in other.children:
+ if child.__class__ == tuple:
+ self.children.append(child)
+ else:
+ self.children.append(child.copy())
+
+ def debug(self, nest_level=1):
+ """
+ Show the binary data and parsed data in a tree structure
+ """
+
+ if self.children is None:
+ self._parse_children()
+
+ prefix = ' ' * nest_level
+ _basic_debug(prefix, self)
+ for field_name in self:
+ child = self._lazy_child(self._field_map[field_name])
+ if child is not VOID:
+ print('%s Field "%s"' % (prefix, field_name))
+ child.debug(nest_level + 3)
+
+ def dump(self, force=False):
+ """
+ Encodes the value using DER
+
+ :param force:
+ If the encoded contents already exist, clear them and regenerate
+ to ensure they are in DER format instead of BER format
+
+ :return:
+ A byte string of the DER-encoded value
+ """
+
+ # If the length is indefinite, force the re-encoding
+ if self._header is not None and self._header[-1:] == b'\x80':
+ force = True
+
+ # We can't force encoding if we don't have a spec
+ if force and self._fields == [] and self.__class__ is Sequence:
+ force = False
+
+ if force:
+ self._set_contents(force=force)
+
+ if self._fields and self.children is not None:
+ for index, (field_name, _, params) in enumerate(self._fields):
+ if self.children[index] is not VOID:
+ continue
+ if 'default' in params or 'optional' in params:
+ continue
+ raise ValueError(unwrap(
+ '''
+ Field "%s" is missing from structure
+ ''',
+ field_name
+ ))
+
+ return Asn1Value.dump(self)
+
+
+class SequenceOf(Asn1Value):
+ """
+ Represents a sequence (ordered) of a single type of values from ASN.1 as a
+ Python object with a list-like interface
+ """
+
+ tag = 16
+
+ class_ = 0
+ method = 1
+
+ # A list of child objects
+ children = None
+
+ # SequenceOf overrides .contents to be a property so that the mutated state
+ # of child objects can be checked to ensure everything is up-to-date
+ _contents = None
+
+ # Variable to track if the object has been mutated
+ _mutated = False
+
+ # An Asn1Value class to use when parsing children
+ _child_spec = None
+
+ def __init__(self, value=None, default=None, contents=None, spec=None, **kwargs):
+ """
+ Allows setting child objects and the _child_spec via the spec parameter
+ before passing everything else along to Asn1Value.__init__()
+
+ :param value:
+ A native Python datatype to initialize the object value with
+
+ :param default:
+ The default value if no value is specified
+
+ :param contents:
+ A byte string of the encoded contents of the value
+
+ :param spec:
+ A class derived from Asn1Value to use to parse children
+ """
+
+ if spec:
+ self._child_spec = spec
+
+ Asn1Value.__init__(self, **kwargs)
+
+ try:
+ if contents is not None:
+ self.contents = contents
+ else:
+ if value is None and default is not None:
+ value = default
+
+ if value is not None:
+ for index, child in enumerate(value):
+ self.__setitem__(index, child)
+
+ # Make sure a blank list is serialized
+ if self.contents is None:
+ self._set_contents()
+
+ except (ValueError, TypeError) as e:
+ args = e.args[1:]
+ e.args = (e.args[0] + '\n while constructing %s' % type_name(self),) + args
+ raise e
+
+ @property
+ def contents(self):
+ """
+ :return:
+ A byte string of the DER-encoded contents of the sequence
+ """
+
+ if self.children is None:
+ return self._contents
+
+ if self._is_mutated():
+ self._set_contents()
+
+ return self._contents
+
+ @contents.setter
+ def contents(self, value):
+ """
+ :param value:
+ A byte string of the DER-encoded contents of the sequence
+ """
+
+ self._contents = value
+
+ def _is_mutated(self):
+ """
+ :return:
+ A boolean - if the sequence or any children (recursively) have been
+ mutated
+ """
+
+ mutated = self._mutated
+ if self.children is not None:
+ for child in self.children:
+ if isinstance(child, Sequence) or isinstance(child, SequenceOf):
+ mutated = mutated or child._is_mutated()
+
+ return mutated
+
+ def _lazy_child(self, index):
+ """
+ Builds a child object if the child has only been parsed into a tuple so far
+ """
+
+ child = self.children[index]
+ if child.__class__ == tuple:
+ child = _build(*child)
+ self.children[index] = child
+ return child
+
+ def _make_value(self, value):
+ """
+ Constructs a _child_spec value from a native Python data type, or
+ an appropriate Asn1Value object
+
+ :param value:
+ A native Python value, or some child of Asn1Value
+
+ :return:
+ An object of type _child_spec
+ """
+
+ if isinstance(value, self._child_spec):
+ new_value = value
+
+ elif issubclass(self._child_spec, Any):
+ if isinstance(value, Asn1Value):
+ new_value = value
+ else:
+ raise ValueError(unwrap(
+ '''
+ Can not set a native python value to %s where the
+ _child_spec is Any - value must be an instance of Asn1Value
+ ''',
+ type_name(self)
+ ))
+
+ elif issubclass(self._child_spec, Choice):
+ if not isinstance(value, Asn1Value):
+ raise ValueError(unwrap(
+ '''
+ Can not set a native python value to %s where the
+ _child_spec is the choice type %s - value must be an
+ instance of Asn1Value
+ ''',
+ type_name(self),
+ self._child_spec.__name__
+ ))
+ if not isinstance(value, self._child_spec):
+ wrapper = self._child_spec()
+ wrapper.validate(value.class_, value.tag, value.contents)
+ wrapper._parsed = value
+ value = wrapper
+ new_value = value
+
+ else:
+ return self._child_spec(value=value)
+
+ params = {}
+ if self._child_spec.explicit:
+ params['explicit'] = self._child_spec.explicit
+ if self._child_spec.implicit:
+ params['implicit'] = (self._child_spec.class_, self._child_spec.tag)
+ return _fix_tagging(new_value, params)
+
+ def __len__(self):
+ """
+ :return:
+ An integer
+ """
+ # We inline this checks to prevent method invocation each time
+ if self.children is None:
+ self._parse_children()
+
+ return len(self.children)
+
+ def __getitem__(self, key):
+ """
+ Allows accessing children via index
+
+ :param key:
+ Integer index of child
+ """
+
+ # We inline this checks to prevent method invocation each time
+ if self.children is None:
+ self._parse_children()
+
+ return self._lazy_child(key)
+
+ def __setitem__(self, key, value):
+ """
+ Allows overriding a child via index
+
+ :param key:
+ Integer index of child
+
+ :param value:
+ Native python datatype that will be passed to _child_spec to create
+ new child object
+ """
+
+ # We inline this checks to prevent method invocation each time
+ if self.children is None:
+ self._parse_children()
+
+ new_value = self._make_value(value)
+
+ # If adding at the end, create a space for the new value
+ if key == len(self.children):
+ self.children.append(None)
+ if self._native is not None:
+ self._native.append(None)
+
+ self.children[key] = new_value
+
+ if self._native is not None:
+ self._native[key] = self.children[key].native
+
+ self._mutated = True
+
+ def __delitem__(self, key):
+ """
+ Allows removing a child via index
+
+ :param key:
+ Integer index of child
+ """
+
+ # We inline this checks to prevent method invocation each time
+ if self.children is None:
+ self._parse_children()
+
+ self.children.pop(key)
+ if self._native is not None:
+ self._native.pop(key)
+
+ self._mutated = True
+
+ def __iter__(self):
+ """
+ :return:
+ An iter() of child objects
+ """
+
+ # We inline this checks to prevent method invocation each time
+ if self.children is None:
+ self._parse_children()
+
+ for index in range(0, len(self.children)):
+ yield self._lazy_child(index)
+
+ def __contains__(self, item):
+ """
+ :param item:
+ An object of the type cls._child_spec
+
+ :return:
+ A boolean if the item is contained in this SequenceOf
+ """
+
+ if item is None or item is VOID:
+ return False
+
+ if not isinstance(item, self._child_spec):
+ raise TypeError(unwrap(
+ '''
+ Checking membership in %s is only available for instances of
+ %s, not %s
+ ''',
+ type_name(self),
+ type_name(self._child_spec),
+ type_name(item)
+ ))
+
+ for child in self:
+ if child == item:
+ return True
+
+ return False
+
+ def append(self, value):
+ """
+ Allows adding a child to the end of the sequence
+
+ :param value:
+ Native python datatype that will be passed to _child_spec to create
+ new child object
+ """
+
+ # We inline this checks to prevent method invocation each time
+ if self.children is None:
+ self._parse_children()
+
+ self.children.append(self._make_value(value))
+
+ if self._native is not None:
+ self._native.append(self.children[-1].native)
+
+ self._mutated = True
+
+ def _set_contents(self, force=False):
+ """
+ Encodes all child objects into the contents for this object
+
+ :param force:
+ Ensure all contents are in DER format instead of possibly using
+ cached BER-encoded data
+ """
+
+ if self.children is None:
+ self._parse_children()
+
+ contents = BytesIO()
+ for child in self:
+ contents.write(child.dump(force=force))
+ self._contents = contents.getvalue()
+ self._header = None
+ if self._trailer != b'':
+ self._trailer = b''
+
+ def _parse_children(self, recurse=False):
+ """
+ Parses the contents and generates Asn1Value objects based on the
+ definitions from _child_spec.
+
+ :param recurse:
+ If child objects that are Sequence or SequenceOf objects should
+ be recursively parsed
+
+ :raises:
+ ValueError - when an error occurs parsing child objects
+ """
+
+ try:
+ self.children = []
+ if self._contents is None:
+ return
+ contents_length = len(self._contents)
+ child_pointer = 0
+ while child_pointer < contents_length:
+ parts, child_pointer = _parse(self._contents, contents_length, pointer=child_pointer)
+ if self._child_spec:
+ child = parts + (self._child_spec,)
+ else:
+ child = parts
+ if recurse:
+ child = _build(*child)
+ if isinstance(child, (Sequence, SequenceOf)):
+ child._parse_children(recurse=True)
+ self.children.append(child)
+ except (ValueError, TypeError) as e:
+ self.children = None
+ args = e.args[1:]
+ e.args = (e.args[0] + '\n while parsing %s' % type_name(self),) + args
+ raise e
+
+ def spec(self):
+ """
+ Determines the spec to use for child values.
+
+ :return:
+ A child class of asn1crypto.core.Asn1Value that child values must be
+ encoded using
+ """
+
+ return self._child_spec
+
+ @property
+ def native(self):
+ """
+ The native Python datatype representation of this value
+
+ :return:
+ A list or None. If a list, all child values are recursively
+ converted to native representation also.
+ """
+
+ if self.contents is None:
+ return None
+
+ if self._native is None:
+ if self.children is None:
+ self._parse_children(recurse=True)
+ try:
+ self._native = [child.native for child in self]
+ except (ValueError, TypeError) as e:
+ args = e.args[1:]
+ e.args = (e.args[0] + '\n while parsing %s' % type_name(self),) + args
+ raise e
+ return self._native
+
+ def _copy(self, other, copy_func):
+ """
+ Copies the contents of another SequenceOf object to itself
+
+ :param object:
+ Another instance of the same class
+
+ :param copy_func:
+ An reference of copy.copy() or copy.deepcopy() to use when copying
+ lists, dicts and objects
+ """
+
+ super(SequenceOf, self)._copy(other, copy_func)
+ if self.children is not None:
+ self.children = []
+ for child in other.children:
+ if child.__class__ == tuple:
+ self.children.append(child)
+ else:
+ self.children.append(child.copy())
+
+ def debug(self, nest_level=1):
+ """
+ Show the binary data and parsed data in a tree structure
+ """
+
+ if self.children is None:
+ self._parse_children()
+
+ prefix = ' ' * nest_level
+ _basic_debug(prefix, self)
+ for child in self:
+ child.debug(nest_level + 1)
+
+ def dump(self, force=False):
+ """
+ Encodes the value using DER
+
+ :param force:
+ If the encoded contents already exist, clear them and regenerate
+ to ensure they are in DER format instead of BER format
+
+ :return:
+ A byte string of the DER-encoded value
+ """
+
+ # If the length is indefinite, force the re-encoding
+ if self._header is not None and self._header[-1:] == b'\x80':
+ force = True
+
+ if force:
+ self._set_contents(force=force)
+
+ return Asn1Value.dump(self)
+
+
+class Set(Sequence):
+ """
+ Represents a set of fields (unordered) from ASN.1 as a Python object with a
+ dict-like interface
+ """
+
+ method = 1
+ class_ = 0
+ tag = 17
+
+ # A dict of 2-element tuples in the form (class_, tag) as keys and integers
+ # as values that are the index of the field in _fields
+ _field_ids = None
+
+ def _setup(self):
+ """
+ Generates _field_map, _field_ids and _oid_nums for use in parsing
+ """
+
+ cls = self.__class__
+ cls._field_map = {}
+ cls._field_ids = {}
+ cls._precomputed_specs = []
+ for index, field in enumerate(cls._fields):
+ if len(field) < 3:
+ field = field + ({},)
+ cls._fields[index] = field
+ cls._field_map[field[0]] = index
+ cls._field_ids[_build_id_tuple(field[2], field[1])] = index
+
+ if cls._oid_pair is not None:
+ cls._oid_nums = (cls._field_map[cls._oid_pair[0]], cls._field_map[cls._oid_pair[1]])
+
+ for index, field in enumerate(cls._fields):
+ has_callback = cls._spec_callbacks is not None and field[0] in cls._spec_callbacks
+ is_mapped_oid = cls._oid_nums is not None and cls._oid_nums[1] == index
+ if has_callback or is_mapped_oid:
+ cls._precomputed_specs.append(None)
+ else:
+ cls._precomputed_specs.append((field[0], field[1], field[1], field[2], None))
+
+ def _parse_children(self, recurse=False):
+ """
+ Parses the contents and generates Asn1Value objects based on the
+ definitions from _fields.
+
+ :param recurse:
+ If child objects that are Sequence or SequenceOf objects should
+ be recursively parsed
+
+ :raises:
+ ValueError - when an error occurs parsing child objects
+ """
+
+ cls = self.__class__
+ if self._contents is None:
+ if self._fields:
+ self.children = [VOID] * len(self._fields)
+ for index, (_, _, params) in enumerate(self._fields):
+ if 'default' in params:
+ if cls._precomputed_specs[index]:
+ field_name, field_spec, value_spec, field_params, _ = cls._precomputed_specs[index]
+ else:
+ field_name, field_spec, value_spec, field_params, _ = self._determine_spec(index)
+ self.children[index] = self._make_value(field_name, field_spec, value_spec, field_params, None)
+ return
+
+ try:
+ child_map = {}
+ contents_length = len(self.contents)
+ child_pointer = 0
+ seen_field = 0
+ while child_pointer < contents_length:
+ parts, child_pointer = _parse(self.contents, contents_length, pointer=child_pointer)
+
+ id_ = (parts[0], parts[2])
+
+ field = self._field_ids.get(id_)
+ if field is None:
+ raise ValueError(unwrap(
+ '''
+ Data for field %s (%s class, %s method, tag %s) does
+ not match any of the field definitions
+ ''',
+ seen_field,
+ CLASS_NUM_TO_NAME_MAP.get(parts[0]),
+ METHOD_NUM_TO_NAME_MAP.get(parts[1]),
+ parts[2],
+ ))
+
+ _, field_spec, value_spec, field_params, spec_override = (
+ cls._precomputed_specs[field] or self._determine_spec(field))
+
+ if field_spec is None or (spec_override and issubclass(field_spec, Any)):
+ field_spec = value_spec
+ spec_override = None
+
+ if spec_override:
+ child = parts + (field_spec, field_params, value_spec)
+ else:
+ child = parts + (field_spec, field_params)
+
+ if recurse:
+ child = _build(*child)
+ if isinstance(child, (Sequence, SequenceOf)):
+ child._parse_children(recurse=True)
+
+ child_map[field] = child
+ seen_field += 1
+
+ total_fields = len(self._fields)
+
+ for index in range(0, total_fields):
+ if index in child_map:
+ continue
+
+ name, field_spec, value_spec, field_params, spec_override = (
+ cls._precomputed_specs[index] or self._determine_spec(index))
+
+ if field_spec is None or (spec_override and issubclass(field_spec, Any)):
+ field_spec = value_spec
+ spec_override = None
+
+ missing = False
+
+ if not field_params:
+ missing = True
+ elif 'optional' not in field_params and 'default' not in field_params:
+ missing = True
+ elif 'optional' in field_params:
+ child_map[index] = VOID
+ elif 'default' in field_params:
+ child_map[index] = field_spec(**field_params)
+
+ if missing:
+ raise ValueError(unwrap(
+ '''
+ Missing required field "%s" from %s
+ ''',
+ name,
+ type_name(self)
+ ))
+
+ self.children = []
+ for index in range(0, total_fields):
+ self.children.append(child_map[index])
+
+ except (ValueError, TypeError) as e:
+ args = e.args[1:]
+ e.args = (e.args[0] + '\n while parsing %s' % type_name(self),) + args
+ raise e
+
+ def _set_contents(self, force=False):
+ """
+ Encodes all child objects into the contents for this object.
+
+ This method is overridden because a Set needs to be encoded by
+ removing defaulted fields and then sorting the fields by tag.
+
+ :param force:
+ Ensure all contents are in DER format instead of possibly using
+ cached BER-encoded data
+ """
+
+ if self.children is None:
+ self._parse_children()
+
+ child_tag_encodings = []
+ for index, child in enumerate(self.children):
+ child_encoding = child.dump(force=force)
+
+ # Skip encoding defaulted children
+ name, spec, field_params = self._fields[index]
+ if 'default' in field_params:
+ if spec(**field_params).dump() == child_encoding:
+ continue
+
+ child_tag_encodings.append((child.tag, child_encoding))
+ child_tag_encodings.sort(key=lambda ct: ct[0])
+
+ self._contents = b''.join([ct[1] for ct in child_tag_encodings])
+ self._header = None
+ if self._trailer != b'':
+ self._trailer = b''
+
+
+class SetOf(SequenceOf):
+ """
+ Represents a set (unordered) of a single type of values from ASN.1 as a
+ Python object with a list-like interface
+ """
+
+ tag = 17
+
+ def _set_contents(self, force=False):
+ """
+ Encodes all child objects into the contents for this object.
+
+ This method is overridden because a SetOf needs to be encoded by
+ sorting the child encodings.
+
+ :param force:
+ Ensure all contents are in DER format instead of possibly using
+ cached BER-encoded data
+ """
+
+ if self.children is None:
+ self._parse_children()
+
+ child_encodings = []
+ for child in self:
+ child_encodings.append(child.dump(force=force))
+
+ self._contents = b''.join(sorted(child_encodings))
+ self._header = None
+ if self._trailer != b'':
+ self._trailer = b''
+
+
+class EmbeddedPdv(Sequence):
+ """
+ A sequence structure
+ """
+
+ tag = 11
+
+
+class NumericString(AbstractString):
+ """
+ Represents a numeric string from ASN.1 as a Python unicode string
+ """
+
+ tag = 18
+ _encoding = 'latin1'
+
+
+class PrintableString(AbstractString):
+ """
+ Represents a printable string from ASN.1 as a Python unicode string
+ """
+
+ tag = 19
+ _encoding = 'latin1'
+
+
+class TeletexString(AbstractString):
+ """
+ Represents a teletex string from ASN.1 as a Python unicode string
+ """
+
+ tag = 20
+ _encoding = 'teletex'
+
+
+class VideotexString(OctetString):
+ """
+ Represents a videotex string from ASN.1 as a Python byte string
+ """
+
+ tag = 21
+
+
+class IA5String(AbstractString):
+ """
+ Represents an IA5 string from ASN.1 as a Python unicode string
+ """
+
+ tag = 22
+ _encoding = 'ascii'
+
+
+class AbstractTime(AbstractString):
+ """
+ Represents a time from ASN.1 as a Python datetime.datetime object
+ """
+
+ @property
+ def _parsed_time(self):
+ """
+ The parsed datetime string.
+
+ :raises:
+ ValueError - when an invalid value is passed
+
+ :return:
+ A dict with the parsed values
+ """
+
+ string = str_cls(self)
+
+ m = self._TIMESTRING_RE.match(string)
+ if not m:
+ raise ValueError(unwrap(
+ '''
+ Error parsing %s to a %s
+ ''',
+ string,
+ type_name(self),
+ ))
+
+ groups = m.groupdict()
+
+ tz = None
+ if groups['zulu']:
+ tz = timezone.utc
+ elif groups['dsign']:
+ sign = 1 if groups['dsign'] == '+' else -1
+ tz = create_timezone(sign * timedelta(
+ hours=int(groups['dhour']),
+ minutes=int(groups['dminute'] or 0)
+ ))
+
+ if groups['fraction']:
+ # Compute fraction in microseconds
+ fract = Fraction(
+ int(groups['fraction']),
+ 10 ** len(groups['fraction'])
+ ) * 1000000
+
+ if groups['minute'] is None:
+ fract *= 3600
+ elif groups['second'] is None:
+ fract *= 60
+
+ fract_usec = int(fract.limit_denominator(1))
+
+ else:
+ fract_usec = 0
+
+ return {
+ 'year': int(groups['year']),
+ 'month': int(groups['month']),
+ 'day': int(groups['day']),
+ 'hour': int(groups['hour']),
+ 'minute': int(groups['minute'] or 0),
+ 'second': int(groups['second'] or 0),
+ 'tzinfo': tz,
+ 'fraction': fract_usec,
+ }
+
+ @property
+ def native(self):
+ """
+ The native Python datatype representation of this value
+
+ :return:
+ A datetime.datetime object, asn1crypto.util.extended_datetime object or
+ None. The datetime object is usually timezone aware. If it's naive, then
+ it's in the sender's local time; see X.680 sect. 42.3
+ """
+
+ if self.contents is None:
+ return None
+
+ if self._native is None:
+ parsed = self._parsed_time
+
+ fraction = parsed.pop('fraction', 0)
+
+ value = self._get_datetime(parsed)
+
+ if fraction:
+ value += timedelta(microseconds=fraction)
+
+ self._native = value
+
+ return self._native
+
+
+class UTCTime(AbstractTime):
+ """
+ Represents a UTC time from ASN.1 as a timezone aware Python datetime.datetime object
+ """
+
+ tag = 23
+
+ # Regular expression for UTCTime as described in X.680 sect. 43 and ISO 8601
+ _TIMESTRING_RE = re.compile(r'''
+ ^
+ # YYMMDD
+ (?P\d{2})
+ (?P\d{2})
+ (?P\d{2})
+
+ # hhmm or hhmmss
+ (?P\d{2})
+ (?P\d{2})
+ (?P\d{2})?
+
+ # Matches nothing, needed because GeneralizedTime uses this.
+ (?P)
+
+ # Z or [-+]hhmm
+ (?:
+ (?PZ)
+ |
+ (?:
+ (?P[-+])
+ (?P\d{2})
+ (?P\d{2})
+ )
+ )
+ $
+ ''', re.X)
+
+ def set(self, value):
+ """
+ Sets the value of the object
+
+ :param value:
+ A unicode string or a datetime.datetime object
+
+ :raises:
+ ValueError - when an invalid value is passed
+ """
+
+ if isinstance(value, datetime):
+ if not value.tzinfo:
+ raise ValueError('Must be timezone aware')
+
+ # Convert value to UTC.
+ value = value.astimezone(utc_with_dst)
+
+ if not 1950 <= value.year <= 2049:
+ raise ValueError('Year of the UTCTime is not in range [1950, 2049], use GeneralizedTime instead')
+
+ value = value.strftime('%y%m%d%H%M%SZ')
+ if _PY2:
+ value = value.decode('ascii')
+
+ AbstractString.set(self, value)
+ # Set it to None and let the class take care of converting the next
+ # time that .native is called
+ self._native = None
+
+ def _get_datetime(self, parsed):
+ """
+ Create a datetime object from the parsed time.
+
+ :return:
+ An aware datetime.datetime object
+ """
+
+ # X.680 only specifies that UTCTime is not using a century.
+ # So "18" could as well mean 2118 or 1318.
+ # X.509 and CMS specify to use UTCTime for years earlier than 2050.
+ # Assume that UTCTime is only used for years [1950, 2049].
+ if parsed['year'] < 50:
+ parsed['year'] += 2000
+ else:
+ parsed['year'] += 1900
+
+ return datetime(**parsed)
+
+
+class GeneralizedTime(AbstractTime):
+ """
+ Represents a generalized time from ASN.1 as a Python datetime.datetime
+ object or asn1crypto.util.extended_datetime object in UTC
+ """
+
+ tag = 24
+
+ # Regular expression for GeneralizedTime as described in X.680 sect. 42 and ISO 8601
+ _TIMESTRING_RE = re.compile(r'''
+ ^
+ # YYYYMMDD
+ (?P\d{4})
+ (?P\d{2})
+ (?P\d{2})
+
+ # hh or hhmm or hhmmss
+ (?P\d{2})
+ (?:
+ (?P\d{2})
+ (?P\d{2})?
+ )?
+
+ # Optional fraction; [.,]dddd (one or more decimals)
+ # If Seconds are given, it's fractions of Seconds.
+ # Else if Minutes are given, it's fractions of Minutes.
+ # Else it's fractions of Hours.
+ (?:
+ [,.]
+ (?P\d+)
+ )?
+
+ # Optional timezone. If left out, the time is in local time.
+ # Z or [-+]hh or [-+]hhmm
+ (?:
+ (?PZ)
+ |
+ (?:
+ (?P[-+])
+ (?P\d{2})
+ (?P\d{2})?
+ )
+ )?
+ $
+ ''', re.X)
+
+ def set(self, value):
+ """
+ Sets the value of the object
+
+ :param value:
+ A unicode string, a datetime.datetime object or an
+ asn1crypto.util.extended_datetime object
+
+ :raises:
+ ValueError - when an invalid value is passed
+ """
+
+ if isinstance(value, (datetime, extended_datetime)):
+ if not value.tzinfo:
+ raise ValueError('Must be timezone aware')
+
+ # Convert value to UTC.
+ value = value.astimezone(utc_with_dst)
+
+ if value.microsecond:
+ fraction = '.' + str(value.microsecond).zfill(6).rstrip('0')
+ else:
+ fraction = ''
+
+ value = value.strftime('%Y%m%d%H%M%S') + fraction + 'Z'
+ if _PY2:
+ value = value.decode('ascii')
+
+ AbstractString.set(self, value)
+ # Set it to None and let the class take care of converting the next
+ # time that .native is called
+ self._native = None
+
+ def _get_datetime(self, parsed):
+ """
+ Create a datetime object from the parsed time.
+
+ :return:
+ A datetime.datetime object or asn1crypto.util.extended_datetime object.
+ It may or may not be aware.
+ """
+
+ if parsed['year'] == 0:
+ # datetime does not support year 0. Use extended_datetime instead.
+ return extended_datetime(**parsed)
+ else:
+ return datetime(**parsed)
+
+
+class GraphicString(AbstractString):
+ """
+ Represents a graphic string from ASN.1 as a Python unicode string
+ """
+
+ tag = 25
+ # This is technically not correct since this type can contain any charset
+ _encoding = 'latin1'
+
+
+class VisibleString(AbstractString):
+ """
+ Represents a visible string from ASN.1 as a Python unicode string
+ """
+
+ tag = 26
+ _encoding = 'latin1'
+
+
+class GeneralString(AbstractString):
+ """
+ Represents a general string from ASN.1 as a Python unicode string
+ """
+
+ tag = 27
+ # This is technically not correct since this type can contain any charset
+ _encoding = 'latin1'
+
+
+class UniversalString(AbstractString):
+ """
+ Represents a universal string from ASN.1 as a Python unicode string
+ """
+
+ tag = 28
+ _encoding = 'utf-32-be'
+
+
+class CharacterString(AbstractString):
+ """
+ Represents a character string from ASN.1 as a Python unicode string
+ """
+
+ tag = 29
+ # This is technically not correct since this type can contain any charset
+ _encoding = 'latin1'
+
+
+class BMPString(AbstractString):
+ """
+ Represents a BMP string from ASN.1 as a Python unicode string
+ """
+
+ tag = 30
+ _encoding = 'utf-16-be'
+
+
+def _basic_debug(prefix, self):
+ """
+ Prints out basic information about an Asn1Value object. Extracted for reuse
+ among different classes that customize the debug information.
+
+ :param prefix:
+ A unicode string of spaces to prefix output line with
+
+ :param self:
+ The object to print the debugging information about
+ """
+
+ print('%s%s Object #%s' % (prefix, type_name(self), id(self)))
+ if self._header:
+ print('%s Header: 0x%s' % (prefix, binascii.hexlify(self._header or b'').decode('utf-8')))
+
+ has_header = self.method is not None and self.class_ is not None and self.tag is not None
+ if has_header:
+ method_name = METHOD_NUM_TO_NAME_MAP.get(self.method)
+ class_name = CLASS_NUM_TO_NAME_MAP.get(self.class_)
+
+ if self.explicit is not None:
+ for class_, tag in self.explicit:
+ print(
+ '%s %s tag %s (explicitly tagged)' %
+ (
+ prefix,
+ CLASS_NUM_TO_NAME_MAP.get(class_),
+ tag
+ )
+ )
+ if has_header:
+ print('%s %s %s %s' % (prefix, method_name, class_name, self.tag))
+
+ elif self.implicit:
+ if has_header:
+ print('%s %s %s tag %s (implicitly tagged)' % (prefix, method_name, class_name, self.tag))
+
+ elif has_header:
+ print('%s %s %s tag %s' % (prefix, method_name, class_name, self.tag))
+
+ if self._trailer:
+ print('%s Trailer: 0x%s' % (prefix, binascii.hexlify(self._trailer or b'').decode('utf-8')))
+
+ print('%s Data: 0x%s' % (prefix, binascii.hexlify(self.contents or b'').decode('utf-8')))
+
+
+def _tag_type_to_explicit_implicit(params):
+ """
+ Converts old-style "tag_type" and "tag" params to "explicit" and "implicit"
+
+ :param params:
+ A dict of parameters to convert from tag_type/tag to explicit/implicit
+ """
+
+ if 'tag_type' in params:
+ if params['tag_type'] == 'explicit':
+ params['explicit'] = (params.get('class', 2), params['tag'])
+ elif params['tag_type'] == 'implicit':
+ params['implicit'] = (params.get('class', 2), params['tag'])
+ del params['tag_type']
+ del params['tag']
+ if 'class' in params:
+ del params['class']
+
+
+def _fix_tagging(value, params):
+ """
+ Checks if a value is properly tagged based on the spec, and re/untags as
+ necessary
+
+ :param value:
+ An Asn1Value object
+
+ :param params:
+ A dict of spec params
+
+ :return:
+ An Asn1Value that is properly tagged
+ """
+
+ _tag_type_to_explicit_implicit(params)
+
+ retag = False
+ if 'implicit' not in params:
+ if value.implicit is not False:
+ retag = True
+ else:
+ if isinstance(params['implicit'], tuple):
+ class_, tag = params['implicit']
+ else:
+ tag = params['implicit']
+ class_ = 'context'
+ if value.implicit is False:
+ retag = True
+ elif value.class_ != CLASS_NAME_TO_NUM_MAP[class_] or value.tag != tag:
+ retag = True
+
+ if params.get('explicit') != value.explicit:
+ retag = True
+
+ if retag:
+ return value.retag(params)
+ return value
+
+
+def _build_id_tuple(params, spec):
+ """
+ Builds a 2-element tuple used to identify fields by grabbing the class_
+ and tag from an Asn1Value class and the params dict being passed to it
+
+ :param params:
+ A dict of params to pass to spec
+
+ :param spec:
+ An Asn1Value class
+
+ :return:
+ A 2-element integer tuple in the form (class_, tag)
+ """
+
+ # Handle situations where the spec is not known at setup time
+ if spec is None:
+ return (None, None)
+
+ required_class = spec.class_
+ required_tag = spec.tag
+
+ _tag_type_to_explicit_implicit(params)
+
+ if 'explicit' in params:
+ if isinstance(params['explicit'], tuple):
+ required_class, required_tag = params['explicit']
+ else:
+ required_class = 2
+ required_tag = params['explicit']
+ elif 'implicit' in params:
+ if isinstance(params['implicit'], tuple):
+ required_class, required_tag = params['implicit']
+ else:
+ required_class = 2
+ required_tag = params['implicit']
+ if required_class is not None and not isinstance(required_class, int_types):
+ required_class = CLASS_NAME_TO_NUM_MAP[required_class]
+
+ required_class = params.get('class_', required_class)
+ required_tag = params.get('tag', required_tag)
+
+ return (required_class, required_tag)
+
+
+def _int_to_bit_tuple(value, bits):
+ """
+ Format value as a tuple of 1s and 0s.
+
+ :param value:
+ A non-negative integer to format
+
+ :param bits:
+ Number of bits in the output
+
+ :return:
+ A tuple of 1s and 0s with bits members.
+ """
+
+ if not value and not bits:
+ return ()
+
+ result = tuple(map(int, format(value, '0{0}b'.format(bits))))
+ if len(result) != bits:
+ raise ValueError('Result too large: {0} > {1}'.format(len(result), bits))
+
+ return result
+
+
+_UNIVERSAL_SPECS = {
+ 1: Boolean,
+ 2: Integer,
+ 3: BitString,
+ 4: OctetString,
+ 5: Null,
+ 6: ObjectIdentifier,
+ 7: ObjectDescriptor,
+ 8: InstanceOf,
+ 9: Real,
+ 10: Enumerated,
+ 11: EmbeddedPdv,
+ 12: UTF8String,
+ 13: RelativeOid,
+ 16: Sequence,
+ 17: Set,
+ 18: NumericString,
+ 19: PrintableString,
+ 20: TeletexString,
+ 21: VideotexString,
+ 22: IA5String,
+ 23: UTCTime,
+ 24: GeneralizedTime,
+ 25: GraphicString,
+ 26: VisibleString,
+ 27: GeneralString,
+ 28: UniversalString,
+ 29: CharacterString,
+ 30: BMPString
+}
+
+
+def _build(class_, method, tag, header, contents, trailer, spec=None, spec_params=None, nested_spec=None):
+ """
+ Builds an Asn1Value object generically, or using a spec with optional params
+
+ :param class_:
+ An integer representing the ASN.1 class
+
+ :param method:
+ An integer representing the ASN.1 method
+
+ :param tag:
+ An integer representing the ASN.1 tag
+
+ :param header:
+ A byte string of the ASN.1 header (class, method, tag, length)
+
+ :param contents:
+ A byte string of the ASN.1 value
+
+ :param trailer:
+ A byte string of any ASN.1 trailer (only used by indefinite length encodings)
+
+ :param spec:
+ A class derived from Asn1Value that defines what class_ and tag the
+ value should have, and the semantics of the encoded value. The
+ return value will be of this type. If omitted, the encoded value
+ will be decoded using the standard universal tag based on the
+ encoded tag number.
+
+ :param spec_params:
+ A dict of params to pass to the spec object
+
+ :param nested_spec:
+ For certain Asn1Value classes (such as OctetString and BitString), the
+ contents can be further parsed and interpreted as another Asn1Value.
+ This parameter controls the spec for that sub-parsing.
+
+ :return:
+ An object of the type spec, or if not specified, a child of Asn1Value
+ """
+
+ if spec_params is not None:
+ _tag_type_to_explicit_implicit(spec_params)
+
+ if header is None:
+ return VOID
+
+ header_set = False
+
+ # If an explicit specification was passed in, make sure it matches
+ if spec is not None:
+ # If there is explicit tagging and contents, we have to split
+ # the header and trailer off before we do the parsing
+ no_explicit = spec_params and 'no_explicit' in spec_params
+ if not no_explicit and (spec.explicit or (spec_params and 'explicit' in spec_params)):
+ if spec_params:
+ value = spec(**spec_params)
+ else:
+ value = spec()
+ original_explicit = value.explicit
+ explicit_info = reversed(original_explicit)
+ parsed_class = class_
+ parsed_method = method
+ parsed_tag = tag
+ to_parse = contents
+ explicit_header = header
+ explicit_trailer = trailer or b''
+ for expected_class, expected_tag in explicit_info:
+ if parsed_class != expected_class:
+ raise ValueError(unwrap(
+ '''
+ Error parsing %s - explicitly-tagged class should have been
+ %s, but %s was found
+ ''',
+ type_name(value),
+ CLASS_NUM_TO_NAME_MAP.get(expected_class),
+ CLASS_NUM_TO_NAME_MAP.get(parsed_class, parsed_class)
+ ))
+ if parsed_method != 1:
+ raise ValueError(unwrap(
+ '''
+ Error parsing %s - explicitly-tagged method should have
+ been %s, but %s was found
+ ''',
+ type_name(value),
+ METHOD_NUM_TO_NAME_MAP.get(1),
+ METHOD_NUM_TO_NAME_MAP.get(parsed_method, parsed_method)
+ ))
+ if parsed_tag != expected_tag:
+ raise ValueError(unwrap(
+ '''
+ Error parsing %s - explicitly-tagged tag should have been
+ %s, but %s was found
+ ''',
+ type_name(value),
+ expected_tag,
+ parsed_tag
+ ))
+ info, _ = _parse(to_parse, len(to_parse))
+ parsed_class, parsed_method, parsed_tag, parsed_header, to_parse, parsed_trailer = info
+
+ if not isinstance(value, Choice):
+ explicit_header += parsed_header
+ explicit_trailer = parsed_trailer + explicit_trailer
+
+ value = _build(*info, spec=spec, spec_params={'no_explicit': True})
+ value._header = explicit_header
+ value._trailer = explicit_trailer
+ value.explicit = original_explicit
+ header_set = True
+ else:
+ if spec_params:
+ value = spec(contents=contents, **spec_params)
+ else:
+ value = spec(contents=contents)
+
+ if spec is Any:
+ pass
+
+ elif isinstance(value, Choice):
+ value.validate(class_, tag, contents)
+ try:
+ # Force parsing the Choice now
+ value.contents = header + value.contents
+ header = b''
+ value.parse()
+ except (ValueError, TypeError) as e:
+ args = e.args[1:]
+ e.args = (e.args[0] + '\n while parsing %s' % type_name(value),) + args
+ raise e
+
+ else:
+ if class_ != value.class_:
+ raise ValueError(unwrap(
+ '''
+ Error parsing %s - class should have been %s, but %s was
+ found
+ ''',
+ type_name(value),
+ CLASS_NUM_TO_NAME_MAP.get(value.class_),
+ CLASS_NUM_TO_NAME_MAP.get(class_, class_)
+ ))
+ if method != value.method:
+ # Allow parsing a primitive method as constructed if the value
+ # is indefinite length. This is to allow parsing BER.
+ ber_indef = method == 1 and value.method == 0 and trailer == b'\x00\x00'
+ if not ber_indef or not isinstance(value, Constructable):
+ raise ValueError(unwrap(
+ '''
+ Error parsing %s - method should have been %s, but %s was found
+ ''',
+ type_name(value),
+ METHOD_NUM_TO_NAME_MAP.get(value.method),
+ METHOD_NUM_TO_NAME_MAP.get(method, method)
+ ))
+ else:
+ value.method = method
+ value._indefinite = True
+ if tag != value.tag:
+ if isinstance(value._bad_tag, tuple):
+ is_bad_tag = tag in value._bad_tag
+ else:
+ is_bad_tag = tag == value._bad_tag
+ if not is_bad_tag:
+ raise ValueError(unwrap(
+ '''
+ Error parsing %s - tag should have been %s, but %s was found
+ ''',
+ type_name(value),
+ value.tag,
+ tag
+ ))
+
+ # For explicitly tagged, un-speced parsings, we use a generic container
+ # since we will be parsing the contents and discarding the outer object
+ # anyway a little further on
+ elif spec_params and 'explicit' in spec_params:
+ original_value = Asn1Value(contents=contents, **spec_params)
+ original_explicit = original_value.explicit
+
+ to_parse = contents
+ explicit_header = header
+ explicit_trailer = trailer or b''
+ for expected_class, expected_tag in reversed(original_explicit):
+ info, _ = _parse(to_parse, len(to_parse))
+ _, _, _, parsed_header, to_parse, parsed_trailer = info
+ explicit_header += parsed_header
+ explicit_trailer = parsed_trailer + explicit_trailer
+ value = _build(*info, spec=spec, spec_params={'no_explicit': True})
+ value._header = header + value._header
+ value._trailer += trailer or b''
+ value.explicit = original_explicit
+ header_set = True
+
+ # If no spec was specified, allow anything and just process what
+ # is in the input data
+ else:
+ if tag not in _UNIVERSAL_SPECS:
+ raise ValueError(unwrap(
+ '''
+ Unknown element - %s class, %s method, tag %s
+ ''',
+ CLASS_NUM_TO_NAME_MAP.get(class_),
+ METHOD_NUM_TO_NAME_MAP.get(method),
+ tag
+ ))
+
+ spec = _UNIVERSAL_SPECS[tag]
+
+ value = spec(contents=contents, class_=class_)
+ ber_indef = method == 1 and value.method == 0 and trailer == b'\x00\x00'
+ if ber_indef and isinstance(value, Constructable):
+ value._indefinite = True
+ value.method = method
+
+ if not header_set:
+ value._header = header
+ value._trailer = trailer or b''
+
+ # Destroy any default value that our contents have overwritten
+ value._native = None
+
+ if nested_spec:
+ try:
+ value.parse(nested_spec)
+ except (ValueError, TypeError) as e:
+ args = e.args[1:]
+ e.args = (e.args[0] + '\n while parsing %s' % type_name(value),) + args
+ raise e
+
+ return value
+
+
+def _parse_build(encoded_data, pointer=0, spec=None, spec_params=None, strict=False):
+ """
+ Parses a byte string generically, or using a spec with optional params
+
+ :param encoded_data:
+ A byte string that contains BER-encoded data
+
+ :param pointer:
+ The index in the byte string to parse from
+
+ :param spec:
+ A class derived from Asn1Value that defines what class_ and tag the
+ value should have, and the semantics of the encoded value. The
+ return value will be of this type. If omitted, the encoded value
+ will be decoded using the standard universal tag based on the
+ encoded tag number.
+
+ :param spec_params:
+ A dict of params to pass to the spec object
+
+ :param strict:
+ A boolean indicating if trailing data should be forbidden - if so, a
+ ValueError will be raised when trailing data exists
+
+ :return:
+ A 2-element tuple:
+ - 0: An object of the type spec, or if not specified, a child of Asn1Value
+ - 1: An integer indicating how many bytes were consumed
+ """
+
+ encoded_len = len(encoded_data)
+ info, new_pointer = _parse(encoded_data, encoded_len, pointer)
+ if strict and new_pointer != pointer + encoded_len:
+ extra_bytes = pointer + encoded_len - new_pointer
+ raise ValueError('Extra data - %d bytes of trailing data were provided' % extra_bytes)
+ return (_build(*info, spec=spec, spec_params=spec_params), new_pointer)
diff --git a/jc/parsers/asn1crypto/crl.py b/jc/parsers/asn1crypto/crl.py
new file mode 100644
index 00000000..84cb1683
--- /dev/null
+++ b/jc/parsers/asn1crypto/crl.py
@@ -0,0 +1,536 @@
+# coding: utf-8
+
+"""
+ASN.1 type classes for certificate revocation lists (CRL). Exports the
+following items:
+
+ - CertificateList()
+
+Other type classes are defined that help compose the types listed above.
+"""
+
+from __future__ import unicode_literals, division, absolute_import, print_function
+
+import hashlib
+
+from .algos import SignedDigestAlgorithm
+from .core import (
+ Boolean,
+ Enumerated,
+ GeneralizedTime,
+ Integer,
+ ObjectIdentifier,
+ OctetBitString,
+ ParsableOctetString,
+ Sequence,
+ SequenceOf,
+)
+from .x509 import (
+ AuthorityInfoAccessSyntax,
+ AuthorityKeyIdentifier,
+ CRLDistributionPoints,
+ DistributionPointName,
+ GeneralNames,
+ Name,
+ ReasonFlags,
+ Time,
+)
+
+
+# The structures in this file are taken from https://tools.ietf.org/html/rfc5280
+
+
+class Version(Integer):
+ _map = {
+ 0: 'v1',
+ 1: 'v2',
+ 2: 'v3',
+ }
+
+
+class IssuingDistributionPoint(Sequence):
+ _fields = [
+ ('distribution_point', DistributionPointName, {'explicit': 0, 'optional': True}),
+ ('only_contains_user_certs', Boolean, {'implicit': 1, 'default': False}),
+ ('only_contains_ca_certs', Boolean, {'implicit': 2, 'default': False}),
+ ('only_some_reasons', ReasonFlags, {'implicit': 3, 'optional': True}),
+ ('indirect_crl', Boolean, {'implicit': 4, 'default': False}),
+ ('only_contains_attribute_certs', Boolean, {'implicit': 5, 'default': False}),
+ ]
+
+
+class TBSCertListExtensionId(ObjectIdentifier):
+ _map = {
+ '2.5.29.18': 'issuer_alt_name',
+ '2.5.29.20': 'crl_number',
+ '2.5.29.27': 'delta_crl_indicator',
+ '2.5.29.28': 'issuing_distribution_point',
+ '2.5.29.35': 'authority_key_identifier',
+ '2.5.29.46': 'freshest_crl',
+ '1.3.6.1.5.5.7.1.1': 'authority_information_access',
+ }
+
+
+class TBSCertListExtension(Sequence):
+ _fields = [
+ ('extn_id', TBSCertListExtensionId),
+ ('critical', Boolean, {'default': False}),
+ ('extn_value', ParsableOctetString),
+ ]
+
+ _oid_pair = ('extn_id', 'extn_value')
+ _oid_specs = {
+ 'issuer_alt_name': GeneralNames,
+ 'crl_number': Integer,
+ 'delta_crl_indicator': Integer,
+ 'issuing_distribution_point': IssuingDistributionPoint,
+ 'authority_key_identifier': AuthorityKeyIdentifier,
+ 'freshest_crl': CRLDistributionPoints,
+ 'authority_information_access': AuthorityInfoAccessSyntax,
+ }
+
+
+class TBSCertListExtensions(SequenceOf):
+ _child_spec = TBSCertListExtension
+
+
+class CRLReason(Enumerated):
+ _map = {
+ 0: 'unspecified',
+ 1: 'key_compromise',
+ 2: 'ca_compromise',
+ 3: 'affiliation_changed',
+ 4: 'superseded',
+ 5: 'cessation_of_operation',
+ 6: 'certificate_hold',
+ 8: 'remove_from_crl',
+ 9: 'privilege_withdrawn',
+ 10: 'aa_compromise',
+ }
+
+ @property
+ def human_friendly(self):
+ """
+ :return:
+ A unicode string with revocation description that is suitable to
+ show to end-users. Starts with a lower case letter and phrased in
+ such a way that it makes sense after the phrase "because of" or
+ "due to".
+ """
+
+ return {
+ 'unspecified': 'an unspecified reason',
+ 'key_compromise': 'a compromised key',
+ 'ca_compromise': 'the CA being compromised',
+ 'affiliation_changed': 'an affiliation change',
+ 'superseded': 'certificate supersession',
+ 'cessation_of_operation': 'a cessation of operation',
+ 'certificate_hold': 'a certificate hold',
+ 'remove_from_crl': 'removal from the CRL',
+ 'privilege_withdrawn': 'privilege withdrawl',
+ 'aa_compromise': 'the AA being compromised',
+ }[self.native]
+
+
+class CRLEntryExtensionId(ObjectIdentifier):
+ _map = {
+ '2.5.29.21': 'crl_reason',
+ '2.5.29.23': 'hold_instruction_code',
+ '2.5.29.24': 'invalidity_date',
+ '2.5.29.29': 'certificate_issuer',
+ }
+
+
+class CRLEntryExtension(Sequence):
+ _fields = [
+ ('extn_id', CRLEntryExtensionId),
+ ('critical', Boolean, {'default': False}),
+ ('extn_value', ParsableOctetString),
+ ]
+
+ _oid_pair = ('extn_id', 'extn_value')
+ _oid_specs = {
+ 'crl_reason': CRLReason,
+ 'hold_instruction_code': ObjectIdentifier,
+ 'invalidity_date': GeneralizedTime,
+ 'certificate_issuer': GeneralNames,
+ }
+
+
+class CRLEntryExtensions(SequenceOf):
+ _child_spec = CRLEntryExtension
+
+
+class RevokedCertificate(Sequence):
+ _fields = [
+ ('user_certificate', Integer),
+ ('revocation_date', Time),
+ ('crl_entry_extensions', CRLEntryExtensions, {'optional': True}),
+ ]
+
+ _processed_extensions = False
+ _critical_extensions = None
+ _crl_reason_value = None
+ _invalidity_date_value = None
+ _certificate_issuer_value = None
+ _issuer_name = False
+
+ def _set_extensions(self):
+ """
+ Sets common named extensions to private attributes and creates a list
+ of critical extensions
+ """
+
+ self._critical_extensions = set()
+
+ for extension in self['crl_entry_extensions']:
+ name = extension['extn_id'].native
+ attribute_name = '_%s_value' % name
+ if hasattr(self, attribute_name):
+ setattr(self, attribute_name, extension['extn_value'].parsed)
+ if extension['critical'].native:
+ self._critical_extensions.add(name)
+
+ self._processed_extensions = True
+
+ @property
+ def critical_extensions(self):
+ """
+ Returns a set of the names (or OID if not a known extension) of the
+ extensions marked as critical
+
+ :return:
+ A set of unicode strings
+ """
+
+ if not self._processed_extensions:
+ self._set_extensions()
+ return self._critical_extensions
+
+ @property
+ def crl_reason_value(self):
+ """
+ This extension indicates the reason that a certificate was revoked.
+
+ :return:
+ None or a CRLReason object
+ """
+
+ if self._processed_extensions is False:
+ self._set_extensions()
+ return self._crl_reason_value
+
+ @property
+ def invalidity_date_value(self):
+ """
+ This extension indicates the suspected date/time the private key was
+ compromised or the certificate became invalid. This would usually be
+ before the revocation date, which is when the CA processed the
+ revocation.
+
+ :return:
+ None or a GeneralizedTime object
+ """
+
+ if self._processed_extensions is False:
+ self._set_extensions()
+ return self._invalidity_date_value
+
+ @property
+ def certificate_issuer_value(self):
+ """
+ This extension indicates the issuer of the certificate in question,
+ and is used in indirect CRLs. CRL entries without this extension are
+ for certificates issued from the last seen issuer.
+
+ :return:
+ None or an x509.GeneralNames object
+ """
+
+ if self._processed_extensions is False:
+ self._set_extensions()
+ return self._certificate_issuer_value
+
+ @property
+ def issuer_name(self):
+ """
+ :return:
+ None, or an asn1crypto.x509.Name object for the issuer of the cert
+ """
+
+ if self._issuer_name is False:
+ self._issuer_name = None
+ if self.certificate_issuer_value:
+ for general_name in self.certificate_issuer_value:
+ if general_name.name == 'directory_name':
+ self._issuer_name = general_name.chosen
+ break
+ return self._issuer_name
+
+
+class RevokedCertificates(SequenceOf):
+ _child_spec = RevokedCertificate
+
+
+class TbsCertList(Sequence):
+ _fields = [
+ ('version', Version, {'optional': True}),
+ ('signature', SignedDigestAlgorithm),
+ ('issuer', Name),
+ ('this_update', Time),
+ ('next_update', Time, {'optional': True}),
+ ('revoked_certificates', RevokedCertificates, {'optional': True}),
+ ('crl_extensions', TBSCertListExtensions, {'explicit': 0, 'optional': True}),
+ ]
+
+
+class CertificateList(Sequence):
+ _fields = [
+ ('tbs_cert_list', TbsCertList),
+ ('signature_algorithm', SignedDigestAlgorithm),
+ ('signature', OctetBitString),
+ ]
+
+ _processed_extensions = False
+ _critical_extensions = None
+ _issuer_alt_name_value = None
+ _crl_number_value = None
+ _delta_crl_indicator_value = None
+ _issuing_distribution_point_value = None
+ _authority_key_identifier_value = None
+ _freshest_crl_value = None
+ _authority_information_access_value = None
+ _issuer_cert_urls = None
+ _delta_crl_distribution_points = None
+ _sha1 = None
+ _sha256 = None
+
+ def _set_extensions(self):
+ """
+ Sets common named extensions to private attributes and creates a list
+ of critical extensions
+ """
+
+ self._critical_extensions = set()
+
+ for extension in self['tbs_cert_list']['crl_extensions']:
+ name = extension['extn_id'].native
+ attribute_name = '_%s_value' % name
+ if hasattr(self, attribute_name):
+ setattr(self, attribute_name, extension['extn_value'].parsed)
+ if extension['critical'].native:
+ self._critical_extensions.add(name)
+
+ self._processed_extensions = True
+
+ @property
+ def critical_extensions(self):
+ """
+ Returns a set of the names (or OID if not a known extension) of the
+ extensions marked as critical
+
+ :return:
+ A set of unicode strings
+ """
+
+ if not self._processed_extensions:
+ self._set_extensions()
+ return self._critical_extensions
+
+ @property
+ def issuer_alt_name_value(self):
+ """
+ This extension allows associating one or more alternative names with
+ the issuer of the CRL.
+
+ :return:
+ None or an x509.GeneralNames object
+ """
+
+ if self._processed_extensions is False:
+ self._set_extensions()
+ return self._issuer_alt_name_value
+
+ @property
+ def crl_number_value(self):
+ """
+ This extension adds a monotonically increasing number to the CRL and is
+ used to distinguish different versions of the CRL.
+
+ :return:
+ None or an Integer object
+ """
+
+ if self._processed_extensions is False:
+ self._set_extensions()
+ return self._crl_number_value
+
+ @property
+ def delta_crl_indicator_value(self):
+ """
+ This extension indicates a CRL is a delta CRL, and contains the CRL
+ number of the base CRL that it is a delta from.
+
+ :return:
+ None or an Integer object
+ """
+
+ if self._processed_extensions is False:
+ self._set_extensions()
+ return self._delta_crl_indicator_value
+
+ @property
+ def issuing_distribution_point_value(self):
+ """
+ This extension includes information about what types of revocations
+ and certificates are part of the CRL.
+
+ :return:
+ None or an IssuingDistributionPoint object
+ """
+
+ if self._processed_extensions is False:
+ self._set_extensions()
+ return self._issuing_distribution_point_value
+
+ @property
+ def authority_key_identifier_value(self):
+ """
+ This extension helps in identifying the public key with which to
+ validate the authenticity of the CRL.
+
+ :return:
+ None or an AuthorityKeyIdentifier object
+ """
+
+ if self._processed_extensions is False:
+ self._set_extensions()
+ return self._authority_key_identifier_value
+
+ @property
+ def freshest_crl_value(self):
+ """
+ This extension is used in complete CRLs to indicate where a delta CRL
+ may be located.
+
+ :return:
+ None or a CRLDistributionPoints object
+ """
+
+ if self._processed_extensions is False:
+ self._set_extensions()
+ return self._freshest_crl_value
+
+ @property
+ def authority_information_access_value(self):
+ """
+ This extension is used to provide a URL with which to download the
+ certificate used to sign this CRL.
+
+ :return:
+ None or an AuthorityInfoAccessSyntax object
+ """
+
+ if self._processed_extensions is False:
+ self._set_extensions()
+ return self._authority_information_access_value
+
+ @property
+ def issuer(self):
+ """
+ :return:
+ An asn1crypto.x509.Name object for the issuer of the CRL
+ """
+
+ return self['tbs_cert_list']['issuer']
+
+ @property
+ def authority_key_identifier(self):
+ """
+ :return:
+ None or a byte string of the key_identifier from the authority key
+ identifier extension
+ """
+
+ if not self.authority_key_identifier_value:
+ return None
+
+ return self.authority_key_identifier_value['key_identifier'].native
+
+ @property
+ def issuer_cert_urls(self):
+ """
+ :return:
+ A list of unicode strings that are URLs that should contain either
+ an individual DER-encoded X.509 certificate, or a DER-encoded CMS
+ message containing multiple certificates
+ """
+
+ if self._issuer_cert_urls is None:
+ self._issuer_cert_urls = []
+ if self.authority_information_access_value:
+ for entry in self.authority_information_access_value:
+ if entry['access_method'].native == 'ca_issuers':
+ location = entry['access_location']
+ if location.name != 'uniform_resource_identifier':
+ continue
+ url = location.native
+ if url.lower()[0:7] == 'http://':
+ self._issuer_cert_urls.append(url)
+ return self._issuer_cert_urls
+
+ @property
+ def delta_crl_distribution_points(self):
+ """
+ Returns delta CRL URLs - only applies to complete CRLs
+
+ :return:
+ A list of zero or more DistributionPoint objects
+ """
+
+ if self._delta_crl_distribution_points is None:
+ self._delta_crl_distribution_points = []
+
+ if self.freshest_crl_value is not None:
+ for distribution_point in self.freshest_crl_value:
+ distribution_point_name = distribution_point['distribution_point']
+ # RFC 5280 indicates conforming CA should not use the relative form
+ if distribution_point_name.name == 'name_relative_to_crl_issuer':
+ continue
+ # This library is currently only concerned with HTTP-based CRLs
+ for general_name in distribution_point_name.chosen:
+ if general_name.name == 'uniform_resource_identifier':
+ self._delta_crl_distribution_points.append(distribution_point)
+
+ return self._delta_crl_distribution_points
+
+ @property
+ def signature(self):
+ """
+ :return:
+ A byte string of the signature
+ """
+
+ return self['signature'].native
+
+ @property
+ def sha1(self):
+ """
+ :return:
+ The SHA1 hash of the DER-encoded bytes of this certificate list
+ """
+
+ if self._sha1 is None:
+ self._sha1 = hashlib.sha1(self.dump()).digest()
+ return self._sha1
+
+ @property
+ def sha256(self):
+ """
+ :return:
+ The SHA-256 hash of the DER-encoded bytes of this certificate list
+ """
+
+ if self._sha256 is None:
+ self._sha256 = hashlib.sha256(self.dump()).digest()
+ return self._sha256
diff --git a/jc/parsers/asn1crypto/csr.py b/jc/parsers/asn1crypto/csr.py
new file mode 100644
index 00000000..7d5ba447
--- /dev/null
+++ b/jc/parsers/asn1crypto/csr.py
@@ -0,0 +1,133 @@
+# coding: utf-8
+
+"""
+ASN.1 type classes for certificate signing requests (CSR). Exports the
+following items:
+
+ - CertificationRequest()
+
+Other type classes are defined that help compose the types listed above.
+"""
+
+from __future__ import unicode_literals, division, absolute_import, print_function
+
+from .algos import SignedDigestAlgorithm
+from .core import (
+ Any,
+ BitString,
+ BMPString,
+ Integer,
+ ObjectIdentifier,
+ OctetBitString,
+ Sequence,
+ SetOf,
+ UTF8String
+)
+from .keys import PublicKeyInfo
+from .x509 import DirectoryString, Extensions, Name
+
+
+# The structures in this file are taken from https://tools.ietf.org/html/rfc2986
+# and https://tools.ietf.org/html/rfc2985
+
+
+class Version(Integer):
+ _map = {
+ 0: 'v1',
+ }
+
+
+class CSRAttributeType(ObjectIdentifier):
+ _map = {
+ '1.2.840.113549.1.9.7': 'challenge_password',
+ '1.2.840.113549.1.9.9': 'extended_certificate_attributes',
+ '1.2.840.113549.1.9.14': 'extension_request',
+ # https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-wcce/a5eaae36-e9f3-4dc5-a687-bfa7115954f1
+ '1.3.6.1.4.1.311.13.2.2': 'microsoft_enrollment_csp_provider',
+ # https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-wcce/7c677cba-030d-48be-ba2b-01e407705f34
+ '1.3.6.1.4.1.311.13.2.3': 'microsoft_os_version',
+ # https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-wcce/64e5ff6d-c6dd-4578-92f7-b3d895f9b9c7
+ '1.3.6.1.4.1.311.21.20': 'microsoft_request_client_info',
+ }
+
+
+class SetOfDirectoryString(SetOf):
+ _child_spec = DirectoryString
+
+
+class Attribute(Sequence):
+ _fields = [
+ ('type', ObjectIdentifier),
+ ('values', SetOf, {'spec': Any}),
+ ]
+
+
+class SetOfAttributes(SetOf):
+ _child_spec = Attribute
+
+
+class SetOfExtensions(SetOf):
+ _child_spec = Extensions
+
+
+class MicrosoftEnrollmentCSProvider(Sequence):
+ _fields = [
+ ('keyspec', Integer),
+ ('cspname', BMPString), # cryptographic service provider name
+ ('signature', BitString),
+ ]
+
+
+class SetOfMicrosoftEnrollmentCSProvider(SetOf):
+ _child_spec = MicrosoftEnrollmentCSProvider
+
+
+class MicrosoftRequestClientInfo(Sequence):
+ _fields = [
+ ('clientid', Integer),
+ ('machinename', UTF8String),
+ ('username', UTF8String),
+ ('processname', UTF8String),
+ ]
+
+
+class SetOfMicrosoftRequestClientInfo(SetOf):
+ _child_spec = MicrosoftRequestClientInfo
+
+
+class CRIAttribute(Sequence):
+ _fields = [
+ ('type', CSRAttributeType),
+ ('values', Any),
+ ]
+
+ _oid_pair = ('type', 'values')
+ _oid_specs = {
+ 'challenge_password': SetOfDirectoryString,
+ 'extended_certificate_attributes': SetOfAttributes,
+ 'extension_request': SetOfExtensions,
+ 'microsoft_enrollment_csp_provider': SetOfMicrosoftEnrollmentCSProvider,
+ 'microsoft_os_version': SetOfDirectoryString,
+ 'microsoft_request_client_info': SetOfMicrosoftRequestClientInfo,
+ }
+
+
+class CRIAttributes(SetOf):
+ _child_spec = CRIAttribute
+
+
+class CertificationRequestInfo(Sequence):
+ _fields = [
+ ('version', Version),
+ ('subject', Name),
+ ('subject_pk_info', PublicKeyInfo),
+ ('attributes', CRIAttributes, {'implicit': 0, 'optional': True}),
+ ]
+
+
+class CertificationRequest(Sequence):
+ _fields = [
+ ('certification_request_info', CertificationRequestInfo),
+ ('signature_algorithm', SignedDigestAlgorithm),
+ ('signature', OctetBitString),
+ ]
diff --git a/jc/parsers/asn1crypto/keys.py b/jc/parsers/asn1crypto/keys.py
new file mode 100644
index 00000000..b4a87aea
--- /dev/null
+++ b/jc/parsers/asn1crypto/keys.py
@@ -0,0 +1,1301 @@
+# coding: utf-8
+
+"""
+ASN.1 type classes for public and private keys. Exports the following items:
+
+ - DSAPrivateKey()
+ - ECPrivateKey()
+ - EncryptedPrivateKeyInfo()
+ - PrivateKeyInfo()
+ - PublicKeyInfo()
+ - RSAPrivateKey()
+ - RSAPublicKey()
+
+Other type classes are defined that help compose the types listed above.
+"""
+
+from __future__ import unicode_literals, division, absolute_import, print_function
+
+import hashlib
+import math
+
+from ._errors import unwrap, APIException
+from ._types import type_name, byte_cls
+from .algos import _ForceNullParameters, DigestAlgorithm, EncryptionAlgorithm, RSAESOAEPParams, RSASSAPSSParams
+from .core import (
+ Any,
+ Asn1Value,
+ BitString,
+ Choice,
+ Integer,
+ IntegerOctetString,
+ Null,
+ ObjectIdentifier,
+ OctetBitString,
+ OctetString,
+ ParsableOctetString,
+ ParsableOctetBitString,
+ Sequence,
+ SequenceOf,
+ SetOf,
+)
+from .util import int_from_bytes, int_to_bytes
+
+
+class OtherPrimeInfo(Sequence):
+ """
+ Source: https://tools.ietf.org/html/rfc3447#page-46
+ """
+
+ _fields = [
+ ('prime', Integer),
+ ('exponent', Integer),
+ ('coefficient', Integer),
+ ]
+
+
+class OtherPrimeInfos(SequenceOf):
+ """
+ Source: https://tools.ietf.org/html/rfc3447#page-46
+ """
+
+ _child_spec = OtherPrimeInfo
+
+
+class RSAPrivateKeyVersion(Integer):
+ """
+ Original Name: Version
+ Source: https://tools.ietf.org/html/rfc3447#page-45
+ """
+
+ _map = {
+ 0: 'two-prime',
+ 1: 'multi',
+ }
+
+
+class RSAPrivateKey(Sequence):
+ """
+ Source: https://tools.ietf.org/html/rfc3447#page-45
+ """
+
+ _fields = [
+ ('version', RSAPrivateKeyVersion),
+ ('modulus', Integer),
+ ('public_exponent', Integer),
+ ('private_exponent', Integer),
+ ('prime1', Integer),
+ ('prime2', Integer),
+ ('exponent1', Integer),
+ ('exponent2', Integer),
+ ('coefficient', Integer),
+ ('other_prime_infos', OtherPrimeInfos, {'optional': True})
+ ]
+
+
+class RSAPublicKey(Sequence):
+ """
+ Source: https://tools.ietf.org/html/rfc3447#page-44
+ """
+
+ _fields = [
+ ('modulus', Integer),
+ ('public_exponent', Integer)
+ ]
+
+
+class DSAPrivateKey(Sequence):
+ """
+ The ASN.1 structure that OpenSSL uses to store a DSA private key that is
+ not part of a PKCS#8 structure. Reversed engineered from english-language
+ description on linked OpenSSL documentation page.
+
+ Original Name: None
+ Source: https://www.openssl.org/docs/apps/dsa.html
+ """
+
+ _fields = [
+ ('version', Integer),
+ ('p', Integer),
+ ('q', Integer),
+ ('g', Integer),
+ ('public_key', Integer),
+ ('private_key', Integer),
+ ]
+
+
+class _ECPoint():
+ """
+ In both PublicKeyInfo and PrivateKeyInfo, the EC public key is a byte
+ string that is encoded as a bit string. This class adds convenience
+ methods for converting to and from the byte string to a pair of integers
+ that are the X and Y coordinates.
+ """
+
+ @classmethod
+ def from_coords(cls, x, y):
+ """
+ Creates an ECPoint object from the X and Y integer coordinates of the
+ point
+
+ :param x:
+ The X coordinate, as an integer
+
+ :param y:
+ The Y coordinate, as an integer
+
+ :return:
+ An ECPoint object
+ """
+
+ x_bytes = int(math.ceil(math.log(x, 2) / 8.0))
+ y_bytes = int(math.ceil(math.log(y, 2) / 8.0))
+
+ num_bytes = max(x_bytes, y_bytes)
+
+ byte_string = b'\x04'
+ byte_string += int_to_bytes(x, width=num_bytes)
+ byte_string += int_to_bytes(y, width=num_bytes)
+
+ return cls(byte_string)
+
+ def to_coords(self):
+ """
+ Returns the X and Y coordinates for this EC point, as native Python
+ integers
+
+ :return:
+ A 2-element tuple containing integers (X, Y)
+ """
+
+ data = self.native
+ first_byte = data[0:1]
+
+ # Uncompressed
+ if first_byte == b'\x04':
+ remaining = data[1:]
+ field_len = len(remaining) // 2
+ x = int_from_bytes(remaining[0:field_len])
+ y = int_from_bytes(remaining[field_len:])
+ return (x, y)
+
+ if first_byte not in set([b'\x02', b'\x03']):
+ raise ValueError(unwrap(
+ '''
+ Invalid EC public key - first byte is incorrect
+ '''
+ ))
+
+ raise ValueError(unwrap(
+ '''
+ Compressed representations of EC public keys are not supported due
+ to patent US6252960
+ '''
+ ))
+
+
+class ECPoint(OctetString, _ECPoint):
+
+ pass
+
+
+class ECPointBitString(OctetBitString, _ECPoint):
+
+ pass
+
+
+class SpecifiedECDomainVersion(Integer):
+ """
+ Source: http://www.secg.org/sec1-v2.pdf page 104
+ """
+ _map = {
+ 1: 'ecdpVer1',
+ 2: 'ecdpVer2',
+ 3: 'ecdpVer3',
+ }
+
+
+class FieldType(ObjectIdentifier):
+ """
+ Original Name: None
+ Source: http://www.secg.org/sec1-v2.pdf page 101
+ """
+
+ _map = {
+ '1.2.840.10045.1.1': 'prime_field',
+ '1.2.840.10045.1.2': 'characteristic_two_field',
+ }
+
+
+class CharacteristicTwoBasis(ObjectIdentifier):
+ """
+ Original Name: None
+ Source: http://www.secg.org/sec1-v2.pdf page 102
+ """
+
+ _map = {
+ '1.2.840.10045.1.2.1.1': 'gn_basis',
+ '1.2.840.10045.1.2.1.2': 'tp_basis',
+ '1.2.840.10045.1.2.1.3': 'pp_basis',
+ }
+
+
+class Pentanomial(Sequence):
+ """
+ Source: http://www.secg.org/sec1-v2.pdf page 102
+ """
+
+ _fields = [
+ ('k1', Integer),
+ ('k2', Integer),
+ ('k3', Integer),
+ ]
+
+
+class CharacteristicTwo(Sequence):
+ """
+ Original Name: Characteristic-two
+ Source: http://www.secg.org/sec1-v2.pdf page 101
+ """
+
+ _fields = [
+ ('m', Integer),
+ ('basis', CharacteristicTwoBasis),
+ ('parameters', Any),
+ ]
+
+ _oid_pair = ('basis', 'parameters')
+ _oid_specs = {
+ 'gn_basis': Null,
+ 'tp_basis': Integer,
+ 'pp_basis': Pentanomial,
+ }
+
+
+class FieldID(Sequence):
+ """
+ Source: http://www.secg.org/sec1-v2.pdf page 100
+ """
+
+ _fields = [
+ ('field_type', FieldType),
+ ('parameters', Any),
+ ]
+
+ _oid_pair = ('field_type', 'parameters')
+ _oid_specs = {
+ 'prime_field': Integer,
+ 'characteristic_two_field': CharacteristicTwo,
+ }
+
+
+class Curve(Sequence):
+ """
+ Source: http://www.secg.org/sec1-v2.pdf page 104
+ """
+
+ _fields = [
+ ('a', OctetString),
+ ('b', OctetString),
+ ('seed', OctetBitString, {'optional': True}),
+ ]
+
+
+class SpecifiedECDomain(Sequence):
+ """
+ Source: http://www.secg.org/sec1-v2.pdf page 103
+ """
+
+ _fields = [
+ ('version', SpecifiedECDomainVersion),
+ ('field_id', FieldID),
+ ('curve', Curve),
+ ('base', ECPoint),
+ ('order', Integer),
+ ('cofactor', Integer, {'optional': True}),
+ ('hash', DigestAlgorithm, {'optional': True}),
+ ]
+
+
+class NamedCurve(ObjectIdentifier):
+ """
+ Various named curves
+
+ Original Name: None
+ Source: https://tools.ietf.org/html/rfc3279#page-23,
+ https://tools.ietf.org/html/rfc5480#page-5
+ """
+
+ _map = {
+ # https://tools.ietf.org/html/rfc3279#page-23
+ '1.2.840.10045.3.0.1': 'c2pnb163v1',
+ '1.2.840.10045.3.0.2': 'c2pnb163v2',
+ '1.2.840.10045.3.0.3': 'c2pnb163v3',
+ '1.2.840.10045.3.0.4': 'c2pnb176w1',
+ '1.2.840.10045.3.0.5': 'c2tnb191v1',
+ '1.2.840.10045.3.0.6': 'c2tnb191v2',
+ '1.2.840.10045.3.0.7': 'c2tnb191v3',
+ '1.2.840.10045.3.0.8': 'c2onb191v4',
+ '1.2.840.10045.3.0.9': 'c2onb191v5',
+ '1.2.840.10045.3.0.10': 'c2pnb208w1',
+ '1.2.840.10045.3.0.11': 'c2tnb239v1',
+ '1.2.840.10045.3.0.12': 'c2tnb239v2',
+ '1.2.840.10045.3.0.13': 'c2tnb239v3',
+ '1.2.840.10045.3.0.14': 'c2onb239v4',
+ '1.2.840.10045.3.0.15': 'c2onb239v5',
+ '1.2.840.10045.3.0.16': 'c2pnb272w1',
+ '1.2.840.10045.3.0.17': 'c2pnb304w1',
+ '1.2.840.10045.3.0.18': 'c2tnb359v1',
+ '1.2.840.10045.3.0.19': 'c2pnb368w1',
+ '1.2.840.10045.3.0.20': 'c2tnb431r1',
+ '1.2.840.10045.3.1.2': 'prime192v2',
+ '1.2.840.10045.3.1.3': 'prime192v3',
+ '1.2.840.10045.3.1.4': 'prime239v1',
+ '1.2.840.10045.3.1.5': 'prime239v2',
+ '1.2.840.10045.3.1.6': 'prime239v3',
+ # https://tools.ietf.org/html/rfc5480#page-5
+ # http://www.secg.org/SEC2-Ver-1.0.pdf
+ '1.2.840.10045.3.1.1': 'secp192r1',
+ '1.2.840.10045.3.1.7': 'secp256r1',
+ '1.3.132.0.1': 'sect163k1',
+ '1.3.132.0.2': 'sect163r1',
+ '1.3.132.0.3': 'sect239k1',
+ '1.3.132.0.4': 'sect113r1',
+ '1.3.132.0.5': 'sect113r2',
+ '1.3.132.0.6': 'secp112r1',
+ '1.3.132.0.7': 'secp112r2',
+ '1.3.132.0.8': 'secp160r1',
+ '1.3.132.0.9': 'secp160k1',
+ '1.3.132.0.10': 'secp256k1',
+ '1.3.132.0.15': 'sect163r2',
+ '1.3.132.0.16': 'sect283k1',
+ '1.3.132.0.17': 'sect283r1',
+ '1.3.132.0.22': 'sect131r1',
+ '1.3.132.0.23': 'sect131r2',
+ '1.3.132.0.24': 'sect193r1',
+ '1.3.132.0.25': 'sect193r2',
+ '1.3.132.0.26': 'sect233k1',
+ '1.3.132.0.27': 'sect233r1',
+ '1.3.132.0.28': 'secp128r1',
+ '1.3.132.0.29': 'secp128r2',
+ '1.3.132.0.30': 'secp160r2',
+ '1.3.132.0.31': 'secp192k1',
+ '1.3.132.0.32': 'secp224k1',
+ '1.3.132.0.33': 'secp224r1',
+ '1.3.132.0.34': 'secp384r1',
+ '1.3.132.0.35': 'secp521r1',
+ '1.3.132.0.36': 'sect409k1',
+ '1.3.132.0.37': 'sect409r1',
+ '1.3.132.0.38': 'sect571k1',
+ '1.3.132.0.39': 'sect571r1',
+ # https://tools.ietf.org/html/rfc5639#section-4.1
+ '1.3.36.3.3.2.8.1.1.1': 'brainpoolp160r1',
+ '1.3.36.3.3.2.8.1.1.2': 'brainpoolp160t1',
+ '1.3.36.3.3.2.8.1.1.3': 'brainpoolp192r1',
+ '1.3.36.3.3.2.8.1.1.4': 'brainpoolp192t1',
+ '1.3.36.3.3.2.8.1.1.5': 'brainpoolp224r1',
+ '1.3.36.3.3.2.8.1.1.6': 'brainpoolp224t1',
+ '1.3.36.3.3.2.8.1.1.7': 'brainpoolp256r1',
+ '1.3.36.3.3.2.8.1.1.8': 'brainpoolp256t1',
+ '1.3.36.3.3.2.8.1.1.9': 'brainpoolp320r1',
+ '1.3.36.3.3.2.8.1.1.10': 'brainpoolp320t1',
+ '1.3.36.3.3.2.8.1.1.11': 'brainpoolp384r1',
+ '1.3.36.3.3.2.8.1.1.12': 'brainpoolp384t1',
+ '1.3.36.3.3.2.8.1.1.13': 'brainpoolp512r1',
+ '1.3.36.3.3.2.8.1.1.14': 'brainpoolp512t1',
+ }
+
+ _key_sizes = {
+ # Order values used to compute these sourced from
+ # http://cr.openjdk.java.net/~vinnie/7194075/webrev-3/src/share/classes/sun/security/ec/CurveDB.java.html
+ '1.2.840.10045.3.0.1': 21,
+ '1.2.840.10045.3.0.2': 21,
+ '1.2.840.10045.3.0.3': 21,
+ '1.2.840.10045.3.0.4': 21,
+ '1.2.840.10045.3.0.5': 24,
+ '1.2.840.10045.3.0.6': 24,
+ '1.2.840.10045.3.0.7': 24,
+ '1.2.840.10045.3.0.8': 24,
+ '1.2.840.10045.3.0.9': 24,
+ '1.2.840.10045.3.0.10': 25,
+ '1.2.840.10045.3.0.11': 30,
+ '1.2.840.10045.3.0.12': 30,
+ '1.2.840.10045.3.0.13': 30,
+ '1.2.840.10045.3.0.14': 30,
+ '1.2.840.10045.3.0.15': 30,
+ '1.2.840.10045.3.0.16': 33,
+ '1.2.840.10045.3.0.17': 37,
+ '1.2.840.10045.3.0.18': 45,
+ '1.2.840.10045.3.0.19': 45,
+ '1.2.840.10045.3.0.20': 53,
+ '1.2.840.10045.3.1.2': 24,
+ '1.2.840.10045.3.1.3': 24,
+ '1.2.840.10045.3.1.4': 30,
+ '1.2.840.10045.3.1.5': 30,
+ '1.2.840.10045.3.1.6': 30,
+ # Order values used to compute these sourced from
+ # http://www.secg.org/SEC2-Ver-1.0.pdf
+ # ceil(n.bit_length() / 8)
+ '1.2.840.10045.3.1.1': 24,
+ '1.2.840.10045.3.1.7': 32,
+ '1.3.132.0.1': 21,
+ '1.3.132.0.2': 21,
+ '1.3.132.0.3': 30,
+ '1.3.132.0.4': 15,
+ '1.3.132.0.5': 15,
+ '1.3.132.0.6': 14,
+ '1.3.132.0.7': 14,
+ '1.3.132.0.8': 21,
+ '1.3.132.0.9': 21,
+ '1.3.132.0.10': 32,
+ '1.3.132.0.15': 21,
+ '1.3.132.0.16': 36,
+ '1.3.132.0.17': 36,
+ '1.3.132.0.22': 17,
+ '1.3.132.0.23': 17,
+ '1.3.132.0.24': 25,
+ '1.3.132.0.25': 25,
+ '1.3.132.0.26': 29,
+ '1.3.132.0.27': 30,
+ '1.3.132.0.28': 16,
+ '1.3.132.0.29': 16,
+ '1.3.132.0.30': 21,
+ '1.3.132.0.31': 24,
+ '1.3.132.0.32': 29,
+ '1.3.132.0.33': 28,
+ '1.3.132.0.34': 48,
+ '1.3.132.0.35': 66,
+ '1.3.132.0.36': 51,
+ '1.3.132.0.37': 52,
+ '1.3.132.0.38': 72,
+ '1.3.132.0.39': 72,
+ # Order values used to compute these sourced from
+ # https://tools.ietf.org/html/rfc5639#section-3
+ # ceil(q.bit_length() / 8)
+ '1.3.36.3.3.2.8.1.1.1': 20,
+ '1.3.36.3.3.2.8.1.1.2': 20,
+ '1.3.36.3.3.2.8.1.1.3': 24,
+ '1.3.36.3.3.2.8.1.1.4': 24,
+ '1.3.36.3.3.2.8.1.1.5': 28,
+ '1.3.36.3.3.2.8.1.1.6': 28,
+ '1.3.36.3.3.2.8.1.1.7': 32,
+ '1.3.36.3.3.2.8.1.1.8': 32,
+ '1.3.36.3.3.2.8.1.1.9': 40,
+ '1.3.36.3.3.2.8.1.1.10': 40,
+ '1.3.36.3.3.2.8.1.1.11': 48,
+ '1.3.36.3.3.2.8.1.1.12': 48,
+ '1.3.36.3.3.2.8.1.1.13': 64,
+ '1.3.36.3.3.2.8.1.1.14': 64,
+ }
+
+ @classmethod
+ def register(cls, name, oid, key_size):
+ """
+ Registers a new named elliptic curve that is not included in the
+ default list of named curves
+
+ :param name:
+ A unicode string of the curve name
+
+ :param oid:
+ A unicode string of the dotted format OID
+
+ :param key_size:
+ An integer of the number of bytes the private key should be
+ encoded to
+ """
+
+ cls._map[oid] = name
+ if cls._reverse_map is not None:
+ cls._reverse_map[name] = oid
+ cls._key_sizes[oid] = key_size
+
+
+class ECDomainParameters(Choice):
+ """
+ Source: http://www.secg.org/sec1-v2.pdf page 102
+ """
+
+ _alternatives = [
+ ('specified', SpecifiedECDomain),
+ ('named', NamedCurve),
+ ('implicit_ca', Null),
+ ]
+
+ @property
+ def key_size(self):
+ if self.name == 'implicit_ca':
+ raise ValueError(unwrap(
+ '''
+ Unable to calculate key_size from ECDomainParameters
+ that are implicitly defined by the CA key
+ '''
+ ))
+
+ if self.name == 'specified':
+ order = self.chosen['order'].native
+ return math.ceil(math.log(order, 2.0) / 8.0)
+
+ oid = self.chosen.dotted
+ if oid not in NamedCurve._key_sizes:
+ raise ValueError(unwrap(
+ '''
+ The asn1crypto.keys.NamedCurve %s does not have a registered key length,
+ please call asn1crypto.keys.NamedCurve.register()
+ ''',
+ repr(oid)
+ ))
+ return NamedCurve._key_sizes[oid]
+
+
+class ECPrivateKeyVersion(Integer):
+ """
+ Original Name: None
+ Source: http://www.secg.org/sec1-v2.pdf page 108
+ """
+
+ _map = {
+ 1: 'ecPrivkeyVer1',
+ }
+
+
+class ECPrivateKey(Sequence):
+ """
+ Source: http://www.secg.org/sec1-v2.pdf page 108
+ """
+
+ _fields = [
+ ('version', ECPrivateKeyVersion),
+ ('private_key', IntegerOctetString),
+ ('parameters', ECDomainParameters, {'explicit': 0, 'optional': True}),
+ ('public_key', ECPointBitString, {'explicit': 1, 'optional': True}),
+ ]
+
+ # Ensures the key is set to the correct length when encoding
+ _key_size = None
+
+ # This is necessary to ensure the private_key IntegerOctetString is encoded properly
+ def __setitem__(self, key, value):
+ res = super(ECPrivateKey, self).__setitem__(key, value)
+
+ if key == 'private_key':
+ if self._key_size is None:
+ # Infer the key_size from the existing private key if possible
+ pkey_contents = self['private_key'].contents
+ if isinstance(pkey_contents, byte_cls) and len(pkey_contents) > 1:
+ self.set_key_size(len(self['private_key'].contents))
+
+ elif self._key_size is not None:
+ self._update_key_size()
+
+ elif key == 'parameters' and isinstance(self['parameters'], ECDomainParameters) and \
+ self['parameters'].name != 'implicit_ca':
+ self.set_key_size(self['parameters'].key_size)
+
+ return res
+
+ def set_key_size(self, key_size):
+ """
+ Sets the key_size to ensure the private key is encoded to the proper length
+
+ :param key_size:
+ An integer byte length to encode the private_key to
+ """
+
+ self._key_size = key_size
+ self._update_key_size()
+
+ def _update_key_size(self):
+ """
+ Ensure the private_key explicit encoding width is set
+ """
+
+ if self._key_size is not None and isinstance(self['private_key'], IntegerOctetString):
+ self['private_key'].set_encoded_width(self._key_size)
+
+
+class DSAParams(Sequence):
+ """
+ Parameters for a DSA public or private key
+
+ Original Name: Dss-Parms
+ Source: https://tools.ietf.org/html/rfc3279#page-9
+ """
+
+ _fields = [
+ ('p', Integer),
+ ('q', Integer),
+ ('g', Integer),
+ ]
+
+
+class Attribute(Sequence):
+ """
+ Source: https://www.itu.int/rec/dologin_pub.asp?lang=e&id=T-REC-X.501-198811-S!!PDF-E&type=items page 8
+ """
+
+ _fields = [
+ ('type', ObjectIdentifier),
+ ('values', SetOf, {'spec': Any}),
+ ]
+
+
+class Attributes(SetOf):
+ """
+ Source: https://tools.ietf.org/html/rfc5208#page-3
+ """
+
+ _child_spec = Attribute
+
+
+class PrivateKeyAlgorithmId(ObjectIdentifier):
+ """
+ These OIDs for various public keys are reused when storing private keys
+ inside of a PKCS#8 structure
+
+ Original Name: None
+ Source: https://tools.ietf.org/html/rfc3279
+ """
+
+ _map = {
+ # https://tools.ietf.org/html/rfc3279#page-19
+ '1.2.840.113549.1.1.1': 'rsa',
+ # https://tools.ietf.org/html/rfc4055#page-8
+ '1.2.840.113549.1.1.10': 'rsassa_pss',
+ # https://tools.ietf.org/html/rfc3279#page-18
+ '1.2.840.10040.4.1': 'dsa',
+ # https://tools.ietf.org/html/rfc3279#page-13
+ '1.2.840.10045.2.1': 'ec',
+ # https://tools.ietf.org/html/rfc8410#section-9
+ '1.3.101.110': 'x25519',
+ '1.3.101.111': 'x448',
+ '1.3.101.112': 'ed25519',
+ '1.3.101.113': 'ed448',
+ }
+
+
+class PrivateKeyAlgorithm(_ForceNullParameters, Sequence):
+ """
+ Original Name: PrivateKeyAlgorithmIdentifier
+ Source: https://tools.ietf.org/html/rfc5208#page-3
+ """
+
+ _fields = [
+ ('algorithm', PrivateKeyAlgorithmId),
+ ('parameters', Any, {'optional': True}),
+ ]
+
+ _oid_pair = ('algorithm', 'parameters')
+ _oid_specs = {
+ 'dsa': DSAParams,
+ 'ec': ECDomainParameters,
+ 'rsassa_pss': RSASSAPSSParams,
+ }
+
+
+class PrivateKeyInfo(Sequence):
+ """
+ Source: https://tools.ietf.org/html/rfc5208#page-3
+ """
+
+ _fields = [
+ ('version', Integer),
+ ('private_key_algorithm', PrivateKeyAlgorithm),
+ ('private_key', ParsableOctetString),
+ ('attributes', Attributes, {'implicit': 0, 'optional': True}),
+ ]
+
+ def _private_key_spec(self):
+ algorithm = self['private_key_algorithm']['algorithm'].native
+ return {
+ 'rsa': RSAPrivateKey,
+ 'rsassa_pss': RSAPrivateKey,
+ 'dsa': Integer,
+ 'ec': ECPrivateKey,
+ # These should be treated as opaque octet strings according
+ # to RFC 8410
+ 'x25519': OctetString,
+ 'x448': OctetString,
+ 'ed25519': OctetString,
+ 'ed448': OctetString,
+ }[algorithm]
+
+ _spec_callbacks = {
+ 'private_key': _private_key_spec
+ }
+
+ _algorithm = None
+ _bit_size = None
+ _public_key = None
+ _fingerprint = None
+
+ @classmethod
+ def wrap(cls, private_key, algorithm):
+ """
+ Wraps a private key in a PrivateKeyInfo structure
+
+ :param private_key:
+ A byte string or Asn1Value object of the private key
+
+ :param algorithm:
+ A unicode string of "rsa", "dsa" or "ec"
+
+ :return:
+ A PrivateKeyInfo object
+ """
+
+ if not isinstance(private_key, byte_cls) and not isinstance(private_key, Asn1Value):
+ raise TypeError(unwrap(
+ '''
+ private_key must be a byte string or Asn1Value, not %s
+ ''',
+ type_name(private_key)
+ ))
+
+ if algorithm == 'rsa' or algorithm == 'rsassa_pss':
+ if not isinstance(private_key, RSAPrivateKey):
+ private_key = RSAPrivateKey.load(private_key)
+ params = Null()
+ elif algorithm == 'dsa':
+ if not isinstance(private_key, DSAPrivateKey):
+ private_key = DSAPrivateKey.load(private_key)
+ params = DSAParams()
+ params['p'] = private_key['p']
+ params['q'] = private_key['q']
+ params['g'] = private_key['g']
+ public_key = private_key['public_key']
+ private_key = private_key['private_key']
+ elif algorithm == 'ec':
+ if not isinstance(private_key, ECPrivateKey):
+ private_key = ECPrivateKey.load(private_key)
+ else:
+ private_key = private_key.copy()
+ params = private_key['parameters']
+ del private_key['parameters']
+ else:
+ raise ValueError(unwrap(
+ '''
+ algorithm must be one of "rsa", "dsa", "ec", not %s
+ ''',
+ repr(algorithm)
+ ))
+
+ private_key_algo = PrivateKeyAlgorithm()
+ private_key_algo['algorithm'] = PrivateKeyAlgorithmId(algorithm)
+ private_key_algo['parameters'] = params
+
+ container = cls()
+ container._algorithm = algorithm
+ container['version'] = Integer(0)
+ container['private_key_algorithm'] = private_key_algo
+ container['private_key'] = private_key
+
+ # Here we save the DSA public key if possible since it is not contained
+ # within the PKCS#8 structure for a DSA key
+ if algorithm == 'dsa':
+ container._public_key = public_key
+
+ return container
+
+ # This is necessary to ensure any contained ECPrivateKey is the
+ # correct size
+ def __setitem__(self, key, value):
+ res = super(PrivateKeyInfo, self).__setitem__(key, value)
+
+ algorithm = self['private_key_algorithm']
+
+ # When possible, use the parameter info to make sure the private key encoding
+ # retains any necessary leading bytes, instead of them being dropped
+ if (key == 'private_key_algorithm' or key == 'private_key') and \
+ algorithm['algorithm'].native == 'ec' and \
+ isinstance(algorithm['parameters'], ECDomainParameters) and \
+ algorithm['parameters'].name != 'implicit_ca' and \
+ isinstance(self['private_key'], ParsableOctetString) and \
+ isinstance(self['private_key'].parsed, ECPrivateKey):
+ self['private_key'].parsed.set_key_size(algorithm['parameters'].key_size)
+
+ return res
+
+ def unwrap(self):
+ """
+ Unwraps the private key into an RSAPrivateKey, DSAPrivateKey or
+ ECPrivateKey object
+
+ :return:
+ An RSAPrivateKey, DSAPrivateKey or ECPrivateKey object
+ """
+
+ raise APIException(
+ 'asn1crypto.keys.PrivateKeyInfo().unwrap() has been removed, '
+ 'please use oscrypto.asymmetric.PrivateKey().unwrap() instead')
+
+ @property
+ def curve(self):
+ """
+ Returns information about the curve used for an EC key
+
+ :raises:
+ ValueError - when the key is not an EC key
+
+ :return:
+ A two-element tuple, with the first element being a unicode string
+ of "implicit_ca", "specified" or "named". If the first element is
+ "implicit_ca", the second is None. If "specified", the second is
+ an OrderedDict that is the native version of SpecifiedECDomain. If
+ "named", the second is a unicode string of the curve name.
+ """
+
+ if self.algorithm != 'ec':
+ raise ValueError(unwrap(
+ '''
+ Only EC keys have a curve, this key is %s
+ ''',
+ self.algorithm.upper()
+ ))
+
+ params = self['private_key_algorithm']['parameters']
+ chosen = params.chosen
+
+ if params.name == 'implicit_ca':
+ value = None
+ else:
+ value = chosen.native
+
+ return (params.name, value)
+
+ @property
+ def hash_algo(self):
+ """
+ Returns the name of the family of hash algorithms used to generate a
+ DSA key
+
+ :raises:
+ ValueError - when the key is not a DSA key
+
+ :return:
+ A unicode string of "sha1" or "sha2"
+ """
+
+ if self.algorithm != 'dsa':
+ raise ValueError(unwrap(
+ '''
+ Only DSA keys are generated using a hash algorithm, this key is
+ %s
+ ''',
+ self.algorithm.upper()
+ ))
+
+ byte_len = math.log(self['private_key_algorithm']['parameters']['q'].native, 2) / 8
+
+ return 'sha1' if byte_len <= 20 else 'sha2'
+
+ @property
+ def algorithm(self):
+ """
+ :return:
+ A unicode string of "rsa", "rsassa_pss", "dsa" or "ec"
+ """
+
+ if self._algorithm is None:
+ self._algorithm = self['private_key_algorithm']['algorithm'].native
+ return self._algorithm
+
+ @property
+ def bit_size(self):
+ """
+ :return:
+ The bit size of the private key, as an integer
+ """
+
+ if self._bit_size is None:
+ if self.algorithm == 'rsa' or self.algorithm == 'rsassa_pss':
+ prime = self['private_key'].parsed['modulus'].native
+ elif self.algorithm == 'dsa':
+ prime = self['private_key_algorithm']['parameters']['p'].native
+ elif self.algorithm == 'ec':
+ prime = self['private_key'].parsed['private_key'].native
+ self._bit_size = int(math.ceil(math.log(prime, 2)))
+ modulus = self._bit_size % 8
+ if modulus != 0:
+ self._bit_size += 8 - modulus
+ return self._bit_size
+
+ @property
+ def byte_size(self):
+ """
+ :return:
+ The byte size of the private key, as an integer
+ """
+
+ return int(math.ceil(self.bit_size / 8))
+
+ @property
+ def public_key(self):
+ """
+ :return:
+ If an RSA key, an RSAPublicKey object. If a DSA key, an Integer
+ object. If an EC key, an ECPointBitString object.
+ """
+
+ raise APIException(
+ 'asn1crypto.keys.PrivateKeyInfo().public_key has been removed, '
+ 'please use oscrypto.asymmetric.PrivateKey().public_key.unwrap() instead')
+
+ @property
+ def public_key_info(self):
+ """
+ :return:
+ A PublicKeyInfo object derived from this private key.
+ """
+
+ raise APIException(
+ 'asn1crypto.keys.PrivateKeyInfo().public_key_info has been removed, '
+ 'please use oscrypto.asymmetric.PrivateKey().public_key.asn1 instead')
+
+ @property
+ def fingerprint(self):
+ """
+ Creates a fingerprint that can be compared with a public key to see if
+ the two form a pair.
+
+ This fingerprint is not compatible with fingerprints generated by any
+ other software.
+
+ :return:
+ A byte string that is a sha256 hash of selected components (based
+ on the key type)
+ """
+
+ raise APIException(
+ 'asn1crypto.keys.PrivateKeyInfo().fingerprint has been removed, '
+ 'please use oscrypto.asymmetric.PrivateKey().fingerprint instead')
+
+
+class EncryptedPrivateKeyInfo(Sequence):
+ """
+ Source: https://tools.ietf.org/html/rfc5208#page-4
+ """
+
+ _fields = [
+ ('encryption_algorithm', EncryptionAlgorithm),
+ ('encrypted_data', OctetString),
+ ]
+
+
+# These structures are from https://tools.ietf.org/html/rfc3279
+
+class ValidationParms(Sequence):
+ """
+ Source: https://tools.ietf.org/html/rfc3279#page-10
+ """
+
+ _fields = [
+ ('seed', BitString),
+ ('pgen_counter', Integer),
+ ]
+
+
+class DomainParameters(Sequence):
+ """
+ Source: https://tools.ietf.org/html/rfc3279#page-10
+ """
+
+ _fields = [
+ ('p', Integer),
+ ('g', Integer),
+ ('q', Integer),
+ ('j', Integer, {'optional': True}),
+ ('validation_params', ValidationParms, {'optional': True}),
+ ]
+
+
+class PublicKeyAlgorithmId(ObjectIdentifier):
+ """
+ Original Name: None
+ Source: https://tools.ietf.org/html/rfc3279
+ """
+
+ _map = {
+ # https://tools.ietf.org/html/rfc3279#page-19
+ '1.2.840.113549.1.1.1': 'rsa',
+ # https://tools.ietf.org/html/rfc3447#page-47
+ '1.2.840.113549.1.1.7': 'rsaes_oaep',
+ # https://tools.ietf.org/html/rfc4055#page-8
+ '1.2.840.113549.1.1.10': 'rsassa_pss',
+ # https://tools.ietf.org/html/rfc3279#page-18
+ '1.2.840.10040.4.1': 'dsa',
+ # https://tools.ietf.org/html/rfc3279#page-13
+ '1.2.840.10045.2.1': 'ec',
+ # https://tools.ietf.org/html/rfc3279#page-10
+ '1.2.840.10046.2.1': 'dh',
+ # https://tools.ietf.org/html/rfc8410#section-9
+ '1.3.101.110': 'x25519',
+ '1.3.101.111': 'x448',
+ '1.3.101.112': 'ed25519',
+ '1.3.101.113': 'ed448',
+ }
+
+
+class PublicKeyAlgorithm(_ForceNullParameters, Sequence):
+ """
+ Original Name: AlgorithmIdentifier
+ Source: https://tools.ietf.org/html/rfc5280#page-18
+ """
+
+ _fields = [
+ ('algorithm', PublicKeyAlgorithmId),
+ ('parameters', Any, {'optional': True}),
+ ]
+
+ _oid_pair = ('algorithm', 'parameters')
+ _oid_specs = {
+ 'dsa': DSAParams,
+ 'ec': ECDomainParameters,
+ 'dh': DomainParameters,
+ 'rsaes_oaep': RSAESOAEPParams,
+ 'rsassa_pss': RSASSAPSSParams,
+ }
+
+
+class PublicKeyInfo(Sequence):
+ """
+ Original Name: SubjectPublicKeyInfo
+ Source: https://tools.ietf.org/html/rfc5280#page-17
+ """
+
+ _fields = [
+ ('algorithm', PublicKeyAlgorithm),
+ ('public_key', ParsableOctetBitString),
+ ]
+
+ def _public_key_spec(self):
+ algorithm = self['algorithm']['algorithm'].native
+ return {
+ 'rsa': RSAPublicKey,
+ 'rsaes_oaep': RSAPublicKey,
+ 'rsassa_pss': RSAPublicKey,
+ 'dsa': Integer,
+ # We override the field spec with ECPoint so that users can easily
+ # decompose the byte string into the constituent X and Y coords
+ 'ec': (ECPointBitString, None),
+ 'dh': Integer,
+ # These should be treated as opaque bit strings according
+ # to RFC 8410, and need not even be valid ASN.1
+ 'x25519': (OctetBitString, None),
+ 'x448': (OctetBitString, None),
+ 'ed25519': (OctetBitString, None),
+ 'ed448': (OctetBitString, None),
+ }[algorithm]
+
+ _spec_callbacks = {
+ 'public_key': _public_key_spec
+ }
+
+ _algorithm = None
+ _bit_size = None
+ _fingerprint = None
+ _sha1 = None
+ _sha256 = None
+
+ @classmethod
+ def wrap(cls, public_key, algorithm):
+ """
+ Wraps a public key in a PublicKeyInfo structure
+
+ :param public_key:
+ A byte string or Asn1Value object of the public key
+
+ :param algorithm:
+ A unicode string of "rsa"
+
+ :return:
+ A PublicKeyInfo object
+ """
+
+ if not isinstance(public_key, byte_cls) and not isinstance(public_key, Asn1Value):
+ raise TypeError(unwrap(
+ '''
+ public_key must be a byte string or Asn1Value, not %s
+ ''',
+ type_name(public_key)
+ ))
+
+ if algorithm != 'rsa' and algorithm != 'rsassa_pss':
+ raise ValueError(unwrap(
+ '''
+ algorithm must "rsa", not %s
+ ''',
+ repr(algorithm)
+ ))
+
+ algo = PublicKeyAlgorithm()
+ algo['algorithm'] = PublicKeyAlgorithmId(algorithm)
+ algo['parameters'] = Null()
+
+ container = cls()
+ container['algorithm'] = algo
+ if isinstance(public_key, Asn1Value):
+ public_key = public_key.untag().dump()
+ container['public_key'] = ParsableOctetBitString(public_key)
+
+ return container
+
+ def unwrap(self):
+ """
+ Unwraps an RSA public key into an RSAPublicKey object. Does not support
+ DSA or EC public keys since they do not have an unwrapped form.
+
+ :return:
+ An RSAPublicKey object
+ """
+
+ raise APIException(
+ 'asn1crypto.keys.PublicKeyInfo().unwrap() has been removed, '
+ 'please use oscrypto.asymmetric.PublicKey().unwrap() instead')
+
+ @property
+ def curve(self):
+ """
+ Returns information about the curve used for an EC key
+
+ :raises:
+ ValueError - when the key is not an EC key
+
+ :return:
+ A two-element tuple, with the first element being a unicode string
+ of "implicit_ca", "specified" or "named". If the first element is
+ "implicit_ca", the second is None. If "specified", the second is
+ an OrderedDict that is the native version of SpecifiedECDomain. If
+ "named", the second is a unicode string of the curve name.
+ """
+
+ if self.algorithm != 'ec':
+ raise ValueError(unwrap(
+ '''
+ Only EC keys have a curve, this key is %s
+ ''',
+ self.algorithm.upper()
+ ))
+
+ params = self['algorithm']['parameters']
+ chosen = params.chosen
+
+ if params.name == 'implicit_ca':
+ value = None
+ else:
+ value = chosen.native
+
+ return (params.name, value)
+
+ @property
+ def hash_algo(self):
+ """
+ Returns the name of the family of hash algorithms used to generate a
+ DSA key
+
+ :raises:
+ ValueError - when the key is not a DSA key
+
+ :return:
+ A unicode string of "sha1" or "sha2" or None if no parameters are
+ present
+ """
+
+ if self.algorithm != 'dsa':
+ raise ValueError(unwrap(
+ '''
+ Only DSA keys are generated using a hash algorithm, this key is
+ %s
+ ''',
+ self.algorithm.upper()
+ ))
+
+ parameters = self['algorithm']['parameters']
+ if parameters.native is None:
+ return None
+
+ byte_len = math.log(parameters['q'].native, 2) / 8
+
+ return 'sha1' if byte_len <= 20 else 'sha2'
+
+ @property
+ def algorithm(self):
+ """
+ :return:
+ A unicode string of "rsa", "rsassa_pss", "dsa" or "ec"
+ """
+
+ if self._algorithm is None:
+ self._algorithm = self['algorithm']['algorithm'].native
+ return self._algorithm
+
+ @property
+ def bit_size(self):
+ """
+ :return:
+ The bit size of the public key, as an integer
+ """
+
+ if self._bit_size is None:
+ if self.algorithm == 'ec':
+ self._bit_size = int(((len(self['public_key'].native) - 1) / 2) * 8)
+ else:
+ if self.algorithm == 'rsa' or self.algorithm == 'rsassa_pss':
+ prime = self['public_key'].parsed['modulus'].native
+ elif self.algorithm == 'dsa':
+ prime = self['algorithm']['parameters']['p'].native
+ self._bit_size = int(math.ceil(math.log(prime, 2)))
+ modulus = self._bit_size % 8
+ if modulus != 0:
+ self._bit_size += 8 - modulus
+
+ return self._bit_size
+
+ @property
+ def byte_size(self):
+ """
+ :return:
+ The byte size of the public key, as an integer
+ """
+
+ return int(math.ceil(self.bit_size / 8))
+
+ @property
+ def sha1(self):
+ """
+ :return:
+ The SHA1 hash of the DER-encoded bytes of this public key info
+ """
+
+ if self._sha1 is None:
+ self._sha1 = hashlib.sha1(byte_cls(self['public_key'])).digest()
+ return self._sha1
+
+ @property
+ def sha256(self):
+ """
+ :return:
+ The SHA-256 hash of the DER-encoded bytes of this public key info
+ """
+
+ if self._sha256 is None:
+ self._sha256 = hashlib.sha256(byte_cls(self['public_key'])).digest()
+ return self._sha256
+
+ @property
+ def fingerprint(self):
+ """
+ Creates a fingerprint that can be compared with a private key to see if
+ the two form a pair.
+
+ This fingerprint is not compatible with fingerprints generated by any
+ other software.
+
+ :return:
+ A byte string that is a sha256 hash of selected components (based
+ on the key type)
+ """
+
+ raise APIException(
+ 'asn1crypto.keys.PublicKeyInfo().fingerprint has been removed, '
+ 'please use oscrypto.asymmetric.PublicKey().fingerprint instead')
diff --git a/jc/parsers/asn1crypto/ocsp.py b/jc/parsers/asn1crypto/ocsp.py
new file mode 100644
index 00000000..91c7fbf3
--- /dev/null
+++ b/jc/parsers/asn1crypto/ocsp.py
@@ -0,0 +1,703 @@
+# coding: utf-8
+
+"""
+ASN.1 type classes for the online certificate status protocol (OCSP). Exports
+the following items:
+
+ - OCSPRequest()
+ - OCSPResponse()
+
+Other type classes are defined that help compose the types listed above.
+"""
+
+from __future__ import unicode_literals, division, absolute_import, print_function
+
+from ._errors import unwrap
+from .algos import DigestAlgorithm, SignedDigestAlgorithm
+from .core import (
+ Boolean,
+ Choice,
+ Enumerated,
+ GeneralizedTime,
+ IA5String,
+ Integer,
+ Null,
+ ObjectIdentifier,
+ OctetBitString,
+ OctetString,
+ ParsableOctetString,
+ Sequence,
+ SequenceOf,
+)
+from .crl import AuthorityInfoAccessSyntax, CRLReason
+from .keys import PublicKeyAlgorithm
+from .x509 import Certificate, GeneralName, GeneralNames, Name
+
+
+# The structures in this file are taken from https://tools.ietf.org/html/rfc6960
+
+
+class Version(Integer):
+ _map = {
+ 0: 'v1'
+ }
+
+
+class CertId(Sequence):
+ _fields = [
+ ('hash_algorithm', DigestAlgorithm),
+ ('issuer_name_hash', OctetString),
+ ('issuer_key_hash', OctetString),
+ ('serial_number', Integer),
+ ]
+
+
+class ServiceLocator(Sequence):
+ _fields = [
+ ('issuer', Name),
+ ('locator', AuthorityInfoAccessSyntax),
+ ]
+
+
+class RequestExtensionId(ObjectIdentifier):
+ _map = {
+ '1.3.6.1.5.5.7.48.1.7': 'service_locator',
+ }
+
+
+class RequestExtension(Sequence):
+ _fields = [
+ ('extn_id', RequestExtensionId),
+ ('critical', Boolean, {'default': False}),
+ ('extn_value', ParsableOctetString),
+ ]
+
+ _oid_pair = ('extn_id', 'extn_value')
+ _oid_specs = {
+ 'service_locator': ServiceLocator,
+ }
+
+
+class RequestExtensions(SequenceOf):
+ _child_spec = RequestExtension
+
+
+class Request(Sequence):
+ _fields = [
+ ('req_cert', CertId),
+ ('single_request_extensions', RequestExtensions, {'explicit': 0, 'optional': True}),
+ ]
+
+ _processed_extensions = False
+ _critical_extensions = None
+ _service_locator_value = None
+
+ def _set_extensions(self):
+ """
+ Sets common named extensions to private attributes and creates a list
+ of critical extensions
+ """
+
+ self._critical_extensions = set()
+
+ for extension in self['single_request_extensions']:
+ name = extension['extn_id'].native
+ attribute_name = '_%s_value' % name
+ if hasattr(self, attribute_name):
+ setattr(self, attribute_name, extension['extn_value'].parsed)
+ if extension['critical'].native:
+ self._critical_extensions.add(name)
+
+ self._processed_extensions = True
+
+ @property
+ def critical_extensions(self):
+ """
+ Returns a set of the names (or OID if not a known extension) of the
+ extensions marked as critical
+
+ :return:
+ A set of unicode strings
+ """
+
+ if not self._processed_extensions:
+ self._set_extensions()
+ return self._critical_extensions
+
+ @property
+ def service_locator_value(self):
+ """
+ This extension is used when communicating with an OCSP responder that
+ acts as a proxy for OCSP requests
+
+ :return:
+ None or a ServiceLocator object
+ """
+
+ if self._processed_extensions is False:
+ self._set_extensions()
+ return self._service_locator_value
+
+
+class Requests(SequenceOf):
+ _child_spec = Request
+
+
+class ResponseType(ObjectIdentifier):
+ _map = {
+ '1.3.6.1.5.5.7.48.1.1': 'basic_ocsp_response',
+ }
+
+
+class AcceptableResponses(SequenceOf):
+ _child_spec = ResponseType
+
+
+class PreferredSignatureAlgorithm(Sequence):
+ _fields = [
+ ('sig_identifier', SignedDigestAlgorithm),
+ ('cert_identifier', PublicKeyAlgorithm, {'optional': True}),
+ ]
+
+
+class PreferredSignatureAlgorithms(SequenceOf):
+ _child_spec = PreferredSignatureAlgorithm
+
+
+class TBSRequestExtensionId(ObjectIdentifier):
+ _map = {
+ '1.3.6.1.5.5.7.48.1.2': 'nonce',
+ '1.3.6.1.5.5.7.48.1.4': 'acceptable_responses',
+ '1.3.6.1.5.5.7.48.1.8': 'preferred_signature_algorithms',
+ }
+
+
+class TBSRequestExtension(Sequence):
+ _fields = [
+ ('extn_id', TBSRequestExtensionId),
+ ('critical', Boolean, {'default': False}),
+ ('extn_value', ParsableOctetString),
+ ]
+
+ _oid_pair = ('extn_id', 'extn_value')
+ _oid_specs = {
+ 'nonce': OctetString,
+ 'acceptable_responses': AcceptableResponses,
+ 'preferred_signature_algorithms': PreferredSignatureAlgorithms,
+ }
+
+
+class TBSRequestExtensions(SequenceOf):
+ _child_spec = TBSRequestExtension
+
+
+class TBSRequest(Sequence):
+ _fields = [
+ ('version', Version, {'explicit': 0, 'default': 'v1'}),
+ ('requestor_name', GeneralName, {'explicit': 1, 'optional': True}),
+ ('request_list', Requests),
+ ('request_extensions', TBSRequestExtensions, {'explicit': 2, 'optional': True}),
+ ]
+
+
+class Certificates(SequenceOf):
+ _child_spec = Certificate
+
+
+class Signature(Sequence):
+ _fields = [
+ ('signature_algorithm', SignedDigestAlgorithm),
+ ('signature', OctetBitString),
+ ('certs', Certificates, {'explicit': 0, 'optional': True}),
+ ]
+
+
+class OCSPRequest(Sequence):
+ _fields = [
+ ('tbs_request', TBSRequest),
+ ('optional_signature', Signature, {'explicit': 0, 'optional': True}),
+ ]
+
+ _processed_extensions = False
+ _critical_extensions = None
+ _nonce_value = None
+ _acceptable_responses_value = None
+ _preferred_signature_algorithms_value = None
+
+ def _set_extensions(self):
+ """
+ Sets common named extensions to private attributes and creates a list
+ of critical extensions
+ """
+
+ self._critical_extensions = set()
+
+ for extension in self['tbs_request']['request_extensions']:
+ name = extension['extn_id'].native
+ attribute_name = '_%s_value' % name
+ if hasattr(self, attribute_name):
+ setattr(self, attribute_name, extension['extn_value'].parsed)
+ if extension['critical'].native:
+ self._critical_extensions.add(name)
+
+ self._processed_extensions = True
+
+ @property
+ def critical_extensions(self):
+ """
+ Returns a set of the names (or OID if not a known extension) of the
+ extensions marked as critical
+
+ :return:
+ A set of unicode strings
+ """
+
+ if not self._processed_extensions:
+ self._set_extensions()
+ return self._critical_extensions
+
+ @property
+ def nonce_value(self):
+ """
+ This extension is used to prevent replay attacks by including a unique,
+ random value with each request/response pair
+
+ :return:
+ None or an OctetString object
+ """
+
+ if self._processed_extensions is False:
+ self._set_extensions()
+ return self._nonce_value
+
+ @property
+ def acceptable_responses_value(self):
+ """
+ This extension is used to allow the client and server to communicate
+ with alternative response formats other than just basic_ocsp_response,
+ although no other formats are defined in the standard.
+
+ :return:
+ None or an AcceptableResponses object
+ """
+
+ if self._processed_extensions is False:
+ self._set_extensions()
+ return self._acceptable_responses_value
+
+ @property
+ def preferred_signature_algorithms_value(self):
+ """
+ This extension is used by the client to define what signature algorithms
+ are preferred, including both the hash algorithm and the public key
+ algorithm, with a level of detail down to even the public key algorithm
+ parameters, such as curve name.
+
+ :return:
+ None or a PreferredSignatureAlgorithms object
+ """
+
+ if self._processed_extensions is False:
+ self._set_extensions()
+ return self._preferred_signature_algorithms_value
+
+
+class OCSPResponseStatus(Enumerated):
+ _map = {
+ 0: 'successful',
+ 1: 'malformed_request',
+ 2: 'internal_error',
+ 3: 'try_later',
+ 5: 'sign_required',
+ 6: 'unauthorized',
+ }
+
+
+class ResponderId(Choice):
+ _alternatives = [
+ ('by_name', Name, {'explicit': 1}),
+ ('by_key', OctetString, {'explicit': 2}),
+ ]
+
+
+# Custom class to return a meaningful .native attribute from CertStatus()
+class StatusGood(Null):
+ def set(self, value):
+ """
+ Sets the value of the object
+
+ :param value:
+ None or 'good'
+ """
+
+ if value is not None and value != 'good' and not isinstance(value, Null):
+ raise ValueError(unwrap(
+ '''
+ value must be one of None, "good", not %s
+ ''',
+ repr(value)
+ ))
+
+ self.contents = b''
+
+ @property
+ def native(self):
+ return 'good'
+
+
+# Custom class to return a meaningful .native attribute from CertStatus()
+class StatusUnknown(Null):
+ def set(self, value):
+ """
+ Sets the value of the object
+
+ :param value:
+ None or 'unknown'
+ """
+
+ if value is not None and value != 'unknown' and not isinstance(value, Null):
+ raise ValueError(unwrap(
+ '''
+ value must be one of None, "unknown", not %s
+ ''',
+ repr(value)
+ ))
+
+ self.contents = b''
+
+ @property
+ def native(self):
+ return 'unknown'
+
+
+class RevokedInfo(Sequence):
+ _fields = [
+ ('revocation_time', GeneralizedTime),
+ ('revocation_reason', CRLReason, {'explicit': 0, 'optional': True}),
+ ]
+
+
+class CertStatus(Choice):
+ _alternatives = [
+ ('good', StatusGood, {'implicit': 0}),
+ ('revoked', RevokedInfo, {'implicit': 1}),
+ ('unknown', StatusUnknown, {'implicit': 2}),
+ ]
+
+
+class CrlId(Sequence):
+ _fields = [
+ ('crl_url', IA5String, {'explicit': 0, 'optional': True}),
+ ('crl_num', Integer, {'explicit': 1, 'optional': True}),
+ ('crl_time', GeneralizedTime, {'explicit': 2, 'optional': True}),
+ ]
+
+
+class SingleResponseExtensionId(ObjectIdentifier):
+ _map = {
+ '1.3.6.1.5.5.7.48.1.3': 'crl',
+ '1.3.6.1.5.5.7.48.1.6': 'archive_cutoff',
+ # These are CRLEntryExtension values from
+ # https://tools.ietf.org/html/rfc5280
+ '2.5.29.21': 'crl_reason',
+ '2.5.29.24': 'invalidity_date',
+ '2.5.29.29': 'certificate_issuer',
+ # https://tools.ietf.org/html/rfc6962.html#page-13
+ '1.3.6.1.4.1.11129.2.4.5': 'signed_certificate_timestamp_list',
+ }
+
+
+class SingleResponseExtension(Sequence):
+ _fields = [
+ ('extn_id', SingleResponseExtensionId),
+ ('critical', Boolean, {'default': False}),
+ ('extn_value', ParsableOctetString),
+ ]
+
+ _oid_pair = ('extn_id', 'extn_value')
+ _oid_specs = {
+ 'crl': CrlId,
+ 'archive_cutoff': GeneralizedTime,
+ 'crl_reason': CRLReason,
+ 'invalidity_date': GeneralizedTime,
+ 'certificate_issuer': GeneralNames,
+ 'signed_certificate_timestamp_list': OctetString,
+ }
+
+
+class SingleResponseExtensions(SequenceOf):
+ _child_spec = SingleResponseExtension
+
+
+class SingleResponse(Sequence):
+ _fields = [
+ ('cert_id', CertId),
+ ('cert_status', CertStatus),
+ ('this_update', GeneralizedTime),
+ ('next_update', GeneralizedTime, {'explicit': 0, 'optional': True}),
+ ('single_extensions', SingleResponseExtensions, {'explicit': 1, 'optional': True}),
+ ]
+
+ _processed_extensions = False
+ _critical_extensions = None
+ _crl_value = None
+ _archive_cutoff_value = None
+ _crl_reason_value = None
+ _invalidity_date_value = None
+ _certificate_issuer_value = None
+
+ def _set_extensions(self):
+ """
+ Sets common named extensions to private attributes and creates a list
+ of critical extensions
+ """
+
+ self._critical_extensions = set()
+
+ for extension in self['single_extensions']:
+ name = extension['extn_id'].native
+ attribute_name = '_%s_value' % name
+ if hasattr(self, attribute_name):
+ setattr(self, attribute_name, extension['extn_value'].parsed)
+ if extension['critical'].native:
+ self._critical_extensions.add(name)
+
+ self._processed_extensions = True
+
+ @property
+ def critical_extensions(self):
+ """
+ Returns a set of the names (or OID if not a known extension) of the
+ extensions marked as critical
+
+ :return:
+ A set of unicode strings
+ """
+
+ if not self._processed_extensions:
+ self._set_extensions()
+ return self._critical_extensions
+
+ @property
+ def crl_value(self):
+ """
+ This extension is used to locate the CRL that a certificate's revocation
+ is contained within.
+
+ :return:
+ None or a CrlId object
+ """
+
+ if self._processed_extensions is False:
+ self._set_extensions()
+ return self._crl_value
+
+ @property
+ def archive_cutoff_value(self):
+ """
+ This extension is used to indicate the date at which an archived
+ (historical) certificate status entry will no longer be available.
+
+ :return:
+ None or a GeneralizedTime object
+ """
+
+ if self._processed_extensions is False:
+ self._set_extensions()
+ return self._archive_cutoff_value
+
+ @property
+ def crl_reason_value(self):
+ """
+ This extension indicates the reason that a certificate was revoked.
+
+ :return:
+ None or a CRLReason object
+ """
+
+ if self._processed_extensions is False:
+ self._set_extensions()
+ return self._crl_reason_value
+
+ @property
+ def invalidity_date_value(self):
+ """
+ This extension indicates the suspected date/time the private key was
+ compromised or the certificate became invalid. This would usually be
+ before the revocation date, which is when the CA processed the
+ revocation.
+
+ :return:
+ None or a GeneralizedTime object
+ """
+
+ if self._processed_extensions is False:
+ self._set_extensions()
+ return self._invalidity_date_value
+
+ @property
+ def certificate_issuer_value(self):
+ """
+ This extension indicates the issuer of the certificate in question.
+
+ :return:
+ None or an x509.GeneralNames object
+ """
+
+ if self._processed_extensions is False:
+ self._set_extensions()
+ return self._certificate_issuer_value
+
+
+class Responses(SequenceOf):
+ _child_spec = SingleResponse
+
+
+class ResponseDataExtensionId(ObjectIdentifier):
+ _map = {
+ '1.3.6.1.5.5.7.48.1.2': 'nonce',
+ '1.3.6.1.5.5.7.48.1.9': 'extended_revoke',
+ }
+
+
+class ResponseDataExtension(Sequence):
+ _fields = [
+ ('extn_id', ResponseDataExtensionId),
+ ('critical', Boolean, {'default': False}),
+ ('extn_value', ParsableOctetString),
+ ]
+
+ _oid_pair = ('extn_id', 'extn_value')
+ _oid_specs = {
+ 'nonce': OctetString,
+ 'extended_revoke': Null,
+ }
+
+
+class ResponseDataExtensions(SequenceOf):
+ _child_spec = ResponseDataExtension
+
+
+class ResponseData(Sequence):
+ _fields = [
+ ('version', Version, {'explicit': 0, 'default': 'v1'}),
+ ('responder_id', ResponderId),
+ ('produced_at', GeneralizedTime),
+ ('responses', Responses),
+ ('response_extensions', ResponseDataExtensions, {'explicit': 1, 'optional': True}),
+ ]
+
+
+class BasicOCSPResponse(Sequence):
+ _fields = [
+ ('tbs_response_data', ResponseData),
+ ('signature_algorithm', SignedDigestAlgorithm),
+ ('signature', OctetBitString),
+ ('certs', Certificates, {'explicit': 0, 'optional': True}),
+ ]
+
+
+class ResponseBytes(Sequence):
+ _fields = [
+ ('response_type', ResponseType),
+ ('response', ParsableOctetString),
+ ]
+
+ _oid_pair = ('response_type', 'response')
+ _oid_specs = {
+ 'basic_ocsp_response': BasicOCSPResponse,
+ }
+
+
+class OCSPResponse(Sequence):
+ _fields = [
+ ('response_status', OCSPResponseStatus),
+ ('response_bytes', ResponseBytes, {'explicit': 0, 'optional': True}),
+ ]
+
+ _processed_extensions = False
+ _critical_extensions = None
+ _nonce_value = None
+ _extended_revoke_value = None
+
+ def _set_extensions(self):
+ """
+ Sets common named extensions to private attributes and creates a list
+ of critical extensions
+ """
+
+ self._critical_extensions = set()
+
+ for extension in self['response_bytes']['response'].parsed['tbs_response_data']['response_extensions']:
+ name = extension['extn_id'].native
+ attribute_name = '_%s_value' % name
+ if hasattr(self, attribute_name):
+ setattr(self, attribute_name, extension['extn_value'].parsed)
+ if extension['critical'].native:
+ self._critical_extensions.add(name)
+
+ self._processed_extensions = True
+
+ @property
+ def critical_extensions(self):
+ """
+ Returns a set of the names (or OID if not a known extension) of the
+ extensions marked as critical
+
+ :return:
+ A set of unicode strings
+ """
+
+ if not self._processed_extensions:
+ self._set_extensions()
+ return self._critical_extensions
+
+ @property
+ def nonce_value(self):
+ """
+ This extension is used to prevent replay attacks on the request/response
+ exchange
+
+ :return:
+ None or an OctetString object
+ """
+
+ if self._processed_extensions is False:
+ self._set_extensions()
+ return self._nonce_value
+
+ @property
+ def extended_revoke_value(self):
+ """
+ This extension is used to signal that the responder will return a
+ "revoked" status for non-issued certificates.
+
+ :return:
+ None or a Null object (if present)
+ """
+
+ if self._processed_extensions is False:
+ self._set_extensions()
+ return self._extended_revoke_value
+
+ @property
+ def basic_ocsp_response(self):
+ """
+ A shortcut into the BasicOCSPResponse sequence
+
+ :return:
+ None or an asn1crypto.ocsp.BasicOCSPResponse object
+ """
+
+ return self['response_bytes']['response'].parsed
+
+ @property
+ def response_data(self):
+ """
+ A shortcut into the parsed, ResponseData sequence
+
+ :return:
+ None or an asn1crypto.ocsp.ResponseData object
+ """
+
+ return self['response_bytes']['response'].parsed['tbs_response_data']
diff --git a/jc/parsers/asn1crypto/parser.py b/jc/parsers/asn1crypto/parser.py
new file mode 100644
index 00000000..2f5a63e1
--- /dev/null
+++ b/jc/parsers/asn1crypto/parser.py
@@ -0,0 +1,292 @@
+# coding: utf-8
+
+"""
+Functions for parsing and dumping using the ASN.1 DER encoding. Exports the
+following items:
+
+ - emit()
+ - parse()
+ - peek()
+
+Other type classes are defined that help compose the types listed above.
+"""
+
+from __future__ import unicode_literals, division, absolute_import, print_function
+
+import sys
+
+from ._types import byte_cls, chr_cls, type_name
+from .util import int_from_bytes, int_to_bytes
+
+_PY2 = sys.version_info <= (3,)
+_INSUFFICIENT_DATA_MESSAGE = 'Insufficient data - %s bytes requested but only %s available'
+_MAX_DEPTH = 10
+
+
+def emit(class_, method, tag, contents):
+ """
+ Constructs a byte string of an ASN.1 DER-encoded value
+
+ This is typically not useful. Instead, use one of the standard classes from
+ asn1crypto.core, or construct a new class with specific fields, and call the
+ .dump() method.
+
+ :param class_:
+ An integer ASN.1 class value: 0 (universal), 1 (application),
+ 2 (context), 3 (private)
+
+ :param method:
+ An integer ASN.1 method value: 0 (primitive), 1 (constructed)
+
+ :param tag:
+ An integer ASN.1 tag value
+
+ :param contents:
+ A byte string of the encoded byte contents
+
+ :return:
+ A byte string of the ASN.1 DER value (header and contents)
+ """
+
+ if not isinstance(class_, int):
+ raise TypeError('class_ must be an integer, not %s' % type_name(class_))
+
+ if class_ < 0 or class_ > 3:
+ raise ValueError('class_ must be one of 0, 1, 2 or 3, not %s' % class_)
+
+ if not isinstance(method, int):
+ raise TypeError('method must be an integer, not %s' % type_name(method))
+
+ if method < 0 or method > 1:
+ raise ValueError('method must be 0 or 1, not %s' % method)
+
+ if not isinstance(tag, int):
+ raise TypeError('tag must be an integer, not %s' % type_name(tag))
+
+ if tag < 0:
+ raise ValueError('tag must be greater than zero, not %s' % tag)
+
+ if not isinstance(contents, byte_cls):
+ raise TypeError('contents must be a byte string, not %s' % type_name(contents))
+
+ return _dump_header(class_, method, tag, contents) + contents
+
+
+def parse(contents, strict=False):
+ """
+ Parses a byte string of ASN.1 BER/DER-encoded data.
+
+ This is typically not useful. Instead, use one of the standard classes from
+ asn1crypto.core, or construct a new class with specific fields, and call the
+ .load() class method.
+
+ :param contents:
+ A byte string of BER/DER-encoded data
+
+ :param strict:
+ A boolean indicating if trailing data should be forbidden - if so, a
+ ValueError will be raised when trailing data exists
+
+ :raises:
+ ValueError - when the contents do not contain an ASN.1 header or are truncated in some way
+ TypeError - when contents is not a byte string
+
+ :return:
+ A 6-element tuple:
+ - 0: integer class (0 to 3)
+ - 1: integer method
+ - 2: integer tag
+ - 3: byte string header
+ - 4: byte string content
+ - 5: byte string trailer
+ """
+
+ if not isinstance(contents, byte_cls):
+ raise TypeError('contents must be a byte string, not %s' % type_name(contents))
+
+ contents_len = len(contents)
+ info, consumed = _parse(contents, contents_len)
+ if strict and consumed != contents_len:
+ raise ValueError('Extra data - %d bytes of trailing data were provided' % (contents_len - consumed))
+ return info
+
+
+def peek(contents):
+ """
+ Parses a byte string of ASN.1 BER/DER-encoded data to find the length
+
+ This is typically used to look into an encoded value to see how long the
+ next chunk of ASN.1-encoded data is. Primarily it is useful when a
+ value is a concatenation of multiple values.
+
+ :param contents:
+ A byte string of BER/DER-encoded data
+
+ :raises:
+ ValueError - when the contents do not contain an ASN.1 header or are truncated in some way
+ TypeError - when contents is not a byte string
+
+ :return:
+ An integer with the number of bytes occupied by the ASN.1 value
+ """
+
+ if not isinstance(contents, byte_cls):
+ raise TypeError('contents must be a byte string, not %s' % type_name(contents))
+
+ info, consumed = _parse(contents, len(contents))
+ return consumed
+
+
+def _parse(encoded_data, data_len, pointer=0, lengths_only=False, depth=0):
+ """
+ Parses a byte string into component parts
+
+ :param encoded_data:
+ A byte string that contains BER-encoded data
+
+ :param data_len:
+ The integer length of the encoded data
+
+ :param pointer:
+ The index in the byte string to parse from
+
+ :param lengths_only:
+ A boolean to cause the call to return a 2-element tuple of the integer
+ number of bytes in the header and the integer number of bytes in the
+ contents. Internal use only.
+
+ :param depth:
+ The recursion depth when evaluating indefinite-length encoding.
+
+ :return:
+ A 2-element tuple:
+ - 0: A tuple of (class_, method, tag, header, content, trailer)
+ - 1: An integer indicating how many bytes were consumed
+ """
+
+ if depth > _MAX_DEPTH:
+ raise ValueError('Indefinite-length recursion limit exceeded')
+
+ start = pointer
+
+ if data_len < pointer + 1:
+ raise ValueError(_INSUFFICIENT_DATA_MESSAGE % (1, data_len - pointer))
+ first_octet = ord(encoded_data[pointer]) if _PY2 else encoded_data[pointer]
+
+ pointer += 1
+
+ tag = first_octet & 31
+ constructed = (first_octet >> 5) & 1
+ # Base 128 length using 8th bit as continuation indicator
+ if tag == 31:
+ tag = 0
+ while True:
+ if data_len < pointer + 1:
+ raise ValueError(_INSUFFICIENT_DATA_MESSAGE % (1, data_len - pointer))
+ num = ord(encoded_data[pointer]) if _PY2 else encoded_data[pointer]
+ pointer += 1
+ if num == 0x80 and tag == 0:
+ raise ValueError('Non-minimal tag encoding')
+ tag *= 128
+ tag += num & 127
+ if num >> 7 == 0:
+ break
+ if tag < 31:
+ raise ValueError('Non-minimal tag encoding')
+
+ if data_len < pointer + 1:
+ raise ValueError(_INSUFFICIENT_DATA_MESSAGE % (1, data_len - pointer))
+ length_octet = ord(encoded_data[pointer]) if _PY2 else encoded_data[pointer]
+ pointer += 1
+ trailer = b''
+
+ if length_octet >> 7 == 0:
+ contents_end = pointer + (length_octet & 127)
+
+ else:
+ length_octets = length_octet & 127
+ if length_octets:
+ if data_len < pointer + length_octets:
+ raise ValueError(_INSUFFICIENT_DATA_MESSAGE % (length_octets, data_len - pointer))
+ pointer += length_octets
+ contents_end = pointer + int_from_bytes(encoded_data[pointer - length_octets:pointer], signed=False)
+
+ else:
+ # To properly parse indefinite length values, we need to scan forward
+ # parsing headers until we find a value with a length of zero. If we
+ # just scanned looking for \x00\x00, nested indefinite length values
+ # would not work.
+ if not constructed:
+ raise ValueError('Indefinite-length element must be constructed')
+ contents_end = pointer
+ while data_len < contents_end + 2 or encoded_data[contents_end:contents_end+2] != b'\x00\x00':
+ _, contents_end = _parse(encoded_data, data_len, contents_end, lengths_only=True, depth=depth+1)
+ contents_end += 2
+ trailer = b'\x00\x00'
+
+ if contents_end > data_len:
+ raise ValueError(_INSUFFICIENT_DATA_MESSAGE % (contents_end - pointer, data_len - pointer))
+
+ if lengths_only:
+ return (pointer, contents_end)
+
+ return (
+ (
+ first_octet >> 6,
+ constructed,
+ tag,
+ encoded_data[start:pointer],
+ encoded_data[pointer:contents_end-len(trailer)],
+ trailer
+ ),
+ contents_end
+ )
+
+
+def _dump_header(class_, method, tag, contents):
+ """
+ Constructs the header bytes for an ASN.1 object
+
+ :param class_:
+ An integer ASN.1 class value: 0 (universal), 1 (application),
+ 2 (context), 3 (private)
+
+ :param method:
+ An integer ASN.1 method value: 0 (primitive), 1 (constructed)
+
+ :param tag:
+ An integer ASN.1 tag value
+
+ :param contents:
+ A byte string of the encoded byte contents
+
+ :return:
+ A byte string of the ASN.1 DER header
+ """
+
+ header = b''
+
+ id_num = 0
+ id_num |= class_ << 6
+ id_num |= method << 5
+
+ if tag >= 31:
+ cont_bit = 0
+ while tag > 0:
+ header = chr_cls(cont_bit | (tag & 0x7f)) + header
+ if not cont_bit:
+ cont_bit = 0x80
+ tag = tag >> 7
+ header = chr_cls(id_num | 31) + header
+ else:
+ header += chr_cls(id_num | tag)
+
+ length = len(contents)
+ if length <= 127:
+ header += chr_cls(length)
+ else:
+ length_bytes = int_to_bytes(length)
+ header += chr_cls(0x80 | len(length_bytes))
+ header += length_bytes
+
+ return header
diff --git a/jc/parsers/asn1crypto/pdf.py b/jc/parsers/asn1crypto/pdf.py
new file mode 100644
index 00000000..b72c886c
--- /dev/null
+++ b/jc/parsers/asn1crypto/pdf.py
@@ -0,0 +1,84 @@
+# coding: utf-8
+
+"""
+ASN.1 type classes for PDF signature structures. Adds extra oid mapping and
+value parsing to asn1crypto.x509.Extension() and asn1crypto.xms.CMSAttribute().
+"""
+
+from __future__ import unicode_literals, division, absolute_import, print_function
+
+from .cms import CMSAttributeType, CMSAttribute
+from .core import (
+ Boolean,
+ Integer,
+ Null,
+ ObjectIdentifier,
+ OctetString,
+ Sequence,
+ SequenceOf,
+ SetOf,
+)
+from .crl import CertificateList
+from .ocsp import OCSPResponse
+from .x509 import (
+ Extension,
+ ExtensionId,
+ GeneralName,
+ KeyPurposeId,
+)
+
+
+class AdobeArchiveRevInfo(Sequence):
+ _fields = [
+ ('version', Integer)
+ ]
+
+
+class AdobeTimestamp(Sequence):
+ _fields = [
+ ('version', Integer),
+ ('location', GeneralName),
+ ('requires_auth', Boolean, {'optional': True, 'default': False}),
+ ]
+
+
+class OtherRevInfo(Sequence):
+ _fields = [
+ ('type', ObjectIdentifier),
+ ('value', OctetString),
+ ]
+
+
+class SequenceOfCertificateList(SequenceOf):
+ _child_spec = CertificateList
+
+
+class SequenceOfOCSPResponse(SequenceOf):
+ _child_spec = OCSPResponse
+
+
+class SequenceOfOtherRevInfo(SequenceOf):
+ _child_spec = OtherRevInfo
+
+
+class RevocationInfoArchival(Sequence):
+ _fields = [
+ ('crl', SequenceOfCertificateList, {'explicit': 0, 'optional': True}),
+ ('ocsp', SequenceOfOCSPResponse, {'explicit': 1, 'optional': True}),
+ ('other_rev_info', SequenceOfOtherRevInfo, {'explicit': 2, 'optional': True}),
+ ]
+
+
+class SetOfRevocationInfoArchival(SetOf):
+ _child_spec = RevocationInfoArchival
+
+
+ExtensionId._map['1.2.840.113583.1.1.9.2'] = 'adobe_archive_rev_info'
+ExtensionId._map['1.2.840.113583.1.1.9.1'] = 'adobe_timestamp'
+ExtensionId._map['1.2.840.113583.1.1.10'] = 'adobe_ppklite_credential'
+Extension._oid_specs['adobe_archive_rev_info'] = AdobeArchiveRevInfo
+Extension._oid_specs['adobe_timestamp'] = AdobeTimestamp
+Extension._oid_specs['adobe_ppklite_credential'] = Null
+KeyPurposeId._map['1.2.840.113583.1.1.5'] = 'pdf_signing'
+CMSAttributeType._map['1.2.840.113583.1.1.8'] = 'adobe_revocation_info_archival'
+CMSAttribute._oid_specs['adobe_revocation_info_archival'] = SetOfRevocationInfoArchival
diff --git a/jc/parsers/asn1crypto/pem.py b/jc/parsers/asn1crypto/pem.py
new file mode 100644
index 00000000..511ea4b5
--- /dev/null
+++ b/jc/parsers/asn1crypto/pem.py
@@ -0,0 +1,222 @@
+# coding: utf-8
+
+"""
+Encoding DER to PEM and decoding PEM to DER. Exports the following items:
+
+ - armor()
+ - detect()
+ - unarmor()
+
+"""
+
+from __future__ import unicode_literals, division, absolute_import, print_function
+
+import base64
+import re
+import sys
+
+from ._errors import unwrap
+from ._types import type_name as _type_name, str_cls, byte_cls
+
+if sys.version_info < (3,):
+ from cStringIO import StringIO as BytesIO
+else:
+ from io import BytesIO
+
+
+def detect(byte_string):
+ """
+ Detect if a byte string seems to contain a PEM-encoded block
+
+ :param byte_string:
+ A byte string to look through
+
+ :return:
+ A boolean, indicating if a PEM-encoded block is contained in the byte
+ string
+ """
+
+ if not isinstance(byte_string, byte_cls):
+ raise TypeError(unwrap(
+ '''
+ byte_string must be a byte string, not %s
+ ''',
+ _type_name(byte_string)
+ ))
+
+ return byte_string.find(b'-----BEGIN') != -1 or byte_string.find(b'---- BEGIN') != -1
+
+
+def armor(type_name, der_bytes, headers=None):
+ """
+ Armors a DER-encoded byte string in PEM
+
+ :param type_name:
+ A unicode string that will be capitalized and placed in the header
+ and footer of the block. E.g. "CERTIFICATE", "PRIVATE KEY", etc. This
+ will appear as "-----BEGIN CERTIFICATE-----" and
+ "-----END CERTIFICATE-----".
+
+ :param der_bytes:
+ A byte string to be armored
+
+ :param headers:
+ An OrderedDict of the header lines to write after the BEGIN line
+
+ :return:
+ A byte string of the PEM block
+ """
+
+ if not isinstance(der_bytes, byte_cls):
+ raise TypeError(unwrap(
+ '''
+ der_bytes must be a byte string, not %s
+ ''' % _type_name(der_bytes)
+ ))
+
+ if not isinstance(type_name, str_cls):
+ raise TypeError(unwrap(
+ '''
+ type_name must be a unicode string, not %s
+ ''',
+ _type_name(type_name)
+ ))
+
+ type_name = type_name.upper().encode('ascii')
+
+ output = BytesIO()
+ output.write(b'-----BEGIN ')
+ output.write(type_name)
+ output.write(b'-----\n')
+ if headers:
+ for key in headers:
+ output.write(key.encode('ascii'))
+ output.write(b': ')
+ output.write(headers[key].encode('ascii'))
+ output.write(b'\n')
+ output.write(b'\n')
+ b64_bytes = base64.b64encode(der_bytes)
+ b64_len = len(b64_bytes)
+ i = 0
+ while i < b64_len:
+ output.write(b64_bytes[i:i + 64])
+ output.write(b'\n')
+ i += 64
+ output.write(b'-----END ')
+ output.write(type_name)
+ output.write(b'-----\n')
+
+ return output.getvalue()
+
+
+def _unarmor(pem_bytes):
+ """
+ Convert a PEM-encoded byte string into one or more DER-encoded byte strings
+
+ :param pem_bytes:
+ A byte string of the PEM-encoded data
+
+ :raises:
+ ValueError - when the pem_bytes do not appear to be PEM-encoded bytes
+
+ :return:
+ A generator of 3-element tuples in the format: (object_type, headers,
+ der_bytes). The object_type is a unicode string of what is between
+ "-----BEGIN " and "-----". Examples include: "CERTIFICATE",
+ "PUBLIC KEY", "PRIVATE KEY". The headers is a dict containing any lines
+ in the form "Name: Value" that are right after the begin line.
+ """
+
+ if not isinstance(pem_bytes, byte_cls):
+ raise TypeError(unwrap(
+ '''
+ pem_bytes must be a byte string, not %s
+ ''',
+ _type_name(pem_bytes)
+ ))
+
+ # Valid states include: "trash", "headers", "body"
+ state = 'trash'
+ headers = {}
+ base64_data = b''
+ object_type = None
+
+ found_start = False
+ found_end = False
+
+ for line in pem_bytes.splitlines(False):
+ if line == b'':
+ continue
+
+ if state == "trash":
+ # Look for a starting line since some CA cert bundle show the cert
+ # into in a parsed format above each PEM block
+ type_name_match = re.match(b'^(?:---- |-----)BEGIN ([A-Z0-9 ]+)(?: ----|-----)', line)
+ if not type_name_match:
+ continue
+ object_type = type_name_match.group(1).decode('ascii')
+
+ found_start = True
+ state = 'headers'
+ continue
+
+ if state == 'headers':
+ if line.find(b':') == -1:
+ state = 'body'
+ else:
+ decoded_line = line.decode('ascii')
+ name, value = decoded_line.split(':', 1)
+ headers[name] = value.strip()
+ continue
+
+ if state == 'body':
+ if line[0:5] in (b'-----', b'---- '):
+ der_bytes = base64.b64decode(base64_data)
+
+ yield (object_type, headers, der_bytes)
+
+ state = 'trash'
+ headers = {}
+ base64_data = b''
+ object_type = None
+ found_end = True
+ continue
+
+ base64_data += line
+
+ if not found_start or not found_end:
+ raise ValueError(unwrap(
+ '''
+ pem_bytes does not appear to contain PEM-encoded data - no
+ BEGIN/END combination found
+ '''
+ ))
+
+
+def unarmor(pem_bytes, multiple=False):
+ """
+ Convert a PEM-encoded byte string into a DER-encoded byte string
+
+ :param pem_bytes:
+ A byte string of the PEM-encoded data
+
+ :param multiple:
+ If True, function will return a generator
+
+ :raises:
+ ValueError - when the pem_bytes do not appear to be PEM-encoded bytes
+
+ :return:
+ A 3-element tuple (object_name, headers, der_bytes). The object_name is
+ a unicode string of what is between "-----BEGIN " and "-----". Examples
+ include: "CERTIFICATE", "PUBLIC KEY", "PRIVATE KEY". The headers is a
+ dict containing any lines in the form "Name: Value" that are right
+ after the begin line.
+ """
+
+ generator = _unarmor(pem_bytes)
+
+ if not multiple:
+ return next(generator)
+
+ return generator
diff --git a/jc/parsers/asn1crypto/pkcs12.py b/jc/parsers/asn1crypto/pkcs12.py
new file mode 100644
index 00000000..7ebcefeb
--- /dev/null
+++ b/jc/parsers/asn1crypto/pkcs12.py
@@ -0,0 +1,193 @@
+# coding: utf-8
+
+"""
+ASN.1 type classes for PKCS#12 files. Exports the following items:
+
+ - CertBag()
+ - CrlBag()
+ - Pfx()
+ - SafeBag()
+ - SecretBag()
+
+Other type classes are defined that help compose the types listed above.
+"""
+
+from __future__ import unicode_literals, division, absolute_import, print_function
+
+from .algos import DigestInfo
+from .cms import ContentInfo, SignedData
+from .core import (
+ Any,
+ BMPString,
+ Integer,
+ ObjectIdentifier,
+ OctetString,
+ ParsableOctetString,
+ Sequence,
+ SequenceOf,
+ SetOf,
+)
+from .keys import PrivateKeyInfo, EncryptedPrivateKeyInfo
+from .x509 import Certificate, KeyPurposeId
+
+
+# The structures in this file are taken from https://tools.ietf.org/html/rfc7292
+
+class MacData(Sequence):
+ _fields = [
+ ('mac', DigestInfo),
+ ('mac_salt', OctetString),
+ ('iterations', Integer, {'default': 1}),
+ ]
+
+
+class Version(Integer):
+ _map = {
+ 3: 'v3'
+ }
+
+
+class AttributeType(ObjectIdentifier):
+ _map = {
+ # https://tools.ietf.org/html/rfc2985#page-18
+ '1.2.840.113549.1.9.20': 'friendly_name',
+ '1.2.840.113549.1.9.21': 'local_key_id',
+ # https://support.microsoft.com/en-us/kb/287547
+ '1.3.6.1.4.1.311.17.1': 'microsoft_local_machine_keyset',
+ # https://github.com/frohoff/jdk8u-dev-jdk/blob/master/src/share/classes/sun/security/pkcs12/PKCS12KeyStore.java
+ # this is a set of OIDs, representing key usage, the usual value is a SET of one element OID 2.5.29.37.0
+ '2.16.840.1.113894.746875.1.1': 'trusted_key_usage',
+ }
+
+
+class SetOfAny(SetOf):
+ _child_spec = Any
+
+
+class SetOfBMPString(SetOf):
+ _child_spec = BMPString
+
+
+class SetOfOctetString(SetOf):
+ _child_spec = OctetString
+
+
+class SetOfKeyPurposeId(SetOf):
+ _child_spec = KeyPurposeId
+
+
+class Attribute(Sequence):
+ _fields = [
+ ('type', AttributeType),
+ ('values', None),
+ ]
+
+ _oid_specs = {
+ 'friendly_name': SetOfBMPString,
+ 'local_key_id': SetOfOctetString,
+ 'microsoft_csp_name': SetOfBMPString,
+ 'trusted_key_usage': SetOfKeyPurposeId,
+ }
+
+ def _values_spec(self):
+ return self._oid_specs.get(self['type'].native, SetOfAny)
+
+ _spec_callbacks = {
+ 'values': _values_spec
+ }
+
+
+class Attributes(SetOf):
+ _child_spec = Attribute
+
+
+class Pfx(Sequence):
+ _fields = [
+ ('version', Version),
+ ('auth_safe', ContentInfo),
+ ('mac_data', MacData, {'optional': True})
+ ]
+
+ _authenticated_safe = None
+
+ @property
+ def authenticated_safe(self):
+ if self._authenticated_safe is None:
+ content = self['auth_safe']['content']
+ if isinstance(content, SignedData):
+ content = content['content_info']['content']
+ self._authenticated_safe = AuthenticatedSafe.load(content.native)
+ return self._authenticated_safe
+
+
+class AuthenticatedSafe(SequenceOf):
+ _child_spec = ContentInfo
+
+
+class BagId(ObjectIdentifier):
+ _map = {
+ '1.2.840.113549.1.12.10.1.1': 'key_bag',
+ '1.2.840.113549.1.12.10.1.2': 'pkcs8_shrouded_key_bag',
+ '1.2.840.113549.1.12.10.1.3': 'cert_bag',
+ '1.2.840.113549.1.12.10.1.4': 'crl_bag',
+ '1.2.840.113549.1.12.10.1.5': 'secret_bag',
+ '1.2.840.113549.1.12.10.1.6': 'safe_contents',
+ }
+
+
+class CertId(ObjectIdentifier):
+ _map = {
+ '1.2.840.113549.1.9.22.1': 'x509',
+ '1.2.840.113549.1.9.22.2': 'sdsi',
+ }
+
+
+class CertBag(Sequence):
+ _fields = [
+ ('cert_id', CertId),
+ ('cert_value', ParsableOctetString, {'explicit': 0}),
+ ]
+
+ _oid_pair = ('cert_id', 'cert_value')
+ _oid_specs = {
+ 'x509': Certificate,
+ }
+
+
+class CrlBag(Sequence):
+ _fields = [
+ ('crl_id', ObjectIdentifier),
+ ('crl_value', OctetString, {'explicit': 0}),
+ ]
+
+
+class SecretBag(Sequence):
+ _fields = [
+ ('secret_type_id', ObjectIdentifier),
+ ('secret_value', OctetString, {'explicit': 0}),
+ ]
+
+
+class SafeContents(SequenceOf):
+ pass
+
+
+class SafeBag(Sequence):
+ _fields = [
+ ('bag_id', BagId),
+ ('bag_value', Any, {'explicit': 0}),
+ ('bag_attributes', Attributes, {'optional': True}),
+ ]
+
+ _oid_pair = ('bag_id', 'bag_value')
+ _oid_specs = {
+ 'key_bag': PrivateKeyInfo,
+ 'pkcs8_shrouded_key_bag': EncryptedPrivateKeyInfo,
+ 'cert_bag': CertBag,
+ 'crl_bag': CrlBag,
+ 'secret_bag': SecretBag,
+ 'safe_contents': SafeContents
+ }
+
+
+SafeContents._child_spec = SafeBag
diff --git a/jc/parsers/asn1crypto/tsp.py b/jc/parsers/asn1crypto/tsp.py
new file mode 100644
index 00000000..f006da99
--- /dev/null
+++ b/jc/parsers/asn1crypto/tsp.py
@@ -0,0 +1,310 @@
+# coding: utf-8
+
+"""
+ASN.1 type classes for the time stamp protocol (TSP). Exports the following
+items:
+
+ - TimeStampReq()
+ - TimeStampResp()
+
+Also adds TimeStampedData() support to asn1crypto.cms.ContentInfo(),
+TimeStampedData() and TSTInfo() support to
+asn1crypto.cms.EncapsulatedContentInfo() and some oids and value parsers to
+asn1crypto.cms.CMSAttribute().
+
+Other type classes are defined that help compose the types listed above.
+"""
+
+from __future__ import unicode_literals, division, absolute_import, print_function
+
+from .algos import DigestAlgorithm
+from .cms import (
+ CMSAttribute,
+ CMSAttributeType,
+ ContentInfo,
+ ContentType,
+ EncapsulatedContentInfo,
+)
+from .core import (
+ Any,
+ BitString,
+ Boolean,
+ Choice,
+ GeneralizedTime,
+ IA5String,
+ Integer,
+ ObjectIdentifier,
+ OctetString,
+ Sequence,
+ SequenceOf,
+ SetOf,
+ UTF8String,
+)
+from .crl import CertificateList
+from .x509 import (
+ Attributes,
+ CertificatePolicies,
+ GeneralName,
+ GeneralNames,
+)
+
+
+# The structures in this file are based on https://tools.ietf.org/html/rfc3161,
+# https://tools.ietf.org/html/rfc4998, https://tools.ietf.org/html/rfc5544,
+# https://tools.ietf.org/html/rfc5035, https://tools.ietf.org/html/rfc2634
+
+class Version(Integer):
+ _map = {
+ 0: 'v0',
+ 1: 'v1',
+ 2: 'v2',
+ 3: 'v3',
+ 4: 'v4',
+ 5: 'v5',
+ }
+
+
+class MessageImprint(Sequence):
+ _fields = [
+ ('hash_algorithm', DigestAlgorithm),
+ ('hashed_message', OctetString),
+ ]
+
+
+class Accuracy(Sequence):
+ _fields = [
+ ('seconds', Integer, {'optional': True}),
+ ('millis', Integer, {'implicit': 0, 'optional': True}),
+ ('micros', Integer, {'implicit': 1, 'optional': True}),
+ ]
+
+
+class Extension(Sequence):
+ _fields = [
+ ('extn_id', ObjectIdentifier),
+ ('critical', Boolean, {'default': False}),
+ ('extn_value', OctetString),
+ ]
+
+
+class Extensions(SequenceOf):
+ _child_spec = Extension
+
+
+class TSTInfo(Sequence):
+ _fields = [
+ ('version', Version),
+ ('policy', ObjectIdentifier),
+ ('message_imprint', MessageImprint),
+ ('serial_number', Integer),
+ ('gen_time', GeneralizedTime),
+ ('accuracy', Accuracy, {'optional': True}),
+ ('ordering', Boolean, {'default': False}),
+ ('nonce', Integer, {'optional': True}),
+ ('tsa', GeneralName, {'explicit': 0, 'optional': True}),
+ ('extensions', Extensions, {'implicit': 1, 'optional': True}),
+ ]
+
+
+class TimeStampReq(Sequence):
+ _fields = [
+ ('version', Version),
+ ('message_imprint', MessageImprint),
+ ('req_policy', ObjectIdentifier, {'optional': True}),
+ ('nonce', Integer, {'optional': True}),
+ ('cert_req', Boolean, {'default': False}),
+ ('extensions', Extensions, {'implicit': 0, 'optional': True}),
+ ]
+
+
+class PKIStatus(Integer):
+ _map = {
+ 0: 'granted',
+ 1: 'granted_with_mods',
+ 2: 'rejection',
+ 3: 'waiting',
+ 4: 'revocation_warning',
+ 5: 'revocation_notification',
+ }
+
+
+class PKIFreeText(SequenceOf):
+ _child_spec = UTF8String
+
+
+class PKIFailureInfo(BitString):
+ _map = {
+ 0: 'bad_alg',
+ 2: 'bad_request',
+ 5: 'bad_data_format',
+ 14: 'time_not_available',
+ 15: 'unaccepted_policy',
+ 16: 'unaccepted_extensions',
+ 17: 'add_info_not_available',
+ 25: 'system_failure',
+ }
+
+
+class PKIStatusInfo(Sequence):
+ _fields = [
+ ('status', PKIStatus),
+ ('status_string', PKIFreeText, {'optional': True}),
+ ('fail_info', PKIFailureInfo, {'optional': True}),
+ ]
+
+
+class TimeStampResp(Sequence):
+ _fields = [
+ ('status', PKIStatusInfo),
+ ('time_stamp_token', ContentInfo),
+ ]
+
+
+class MetaData(Sequence):
+ _fields = [
+ ('hash_protected', Boolean),
+ ('file_name', UTF8String, {'optional': True}),
+ ('media_type', IA5String, {'optional': True}),
+ ('other_meta_data', Attributes, {'optional': True}),
+ ]
+
+
+class TimeStampAndCRL(Sequence):
+ _fields = [
+ ('time_stamp', EncapsulatedContentInfo),
+ ('crl', CertificateList, {'optional': True}),
+ ]
+
+
+class TimeStampTokenEvidence(SequenceOf):
+ _child_spec = TimeStampAndCRL
+
+
+class DigestAlgorithms(SequenceOf):
+ _child_spec = DigestAlgorithm
+
+
+class EncryptionInfo(Sequence):
+ _fields = [
+ ('encryption_info_type', ObjectIdentifier),
+ ('encryption_info_value', Any),
+ ]
+
+
+class PartialHashtree(SequenceOf):
+ _child_spec = OctetString
+
+
+class PartialHashtrees(SequenceOf):
+ _child_spec = PartialHashtree
+
+
+class ArchiveTimeStamp(Sequence):
+ _fields = [
+ ('digest_algorithm', DigestAlgorithm, {'implicit': 0, 'optional': True}),
+ ('attributes', Attributes, {'implicit': 1, 'optional': True}),
+ ('reduced_hashtree', PartialHashtrees, {'implicit': 2, 'optional': True}),
+ ('time_stamp', ContentInfo),
+ ]
+
+
+class ArchiveTimeStampSequence(SequenceOf):
+ _child_spec = ArchiveTimeStamp
+
+
+class EvidenceRecord(Sequence):
+ _fields = [
+ ('version', Version),
+ ('digest_algorithms', DigestAlgorithms),
+ ('crypto_infos', Attributes, {'implicit': 0, 'optional': True}),
+ ('encryption_info', EncryptionInfo, {'implicit': 1, 'optional': True}),
+ ('archive_time_stamp_sequence', ArchiveTimeStampSequence),
+ ]
+
+
+class OtherEvidence(Sequence):
+ _fields = [
+ ('oe_type', ObjectIdentifier),
+ ('oe_value', Any),
+ ]
+
+
+class Evidence(Choice):
+ _alternatives = [
+ ('tst_evidence', TimeStampTokenEvidence, {'implicit': 0}),
+ ('ers_evidence', EvidenceRecord, {'implicit': 1}),
+ ('other_evidence', OtherEvidence, {'implicit': 2}),
+ ]
+
+
+class TimeStampedData(Sequence):
+ _fields = [
+ ('version', Version),
+ ('data_uri', IA5String, {'optional': True}),
+ ('meta_data', MetaData, {'optional': True}),
+ ('content', OctetString, {'optional': True}),
+ ('temporal_evidence', Evidence),
+ ]
+
+
+class IssuerSerial(Sequence):
+ _fields = [
+ ('issuer', GeneralNames),
+ ('serial_number', Integer),
+ ]
+
+
+class ESSCertID(Sequence):
+ _fields = [
+ ('cert_hash', OctetString),
+ ('issuer_serial', IssuerSerial, {'optional': True}),
+ ]
+
+
+class ESSCertIDs(SequenceOf):
+ _child_spec = ESSCertID
+
+
+class SigningCertificate(Sequence):
+ _fields = [
+ ('certs', ESSCertIDs),
+ ('policies', CertificatePolicies, {'optional': True}),
+ ]
+
+
+class SetOfSigningCertificates(SetOf):
+ _child_spec = SigningCertificate
+
+
+class ESSCertIDv2(Sequence):
+ _fields = [
+ ('hash_algorithm', DigestAlgorithm, {'default': {'algorithm': 'sha256'}}),
+ ('cert_hash', OctetString),
+ ('issuer_serial', IssuerSerial, {'optional': True}),
+ ]
+
+
+class ESSCertIDv2s(SequenceOf):
+ _child_spec = ESSCertIDv2
+
+
+class SigningCertificateV2(Sequence):
+ _fields = [
+ ('certs', ESSCertIDv2s),
+ ('policies', CertificatePolicies, {'optional': True}),
+ ]
+
+
+class SetOfSigningCertificatesV2(SetOf):
+ _child_spec = SigningCertificateV2
+
+
+EncapsulatedContentInfo._oid_specs['tst_info'] = TSTInfo
+EncapsulatedContentInfo._oid_specs['timestamped_data'] = TimeStampedData
+ContentInfo._oid_specs['timestamped_data'] = TimeStampedData
+ContentType._map['1.2.840.113549.1.9.16.1.4'] = 'tst_info'
+ContentType._map['1.2.840.113549.1.9.16.1.31'] = 'timestamped_data'
+CMSAttributeType._map['1.2.840.113549.1.9.16.2.12'] = 'signing_certificate'
+CMSAttribute._oid_specs['signing_certificate'] = SetOfSigningCertificates
+CMSAttributeType._map['1.2.840.113549.1.9.16.2.47'] = 'signing_certificate_v2'
+CMSAttribute._oid_specs['signing_certificate_v2'] = SetOfSigningCertificatesV2
diff --git a/jc/parsers/asn1crypto/util.py b/jc/parsers/asn1crypto/util.py
new file mode 100644
index 00000000..7196897c
--- /dev/null
+++ b/jc/parsers/asn1crypto/util.py
@@ -0,0 +1,878 @@
+# coding: utf-8
+
+"""
+Miscellaneous data helpers, including functions for converting integers to and
+from bytes and UTC timezone. Exports the following items:
+
+ - OrderedDict()
+ - int_from_bytes()
+ - int_to_bytes()
+ - timezone.utc
+ - utc_with_dst
+ - create_timezone()
+ - inet_ntop()
+ - inet_pton()
+ - uri_to_iri()
+ - iri_to_uri()
+"""
+
+from __future__ import unicode_literals, division, absolute_import, print_function
+
+import math
+import sys
+from datetime import datetime, date, timedelta, tzinfo
+
+from ._errors import unwrap
+from ._iri import iri_to_uri, uri_to_iri # noqa
+from ._ordereddict import OrderedDict # noqa
+from ._types import type_name
+
+if sys.platform == 'win32':
+ from ._inet import inet_ntop, inet_pton
+else:
+ from socket import inet_ntop, inet_pton # noqa
+
+
+# Python 2
+if sys.version_info <= (3,):
+
+ def int_to_bytes(value, signed=False, width=None):
+ """
+ Converts an integer to a byte string
+
+ :param value:
+ The integer to convert
+
+ :param signed:
+ If the byte string should be encoded using two's complement
+
+ :param width:
+ If None, the minimal possible size (but at least 1),
+ otherwise an integer of the byte width for the return value
+
+ :return:
+ A byte string
+ """
+
+ if value == 0 and width == 0:
+ return b''
+
+ # Handle negatives in two's complement
+ is_neg = False
+ if signed and value < 0:
+ is_neg = True
+ bits = int(math.ceil(len('%x' % abs(value)) / 2.0) * 8)
+ value = (value + (1 << bits)) % (1 << bits)
+
+ hex_str = '%x' % value
+ if len(hex_str) & 1:
+ hex_str = '0' + hex_str
+
+ output = hex_str.decode('hex')
+
+ if signed and not is_neg and ord(output[0:1]) & 0x80:
+ output = b'\x00' + output
+
+ if width is not None:
+ if len(output) > width:
+ raise OverflowError('int too big to convert')
+ if is_neg:
+ pad_char = b'\xFF'
+ else:
+ pad_char = b'\x00'
+ output = (pad_char * (width - len(output))) + output
+ elif is_neg and ord(output[0:1]) & 0x80 == 0:
+ output = b'\xFF' + output
+
+ return output
+
+ def int_from_bytes(value, signed=False):
+ """
+ Converts a byte string to an integer
+
+ :param value:
+ The byte string to convert
+
+ :param signed:
+ If the byte string should be interpreted using two's complement
+
+ :return:
+ An integer
+ """
+
+ if value == b'':
+ return 0
+
+ num = long(value.encode("hex"), 16) # noqa
+
+ if not signed:
+ return num
+
+ # Check for sign bit and handle two's complement
+ if ord(value[0:1]) & 0x80:
+ bit_len = len(value) * 8
+ return num - (1 << bit_len)
+
+ return num
+
+ class timezone(tzinfo): # noqa
+ """
+ Implements datetime.timezone for py2.
+ Only full minute offsets are supported.
+ DST is not supported.
+ """
+
+ def __init__(self, offset, name=None):
+ """
+ :param offset:
+ A timedelta with this timezone's offset from UTC
+
+ :param name:
+ Name of the timezone; if None, generate one.
+ """
+
+ if not timedelta(hours=-24) < offset < timedelta(hours=24):
+ raise ValueError('Offset must be in [-23:59, 23:59]')
+
+ if offset.seconds % 60 or offset.microseconds:
+ raise ValueError('Offset must be full minutes')
+
+ self._offset = offset
+
+ if name is not None:
+ self._name = name
+ elif not offset:
+ self._name = 'UTC'
+ else:
+ self._name = 'UTC' + _format_offset(offset)
+
+ def __eq__(self, other):
+ """
+ Compare two timezones
+
+ :param other:
+ The other timezone to compare to
+
+ :return:
+ A boolean
+ """
+
+ if type(other) != timezone:
+ return False
+ return self._offset == other._offset
+
+ def __getinitargs__(self):
+ """
+ Called by tzinfo.__reduce__ to support pickle and copy.
+
+ :return:
+ offset and name, to be used for __init__
+ """
+
+ return self._offset, self._name
+
+ def tzname(self, dt):
+ """
+ :param dt:
+ A datetime object; ignored.
+
+ :return:
+ Name of this timezone
+ """
+
+ return self._name
+
+ def utcoffset(self, dt):
+ """
+ :param dt:
+ A datetime object; ignored.
+
+ :return:
+ A timedelta object with the offset from UTC
+ """
+
+ return self._offset
+
+ def dst(self, dt):
+ """
+ :param dt:
+ A datetime object; ignored.
+
+ :return:
+ Zero timedelta
+ """
+
+ return timedelta(0)
+
+ timezone.utc = timezone(timedelta(0))
+
+# Python 3
+else:
+
+ from datetime import timezone # noqa
+
+ def int_to_bytes(value, signed=False, width=None):
+ """
+ Converts an integer to a byte string
+
+ :param value:
+ The integer to convert
+
+ :param signed:
+ If the byte string should be encoded using two's complement
+
+ :param width:
+ If None, the minimal possible size (but at least 1),
+ otherwise an integer of the byte width for the return value
+
+ :return:
+ A byte string
+ """
+
+ if width is None:
+ if signed:
+ if value < 0:
+ bits_required = abs(value + 1).bit_length()
+ else:
+ bits_required = value.bit_length()
+ if bits_required % 8 == 0:
+ bits_required += 1
+ else:
+ bits_required = value.bit_length()
+ width = math.ceil(bits_required / 8) or 1
+ return value.to_bytes(width, byteorder='big', signed=signed)
+
+ def int_from_bytes(value, signed=False):
+ """
+ Converts a byte string to an integer
+
+ :param value:
+ The byte string to convert
+
+ :param signed:
+ If the byte string should be interpreted using two's complement
+
+ :return:
+ An integer
+ """
+
+ return int.from_bytes(value, 'big', signed=signed)
+
+
+def _format_offset(off):
+ """
+ Format a timedelta into "[+-]HH:MM" format or "" for None
+ """
+
+ if off is None:
+ return ''
+ mins = off.days * 24 * 60 + off.seconds // 60
+ sign = '-' if mins < 0 else '+'
+ return sign + '%02d:%02d' % divmod(abs(mins), 60)
+
+
+class _UtcWithDst(tzinfo):
+ """
+ Utc class where dst does not return None; required for astimezone
+ """
+
+ def tzname(self, dt):
+ return 'UTC'
+
+ def utcoffset(self, dt):
+ return timedelta(0)
+
+ def dst(self, dt):
+ return timedelta(0)
+
+
+utc_with_dst = _UtcWithDst()
+
+_timezone_cache = {}
+
+
+def create_timezone(offset):
+ """
+ Returns a new datetime.timezone object with the given offset.
+ Uses cached objects if possible.
+
+ :param offset:
+ A datetime.timedelta object; It needs to be in full minutes and between -23:59 and +23:59.
+
+ :return:
+ A datetime.timezone object
+ """
+
+ try:
+ tz = _timezone_cache[offset]
+ except KeyError:
+ tz = _timezone_cache[offset] = timezone(offset)
+ return tz
+
+
+class extended_date(object):
+ """
+ A datetime.datetime-like object that represents the year 0. This is just
+ to handle 0000-01-01 found in some certificates. Python's datetime does
+ not support year 0.
+
+ The proleptic gregorian calendar repeats itself every 400 years. Therefore,
+ the simplest way to format is to substitute year 2000.
+ """
+
+ def __init__(self, year, month, day):
+ """
+ :param year:
+ The integer 0
+
+ :param month:
+ An integer from 1 to 12
+
+ :param day:
+ An integer from 1 to 31
+ """
+
+ if year != 0:
+ raise ValueError('year must be 0')
+
+ self._y2k = date(2000, month, day)
+
+ @property
+ def year(self):
+ """
+ :return:
+ The integer 0
+ """
+
+ return 0
+
+ @property
+ def month(self):
+ """
+ :return:
+ An integer from 1 to 12
+ """
+
+ return self._y2k.month
+
+ @property
+ def day(self):
+ """
+ :return:
+ An integer from 1 to 31
+ """
+
+ return self._y2k.day
+
+ def strftime(self, format):
+ """
+ Formats the date using strftime()
+
+ :param format:
+ A strftime() format string
+
+ :return:
+ A str, the formatted date as a unicode string
+ in Python 3 and a byte string in Python 2
+ """
+
+ # Format the date twice, once with year 2000, once with year 4000.
+ # The only differences in the result will be in the millennium. Find them and replace by zeros.
+ y2k = self._y2k.strftime(format)
+ y4k = self._y2k.replace(year=4000).strftime(format)
+ return ''.join('0' if (c2, c4) == ('2', '4') else c2 for c2, c4 in zip(y2k, y4k))
+
+ def isoformat(self):
+ """
+ Formats the date as %Y-%m-%d
+
+ :return:
+ The date formatted to %Y-%m-%d as a unicode string in Python 3
+ and a byte string in Python 2
+ """
+
+ return self.strftime('0000-%m-%d')
+
+ def replace(self, year=None, month=None, day=None):
+ """
+ Returns a new datetime.date or asn1crypto.util.extended_date
+ object with the specified components replaced
+
+ :return:
+ A datetime.date or asn1crypto.util.extended_date object
+ """
+
+ if year is None:
+ year = self.year
+ if month is None:
+ month = self.month
+ if day is None:
+ day = self.day
+
+ if year > 0:
+ cls = date
+ else:
+ cls = extended_date
+
+ return cls(
+ year,
+ month,
+ day
+ )
+
+ def __str__(self):
+ """
+ :return:
+ A str representing this extended_date, e.g. "0000-01-01"
+ """
+
+ return self.strftime('%Y-%m-%d')
+
+ def __eq__(self, other):
+ """
+ Compare two extended_date objects
+
+ :param other:
+ The other extended_date to compare to
+
+ :return:
+ A boolean
+ """
+
+ # datetime.date object wouldn't compare equal because it can't be year 0
+ if not isinstance(other, self.__class__):
+ return False
+ return self.__cmp__(other) == 0
+
+ def __ne__(self, other):
+ """
+ Compare two extended_date objects
+
+ :param other:
+ The other extended_date to compare to
+
+ :return:
+ A boolean
+ """
+
+ return not self.__eq__(other)
+
+ def _comparison_error(self, other):
+ raise TypeError(unwrap(
+ '''
+ An asn1crypto.util.extended_date object can only be compared to
+ an asn1crypto.util.extended_date or datetime.date object, not %s
+ ''',
+ type_name(other)
+ ))
+
+ def __cmp__(self, other):
+ """
+ Compare two extended_date or datetime.date objects
+
+ :param other:
+ The other extended_date object to compare to
+
+ :return:
+ An integer smaller than, equal to, or larger than 0
+ """
+
+ # self is year 0, other is >= year 1
+ if isinstance(other, date):
+ return -1
+
+ if not isinstance(other, self.__class__):
+ self._comparison_error(other)
+
+ if self._y2k < other._y2k:
+ return -1
+ if self._y2k > other._y2k:
+ return 1
+ return 0
+
+ def __lt__(self, other):
+ return self.__cmp__(other) < 0
+
+ def __le__(self, other):
+ return self.__cmp__(other) <= 0
+
+ def __gt__(self, other):
+ return self.__cmp__(other) > 0
+
+ def __ge__(self, other):
+ return self.__cmp__(other) >= 0
+
+
+class extended_datetime(object):
+ """
+ A datetime.datetime-like object that represents the year 0. This is just
+ to handle 0000-01-01 found in some certificates. Python's datetime does
+ not support year 0.
+
+ The proleptic gregorian calendar repeats itself every 400 years. Therefore,
+ the simplest way to format is to substitute year 2000.
+ """
+
+ # There are 97 leap days during 400 years.
+ DAYS_IN_400_YEARS = 400 * 365 + 97
+ DAYS_IN_2000_YEARS = 5 * DAYS_IN_400_YEARS
+
+ def __init__(self, year, *args, **kwargs):
+ """
+ :param year:
+ The integer 0
+
+ :param args:
+ Other positional arguments; see datetime.datetime.
+
+ :param kwargs:
+ Other keyword arguments; see datetime.datetime.
+ """
+
+ if year != 0:
+ raise ValueError('year must be 0')
+
+ self._y2k = datetime(2000, *args, **kwargs)
+
+ @property
+ def year(self):
+ """
+ :return:
+ The integer 0
+ """
+
+ return 0
+
+ @property
+ def month(self):
+ """
+ :return:
+ An integer from 1 to 12
+ """
+
+ return self._y2k.month
+
+ @property
+ def day(self):
+ """
+ :return:
+ An integer from 1 to 31
+ """
+
+ return self._y2k.day
+
+ @property
+ def hour(self):
+ """
+ :return:
+ An integer from 1 to 24
+ """
+
+ return self._y2k.hour
+
+ @property
+ def minute(self):
+ """
+ :return:
+ An integer from 1 to 60
+ """
+
+ return self._y2k.minute
+
+ @property
+ def second(self):
+ """
+ :return:
+ An integer from 1 to 60
+ """
+
+ return self._y2k.second
+
+ @property
+ def microsecond(self):
+ """
+ :return:
+ An integer from 0 to 999999
+ """
+
+ return self._y2k.microsecond
+
+ @property
+ def tzinfo(self):
+ """
+ :return:
+ If object is timezone aware, a datetime.tzinfo object, else None.
+ """
+
+ return self._y2k.tzinfo
+
+ def utcoffset(self):
+ """
+ :return:
+ If object is timezone aware, a datetime.timedelta object, else None.
+ """
+
+ return self._y2k.utcoffset()
+
+ def time(self):
+ """
+ :return:
+ A datetime.time object
+ """
+
+ return self._y2k.time()
+
+ def date(self):
+ """
+ :return:
+ An asn1crypto.util.extended_date of the date
+ """
+
+ return extended_date(0, self.month, self.day)
+
+ def strftime(self, format):
+ """
+ Performs strftime(), always returning a str
+
+ :param format:
+ A strftime() format string
+
+ :return:
+ A str of the formatted datetime
+ """
+
+ # Format the datetime twice, once with year 2000, once with year 4000.
+ # The only differences in the result will be in the millennium. Find them and replace by zeros.
+ y2k = self._y2k.strftime(format)
+ y4k = self._y2k.replace(year=4000).strftime(format)
+ return ''.join('0' if (c2, c4) == ('2', '4') else c2 for c2, c4 in zip(y2k, y4k))
+
+ def isoformat(self, sep='T'):
+ """
+ Formats the date as "%Y-%m-%d %H:%M:%S" with the sep param between the
+ date and time portions
+
+ :param set:
+ A single character of the separator to place between the date and
+ time
+
+ :return:
+ The formatted datetime as a unicode string in Python 3 and a byte
+ string in Python 2
+ """
+
+ s = '0000-%02d-%02d%c%02d:%02d:%02d' % (self.month, self.day, sep, self.hour, self.minute, self.second)
+ if self.microsecond:
+ s += '.%06d' % self.microsecond
+ return s + _format_offset(self.utcoffset())
+
+ def replace(self, year=None, *args, **kwargs):
+ """
+ Returns a new datetime.datetime or asn1crypto.util.extended_datetime
+ object with the specified components replaced
+
+ :param year:
+ The new year to substitute. None to keep it.
+
+ :param args:
+ Other positional arguments; see datetime.datetime.replace.
+
+ :param kwargs:
+ Other keyword arguments; see datetime.datetime.replace.
+
+ :return:
+ A datetime.datetime or asn1crypto.util.extended_datetime object
+ """
+
+ if year:
+ return self._y2k.replace(year, *args, **kwargs)
+
+ return extended_datetime.from_y2k(self._y2k.replace(2000, *args, **kwargs))
+
+ def astimezone(self, tz):
+ """
+ Convert this extended_datetime to another timezone.
+
+ :param tz:
+ A datetime.tzinfo object.
+
+ :return:
+ A new extended_datetime or datetime.datetime object
+ """
+
+ return extended_datetime.from_y2k(self._y2k.astimezone(tz))
+
+ def timestamp(self):
+ """
+ Return POSIX timestamp. Only supported in python >= 3.3
+
+ :return:
+ A float representing the seconds since 1970-01-01 UTC. This will be a negative value.
+ """
+
+ return self._y2k.timestamp() - self.DAYS_IN_2000_YEARS * 86400
+
+ def __str__(self):
+ """
+ :return:
+ A str representing this extended_datetime, e.g. "0000-01-01 00:00:00.000001-10:00"
+ """
+
+ return self.isoformat(sep=' ')
+
+ def __eq__(self, other):
+ """
+ Compare two extended_datetime objects
+
+ :param other:
+ The other extended_datetime to compare to
+
+ :return:
+ A boolean
+ """
+
+ # Only compare against other datetime or extended_datetime objects
+ if not isinstance(other, (self.__class__, datetime)):
+ return False
+
+ # Offset-naive and offset-aware datetimes are never the same
+ if (self.tzinfo is None) != (other.tzinfo is None):
+ return False
+
+ return self.__cmp__(other) == 0
+
+ def __ne__(self, other):
+ """
+ Compare two extended_datetime objects
+
+ :param other:
+ The other extended_datetime to compare to
+
+ :return:
+ A boolean
+ """
+
+ return not self.__eq__(other)
+
+ def _comparison_error(self, other):
+ """
+ Raises a TypeError about the other object not being suitable for
+ comparison
+
+ :param other:
+ The object being compared to
+ """
+
+ raise TypeError(unwrap(
+ '''
+ An asn1crypto.util.extended_datetime object can only be compared to
+ an asn1crypto.util.extended_datetime or datetime.datetime object,
+ not %s
+ ''',
+ type_name(other)
+ ))
+
+ def __cmp__(self, other):
+ """
+ Compare two extended_datetime or datetime.datetime objects
+
+ :param other:
+ The other extended_datetime or datetime.datetime object to compare to
+
+ :return:
+ An integer smaller than, equal to, or larger than 0
+ """
+
+ if not isinstance(other, (self.__class__, datetime)):
+ self._comparison_error(other)
+
+ if (self.tzinfo is None) != (other.tzinfo is None):
+ raise TypeError("can't compare offset-naive and offset-aware datetimes")
+
+ diff = self - other
+ zero = timedelta(0)
+ if diff < zero:
+ return -1
+ if diff > zero:
+ return 1
+ return 0
+
+ def __lt__(self, other):
+ return self.__cmp__(other) < 0
+
+ def __le__(self, other):
+ return self.__cmp__(other) <= 0
+
+ def __gt__(self, other):
+ return self.__cmp__(other) > 0
+
+ def __ge__(self, other):
+ return self.__cmp__(other) >= 0
+
+ def __add__(self, other):
+ """
+ Adds a timedelta
+
+ :param other:
+ A datetime.timedelta object to add.
+
+ :return:
+ A new extended_datetime or datetime.datetime object.
+ """
+
+ return extended_datetime.from_y2k(self._y2k + other)
+
+ def __sub__(self, other):
+ """
+ Subtracts a timedelta or another datetime.
+
+ :param other:
+ A datetime.timedelta or datetime.datetime or extended_datetime object to subtract.
+
+ :return:
+ If a timedelta is passed, a new extended_datetime or datetime.datetime object.
+ Else a datetime.timedelta object.
+ """
+
+ if isinstance(other, timedelta):
+ return extended_datetime.from_y2k(self._y2k - other)
+
+ if isinstance(other, extended_datetime):
+ return self._y2k - other._y2k
+
+ if isinstance(other, datetime):
+ return self._y2k - other - timedelta(days=self.DAYS_IN_2000_YEARS)
+
+ return NotImplemented
+
+ def __rsub__(self, other):
+ return -(self - other)
+
+ @classmethod
+ def from_y2k(cls, value):
+ """
+ Revert substitution of year 2000.
+
+ :param value:
+ A datetime.datetime object which is 2000 years in the future.
+ :return:
+ A new extended_datetime or datetime.datetime object.
+ """
+
+ year = value.year - 2000
+
+ if year > 0:
+ new_cls = datetime
+ else:
+ new_cls = cls
+
+ return new_cls(
+ year,
+ value.month,
+ value.day,
+ value.hour,
+ value.minute,
+ value.second,
+ value.microsecond,
+ value.tzinfo
+ )
diff --git a/jc/parsers/asn1crypto/version.py b/jc/parsers/asn1crypto/version.py
new file mode 100644
index 00000000..966b57a5
--- /dev/null
+++ b/jc/parsers/asn1crypto/version.py
@@ -0,0 +1,6 @@
+# coding: utf-8
+from __future__ import unicode_literals, division, absolute_import, print_function
+
+
+__version__ = '1.5.1'
+__version_info__ = (1, 5, 1)
diff --git a/jc/parsers/asn1crypto/x509.py b/jc/parsers/asn1crypto/x509.py
new file mode 100644
index 00000000..8cfb2c78
--- /dev/null
+++ b/jc/parsers/asn1crypto/x509.py
@@ -0,0 +1,3036 @@
+# coding: utf-8
+
+"""
+ASN.1 type classes for X.509 certificates. Exports the following items:
+
+ - Attributes()
+ - Certificate()
+ - Extensions()
+ - GeneralName()
+ - GeneralNames()
+ - Name()
+
+Other type classes are defined that help compose the types listed above.
+"""
+
+from __future__ import unicode_literals, division, absolute_import, print_function
+
+from contextlib import contextmanager
+from encodings import idna # noqa
+import hashlib
+import re
+import socket
+import stringprep
+import sys
+import unicodedata
+
+from ._errors import unwrap
+from ._iri import iri_to_uri, uri_to_iri
+from ._ordereddict import OrderedDict
+from ._types import type_name, str_cls, bytes_to_list
+from .algos import AlgorithmIdentifier, AnyAlgorithmIdentifier, DigestAlgorithm, SignedDigestAlgorithm
+from .core import (
+ Any,
+ BitString,
+ BMPString,
+ Boolean,
+ Choice,
+ Concat,
+ Enumerated,
+ GeneralizedTime,
+ GeneralString,
+ IA5String,
+ Integer,
+ Null,
+ NumericString,
+ ObjectIdentifier,
+ OctetBitString,
+ OctetString,
+ ParsableOctetString,
+ PrintableString,
+ Sequence,
+ SequenceOf,
+ Set,
+ SetOf,
+ TeletexString,
+ UniversalString,
+ UTCTime,
+ UTF8String,
+ VisibleString,
+ VOID,
+)
+from .keys import PublicKeyInfo
+from .util import int_to_bytes, int_from_bytes, inet_ntop, inet_pton
+
+
+# The structures in this file are taken from https://tools.ietf.org/html/rfc5280
+# and a few other supplementary sources, mostly due to extra supported
+# extension and name OIDs
+
+
+class DNSName(IA5String):
+
+ _encoding = 'idna'
+ _bad_tag = (12, 19)
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __eq__(self, other):
+ """
+ Equality as defined by https://tools.ietf.org/html/rfc5280#section-7.2
+
+ :param other:
+ Another DNSName object
+
+ :return:
+ A boolean
+ """
+
+ if not isinstance(other, DNSName):
+ return False
+
+ return self.__unicode__().lower() == other.__unicode__().lower()
+
+ def set(self, value):
+ """
+ Sets the value of the DNS name
+
+ :param value:
+ A unicode string
+ """
+
+ if not isinstance(value, str_cls):
+ raise TypeError(unwrap(
+ '''
+ %s value must be a unicode string, not %s
+ ''',
+ type_name(self),
+ type_name(value)
+ ))
+
+ if value.startswith('.'):
+ encoded_value = b'.' + value[1:].encode(self._encoding)
+ else:
+ encoded_value = value.encode(self._encoding)
+
+ self._unicode = value
+ self.contents = encoded_value
+ self._header = None
+ if self._trailer != b'':
+ self._trailer = b''
+
+
+class URI(IA5String):
+
+ def set(self, value):
+ """
+ Sets the value of the string
+
+ :param value:
+ A unicode string
+ """
+
+ if not isinstance(value, str_cls):
+ raise TypeError(unwrap(
+ '''
+ %s value must be a unicode string, not %s
+ ''',
+ type_name(self),
+ type_name(value)
+ ))
+
+ self._unicode = value
+ self.contents = iri_to_uri(value)
+ self._header = None
+ if self._trailer != b'':
+ self._trailer = b''
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __eq__(self, other):
+ """
+ Equality as defined by https://tools.ietf.org/html/rfc5280#section-7.4
+
+ :param other:
+ Another URI object
+
+ :return:
+ A boolean
+ """
+
+ if not isinstance(other, URI):
+ return False
+
+ return iri_to_uri(self.native, True) == iri_to_uri(other.native, True)
+
+ def __unicode__(self):
+ """
+ :return:
+ A unicode string
+ """
+
+ if self.contents is None:
+ return ''
+ if self._unicode is None:
+ self._unicode = uri_to_iri(self._merge_chunks())
+ return self._unicode
+
+
+class EmailAddress(IA5String):
+
+ _contents = None
+
+ # If the value has gone through the .set() method, thus normalizing it
+ _normalized = False
+
+ # In the wild we've seen this encoded as a UTF8String and PrintableString
+ _bad_tag = (12, 19)
+
+ @property
+ def contents(self):
+ """
+ :return:
+ A byte string of the DER-encoded contents of the sequence
+ """
+
+ return self._contents
+
+ @contents.setter
+ def contents(self, value):
+ """
+ :param value:
+ A byte string of the DER-encoded contents of the sequence
+ """
+
+ self._normalized = False
+ self._contents = value
+
+ def set(self, value):
+ """
+ Sets the value of the string
+
+ :param value:
+ A unicode string
+ """
+
+ if not isinstance(value, str_cls):
+ raise TypeError(unwrap(
+ '''
+ %s value must be a unicode string, not %s
+ ''',
+ type_name(self),
+ type_name(value)
+ ))
+
+ if value.find('@') != -1:
+ mailbox, hostname = value.rsplit('@', 1)
+ encoded_value = mailbox.encode('ascii') + b'@' + hostname.encode('idna')
+ else:
+ encoded_value = value.encode('ascii')
+
+ self._normalized = True
+ self._unicode = value
+ self.contents = encoded_value
+ self._header = None
+ if self._trailer != b'':
+ self._trailer = b''
+
+ def __unicode__(self):
+ """
+ :return:
+ A unicode string
+ """
+
+ # We've seen this in the wild as a PrintableString, and since ascii is a
+ # subset of cp1252, we use the later for decoding to be more user friendly
+ if self._unicode is None:
+ contents = self._merge_chunks()
+ if contents.find(b'@') == -1:
+ self._unicode = contents.decode('cp1252')
+ else:
+ mailbox, hostname = contents.rsplit(b'@', 1)
+ self._unicode = mailbox.decode('cp1252') + '@' + hostname.decode('idna')
+ return self._unicode
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __eq__(self, other):
+ """
+ Equality as defined by https://tools.ietf.org/html/rfc5280#section-7.5
+
+ :param other:
+ Another EmailAddress object
+
+ :return:
+ A boolean
+ """
+
+ if not isinstance(other, EmailAddress):
+ return False
+
+ if not self._normalized:
+ self.set(self.native)
+ if not other._normalized:
+ other.set(other.native)
+
+ if self._contents.find(b'@') == -1 or other._contents.find(b'@') == -1:
+ return self._contents == other._contents
+
+ other_mailbox, other_hostname = other._contents.rsplit(b'@', 1)
+ mailbox, hostname = self._contents.rsplit(b'@', 1)
+
+ if mailbox != other_mailbox:
+ return False
+
+ if hostname.lower() != other_hostname.lower():
+ return False
+
+ return True
+
+
+class IPAddress(OctetString):
+ def parse(self, spec=None, spec_params=None):
+ """
+ This method is not applicable to IP addresses
+ """
+
+ raise ValueError(unwrap(
+ '''
+ IP address values can not be parsed
+ '''
+ ))
+
+ def set(self, value):
+ """
+ Sets the value of the object
+
+ :param value:
+ A unicode string containing an IPv4 address, IPv4 address with CIDR,
+ an IPv6 address or IPv6 address with CIDR
+ """
+
+ if not isinstance(value, str_cls):
+ raise TypeError(unwrap(
+ '''
+ %s value must be a unicode string, not %s
+ ''',
+ type_name(self),
+ type_name(value)
+ ))
+
+ original_value = value
+
+ has_cidr = value.find('/') != -1
+ cidr = 0
+ if has_cidr:
+ parts = value.split('/', 1)
+ value = parts[0]
+ cidr = int(parts[1])
+ if cidr < 0:
+ raise ValueError(unwrap(
+ '''
+ %s value contains a CIDR range less than 0
+ ''',
+ type_name(self)
+ ))
+
+ if value.find(':') != -1:
+ family = socket.AF_INET6
+ if cidr > 128:
+ raise ValueError(unwrap(
+ '''
+ %s value contains a CIDR range bigger than 128, the maximum
+ value for an IPv6 address
+ ''',
+ type_name(self)
+ ))
+ cidr_size = 128
+ else:
+ family = socket.AF_INET
+ if cidr > 32:
+ raise ValueError(unwrap(
+ '''
+ %s value contains a CIDR range bigger than 32, the maximum
+ value for an IPv4 address
+ ''',
+ type_name(self)
+ ))
+ cidr_size = 32
+
+ cidr_bytes = b''
+ if has_cidr:
+ cidr_mask = '1' * cidr
+ cidr_mask += '0' * (cidr_size - len(cidr_mask))
+ cidr_bytes = int_to_bytes(int(cidr_mask, 2))
+ cidr_bytes = (b'\x00' * ((cidr_size // 8) - len(cidr_bytes))) + cidr_bytes
+
+ self._native = original_value
+ self.contents = inet_pton(family, value) + cidr_bytes
+ self._bytes = self.contents
+ self._header = None
+ if self._trailer != b'':
+ self._trailer = b''
+
+ @property
+ def native(self):
+ """
+ The native Python datatype representation of this value
+
+ :return:
+ A unicode string or None
+ """
+
+ if self.contents is None:
+ return None
+
+ if self._native is None:
+ byte_string = self.__bytes__()
+ byte_len = len(byte_string)
+ value = None
+ cidr_int = None
+ if byte_len in set([32, 16]):
+ value = inet_ntop(socket.AF_INET6, byte_string[0:16])
+ if byte_len > 16:
+ cidr_int = int_from_bytes(byte_string[16:])
+ elif byte_len in set([8, 4]):
+ value = inet_ntop(socket.AF_INET, byte_string[0:4])
+ if byte_len > 4:
+ cidr_int = int_from_bytes(byte_string[4:])
+ if cidr_int is not None:
+ cidr_bits = '{0:b}'.format(cidr_int)
+ cidr = len(cidr_bits.rstrip('0'))
+ value = value + '/' + str_cls(cidr)
+ self._native = value
+ return self._native
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __eq__(self, other):
+ """
+ :param other:
+ Another IPAddress object
+
+ :return:
+ A boolean
+ """
+
+ if not isinstance(other, IPAddress):
+ return False
+
+ return self.__bytes__() == other.__bytes__()
+
+
+class Attribute(Sequence):
+ _fields = [
+ ('type', ObjectIdentifier),
+ ('values', SetOf, {'spec': Any}),
+ ]
+
+
+class Attributes(SequenceOf):
+ _child_spec = Attribute
+
+
+class KeyUsage(BitString):
+ _map = {
+ 0: 'digital_signature',
+ 1: 'non_repudiation',
+ 2: 'key_encipherment',
+ 3: 'data_encipherment',
+ 4: 'key_agreement',
+ 5: 'key_cert_sign',
+ 6: 'crl_sign',
+ 7: 'encipher_only',
+ 8: 'decipher_only',
+ }
+
+
+class PrivateKeyUsagePeriod(Sequence):
+ _fields = [
+ ('not_before', GeneralizedTime, {'implicit': 0, 'optional': True}),
+ ('not_after', GeneralizedTime, {'implicit': 1, 'optional': True}),
+ ]
+
+
+class NotReallyTeletexString(TeletexString):
+ """
+ OpenSSL (and probably some other libraries) puts ISO-8859-1
+ into TeletexString instead of ITU T.61. We use Windows-1252 when
+ decoding since it is a superset of ISO-8859-1, and less likely to
+ cause encoding issues, but we stay strict with encoding to prevent
+ us from creating bad data.
+ """
+
+ _decoding_encoding = 'cp1252'
+
+ def __unicode__(self):
+ """
+ :return:
+ A unicode string
+ """
+
+ if self.contents is None:
+ return ''
+ if self._unicode is None:
+ self._unicode = self._merge_chunks().decode(self._decoding_encoding)
+ return self._unicode
+
+
+@contextmanager
+def strict_teletex():
+ try:
+ NotReallyTeletexString._decoding_encoding = 'teletex'
+ yield
+ finally:
+ NotReallyTeletexString._decoding_encoding = 'cp1252'
+
+
+class DirectoryString(Choice):
+ _alternatives = [
+ ('teletex_string', NotReallyTeletexString),
+ ('printable_string', PrintableString),
+ ('universal_string', UniversalString),
+ ('utf8_string', UTF8String),
+ ('bmp_string', BMPString),
+ # This is an invalid/bad alternative, but some broken certs use it
+ ('ia5_string', IA5String),
+ ]
+
+
+class NameType(ObjectIdentifier):
+ _map = {
+ '2.5.4.3': 'common_name',
+ '2.5.4.4': 'surname',
+ '2.5.4.5': 'serial_number',
+ '2.5.4.6': 'country_name',
+ '2.5.4.7': 'locality_name',
+ '2.5.4.8': 'state_or_province_name',
+ '2.5.4.9': 'street_address',
+ '2.5.4.10': 'organization_name',
+ '2.5.4.11': 'organizational_unit_name',
+ '2.5.4.12': 'title',
+ '2.5.4.15': 'business_category',
+ '2.5.4.17': 'postal_code',
+ '2.5.4.20': 'telephone_number',
+ '2.5.4.41': 'name',
+ '2.5.4.42': 'given_name',
+ '2.5.4.43': 'initials',
+ '2.5.4.44': 'generation_qualifier',
+ '2.5.4.45': 'unique_identifier',
+ '2.5.4.46': 'dn_qualifier',
+ '2.5.4.65': 'pseudonym',
+ '2.5.4.97': 'organization_identifier',
+ # https://www.trustedcomputinggroup.org/wp-content/uploads/Credential_Profile_EK_V2.0_R14_published.pdf
+ '2.23.133.2.1': 'tpm_manufacturer',
+ '2.23.133.2.2': 'tpm_model',
+ '2.23.133.2.3': 'tpm_version',
+ '2.23.133.2.4': 'platform_manufacturer',
+ '2.23.133.2.5': 'platform_model',
+ '2.23.133.2.6': 'platform_version',
+ # https://tools.ietf.org/html/rfc2985#page-26
+ '1.2.840.113549.1.9.1': 'email_address',
+ # Page 10 of https://cabforum.org/wp-content/uploads/EV-V1_5_5.pdf
+ '1.3.6.1.4.1.311.60.2.1.1': 'incorporation_locality',
+ '1.3.6.1.4.1.311.60.2.1.2': 'incorporation_state_or_province',
+ '1.3.6.1.4.1.311.60.2.1.3': 'incorporation_country',
+ # https://tools.ietf.org/html/rfc4519#section-2.39
+ '0.9.2342.19200300.100.1.1': 'user_id',
+ # https://tools.ietf.org/html/rfc2247#section-4
+ '0.9.2342.19200300.100.1.25': 'domain_component',
+ # http://www.alvestrand.no/objectid/0.2.262.1.10.7.20.html
+ '0.2.262.1.10.7.20': 'name_distinguisher',
+ }
+
+ # This order is largely based on observed order seen in EV certs from
+ # Symantec and DigiCert. Some of the uncommon name-related fields are
+ # just placed in what seems like a reasonable order.
+ preferred_order = [
+ 'incorporation_country',
+ 'incorporation_state_or_province',
+ 'incorporation_locality',
+ 'business_category',
+ 'serial_number',
+ 'country_name',
+ 'postal_code',
+ 'state_or_province_name',
+ 'locality_name',
+ 'street_address',
+ 'organization_name',
+ 'organizational_unit_name',
+ 'title',
+ 'common_name',
+ 'user_id',
+ 'initials',
+ 'generation_qualifier',
+ 'surname',
+ 'given_name',
+ 'name',
+ 'pseudonym',
+ 'dn_qualifier',
+ 'telephone_number',
+ 'email_address',
+ 'domain_component',
+ 'name_distinguisher',
+ 'organization_identifier',
+ 'tpm_manufacturer',
+ 'tpm_model',
+ 'tpm_version',
+ 'platform_manufacturer',
+ 'platform_model',
+ 'platform_version',
+ ]
+
+ @classmethod
+ def preferred_ordinal(cls, attr_name):
+ """
+ Returns an ordering value for a particular attribute key.
+
+ Unrecognized attributes and OIDs will be sorted lexically at the end.
+
+ :return:
+ An orderable value.
+
+ """
+
+ attr_name = cls.map(attr_name)
+ if attr_name in cls.preferred_order:
+ ordinal = cls.preferred_order.index(attr_name)
+ else:
+ ordinal = len(cls.preferred_order)
+
+ return (ordinal, attr_name)
+
+ @property
+ def human_friendly(self):
+ """
+ :return:
+ A human-friendly unicode string to display to users
+ """
+
+ return {
+ 'common_name': 'Common Name',
+ 'surname': 'Surname',
+ 'serial_number': 'Serial Number',
+ 'country_name': 'Country',
+ 'locality_name': 'Locality',
+ 'state_or_province_name': 'State/Province',
+ 'street_address': 'Street Address',
+ 'organization_name': 'Organization',
+ 'organizational_unit_name': 'Organizational Unit',
+ 'title': 'Title',
+ 'business_category': 'Business Category',
+ 'postal_code': 'Postal Code',
+ 'telephone_number': 'Telephone Number',
+ 'name': 'Name',
+ 'given_name': 'Given Name',
+ 'initials': 'Initials',
+ 'generation_qualifier': 'Generation Qualifier',
+ 'unique_identifier': 'Unique Identifier',
+ 'dn_qualifier': 'DN Qualifier',
+ 'pseudonym': 'Pseudonym',
+ 'email_address': 'Email Address',
+ 'incorporation_locality': 'Incorporation Locality',
+ 'incorporation_state_or_province': 'Incorporation State/Province',
+ 'incorporation_country': 'Incorporation Country',
+ 'domain_component': 'Domain Component',
+ 'name_distinguisher': 'Name Distinguisher',
+ 'organization_identifier': 'Organization Identifier',
+ 'tpm_manufacturer': 'TPM Manufacturer',
+ 'tpm_model': 'TPM Model',
+ 'tpm_version': 'TPM Version',
+ 'platform_manufacturer': 'Platform Manufacturer',
+ 'platform_model': 'Platform Model',
+ 'platform_version': 'Platform Version',
+ 'user_id': 'User ID',
+ }.get(self.native, self.native)
+
+
+class NameTypeAndValue(Sequence):
+ _fields = [
+ ('type', NameType),
+ ('value', Any),
+ ]
+
+ _oid_pair = ('type', 'value')
+ _oid_specs = {
+ 'common_name': DirectoryString,
+ 'surname': DirectoryString,
+ 'serial_number': DirectoryString,
+ 'country_name': DirectoryString,
+ 'locality_name': DirectoryString,
+ 'state_or_province_name': DirectoryString,
+ 'street_address': DirectoryString,
+ 'organization_name': DirectoryString,
+ 'organizational_unit_name': DirectoryString,
+ 'title': DirectoryString,
+ 'business_category': DirectoryString,
+ 'postal_code': DirectoryString,
+ 'telephone_number': PrintableString,
+ 'name': DirectoryString,
+ 'given_name': DirectoryString,
+ 'initials': DirectoryString,
+ 'generation_qualifier': DirectoryString,
+ 'unique_identifier': OctetBitString,
+ 'dn_qualifier': DirectoryString,
+ 'pseudonym': DirectoryString,
+ # https://tools.ietf.org/html/rfc2985#page-26
+ 'email_address': EmailAddress,
+ # Page 10 of https://cabforum.org/wp-content/uploads/EV-V1_5_5.pdf
+ 'incorporation_locality': DirectoryString,
+ 'incorporation_state_or_province': DirectoryString,
+ 'incorporation_country': DirectoryString,
+ 'domain_component': DNSName,
+ 'name_distinguisher': DirectoryString,
+ 'organization_identifier': DirectoryString,
+ 'tpm_manufacturer': UTF8String,
+ 'tpm_model': UTF8String,
+ 'tpm_version': UTF8String,
+ 'platform_manufacturer': UTF8String,
+ 'platform_model': UTF8String,
+ 'platform_version': UTF8String,
+ 'user_id': DirectoryString,
+ }
+
+ _prepped = None
+
+ @property
+ def prepped_value(self):
+ """
+ Returns the value after being processed by the internationalized string
+ preparation as specified by RFC 5280
+
+ :return:
+ A unicode string
+ """
+
+ if self._prepped is None:
+ self._prepped = self._ldap_string_prep(self['value'].native)
+ return self._prepped
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __eq__(self, other):
+ """
+ Equality as defined by https://tools.ietf.org/html/rfc5280#section-7.1
+
+ :param other:
+ Another NameTypeAndValue object
+
+ :return:
+ A boolean
+ """
+
+ if not isinstance(other, NameTypeAndValue):
+ return False
+
+ if other['type'].native != self['type'].native:
+ return False
+
+ return other.prepped_value == self.prepped_value
+
+ def _ldap_string_prep(self, string):
+ """
+ Implements the internationalized string preparation algorithm from
+ RFC 4518. https://tools.ietf.org/html/rfc4518#section-2
+
+ :param string:
+ A unicode string to prepare
+
+ :return:
+ A prepared unicode string, ready for comparison
+ """
+
+ # Map step
+ string = re.sub('[\u00ad\u1806\u034f\u180b-\u180d\ufe0f-\uff00\ufffc]+', '', string)
+ string = re.sub('[\u0009\u000a\u000b\u000c\u000d\u0085]', ' ', string)
+ if sys.maxunicode == 0xffff:
+ # Some installs of Python 2.7 don't support 8-digit unicode escape
+ # ranges, so we have to break them into pieces
+ # Original was: \U0001D173-\U0001D17A and \U000E0020-\U000E007F
+ string = re.sub('\ud834[\udd73-\udd7a]|\udb40[\udc20-\udc7f]|\U000e0001', '', string)
+ else:
+ string = re.sub('[\U0001D173-\U0001D17A\U000E0020-\U000E007F\U000e0001]', '', string)
+ string = re.sub(
+ '[\u0000-\u0008\u000e-\u001f\u007f-\u0084\u0086-\u009f\u06dd\u070f\u180e\u200c-\u200f'
+ '\u202a-\u202e\u2060-\u2063\u206a-\u206f\ufeff\ufff9-\ufffb]+',
+ '',
+ string
+ )
+ string = string.replace('\u200b', '')
+ string = re.sub('[\u00a0\u1680\u2000-\u200a\u2028-\u2029\u202f\u205f\u3000]', ' ', string)
+
+ string = ''.join(map(stringprep.map_table_b2, string))
+
+ # Normalize step
+ string = unicodedata.normalize('NFKC', string)
+
+ # Prohibit step
+ for char in string:
+ if stringprep.in_table_a1(char):
+ raise ValueError(unwrap(
+ '''
+ X.509 Name objects may not contain unassigned code points
+ '''
+ ))
+
+ if stringprep.in_table_c8(char):
+ raise ValueError(unwrap(
+ '''
+ X.509 Name objects may not contain change display or
+ zzzzdeprecated characters
+ '''
+ ))
+
+ if stringprep.in_table_c3(char):
+ raise ValueError(unwrap(
+ '''
+ X.509 Name objects may not contain private use characters
+ '''
+ ))
+
+ if stringprep.in_table_c4(char):
+ raise ValueError(unwrap(
+ '''
+ X.509 Name objects may not contain non-character code points
+ '''
+ ))
+
+ if stringprep.in_table_c5(char):
+ raise ValueError(unwrap(
+ '''
+ X.509 Name objects may not contain surrogate code points
+ '''
+ ))
+
+ if char == '\ufffd':
+ raise ValueError(unwrap(
+ '''
+ X.509 Name objects may not contain the replacement character
+ '''
+ ))
+
+ # Check bidirectional step - here we ensure that we are not mixing
+ # left-to-right and right-to-left text in the string
+ has_r_and_al_cat = False
+ has_l_cat = False
+ for char in string:
+ if stringprep.in_table_d1(char):
+ has_r_and_al_cat = True
+ elif stringprep.in_table_d2(char):
+ has_l_cat = True
+
+ if has_r_and_al_cat:
+ first_is_r_and_al = stringprep.in_table_d1(string[0])
+ last_is_r_and_al = stringprep.in_table_d1(string[-1])
+
+ if has_l_cat or not first_is_r_and_al or not last_is_r_and_al:
+ raise ValueError(unwrap(
+ '''
+ X.509 Name object contains a malformed bidirectional
+ sequence
+ '''
+ ))
+
+ # Insignificant space handling step
+ string = ' ' + re.sub(' +', ' ', string).strip() + ' '
+
+ return string
+
+
+class RelativeDistinguishedName(SetOf):
+ _child_spec = NameTypeAndValue
+
+ @property
+ def hashable(self):
+ """
+ :return:
+ A unicode string that can be used as a dict key or in a set
+ """
+
+ output = []
+ values = self._get_values(self)
+ for key in sorted(values.keys()):
+ output.append('%s: %s' % (key, values[key]))
+ # Unit separator is used here since the normalization process for
+ # values moves any such character, and the keys are all dotted integers
+ # or under_score_words
+ return '\x1F'.join(output)
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __eq__(self, other):
+ """
+ Equality as defined by https://tools.ietf.org/html/rfc5280#section-7.1
+
+ :param other:
+ Another RelativeDistinguishedName object
+
+ :return:
+ A boolean
+ """
+
+ if not isinstance(other, RelativeDistinguishedName):
+ return False
+
+ if len(self) != len(other):
+ return False
+
+ self_types = self._get_types(self)
+ other_types = self._get_types(other)
+
+ if self_types != other_types:
+ return False
+
+ self_values = self._get_values(self)
+ other_values = self._get_values(other)
+
+ for type_name_ in self_types:
+ if self_values[type_name_] != other_values[type_name_]:
+ return False
+
+ return True
+
+ def _get_types(self, rdn):
+ """
+ Returns a set of types contained in an RDN
+
+ :param rdn:
+ A RelativeDistinguishedName object
+
+ :return:
+ A set object with unicode strings of NameTypeAndValue type field
+ values
+ """
+
+ return set([ntv['type'].native for ntv in rdn])
+
+ def _get_values(self, rdn):
+ """
+ Returns a dict of prepped values contained in an RDN
+
+ :param rdn:
+ A RelativeDistinguishedName object
+
+ :return:
+ A dict object with unicode strings of NameTypeAndValue value field
+ values that have been prepped for comparison
+ """
+
+ output = {}
+ [output.update([(ntv['type'].native, ntv.prepped_value)]) for ntv in rdn]
+ return output
+
+
+class RDNSequence(SequenceOf):
+ _child_spec = RelativeDistinguishedName
+
+ @property
+ def hashable(self):
+ """
+ :return:
+ A unicode string that can be used as a dict key or in a set
+ """
+
+ # Record separator is used here since the normalization process for
+ # values moves any such character, and the keys are all dotted integers
+ # or under_score_words
+ return '\x1E'.join(rdn.hashable for rdn in self)
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __eq__(self, other):
+ """
+ Equality as defined by https://tools.ietf.org/html/rfc5280#section-7.1
+
+ :param other:
+ Another RDNSequence object
+
+ :return:
+ A boolean
+ """
+
+ if not isinstance(other, RDNSequence):
+ return False
+
+ if len(self) != len(other):
+ return False
+
+ for index, self_rdn in enumerate(self):
+ if other[index] != self_rdn:
+ return False
+
+ return True
+
+
+class Name(Choice):
+ _alternatives = [
+ ('', RDNSequence),
+ ]
+
+ _human_friendly = None
+ _sha1 = None
+ _sha256 = None
+
+ @classmethod
+ def build(cls, name_dict, use_printable=False):
+ """
+ Creates a Name object from a dict of unicode string keys and values.
+ The keys should be from NameType._map, or a dotted-integer OID unicode
+ string.
+
+ :param name_dict:
+ A dict of name information, e.g. {"common_name": "Will Bond",
+ "country_name": "US", "organization_name": "Codex Non Sufficit LC"}
+
+ :param use_printable:
+ A bool - if PrintableString should be used for encoding instead of
+ UTF8String. This is for backwards compatibility with old software.
+
+ :return:
+ An x509.Name object
+ """
+
+ rdns = []
+ if not use_printable:
+ encoding_name = 'utf8_string'
+ encoding_class = UTF8String
+ else:
+ encoding_name = 'printable_string'
+ encoding_class = PrintableString
+
+ # Sort the attributes according to NameType.preferred_order
+ name_dict = OrderedDict(
+ sorted(
+ name_dict.items(),
+ key=lambda item: NameType.preferred_ordinal(item[0])
+ )
+ )
+
+ for attribute_name, attribute_value in name_dict.items():
+ attribute_name = NameType.map(attribute_name)
+ if attribute_name == 'email_address':
+ value = EmailAddress(attribute_value)
+ elif attribute_name == 'domain_component':
+ value = DNSName(attribute_value)
+ elif attribute_name in set(['dn_qualifier', 'country_name', 'serial_number']):
+ value = DirectoryString(
+ name='printable_string',
+ value=PrintableString(attribute_value)
+ )
+ else:
+ value = DirectoryString(
+ name=encoding_name,
+ value=encoding_class(attribute_value)
+ )
+
+ rdns.append(RelativeDistinguishedName([
+ NameTypeAndValue({
+ 'type': attribute_name,
+ 'value': value
+ })
+ ]))
+
+ return cls(name='', value=RDNSequence(rdns))
+
+ @property
+ def hashable(self):
+ """
+ :return:
+ A unicode string that can be used as a dict key or in a set
+ """
+
+ return self.chosen.hashable
+
+ def __len__(self):
+ return len(self.chosen)
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __eq__(self, other):
+ """
+ Equality as defined by https://tools.ietf.org/html/rfc5280#section-7.1
+
+ :param other:
+ Another Name object
+
+ :return:
+ A boolean
+ """
+
+ if not isinstance(other, Name):
+ return False
+ return self.chosen == other.chosen
+
+ @property
+ def native(self):
+ if self._native is None:
+ self._native = OrderedDict()
+ for rdn in self.chosen.native:
+ for type_val in rdn:
+ field_name = type_val['type']
+ if field_name in self._native:
+ existing = self._native[field_name]
+ if not isinstance(existing, list):
+ existing = self._native[field_name] = [existing]
+ existing.append(type_val['value'])
+ else:
+ self._native[field_name] = type_val['value']
+ return self._native
+
+ @property
+ def human_friendly(self):
+ """
+ :return:
+ A human-friendly unicode string containing the parts of the name
+ """
+
+ if self._human_friendly is None:
+ data = OrderedDict()
+ last_field = None
+ for rdn in self.chosen:
+ for type_val in rdn:
+ field_name = type_val['type'].human_friendly
+ last_field = field_name
+ if field_name in data:
+ data[field_name] = [data[field_name]]
+ data[field_name].append(type_val['value'])
+ else:
+ data[field_name] = type_val['value']
+ to_join = []
+ keys = data.keys()
+ if last_field == 'Country':
+ keys = reversed(list(keys))
+ for key in keys:
+ value = data[key]
+ native_value = self._recursive_humanize(value)
+ to_join.append('%s: %s' % (key, native_value))
+
+ has_comma = False
+ for element in to_join:
+ if element.find(',') != -1:
+ has_comma = True
+ break
+
+ separator = ', ' if not has_comma else '; '
+ self._human_friendly = separator.join(to_join[::-1])
+
+ return self._human_friendly
+
+ def _recursive_humanize(self, value):
+ """
+ Recursively serializes data compiled from the RDNSequence
+
+ :param value:
+ An Asn1Value object, or a list of Asn1Value objects
+
+ :return:
+ A unicode string
+ """
+
+ if isinstance(value, list):
+ return ', '.join(
+ reversed([self._recursive_humanize(sub_value) for sub_value in value])
+ )
+ return value.native
+
+ @property
+ def sha1(self):
+ """
+ :return:
+ The SHA1 hash of the DER-encoded bytes of this name
+ """
+
+ if self._sha1 is None:
+ self._sha1 = hashlib.sha1(self.dump()).digest()
+ return self._sha1
+
+ @property
+ def sha256(self):
+ """
+ :return:
+ The SHA-256 hash of the DER-encoded bytes of this name
+ """
+
+ if self._sha256 is None:
+ self._sha256 = hashlib.sha256(self.dump()).digest()
+ return self._sha256
+
+
+class AnotherName(Sequence):
+ _fields = [
+ ('type_id', ObjectIdentifier),
+ ('value', Any, {'explicit': 0}),
+ ]
+
+
+class CountryName(Choice):
+ class_ = 1
+ tag = 1
+
+ _alternatives = [
+ ('x121_dcc_code', NumericString),
+ ('iso_3166_alpha2_code', PrintableString),
+ ]
+
+
+class AdministrationDomainName(Choice):
+ class_ = 1
+ tag = 2
+
+ _alternatives = [
+ ('numeric', NumericString),
+ ('printable', PrintableString),
+ ]
+
+
+class PrivateDomainName(Choice):
+ _alternatives = [
+ ('numeric', NumericString),
+ ('printable', PrintableString),
+ ]
+
+
+class PersonalName(Set):
+ _fields = [
+ ('surname', PrintableString, {'implicit': 0}),
+ ('given_name', PrintableString, {'implicit': 1, 'optional': True}),
+ ('initials', PrintableString, {'implicit': 2, 'optional': True}),
+ ('generation_qualifier', PrintableString, {'implicit': 3, 'optional': True}),
+ ]
+
+
+class TeletexPersonalName(Set):
+ _fields = [
+ ('surname', TeletexString, {'implicit': 0}),
+ ('given_name', TeletexString, {'implicit': 1, 'optional': True}),
+ ('initials', TeletexString, {'implicit': 2, 'optional': True}),
+ ('generation_qualifier', TeletexString, {'implicit': 3, 'optional': True}),
+ ]
+
+
+class OrganizationalUnitNames(SequenceOf):
+ _child_spec = PrintableString
+
+
+class TeletexOrganizationalUnitNames(SequenceOf):
+ _child_spec = TeletexString
+
+
+class BuiltInStandardAttributes(Sequence):
+ _fields = [
+ ('country_name', CountryName, {'optional': True}),
+ ('administration_domain_name', AdministrationDomainName, {'optional': True}),
+ ('network_address', NumericString, {'implicit': 0, 'optional': True}),
+ ('terminal_identifier', PrintableString, {'implicit': 1, 'optional': True}),
+ ('private_domain_name', PrivateDomainName, {'explicit': 2, 'optional': True}),
+ ('organization_name', PrintableString, {'implicit': 3, 'optional': True}),
+ ('numeric_user_identifier', NumericString, {'implicit': 4, 'optional': True}),
+ ('personal_name', PersonalName, {'implicit': 5, 'optional': True}),
+ ('organizational_unit_names', OrganizationalUnitNames, {'implicit': 6, 'optional': True}),
+ ]
+
+
+class BuiltInDomainDefinedAttribute(Sequence):
+ _fields = [
+ ('type', PrintableString),
+ ('value', PrintableString),
+ ]
+
+
+class BuiltInDomainDefinedAttributes(SequenceOf):
+ _child_spec = BuiltInDomainDefinedAttribute
+
+
+class TeletexDomainDefinedAttribute(Sequence):
+ _fields = [
+ ('type', TeletexString),
+ ('value', TeletexString),
+ ]
+
+
+class TeletexDomainDefinedAttributes(SequenceOf):
+ _child_spec = TeletexDomainDefinedAttribute
+
+
+class PhysicalDeliveryCountryName(Choice):
+ _alternatives = [
+ ('x121_dcc_code', NumericString),
+ ('iso_3166_alpha2_code', PrintableString),
+ ]
+
+
+class PostalCode(Choice):
+ _alternatives = [
+ ('numeric_code', NumericString),
+ ('printable_code', PrintableString),
+ ]
+
+
+class PDSParameter(Set):
+ _fields = [
+ ('printable_string', PrintableString, {'optional': True}),
+ ('teletex_string', TeletexString, {'optional': True}),
+ ]
+
+
+class PrintableAddress(SequenceOf):
+ _child_spec = PrintableString
+
+
+class UnformattedPostalAddress(Set):
+ _fields = [
+ ('printable_address', PrintableAddress, {'optional': True}),
+ ('teletex_string', TeletexString, {'optional': True}),
+ ]
+
+
+class E1634Address(Sequence):
+ _fields = [
+ ('number', NumericString, {'implicit': 0}),
+ ('sub_address', NumericString, {'implicit': 1, 'optional': True}),
+ ]
+
+
+class NAddresses(SetOf):
+ _child_spec = OctetString
+
+
+class PresentationAddress(Sequence):
+ _fields = [
+ ('p_selector', OctetString, {'explicit': 0, 'optional': True}),
+ ('s_selector', OctetString, {'explicit': 1, 'optional': True}),
+ ('t_selector', OctetString, {'explicit': 2, 'optional': True}),
+ ('n_addresses', NAddresses, {'explicit': 3}),
+ ]
+
+
+class ExtendedNetworkAddress(Choice):
+ _alternatives = [
+ ('e163_4_address', E1634Address),
+ ('psap_address', PresentationAddress, {'implicit': 0})
+ ]
+
+
+class TerminalType(Integer):
+ _map = {
+ 3: 'telex',
+ 4: 'teletex',
+ 5: 'g3_facsimile',
+ 6: 'g4_facsimile',
+ 7: 'ia5_terminal',
+ 8: 'videotex',
+ }
+
+
+class ExtensionAttributeType(Integer):
+ _map = {
+ 1: 'common_name',
+ 2: 'teletex_common_name',
+ 3: 'teletex_organization_name',
+ 4: 'teletex_personal_name',
+ 5: 'teletex_organization_unit_names',
+ 6: 'teletex_domain_defined_attributes',
+ 7: 'pds_name',
+ 8: 'physical_delivery_country_name',
+ 9: 'postal_code',
+ 10: 'physical_delivery_office_name',
+ 11: 'physical_delivery_office_number',
+ 12: 'extension_of_address_components',
+ 13: 'physical_delivery_personal_name',
+ 14: 'physical_delivery_organization_name',
+ 15: 'extension_physical_delivery_address_components',
+ 16: 'unformatted_postal_address',
+ 17: 'street_address',
+ 18: 'post_office_box_address',
+ 19: 'poste_restante_address',
+ 20: 'unique_postal_name',
+ 21: 'local_postal_attributes',
+ 22: 'extended_network_address',
+ 23: 'terminal_type',
+ }
+
+
+class ExtensionAttribute(Sequence):
+ _fields = [
+ ('extension_attribute_type', ExtensionAttributeType, {'implicit': 0}),
+ ('extension_attribute_value', Any, {'explicit': 1}),
+ ]
+
+ _oid_pair = ('extension_attribute_type', 'extension_attribute_value')
+ _oid_specs = {
+ 'common_name': PrintableString,
+ 'teletex_common_name': TeletexString,
+ 'teletex_organization_name': TeletexString,
+ 'teletex_personal_name': TeletexPersonalName,
+ 'teletex_organization_unit_names': TeletexOrganizationalUnitNames,
+ 'teletex_domain_defined_attributes': TeletexDomainDefinedAttributes,
+ 'pds_name': PrintableString,
+ 'physical_delivery_country_name': PhysicalDeliveryCountryName,
+ 'postal_code': PostalCode,
+ 'physical_delivery_office_name': PDSParameter,
+ 'physical_delivery_office_number': PDSParameter,
+ 'extension_of_address_components': PDSParameter,
+ 'physical_delivery_personal_name': PDSParameter,
+ 'physical_delivery_organization_name': PDSParameter,
+ 'extension_physical_delivery_address_components': PDSParameter,
+ 'unformatted_postal_address': UnformattedPostalAddress,
+ 'street_address': PDSParameter,
+ 'post_office_box_address': PDSParameter,
+ 'poste_restante_address': PDSParameter,
+ 'unique_postal_name': PDSParameter,
+ 'local_postal_attributes': PDSParameter,
+ 'extended_network_address': ExtendedNetworkAddress,
+ 'terminal_type': TerminalType,
+ }
+
+
+class ExtensionAttributes(SequenceOf):
+ _child_spec = ExtensionAttribute
+
+
+class ORAddress(Sequence):
+ _fields = [
+ ('built_in_standard_attributes', BuiltInStandardAttributes),
+ ('built_in_domain_defined_attributes', BuiltInDomainDefinedAttributes, {'optional': True}),
+ ('extension_attributes', ExtensionAttributes, {'optional': True}),
+ ]
+
+
+class EDIPartyName(Sequence):
+ _fields = [
+ ('name_assigner', DirectoryString, {'implicit': 0, 'optional': True}),
+ ('party_name', DirectoryString, {'implicit': 1}),
+ ]
+
+
+class GeneralName(Choice):
+ _alternatives = [
+ ('other_name', AnotherName, {'implicit': 0}),
+ ('rfc822_name', EmailAddress, {'implicit': 1}),
+ ('dns_name', DNSName, {'implicit': 2}),
+ ('x400_address', ORAddress, {'implicit': 3}),
+ ('directory_name', Name, {'explicit': 4}),
+ ('edi_party_name', EDIPartyName, {'implicit': 5}),
+ ('uniform_resource_identifier', URI, {'implicit': 6}),
+ ('ip_address', IPAddress, {'implicit': 7}),
+ ('registered_id', ObjectIdentifier, {'implicit': 8}),
+ ]
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __eq__(self, other):
+ """
+ Does not support other_name, x400_address or edi_party_name
+
+ :param other:
+ The other GeneralName to compare to
+
+ :return:
+ A boolean
+ """
+
+ if self.name in ('other_name', 'x400_address', 'edi_party_name'):
+ raise ValueError(unwrap(
+ '''
+ Comparison is not supported for GeneralName objects of
+ choice %s
+ ''',
+ self.name
+ ))
+
+ if other.name in ('other_name', 'x400_address', 'edi_party_name'):
+ raise ValueError(unwrap(
+ '''
+ Comparison is not supported for GeneralName objects of choice
+ %s''',
+ other.name
+ ))
+
+ if self.name != other.name:
+ return False
+
+ return self.chosen == other.chosen
+
+
+class GeneralNames(SequenceOf):
+ _child_spec = GeneralName
+
+
+class Time(Choice):
+ _alternatives = [
+ ('utc_time', UTCTime),
+ ('general_time', GeneralizedTime),
+ ]
+
+
+class Validity(Sequence):
+ _fields = [
+ ('not_before', Time),
+ ('not_after', Time),
+ ]
+
+
+class BasicConstraints(Sequence):
+ _fields = [
+ ('ca', Boolean, {'default': False}),
+ ('path_len_constraint', Integer, {'optional': True}),
+ ]
+
+
+class AuthorityKeyIdentifier(Sequence):
+ _fields = [
+ ('key_identifier', OctetString, {'implicit': 0, 'optional': True}),
+ ('authority_cert_issuer', GeneralNames, {'implicit': 1, 'optional': True}),
+ ('authority_cert_serial_number', Integer, {'implicit': 2, 'optional': True}),
+ ]
+
+
+class DistributionPointName(Choice):
+ _alternatives = [
+ ('full_name', GeneralNames, {'implicit': 0}),
+ ('name_relative_to_crl_issuer', RelativeDistinguishedName, {'implicit': 1}),
+ ]
+
+
+class ReasonFlags(BitString):
+ _map = {
+ 0: 'unused',
+ 1: 'key_compromise',
+ 2: 'ca_compromise',
+ 3: 'affiliation_changed',
+ 4: 'superseded',
+ 5: 'cessation_of_operation',
+ 6: 'certificate_hold',
+ 7: 'privilege_withdrawn',
+ 8: 'aa_compromise',
+ }
+
+
+class GeneralSubtree(Sequence):
+ _fields = [
+ ('base', GeneralName),
+ ('minimum', Integer, {'implicit': 0, 'default': 0}),
+ ('maximum', Integer, {'implicit': 1, 'optional': True}),
+ ]
+
+
+class GeneralSubtrees(SequenceOf):
+ _child_spec = GeneralSubtree
+
+
+class NameConstraints(Sequence):
+ _fields = [
+ ('permitted_subtrees', GeneralSubtrees, {'implicit': 0, 'optional': True}),
+ ('excluded_subtrees', GeneralSubtrees, {'implicit': 1, 'optional': True}),
+ ]
+
+
+class DistributionPoint(Sequence):
+ _fields = [
+ ('distribution_point', DistributionPointName, {'explicit': 0, 'optional': True}),
+ ('reasons', ReasonFlags, {'implicit': 1, 'optional': True}),
+ ('crl_issuer', GeneralNames, {'implicit': 2, 'optional': True}),
+ ]
+
+ _url = False
+
+ @property
+ def url(self):
+ """
+ :return:
+ None or a unicode string of the distribution point's URL
+ """
+
+ if self._url is False:
+ self._url = None
+ name = self['distribution_point']
+ if name.name != 'full_name':
+ raise ValueError(unwrap(
+ '''
+ CRL distribution points that are relative to the issuer are
+ not supported
+ '''
+ ))
+
+ for general_name in name.chosen:
+ if general_name.name == 'uniform_resource_identifier':
+ url = general_name.native
+ if url.lower().startswith(('http://', 'https://', 'ldap://', 'ldaps://')):
+ self._url = url
+ break
+
+ return self._url
+
+
+class CRLDistributionPoints(SequenceOf):
+ _child_spec = DistributionPoint
+
+
+class DisplayText(Choice):
+ _alternatives = [
+ ('ia5_string', IA5String),
+ ('visible_string', VisibleString),
+ ('bmp_string', BMPString),
+ ('utf8_string', UTF8String),
+ ]
+
+
+class NoticeNumbers(SequenceOf):
+ _child_spec = Integer
+
+
+class NoticeReference(Sequence):
+ _fields = [
+ ('organization', DisplayText),
+ ('notice_numbers', NoticeNumbers),
+ ]
+
+
+class UserNotice(Sequence):
+ _fields = [
+ ('notice_ref', NoticeReference, {'optional': True}),
+ ('explicit_text', DisplayText, {'optional': True}),
+ ]
+
+
+class PolicyQualifierId(ObjectIdentifier):
+ _map = {
+ '1.3.6.1.5.5.7.2.1': 'certification_practice_statement',
+ '1.3.6.1.5.5.7.2.2': 'user_notice',
+ }
+
+
+class PolicyQualifierInfo(Sequence):
+ _fields = [
+ ('policy_qualifier_id', PolicyQualifierId),
+ ('qualifier', Any),
+ ]
+
+ _oid_pair = ('policy_qualifier_id', 'qualifier')
+ _oid_specs = {
+ 'certification_practice_statement': IA5String,
+ 'user_notice': UserNotice,
+ }
+
+
+class PolicyQualifierInfos(SequenceOf):
+ _child_spec = PolicyQualifierInfo
+
+
+class PolicyIdentifier(ObjectIdentifier):
+ _map = {
+ '2.5.29.32.0': 'any_policy',
+ }
+
+
+class PolicyInformation(Sequence):
+ _fields = [
+ ('policy_identifier', PolicyIdentifier),
+ ('policy_qualifiers', PolicyQualifierInfos, {'optional': True})
+ ]
+
+
+class CertificatePolicies(SequenceOf):
+ _child_spec = PolicyInformation
+
+
+class PolicyMapping(Sequence):
+ _fields = [
+ ('issuer_domain_policy', PolicyIdentifier),
+ ('subject_domain_policy', PolicyIdentifier),
+ ]
+
+
+class PolicyMappings(SequenceOf):
+ _child_spec = PolicyMapping
+
+
+class PolicyConstraints(Sequence):
+ _fields = [
+ ('require_explicit_policy', Integer, {'implicit': 0, 'optional': True}),
+ ('inhibit_policy_mapping', Integer, {'implicit': 1, 'optional': True}),
+ ]
+
+
+class KeyPurposeId(ObjectIdentifier):
+ _map = {
+ # https://tools.ietf.org/html/rfc5280#page-45
+ '2.5.29.37.0': 'any_extended_key_usage',
+ '1.3.6.1.5.5.7.3.1': 'server_auth',
+ '1.3.6.1.5.5.7.3.2': 'client_auth',
+ '1.3.6.1.5.5.7.3.3': 'code_signing',
+ '1.3.6.1.5.5.7.3.4': 'email_protection',
+ '1.3.6.1.5.5.7.3.5': 'ipsec_end_system',
+ '1.3.6.1.5.5.7.3.6': 'ipsec_tunnel',
+ '1.3.6.1.5.5.7.3.7': 'ipsec_user',
+ '1.3.6.1.5.5.7.3.8': 'time_stamping',
+ '1.3.6.1.5.5.7.3.9': 'ocsp_signing',
+ # http://tools.ietf.org/html/rfc3029.html#page-9
+ '1.3.6.1.5.5.7.3.10': 'dvcs',
+ # http://tools.ietf.org/html/rfc6268.html#page-16
+ '1.3.6.1.5.5.7.3.13': 'eap_over_ppp',
+ '1.3.6.1.5.5.7.3.14': 'eap_over_lan',
+ # https://tools.ietf.org/html/rfc5055#page-76
+ '1.3.6.1.5.5.7.3.15': 'scvp_server',
+ '1.3.6.1.5.5.7.3.16': 'scvp_client',
+ # https://tools.ietf.org/html/rfc4945#page-31
+ '1.3.6.1.5.5.7.3.17': 'ipsec_ike',
+ # https://tools.ietf.org/html/rfc5415#page-38
+ '1.3.6.1.5.5.7.3.18': 'capwap_ac',
+ '1.3.6.1.5.5.7.3.19': 'capwap_wtp',
+ # https://tools.ietf.org/html/rfc5924#page-8
+ '1.3.6.1.5.5.7.3.20': 'sip_domain',
+ # https://tools.ietf.org/html/rfc6187#page-7
+ '1.3.6.1.5.5.7.3.21': 'secure_shell_client',
+ '1.3.6.1.5.5.7.3.22': 'secure_shell_server',
+ # https://tools.ietf.org/html/rfc6494#page-7
+ '1.3.6.1.5.5.7.3.23': 'send_router',
+ '1.3.6.1.5.5.7.3.24': 'send_proxied_router',
+ '1.3.6.1.5.5.7.3.25': 'send_owner',
+ '1.3.6.1.5.5.7.3.26': 'send_proxied_owner',
+ # https://tools.ietf.org/html/rfc6402#page-10
+ '1.3.6.1.5.5.7.3.27': 'cmc_ca',
+ '1.3.6.1.5.5.7.3.28': 'cmc_ra',
+ '1.3.6.1.5.5.7.3.29': 'cmc_archive',
+ # https://tools.ietf.org/html/draft-ietf-sidr-bgpsec-pki-profiles-15#page-6
+ '1.3.6.1.5.5.7.3.30': 'bgpspec_router',
+ # https://www.ietf.org/proceedings/44/I-D/draft-ietf-ipsec-pki-req-01.txt
+ '1.3.6.1.5.5.8.2.2': 'ike_intermediate',
+ # https://msdn.microsoft.com/en-us/library/windows/desktop/aa378132(v=vs.85).aspx
+ # and https://support.microsoft.com/en-us/kb/287547
+ '1.3.6.1.4.1.311.10.3.1': 'microsoft_trust_list_signing',
+ '1.3.6.1.4.1.311.10.3.2': 'microsoft_time_stamp_signing',
+ '1.3.6.1.4.1.311.10.3.3': 'microsoft_server_gated',
+ '1.3.6.1.4.1.311.10.3.3.1': 'microsoft_serialized',
+ '1.3.6.1.4.1.311.10.3.4': 'microsoft_efs',
+ '1.3.6.1.4.1.311.10.3.4.1': 'microsoft_efs_recovery',
+ '1.3.6.1.4.1.311.10.3.5': 'microsoft_whql',
+ '1.3.6.1.4.1.311.10.3.6': 'microsoft_nt5',
+ '1.3.6.1.4.1.311.10.3.7': 'microsoft_oem_whql',
+ '1.3.6.1.4.1.311.10.3.8': 'microsoft_embedded_nt',
+ '1.3.6.1.4.1.311.10.3.9': 'microsoft_root_list_signer',
+ '1.3.6.1.4.1.311.10.3.10': 'microsoft_qualified_subordination',
+ '1.3.6.1.4.1.311.10.3.11': 'microsoft_key_recovery',
+ '1.3.6.1.4.1.311.10.3.12': 'microsoft_document_signing',
+ '1.3.6.1.4.1.311.10.3.13': 'microsoft_lifetime_signing',
+ '1.3.6.1.4.1.311.10.3.14': 'microsoft_mobile_device_software',
+ # https://support.microsoft.com/en-us/help/287547/object-ids-associated-with-microsoft-cryptography
+ '1.3.6.1.4.1.311.20.2.2': 'microsoft_smart_card_logon',
+ # https://opensource.apple.com/source
+ # - /Security/Security-57031.40.6/Security/libsecurity_keychain/lib/SecPolicy.cpp
+ # - /libsecurity_cssm/libsecurity_cssm-36064/lib/oidsalg.c
+ '1.2.840.113635.100.1.2': 'apple_x509_basic',
+ '1.2.840.113635.100.1.3': 'apple_ssl',
+ '1.2.840.113635.100.1.4': 'apple_local_cert_gen',
+ '1.2.840.113635.100.1.5': 'apple_csr_gen',
+ '1.2.840.113635.100.1.6': 'apple_revocation_crl',
+ '1.2.840.113635.100.1.7': 'apple_revocation_ocsp',
+ '1.2.840.113635.100.1.8': 'apple_smime',
+ '1.2.840.113635.100.1.9': 'apple_eap',
+ '1.2.840.113635.100.1.10': 'apple_software_update_signing',
+ '1.2.840.113635.100.1.11': 'apple_ipsec',
+ '1.2.840.113635.100.1.12': 'apple_ichat',
+ '1.2.840.113635.100.1.13': 'apple_resource_signing',
+ '1.2.840.113635.100.1.14': 'apple_pkinit_client',
+ '1.2.840.113635.100.1.15': 'apple_pkinit_server',
+ '1.2.840.113635.100.1.16': 'apple_code_signing',
+ '1.2.840.113635.100.1.17': 'apple_package_signing',
+ '1.2.840.113635.100.1.18': 'apple_id_validation',
+ '1.2.840.113635.100.1.20': 'apple_time_stamping',
+ '1.2.840.113635.100.1.21': 'apple_revocation',
+ '1.2.840.113635.100.1.22': 'apple_passbook_signing',
+ '1.2.840.113635.100.1.23': 'apple_mobile_store',
+ '1.2.840.113635.100.1.24': 'apple_escrow_service',
+ '1.2.840.113635.100.1.25': 'apple_profile_signer',
+ '1.2.840.113635.100.1.26': 'apple_qa_profile_signer',
+ '1.2.840.113635.100.1.27': 'apple_test_mobile_store',
+ '1.2.840.113635.100.1.28': 'apple_otapki_signer',
+ '1.2.840.113635.100.1.29': 'apple_test_otapki_signer',
+ '1.2.840.113625.100.1.30': 'apple_id_validation_record_signing_policy',
+ '1.2.840.113625.100.1.31': 'apple_smp_encryption',
+ '1.2.840.113625.100.1.32': 'apple_test_smp_encryption',
+ '1.2.840.113635.100.1.33': 'apple_server_authentication',
+ '1.2.840.113635.100.1.34': 'apple_pcs_escrow_service',
+ # http://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.201-2.pdf
+ '2.16.840.1.101.3.6.8': 'piv_card_authentication',
+ '2.16.840.1.101.3.6.7': 'piv_content_signing',
+ # https://tools.ietf.org/html/rfc4556.html
+ '1.3.6.1.5.2.3.4': 'pkinit_kpclientauth',
+ '1.3.6.1.5.2.3.5': 'pkinit_kpkdc',
+ # https://www.adobe.com/devnet-docs/acrobatetk/tools/DigSig/changes.html
+ '1.2.840.113583.1.1.5': 'adobe_authentic_documents_trust',
+ # https://www.idmanagement.gov/wp-content/uploads/sites/1171/uploads/fpki-pivi-cert-profiles.pdf
+ '2.16.840.1.101.3.8.7': 'fpki_pivi_content_signing'
+ }
+
+
+class ExtKeyUsageSyntax(SequenceOf):
+ _child_spec = KeyPurposeId
+
+
+class AccessMethod(ObjectIdentifier):
+ _map = {
+ '1.3.6.1.5.5.7.48.1': 'ocsp',
+ '1.3.6.1.5.5.7.48.2': 'ca_issuers',
+ '1.3.6.1.5.5.7.48.3': 'time_stamping',
+ '1.3.6.1.5.5.7.48.5': 'ca_repository',
+ }
+
+
+class AccessDescription(Sequence):
+ _fields = [
+ ('access_method', AccessMethod),
+ ('access_location', GeneralName),
+ ]
+
+
+class AuthorityInfoAccessSyntax(SequenceOf):
+ _child_spec = AccessDescription
+
+
+class SubjectInfoAccessSyntax(SequenceOf):
+ _child_spec = AccessDescription
+
+
+# https://tools.ietf.org/html/rfc7633
+class Features(SequenceOf):
+ _child_spec = Integer
+
+
+class EntrustVersionInfo(Sequence):
+ _fields = [
+ ('entrust_vers', GeneralString),
+ ('entrust_info_flags', BitString)
+ ]
+
+
+class NetscapeCertificateType(BitString):
+ _map = {
+ 0: 'ssl_client',
+ 1: 'ssl_server',
+ 2: 'email',
+ 3: 'object_signing',
+ 4: 'reserved',
+ 5: 'ssl_ca',
+ 6: 'email_ca',
+ 7: 'object_signing_ca',
+ }
+
+
+class Version(Integer):
+ _map = {
+ 0: 'v1',
+ 1: 'v2',
+ 2: 'v3',
+ }
+
+
+class TPMSpecification(Sequence):
+ _fields = [
+ ('family', UTF8String),
+ ('level', Integer),
+ ('revision', Integer),
+ ]
+
+
+class SetOfTPMSpecification(SetOf):
+ _child_spec = TPMSpecification
+
+
+class TCGSpecificationVersion(Sequence):
+ _fields = [
+ ('major_version', Integer),
+ ('minor_version', Integer),
+ ('revision', Integer),
+ ]
+
+
+class TCGPlatformSpecification(Sequence):
+ _fields = [
+ ('version', TCGSpecificationVersion),
+ ('platform_class', OctetString),
+ ]
+
+
+class SetOfTCGPlatformSpecification(SetOf):
+ _child_spec = TCGPlatformSpecification
+
+
+class EKGenerationType(Enumerated):
+ _map = {
+ 0: 'internal',
+ 1: 'injected',
+ 2: 'internal_revocable',
+ 3: 'injected_revocable',
+ }
+
+
+class EKGenerationLocation(Enumerated):
+ _map = {
+ 0: 'tpm_manufacturer',
+ 1: 'platform_manufacturer',
+ 2: 'ek_cert_signer',
+ }
+
+
+class EKCertificateGenerationLocation(Enumerated):
+ _map = {
+ 0: 'tpm_manufacturer',
+ 1: 'platform_manufacturer',
+ 2: 'ek_cert_signer',
+ }
+
+
+class EvaluationAssuranceLevel(Enumerated):
+ _map = {
+ 1: 'level1',
+ 2: 'level2',
+ 3: 'level3',
+ 4: 'level4',
+ 5: 'level5',
+ 6: 'level6',
+ 7: 'level7',
+ }
+
+
+class EvaluationStatus(Enumerated):
+ _map = {
+ 0: 'designed_to_meet',
+ 1: 'evaluation_in_progress',
+ 2: 'evaluation_completed',
+ }
+
+
+class StrengthOfFunction(Enumerated):
+ _map = {
+ 0: 'basic',
+ 1: 'medium',
+ 2: 'high',
+ }
+
+
+class URIReference(Sequence):
+ _fields = [
+ ('uniform_resource_identifier', IA5String),
+ ('hash_algorithm', DigestAlgorithm, {'optional': True}),
+ ('hash_value', BitString, {'optional': True}),
+ ]
+
+
+class CommonCriteriaMeasures(Sequence):
+ _fields = [
+ ('version', IA5String),
+ ('assurance_level', EvaluationAssuranceLevel),
+ ('evaluation_status', EvaluationStatus),
+ ('plus', Boolean, {'default': False}),
+ ('strengh_of_function', StrengthOfFunction, {'implicit': 0, 'optional': True}),
+ ('profile_oid', ObjectIdentifier, {'implicit': 1, 'optional': True}),
+ ('profile_url', URIReference, {'implicit': 2, 'optional': True}),
+ ('target_oid', ObjectIdentifier, {'implicit': 3, 'optional': True}),
+ ('target_uri', URIReference, {'implicit': 4, 'optional': True}),
+ ]
+
+
+class SecurityLevel(Enumerated):
+ _map = {
+ 1: 'level1',
+ 2: 'level2',
+ 3: 'level3',
+ 4: 'level4',
+ }
+
+
+class FIPSLevel(Sequence):
+ _fields = [
+ ('version', IA5String),
+ ('level', SecurityLevel),
+ ('plus', Boolean, {'default': False}),
+ ]
+
+
+class TPMSecurityAssertions(Sequence):
+ _fields = [
+ ('version', Version, {'default': 'v1'}),
+ ('field_upgradable', Boolean, {'default': False}),
+ ('ek_generation_type', EKGenerationType, {'implicit': 0, 'optional': True}),
+ ('ek_generation_location', EKGenerationLocation, {'implicit': 1, 'optional': True}),
+ ('ek_certificate_generation_location', EKCertificateGenerationLocation, {'implicit': 2, 'optional': True}),
+ ('cc_info', CommonCriteriaMeasures, {'implicit': 3, 'optional': True}),
+ ('fips_level', FIPSLevel, {'implicit': 4, 'optional': True}),
+ ('iso_9000_certified', Boolean, {'implicit': 5, 'default': False}),
+ ('iso_9000_uri', IA5String, {'optional': True}),
+ ]
+
+
+class SetOfTPMSecurityAssertions(SetOf):
+ _child_spec = TPMSecurityAssertions
+
+
+class SubjectDirectoryAttributeId(ObjectIdentifier):
+ _map = {
+ # https://tools.ietf.org/html/rfc2256#page-11
+ '2.5.4.52': 'supported_algorithms',
+ # https://www.trustedcomputinggroup.org/wp-content/uploads/Credential_Profile_EK_V2.0_R14_published.pdf
+ '2.23.133.2.16': 'tpm_specification',
+ '2.23.133.2.17': 'tcg_platform_specification',
+ '2.23.133.2.18': 'tpm_security_assertions',
+ # https://tools.ietf.org/html/rfc3739#page-18
+ '1.3.6.1.5.5.7.9.1': 'pda_date_of_birth',
+ '1.3.6.1.5.5.7.9.2': 'pda_place_of_birth',
+ '1.3.6.1.5.5.7.9.3': 'pda_gender',
+ '1.3.6.1.5.5.7.9.4': 'pda_country_of_citizenship',
+ '1.3.6.1.5.5.7.9.5': 'pda_country_of_residence',
+ # https://holtstrom.com/michael/tools/asn1decoder.php
+ '1.2.840.113533.7.68.29': 'entrust_user_role',
+ }
+
+
+class SetOfGeneralizedTime(SetOf):
+ _child_spec = GeneralizedTime
+
+
+class SetOfDirectoryString(SetOf):
+ _child_spec = DirectoryString
+
+
+class SetOfPrintableString(SetOf):
+ _child_spec = PrintableString
+
+
+class SupportedAlgorithm(Sequence):
+ _fields = [
+ ('algorithm_identifier', AnyAlgorithmIdentifier),
+ ('intended_usage', KeyUsage, {'explicit': 0, 'optional': True}),
+ ('intended_certificate_policies', CertificatePolicies, {'explicit': 1, 'optional': True}),
+ ]
+
+
+class SetOfSupportedAlgorithm(SetOf):
+ _child_spec = SupportedAlgorithm
+
+
+class SubjectDirectoryAttribute(Sequence):
+ _fields = [
+ ('type', SubjectDirectoryAttributeId),
+ ('values', Any),
+ ]
+
+ _oid_pair = ('type', 'values')
+ _oid_specs = {
+ 'supported_algorithms': SetOfSupportedAlgorithm,
+ 'tpm_specification': SetOfTPMSpecification,
+ 'tcg_platform_specification': SetOfTCGPlatformSpecification,
+ 'tpm_security_assertions': SetOfTPMSecurityAssertions,
+ 'pda_date_of_birth': SetOfGeneralizedTime,
+ 'pda_place_of_birth': SetOfDirectoryString,
+ 'pda_gender': SetOfPrintableString,
+ 'pda_country_of_citizenship': SetOfPrintableString,
+ 'pda_country_of_residence': SetOfPrintableString,
+ }
+
+ def _values_spec(self):
+ type_ = self['type'].native
+ if type_ in self._oid_specs:
+ return self._oid_specs[type_]
+ return SetOf
+
+ _spec_callbacks = {
+ 'values': _values_spec
+ }
+
+
+class SubjectDirectoryAttributes(SequenceOf):
+ _child_spec = SubjectDirectoryAttribute
+
+
+class ExtensionId(ObjectIdentifier):
+ _map = {
+ '2.5.29.9': 'subject_directory_attributes',
+ '2.5.29.14': 'key_identifier',
+ '2.5.29.15': 'key_usage',
+ '2.5.29.16': 'private_key_usage_period',
+ '2.5.29.17': 'subject_alt_name',
+ '2.5.29.18': 'issuer_alt_name',
+ '2.5.29.19': 'basic_constraints',
+ '2.5.29.30': 'name_constraints',
+ '2.5.29.31': 'crl_distribution_points',
+ '2.5.29.32': 'certificate_policies',
+ '2.5.29.33': 'policy_mappings',
+ '2.5.29.35': 'authority_key_identifier',
+ '2.5.29.36': 'policy_constraints',
+ '2.5.29.37': 'extended_key_usage',
+ '2.5.29.46': 'freshest_crl',
+ '2.5.29.54': 'inhibit_any_policy',
+ '1.3.6.1.5.5.7.1.1': 'authority_information_access',
+ '1.3.6.1.5.5.7.1.11': 'subject_information_access',
+ # https://tools.ietf.org/html/rfc7633
+ '1.3.6.1.5.5.7.1.24': 'tls_feature',
+ '1.3.6.1.5.5.7.48.1.5': 'ocsp_no_check',
+ '1.2.840.113533.7.65.0': 'entrust_version_extension',
+ '2.16.840.1.113730.1.1': 'netscape_certificate_type',
+ # https://tools.ietf.org/html/rfc6962.html#page-14
+ '1.3.6.1.4.1.11129.2.4.2': 'signed_certificate_timestamp_list',
+ # https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-wcce/3aec3e50-511a-42f9-a5d5-240af503e470
+ '1.3.6.1.4.1.311.20.2': 'microsoft_enroll_certtype',
+ }
+
+
+class Extension(Sequence):
+ _fields = [
+ ('extn_id', ExtensionId),
+ ('critical', Boolean, {'default': False}),
+ ('extn_value', ParsableOctetString),
+ ]
+
+ _oid_pair = ('extn_id', 'extn_value')
+ _oid_specs = {
+ 'subject_directory_attributes': SubjectDirectoryAttributes,
+ 'key_identifier': OctetString,
+ 'key_usage': KeyUsage,
+ 'private_key_usage_period': PrivateKeyUsagePeriod,
+ 'subject_alt_name': GeneralNames,
+ 'issuer_alt_name': GeneralNames,
+ 'basic_constraints': BasicConstraints,
+ 'name_constraints': NameConstraints,
+ 'crl_distribution_points': CRLDistributionPoints,
+ 'certificate_policies': CertificatePolicies,
+ 'policy_mappings': PolicyMappings,
+ 'authority_key_identifier': AuthorityKeyIdentifier,
+ 'policy_constraints': PolicyConstraints,
+ 'extended_key_usage': ExtKeyUsageSyntax,
+ 'freshest_crl': CRLDistributionPoints,
+ 'inhibit_any_policy': Integer,
+ 'authority_information_access': AuthorityInfoAccessSyntax,
+ 'subject_information_access': SubjectInfoAccessSyntax,
+ 'tls_feature': Features,
+ 'ocsp_no_check': Null,
+ 'entrust_version_extension': EntrustVersionInfo,
+ 'netscape_certificate_type': NetscapeCertificateType,
+ 'signed_certificate_timestamp_list': OctetString,
+ # Not UTF8String as Microsofts docs claim, see:
+ # https://www.alvestrand.no/objectid/1.3.6.1.4.1.311.20.2.html
+ 'microsoft_enroll_certtype': BMPString,
+ }
+
+
+class Extensions(SequenceOf):
+ _child_spec = Extension
+
+
+class TbsCertificate(Sequence):
+ _fields = [
+ ('version', Version, {'explicit': 0, 'default': 'v1'}),
+ ('serial_number', Integer),
+ ('signature', SignedDigestAlgorithm),
+ ('issuer', Name),
+ ('validity', Validity),
+ ('subject', Name),
+ ('subject_public_key_info', PublicKeyInfo),
+ ('issuer_unique_id', OctetBitString, {'implicit': 1, 'optional': True}),
+ ('subject_unique_id', OctetBitString, {'implicit': 2, 'optional': True}),
+ ('extensions', Extensions, {'explicit': 3, 'optional': True}),
+ ]
+
+
+class Certificate(Sequence):
+ _fields = [
+ ('tbs_certificate', TbsCertificate),
+ ('signature_algorithm', SignedDigestAlgorithm),
+ ('signature_value', OctetBitString),
+ ]
+
+ _processed_extensions = False
+ _critical_extensions = None
+ _subject_directory_attributes_value = None
+ _key_identifier_value = None
+ _key_usage_value = None
+ _subject_alt_name_value = None
+ _issuer_alt_name_value = None
+ _basic_constraints_value = None
+ _name_constraints_value = None
+ _crl_distribution_points_value = None
+ _certificate_policies_value = None
+ _policy_mappings_value = None
+ _authority_key_identifier_value = None
+ _policy_constraints_value = None
+ _freshest_crl_value = None
+ _inhibit_any_policy_value = None
+ _extended_key_usage_value = None
+ _authority_information_access_value = None
+ _subject_information_access_value = None
+ _private_key_usage_period_value = None
+ _tls_feature_value = None
+ _ocsp_no_check_value = None
+ _issuer_serial = None
+ _authority_issuer_serial = False
+ _crl_distribution_points = None
+ _delta_crl_distribution_points = None
+ _valid_domains = None
+ _valid_ips = None
+ _self_issued = None
+ _self_signed = None
+ _sha1 = None
+ _sha256 = None
+
+ def _set_extensions(self):
+ """
+ Sets common named extensions to private attributes and creates a list
+ of critical extensions
+ """
+
+ self._critical_extensions = set()
+
+ for extension in self['tbs_certificate']['extensions']:
+ name = extension['extn_id'].native
+ attribute_name = '_%s_value' % name
+ if hasattr(self, attribute_name):
+ setattr(self, attribute_name, extension['extn_value'].parsed)
+ if extension['critical'].native:
+ self._critical_extensions.add(name)
+
+ self._processed_extensions = True
+
+ @property
+ def critical_extensions(self):
+ """
+ Returns a set of the names (or OID if not a known extension) of the
+ extensions marked as critical
+
+ :return:
+ A set of unicode strings
+ """
+
+ if not self._processed_extensions:
+ self._set_extensions()
+ return self._critical_extensions
+
+ @property
+ def private_key_usage_period_value(self):
+ """
+ This extension is used to constrain the period over which the subject
+ private key may be used
+
+ :return:
+ None or a PrivateKeyUsagePeriod object
+ """
+
+ if not self._processed_extensions:
+ self._set_extensions()
+ return self._private_key_usage_period_value
+
+ @property
+ def subject_directory_attributes_value(self):
+ """
+ This extension is used to contain additional identification attributes
+ about the subject.
+
+ :return:
+ None or a SubjectDirectoryAttributes object
+ """
+
+ if not self._processed_extensions:
+ self._set_extensions()
+ return self._subject_directory_attributes_value
+
+ @property
+ def key_identifier_value(self):
+ """
+ This extension is used to help in creating certificate validation paths.
+ It contains an identifier that should generally, but is not guaranteed
+ to, be unique.
+
+ :return:
+ None or an OctetString object
+ """
+
+ if not self._processed_extensions:
+ self._set_extensions()
+ return self._key_identifier_value
+
+ @property
+ def key_usage_value(self):
+ """
+ This extension is used to define the purpose of the public key
+ contained within the certificate.
+
+ :return:
+ None or a KeyUsage
+ """
+
+ if not self._processed_extensions:
+ self._set_extensions()
+ return self._key_usage_value
+
+ @property
+ def subject_alt_name_value(self):
+ """
+ This extension allows for additional names to be associate with the
+ subject of the certificate. While it may contain a whole host of
+ possible names, it is usually used to allow certificates to be used
+ with multiple different domain names.
+
+ :return:
+ None or a GeneralNames object
+ """
+
+ if not self._processed_extensions:
+ self._set_extensions()
+ return self._subject_alt_name_value
+
+ @property
+ def issuer_alt_name_value(self):
+ """
+ This extension allows associating one or more alternative names with
+ the issuer of the certificate.
+
+ :return:
+ None or an x509.GeneralNames object
+ """
+
+ if not self._processed_extensions:
+ self._set_extensions()
+ return self._issuer_alt_name_value
+
+ @property
+ def basic_constraints_value(self):
+ """
+ This extension is used to determine if the subject of the certificate
+ is a CA, and if so, what the maximum number of intermediate CA certs
+ after this are, before an end-entity certificate is found.
+
+ :return:
+ None or a BasicConstraints object
+ """
+
+ if not self._processed_extensions:
+ self._set_extensions()
+ return self._basic_constraints_value
+
+ @property
+ def name_constraints_value(self):
+ """
+ This extension is used in CA certificates, and is used to limit the
+ possible names of certificates issued.
+
+ :return:
+ None or a NameConstraints object
+ """
+
+ if not self._processed_extensions:
+ self._set_extensions()
+ return self._name_constraints_value
+
+ @property
+ def crl_distribution_points_value(self):
+ """
+ This extension is used to help in locating the CRL for this certificate.
+
+ :return:
+ None or a CRLDistributionPoints object
+ extension
+ """
+
+ if not self._processed_extensions:
+ self._set_extensions()
+ return self._crl_distribution_points_value
+
+ @property
+ def certificate_policies_value(self):
+ """
+ This extension defines policies in CA certificates under which
+ certificates may be issued. In end-entity certificates, the inclusion
+ of a policy indicates the issuance of the certificate follows the
+ policy.
+
+ :return:
+ None or a CertificatePolicies object
+ """
+
+ if not self._processed_extensions:
+ self._set_extensions()
+ return self._certificate_policies_value
+
+ @property
+ def policy_mappings_value(self):
+ """
+ This extension allows mapping policy OIDs to other OIDs. This is used
+ to allow different policies to be treated as equivalent in the process
+ of validation.
+
+ :return:
+ None or a PolicyMappings object
+ """
+
+ if not self._processed_extensions:
+ self._set_extensions()
+ return self._policy_mappings_value
+
+ @property
+ def authority_key_identifier_value(self):
+ """
+ This extension helps in identifying the public key with which to
+ validate the authenticity of the certificate.
+
+ :return:
+ None or an AuthorityKeyIdentifier object
+ """
+
+ if not self._processed_extensions:
+ self._set_extensions()
+ return self._authority_key_identifier_value
+
+ @property
+ def policy_constraints_value(self):
+ """
+ This extension is used to control if policy mapping is allowed and
+ when policies are required.
+
+ :return:
+ None or a PolicyConstraints object
+ """
+
+ if not self._processed_extensions:
+ self._set_extensions()
+ return self._policy_constraints_value
+
+ @property
+ def freshest_crl_value(self):
+ """
+ This extension is used to help locate any available delta CRLs
+
+ :return:
+ None or an CRLDistributionPoints object
+ """
+
+ if not self._processed_extensions:
+ self._set_extensions()
+ return self._freshest_crl_value
+
+ @property
+ def inhibit_any_policy_value(self):
+ """
+ This extension is used to prevent mapping of the any policy to
+ specific requirements
+
+ :return:
+ None or a Integer object
+ """
+
+ if not self._processed_extensions:
+ self._set_extensions()
+ return self._inhibit_any_policy_value
+
+ @property
+ def extended_key_usage_value(self):
+ """
+ This extension is used to define additional purposes for the public key
+ beyond what is contained in the basic constraints.
+
+ :return:
+ None or an ExtKeyUsageSyntax object
+ """
+
+ if not self._processed_extensions:
+ self._set_extensions()
+ return self._extended_key_usage_value
+
+ @property
+ def authority_information_access_value(self):
+ """
+ This extension is used to locate the CA certificate used to sign this
+ certificate, or the OCSP responder for this certificate.
+
+ :return:
+ None or an AuthorityInfoAccessSyntax object
+ """
+
+ if not self._processed_extensions:
+ self._set_extensions()
+ return self._authority_information_access_value
+
+ @property
+ def subject_information_access_value(self):
+ """
+ This extension is used to access information about the subject of this
+ certificate.
+
+ :return:
+ None or a SubjectInfoAccessSyntax object
+ """
+
+ if not self._processed_extensions:
+ self._set_extensions()
+ return self._subject_information_access_value
+
+ @property
+ def tls_feature_value(self):
+ """
+ This extension is used to list the TLS features a server must respond
+ with if a client initiates a request supporting them.
+
+ :return:
+ None or a Features object
+ """
+
+ if not self._processed_extensions:
+ self._set_extensions()
+ return self._tls_feature_value
+
+ @property
+ def ocsp_no_check_value(self):
+ """
+ This extension is used on certificates of OCSP responders, indicating
+ that revocation information for the certificate should never need to
+ be verified, thus preventing possible loops in path validation.
+
+ :return:
+ None or a Null object (if present)
+ """
+
+ if not self._processed_extensions:
+ self._set_extensions()
+ return self._ocsp_no_check_value
+
+ @property
+ def signature(self):
+ """
+ :return:
+ A byte string of the signature
+ """
+
+ return self['signature_value'].native
+
+ @property
+ def signature_algo(self):
+ """
+ :return:
+ A unicode string of "rsassa_pkcs1v15", "rsassa_pss", "dsa", "ecdsa"
+ """
+
+ return self['signature_algorithm'].signature_algo
+
+ @property
+ def hash_algo(self):
+ """
+ :return:
+ A unicode string of "md2", "md5", "sha1", "sha224", "sha256",
+ "sha384", "sha512", "sha512_224", "sha512_256"
+ """
+
+ return self['signature_algorithm'].hash_algo
+
+ @property
+ def public_key(self):
+ """
+ :return:
+ The PublicKeyInfo object for this certificate
+ """
+
+ return self['tbs_certificate']['subject_public_key_info']
+
+ @property
+ def subject(self):
+ """
+ :return:
+ The Name object for the subject of this certificate
+ """
+
+ return self['tbs_certificate']['subject']
+
+ @property
+ def issuer(self):
+ """
+ :return:
+ The Name object for the issuer of this certificate
+ """
+
+ return self['tbs_certificate']['issuer']
+
+ @property
+ def serial_number(self):
+ """
+ :return:
+ An integer of the certificate's serial number
+ """
+
+ return self['tbs_certificate']['serial_number'].native
+
+ @property
+ def key_identifier(self):
+ """
+ :return:
+ None or a byte string of the certificate's key identifier from the
+ key identifier extension
+ """
+
+ if not self.key_identifier_value:
+ return None
+
+ return self.key_identifier_value.native
+
+ @property
+ def issuer_serial(self):
+ """
+ :return:
+ A byte string of the SHA-256 hash of the issuer concatenated with
+ the ascii character ":", concatenated with the serial number as
+ an ascii string
+ """
+
+ if self._issuer_serial is None:
+ self._issuer_serial = self.issuer.sha256 + b':' + str_cls(self.serial_number).encode('ascii')
+ return self._issuer_serial
+
+ @property
+ def not_valid_after(self):
+ """
+ :return:
+ A datetime of latest time when the certificate is still valid
+ """
+ return self['tbs_certificate']['validity']['not_after'].native
+
+ @property
+ def not_valid_before(self):
+ """
+ :return:
+ A datetime of the earliest time when the certificate is valid
+ """
+ return self['tbs_certificate']['validity']['not_before'].native
+
+ @property
+ def authority_key_identifier(self):
+ """
+ :return:
+ None or a byte string of the key_identifier from the authority key
+ identifier extension
+ """
+
+ if not self.authority_key_identifier_value:
+ return None
+
+ return self.authority_key_identifier_value['key_identifier'].native
+
+ @property
+ def authority_issuer_serial(self):
+ """
+ :return:
+ None or a byte string of the SHA-256 hash of the isser from the
+ authority key identifier extension concatenated with the ascii
+ character ":", concatenated with the serial number from the
+ authority key identifier extension as an ascii string
+ """
+
+ if self._authority_issuer_serial is False:
+ akiv = self.authority_key_identifier_value
+ if akiv and akiv['authority_cert_issuer'].native:
+ issuer = self.authority_key_identifier_value['authority_cert_issuer'][0].chosen
+ # We untag the element since it is tagged via being a choice from GeneralName
+ issuer = issuer.untag()
+ authority_serial = self.authority_key_identifier_value['authority_cert_serial_number'].native
+ self._authority_issuer_serial = issuer.sha256 + b':' + str_cls(authority_serial).encode('ascii')
+ else:
+ self._authority_issuer_serial = None
+ return self._authority_issuer_serial
+
+ @property
+ def crl_distribution_points(self):
+ """
+ Returns complete CRL URLs - does not include delta CRLs
+
+ :return:
+ A list of zero or more DistributionPoint objects
+ """
+
+ if self._crl_distribution_points is None:
+ self._crl_distribution_points = self._get_http_crl_distribution_points(self.crl_distribution_points_value)
+ return self._crl_distribution_points
+
+ @property
+ def delta_crl_distribution_points(self):
+ """
+ Returns delta CRL URLs - does not include complete CRLs
+
+ :return:
+ A list of zero or more DistributionPoint objects
+ """
+
+ if self._delta_crl_distribution_points is None:
+ self._delta_crl_distribution_points = self._get_http_crl_distribution_points(self.freshest_crl_value)
+ return self._delta_crl_distribution_points
+
+ def _get_http_crl_distribution_points(self, crl_distribution_points):
+ """
+ Fetches the DistributionPoint object for non-relative, HTTP CRLs
+ referenced by the certificate
+
+ :param crl_distribution_points:
+ A CRLDistributionPoints object to grab the DistributionPoints from
+
+ :return:
+ A list of zero or more DistributionPoint objects
+ """
+
+ output = []
+
+ if crl_distribution_points is None:
+ return []
+
+ for distribution_point in crl_distribution_points:
+ distribution_point_name = distribution_point['distribution_point']
+ if distribution_point_name is VOID:
+ continue
+ # RFC 5280 indicates conforming CA should not use the relative form
+ if distribution_point_name.name == 'name_relative_to_crl_issuer':
+ continue
+ # This library is currently only concerned with HTTP-based CRLs
+ for general_name in distribution_point_name.chosen:
+ if general_name.name == 'uniform_resource_identifier':
+ output.append(distribution_point)
+
+ return output
+
+ @property
+ def ocsp_urls(self):
+ """
+ :return:
+ A list of zero or more unicode strings of the OCSP URLs for this
+ cert
+ """
+
+ if not self.authority_information_access_value:
+ return []
+
+ output = []
+ for entry in self.authority_information_access_value:
+ if entry['access_method'].native == 'ocsp':
+ location = entry['access_location']
+ if location.name != 'uniform_resource_identifier':
+ continue
+ url = location.native
+ if url.lower().startswith(('http://', 'https://', 'ldap://', 'ldaps://')):
+ output.append(url)
+ return output
+
+ @property
+ def valid_domains(self):
+ """
+ :return:
+ A list of unicode strings of valid domain names for the certificate.
+ Wildcard certificates will have a domain in the form: *.example.com
+ """
+
+ if self._valid_domains is None:
+ self._valid_domains = []
+
+ # For the subject alt name extension, we can look at the name of
+ # the choice selected since it distinguishes between domain names,
+ # email addresses, IPs, etc
+ if self.subject_alt_name_value:
+ for general_name in self.subject_alt_name_value:
+ if general_name.name == 'dns_name' and general_name.native not in self._valid_domains:
+ self._valid_domains.append(general_name.native)
+
+ # If there was no subject alt name extension, and the common name
+ # in the subject looks like a domain, that is considered the valid
+ # list. This is done because according to
+ # https://tools.ietf.org/html/rfc6125#section-6.4.4, the common
+ # name should not be used if the subject alt name is present.
+ else:
+ pattern = re.compile('^(\\*\\.)?(?:[a-zA-Z0-9](?:[a-zA-Z0-9\\-]*[a-zA-Z0-9])?\\.)+[a-zA-Z]{2,}$')
+ for rdn in self.subject.chosen:
+ for name_type_value in rdn:
+ if name_type_value['type'].native == 'common_name':
+ value = name_type_value['value'].native
+ if pattern.match(value):
+ self._valid_domains.append(value)
+
+ return self._valid_domains
+
+ @property
+ def valid_ips(self):
+ """
+ :return:
+ A list of unicode strings of valid IP addresses for the certificate
+ """
+
+ if self._valid_ips is None:
+ self._valid_ips = []
+
+ if self.subject_alt_name_value:
+ for general_name in self.subject_alt_name_value:
+ if general_name.name == 'ip_address':
+ self._valid_ips.append(general_name.native)
+
+ return self._valid_ips
+
+ @property
+ def ca(self):
+ """
+ :return;
+ A boolean - if the certificate is marked as a CA
+ """
+
+ return self.basic_constraints_value and self.basic_constraints_value['ca'].native
+
+ @property
+ def max_path_length(self):
+ """
+ :return;
+ None or an integer of the maximum path length
+ """
+
+ if not self.ca:
+ return None
+ return self.basic_constraints_value['path_len_constraint'].native
+
+ @property
+ def self_issued(self):
+ """
+ :return:
+ A boolean - if the certificate is self-issued, as defined by RFC
+ 5280
+ """
+
+ if self._self_issued is None:
+ self._self_issued = self.subject == self.issuer
+ return self._self_issued
+
+ @property
+ def self_signed(self):
+ """
+ :return:
+ A unicode string of "no" or "maybe". The "maybe" result will
+ be returned if the certificate issuer and subject are the same.
+ If a key identifier and authority key identifier are present,
+ they will need to match otherwise "no" will be returned.
+
+ To verify is a certificate is truly self-signed, the signature
+ will need to be verified. See the certvalidator package for
+ one possible solution.
+ """
+
+ if self._self_signed is None:
+ self._self_signed = 'no'
+ if self.self_issued:
+ if self.key_identifier:
+ if not self.authority_key_identifier:
+ self._self_signed = 'maybe'
+ elif self.authority_key_identifier == self.key_identifier:
+ self._self_signed = 'maybe'
+ else:
+ self._self_signed = 'maybe'
+ return self._self_signed
+
+ @property
+ def sha1(self):
+ """
+ :return:
+ The SHA-1 hash of the DER-encoded bytes of this complete certificate
+ """
+
+ if self._sha1 is None:
+ self._sha1 = hashlib.sha1(self.dump()).digest()
+ return self._sha1
+
+ @property
+ def sha1_fingerprint(self):
+ """
+ :return:
+ A unicode string of the SHA-1 hash, formatted using hex encoding
+ with a space between each pair of characters, all uppercase
+ """
+
+ return ' '.join('%02X' % c for c in bytes_to_list(self.sha1))
+
+ @property
+ def sha256(self):
+ """
+ :return:
+ The SHA-256 hash of the DER-encoded bytes of this complete
+ certificate
+ """
+
+ if self._sha256 is None:
+ self._sha256 = hashlib.sha256(self.dump()).digest()
+ return self._sha256
+
+ @property
+ def sha256_fingerprint(self):
+ """
+ :return:
+ A unicode string of the SHA-256 hash, formatted using hex encoding
+ with a space between each pair of characters, all uppercase
+ """
+
+ return ' '.join('%02X' % c for c in bytes_to_list(self.sha256))
+
+ def is_valid_domain_ip(self, domain_ip):
+ """
+ Check if a domain name or IP address is valid according to the
+ certificate
+
+ :param domain_ip:
+ A unicode string of a domain name or IP address
+
+ :return:
+ A boolean - if the domain or IP is valid for the certificate
+ """
+
+ if not isinstance(domain_ip, str_cls):
+ raise TypeError(unwrap(
+ '''
+ domain_ip must be a unicode string, not %s
+ ''',
+ type_name(domain_ip)
+ ))
+
+ encoded_domain_ip = domain_ip.encode('idna').decode('ascii').lower()
+
+ is_ipv6 = encoded_domain_ip.find(':') != -1
+ is_ipv4 = not is_ipv6 and re.match('^\\d+\\.\\d+\\.\\d+\\.\\d+$', encoded_domain_ip)
+ is_domain = not is_ipv6 and not is_ipv4
+
+ # Handle domain name checks
+ if is_domain:
+ if not self.valid_domains:
+ return False
+
+ domain_labels = encoded_domain_ip.split('.')
+
+ for valid_domain in self.valid_domains:
+ encoded_valid_domain = valid_domain.encode('idna').decode('ascii').lower()
+ valid_domain_labels = encoded_valid_domain.split('.')
+
+ # The domain must be equal in label length to match
+ if len(valid_domain_labels) != len(domain_labels):
+ continue
+
+ if valid_domain_labels == domain_labels:
+ return True
+
+ is_wildcard = self._is_wildcard_domain(encoded_valid_domain)
+ if is_wildcard and self._is_wildcard_match(domain_labels, valid_domain_labels):
+ return True
+
+ return False
+
+ # Handle IP address checks
+ if not self.valid_ips:
+ return False
+
+ family = socket.AF_INET if is_ipv4 else socket.AF_INET6
+ normalized_ip = inet_pton(family, encoded_domain_ip)
+
+ for valid_ip in self.valid_ips:
+ valid_family = socket.AF_INET if valid_ip.find('.') != -1 else socket.AF_INET6
+ normalized_valid_ip = inet_pton(valid_family, valid_ip)
+
+ if normalized_valid_ip == normalized_ip:
+ return True
+
+ return False
+
+ def _is_wildcard_domain(self, domain):
+ """
+ Checks if a domain is a valid wildcard according to
+ https://tools.ietf.org/html/rfc6125#section-6.4.3
+
+ :param domain:
+ A unicode string of the domain name, where any U-labels from an IDN
+ have been converted to A-labels
+
+ :return:
+ A boolean - if the domain is a valid wildcard domain
+ """
+
+ # The * character must be present for a wildcard match, and if there is
+ # most than one, it is an invalid wildcard specification
+ if domain.count('*') != 1:
+ return False
+
+ labels = domain.lower().split('.')
+
+ if not labels:
+ return False
+
+ # Wildcards may only appear in the left-most label
+ if labels[0].find('*') == -1:
+ return False
+
+ # Wildcards may not be embedded in an A-label from an IDN
+ if labels[0][0:4] == 'xn--':
+ return False
+
+ return True
+
+ def _is_wildcard_match(self, domain_labels, valid_domain_labels):
+ """
+ Determines if the labels in a domain are a match for labels from a
+ wildcard valid domain name
+
+ :param domain_labels:
+ A list of unicode strings, with A-label form for IDNs, of the labels
+ in the domain name to check
+
+ :param valid_domain_labels:
+ A list of unicode strings, with A-label form for IDNs, of the labels
+ in a wildcard domain pattern
+
+ :return:
+ A boolean - if the domain matches the valid domain
+ """
+
+ first_domain_label = domain_labels[0]
+ other_domain_labels = domain_labels[1:]
+
+ wildcard_label = valid_domain_labels[0]
+ other_valid_domain_labels = valid_domain_labels[1:]
+
+ # The wildcard is only allowed in the first label, so if
+ # The subsequent labels are not equal, there is no match
+ if other_domain_labels != other_valid_domain_labels:
+ return False
+
+ if wildcard_label == '*':
+ return True
+
+ wildcard_regex = re.compile('^' + wildcard_label.replace('*', '.*') + '$')
+ if wildcard_regex.match(first_domain_label):
+ return True
+
+ return False
+
+
+# The structures are taken from the OpenSSL source file x_x509a.c, and specify
+# extra information that is added to X.509 certificates to store trust
+# information about the certificate.
+
+class KeyPurposeIdentifiers(SequenceOf):
+ _child_spec = KeyPurposeId
+
+
+class SequenceOfAlgorithmIdentifiers(SequenceOf):
+ _child_spec = AlgorithmIdentifier
+
+
+class CertificateAux(Sequence):
+ _fields = [
+ ('trust', KeyPurposeIdentifiers, {'optional': True}),
+ ('reject', KeyPurposeIdentifiers, {'implicit': 0, 'optional': True}),
+ ('alias', UTF8String, {'optional': True}),
+ ('keyid', OctetString, {'optional': True}),
+ ('other', SequenceOfAlgorithmIdentifiers, {'implicit': 1, 'optional': True}),
+ ]
+
+
+class TrustedCertificate(Concat):
+ _child_specs = [Certificate, CertificateAux]
diff --git a/jc/parsers/gpg.py b/jc/parsers/gpg.py
new file mode 100644
index 00000000..684a540f
--- /dev/null
+++ b/jc/parsers/gpg.py
@@ -0,0 +1,337 @@
+"""jc - JSON Convert `gpg --with-colons` command output parser
+
+Usage (cli):
+
+ $ gpg --with-colons --show-keys file.gpg | jc --gpg
+
+ or
+
+ $ jc gpg --with-colons --show-keys file.gpg
+
+Usage (module):
+
+ import jc
+ result = jc.parse('gpg', gpg_command_output)
+
+Schema:
+
+Field definitions from https://git.gnupg.org/cgi-bin/gitweb.cgi?p=gnupg.git;a=blob_plain;f=doc/DETAILS
+
+> Note: Number values are not converted to integers because many field
+> specifications are overloaded and future augmentations are implied in the
+> documentation.
+
+ [
+ {
+ "type": string,
+ "validity": string,
+ "key_length": string,
+ "pub_key_alg": string,
+ "key_id": string,
+ "creation_date": string,
+ "expiration_date": string,
+ "certsn_uidhash_trustinfo": string,
+ "owner_trust": string,
+ "user_id": string,
+ "signature_class": string,
+ "key_capabilities": string,
+ "cert_fingerprint_other": string,
+ "flag": string,
+ "token_sn": string,
+ "hash_alg": string,
+ "curve_name": string,
+ "compliance_flags": string,
+ "last_update_date": string,
+ "origin": string,
+ "comment": string,
+ "index": string, # [0]
+ "bits": string, # [0]
+ "value": string, # [0]
+ "version": string, # [1], [4]
+ "signature_count": string, # [1]
+ "encryption_count": string, # [1]
+ "policy": string, # [1]
+ "signature_first_seen": string, # [1]
+ "signature_most_recent_seen": string, # [1]
+ "encryption_first_done": string, # [1]
+ "encryption_most_recent_done": string, # [1]
+ "staleness_reason": string, # [2]
+ "trust_model": string, # [2]
+ "trust_db_created": string, # [2]
+ "trust_db_expires": string, # [2]
+ "marginally_trusted_users": string, # [2]
+ "completely_trusted_users": string, # [2]
+ "cert_chain_max_depth": string, # [2]
+ "subpacket_number": string, # [3]
+ "hex_flags": string, # [3]
+ "subpacket_length": string, # [3]
+ "subpacket_data": string, # [3]
+ "pubkey": string, # [4]
+ "cipher": string, # [4]
+ "digest": string, # [4]
+ "compress": string, # [4]
+ "group": string, # [4]
+ "members": string, # [4]
+ "curve_names": string, # [4]
+ }
+ ]
+
+ All blank values are converted to null/None.
+
+ [0] for 'pkd' type
+ [1] for 'tfs' type
+ [2] for 'tru' type
+ [3] for 'skp' type
+ [4] for 'cfg' type
+
+Examples:
+
+ $ gpg --with-colons --show-keys file.gpg | jc --gpg -p
+ [
+ {
+ "type": "pub",
+ "validity": "f",
+ "key_length": "1024",
+ "pub_key_alg": "17",
+ "key_id": "6C7EE1B8621CC013",
+ "creation_date": "899817715",
+ "expiration_date": "1055898235",
+ "certsn_uidhash_trustinfo": null,
+ "owner_trust": "m",
+ "user_id": null,
+ "signature_class": null,
+ "key_capabilities": "scESC",
+ "cert_fingerprint_other": null,
+ "flag": null,
+ "token_sn": null,
+ "hash_alg": null,
+ "curve_name": null,
+ "compliance_flags": null,
+ "last_update_date": null,
+ "origin": null,
+ "comment": null
+ },
+ ...
+ ]
+"""
+from typing import List, Dict, Optional
+import jc.utils
+
+
+class info():
+ """Provides parser metadata (version, author, etc.)"""
+ version = '1.0'
+ description = '`gpg --with-colons` command parser'
+ author = 'Kelly Brazil'
+ author_email = 'kellyjonbrazil@gmail.com'
+ compatible = ['linux']
+ magic_commands = ['gpg --with-colons']
+
+
+__version__ = info.version
+
+
+def _process(proc_data: List[Dict]) -> List[Dict]:
+ """
+ Final processing to conform to the schema.
+
+ Parameters:
+
+ proc_data: (List of Dictionaries) raw structured data to process
+
+ Returns:
+
+ List of Dictionaries. Structured to conform to the schema.
+ """
+ return proc_data
+
+
+def _list_get(my_list: List, index: int, default_val=None) -> Optional[str]:
+ """get a list value or return None/default value if out of range."""
+ if index <= len(my_list) - 1:
+ return my_list[index] or None
+
+ return default_val
+
+
+def parse(
+ data: str,
+ raw: bool = False,
+ quiet: bool = False
+) -> List[Dict]:
+ """
+ Main text parsing function
+
+ Parameters:
+
+ data: (string) text data to parse
+ raw: (boolean) unprocessed output if True
+ quiet: (boolean) suppress warning messages if True
+
+ Returns:
+
+ List of Dictionaries. Raw or processed structured data.
+ """
+ jc.utils.compatibility(__name__, info.compatible, quiet)
+ jc.utils.input_type_check(data)
+
+ raw_output: List = []
+
+ if jc.utils.has_data(data):
+
+ for line in filter(None, data.splitlines()):
+ values = line.split(':')
+ temp_obj = {
+ 'type': _list_get(values, 0),
+ 'validity': _list_get(values, 1),
+ 'key_length': _list_get(values, 2),
+ 'pub_key_alg': _list_get(values, 3),
+ 'key_id': _list_get(values, 4),
+ 'creation_date': _list_get(values, 5),
+ 'expiration_date': _list_get(values, 6),
+ 'certsn_uidhash_trustinfo': _list_get(values, 7),
+ 'owner_trust': _list_get(values, 8),
+ 'user_id': _list_get(values, 9),
+ 'signature_class': _list_get(values, 10),
+ 'key_capabilities': _list_get(values, 11),
+ 'cert_fingerprint_other': _list_get(values, 12),
+ 'flag': _list_get(values, 13),
+ 'token_sn': _list_get(values, 14),
+ 'hash_alg': _list_get(values, 15),
+ 'curve_name': _list_get(values, 16),
+ 'compliance_flags': _list_get(values, 17),
+ 'last_update_date': _list_get(values, 18),
+ 'origin': _list_get(values, 19),
+ 'comment': _list_get(values, 20)
+ }
+
+ # field mappings change for special types: pkd, tfs, tru, skp, cfg
+
+ if temp_obj['type'] == 'pkd':
+ # pkd:0:1024:B665B1435F4C2 .... FF26ABB:
+ # ! ! !-- the value
+ # ! !------ for information number of bits in the value
+ # !--------- index (eg. DSA goes from 0 to 3: p,q,g,y)
+ line_obj = {
+ 'type': temp_obj['type'],
+ 'index': temp_obj['validity'],
+ 'bits': temp_obj['key_length'],
+ 'value': temp_obj['pub_key_alg']
+ }
+
+ elif temp_obj['type'] == 'tfs':
+ # - Field 2 :: tfs record version (must be 1)
+ # - Field 3 :: validity - A number with validity code.
+ # - Field 4 :: signcount - The number of signatures seen.
+ # - Field 5 :: encrcount - The number of encryptions done.
+ # - Field 6 :: policy - A string with the policy
+ # - Field 7 :: signture-first-seen - a timestamp or 0 if not known.
+ # - Field 8 :: signature-most-recent-seen - a timestamp or 0 if not known.
+ # - Field 9 :: encryption-first-done - a timestamp or 0 if not known.
+ # - Field 10 :: encryption-most-recent-done - a timestamp or 0 if not known.
+ line_obj = {
+ 'type': temp_obj['type'],
+ 'version': temp_obj['validity'],
+ 'validity': temp_obj['key_length'],
+ 'signature_count': temp_obj['pub_key_alg'],
+ 'encryption_count': temp_obj['key_id'],
+ 'policy': temp_obj['creation_date'],
+ 'signature_first_seen': temp_obj['expiration_date'],
+ 'signature_most_recent_seen': temp_obj['certsn_uidhash_trustinfo'],
+ 'encryption_first_done': temp_obj['owner_trust'],
+ 'encryption_most_recent_done': temp_obj['user_id']
+ }
+
+ elif temp_obj['type'] == 'tru':
+ # tru:o:0:1166697654:1:3:1:5
+ # - Field 2 :: Reason for staleness of trust.
+ # - Field 3 :: Trust model
+ # - Field 4 :: Date trustdb was created in seconds since Epoch.
+ # - Field 5 :: Date trustdb will expire in seconds since Epoch.
+ # - Field 6 :: Number of marginally trusted users to introduce a new key signer.
+ # - Field 7 :: Number of completely trusted users to introduce a new key signer.
+ # - Field 8 :: Maximum depth of a certification chain.
+ line_obj = {
+ 'type': temp_obj['type'],
+ 'staleness_reason': temp_obj['validity'],
+ 'trust_model': temp_obj['key_length'],
+ 'trust_db_created': temp_obj['pub_key_alg'],
+ 'trust_db_expires': temp_obj['key_id'],
+ 'marginally_trusted_users': temp_obj['creation_date'],
+ 'completely_trusted_users': temp_obj['expiration_date'],
+ 'cert_chain_max_depth': temp_obj['certsn_uidhash_trustinfo']
+ }
+
+ elif temp_obj['type'] == 'skp':
+ # - Field 2 :: Subpacket number as per RFC-4880 and later.
+ # - Field 3 :: Flags in hex.
+ # - Field 4 :: Length of the subpacket.
+ # - Field 5 :: The subpacket data.
+ line_obj = {
+ 'type': temp_obj['type'],
+ 'subpacket_number': temp_obj['validity'],
+ 'hex_flags': temp_obj['key_length'],
+ 'subpacket_length': temp_obj['pub_key_alg'],
+ 'subpacket_data': temp_obj['key_id']
+ }
+
+ elif temp_obj['type'] == 'cfg':
+
+ # there are several 'cfg' formats
+
+ if temp_obj['validity'] == 'version':
+ # cfg:version:1.3.5
+ line_obj = {
+ 'type': temp_obj['type'],
+ 'version': temp_obj['key_length']
+ }
+
+ elif temp_obj['validity'] == 'pubkey':
+ # cfg:pubkey:1;2;3;16;17
+ line_obj = {
+ 'type': temp_obj['type'],
+ 'pubkey': temp_obj['key_length']
+ }
+
+ elif temp_obj['validity'] == 'cipher':
+ # cfg:cipher:2;3;4;7;8;9;10
+ line_obj = {
+ 'type': temp_obj['type'],
+ 'cipher': temp_obj['key_length']
+ }
+
+ elif temp_obj['validity'] == 'digest':
+ # cfg:digest:1;2;3;8;9;10
+ line_obj = {
+ 'type': temp_obj['type'],
+ 'digest': temp_obj['key_length']
+ }
+
+ elif temp_obj['validity'] == 'compress':
+ # cfg:compress:0;1;2;3
+ line_obj = {
+ 'type': temp_obj['type'],
+ 'compress': temp_obj['key_length']
+ }
+
+ elif temp_obj['validity'] == 'group':
+ # cfg:group:mynames:patti;joe;0x12345678;paige
+ line_obj = {
+ 'type': temp_obj['type'],
+ 'group': temp_obj['key_length'],
+ 'members': temp_obj['pub_key_alg']
+ }
+
+ elif temp_obj['validity'] == 'curve':
+ # cfg:curve:ed25519;nistp256;nistp384;nistp521
+ line_obj = {
+ 'type': temp_obj['type'],
+ 'curve_names': temp_obj['key_length']
+ }
+
+ else:
+ line_obj = temp_obj
+
+ raw_output.append(line_obj)
+
+ return raw_output if raw else _process(raw_output)
diff --git a/jc/parsers/iptables.py b/jc/parsers/iptables.py
index 8ec2ca9e..302bb3ff 100644
--- a/jc/parsers/iptables.py
+++ b/jc/parsers/iptables.py
@@ -163,7 +163,7 @@ import jc.utils
class info():
"""Provides parser metadata (version, author, etc.)"""
- version = '1.7'
+ version = '1.8'
description = '`iptables` command parser'
author = 'Kelly Brazil'
author_email = 'kellyjonbrazil@gmail.com'
@@ -264,7 +264,6 @@ def parse(data, raw=False, quiet=False):
continue
elif line.startswith('target') or line.find('pkts') == 1 or line.startswith('num'):
- headers = []
headers = [h for h in ' '.join(line.lower().strip().split()).split() if h]
headers.append("options")
diff --git a/jc/parsers/x509_cert.py b/jc/parsers/x509_cert.py
new file mode 100644
index 00000000..e1812546
--- /dev/null
+++ b/jc/parsers/x509_cert.py
@@ -0,0 +1,349 @@
+"""jc - JSON Convert X.509 Certificate format file parser
+
+This parser will convert DER and PEM encoded X.509 certificate files.
+
+Usage (cli):
+
+ $ cat certificate.pem | jc --x509-cert
+
+Usage (module):
+
+ import jc
+ result = jc.parse('x509_cert', x509_cert_file_output)
+
+Schema:
+
+ [
+ {
+ "tbs_certificate": {
+ "version": string,
+ "serial_number": string, # [0]
+ "signature": {
+ "algorithm": string,
+ "parameters": string/null,
+ },
+ "issuer": {
+ "country_name": string,
+ "state_or_province_name" string,
+ "locality_name": string,
+ "organization_name": array/string,
+ "organizational_unit_name": array/string,
+ "common_name": string,
+ "email_address": string
+ },
+ "validity": {
+ "not_before": integer, # [1]
+ "not_after": integer, # [1]
+ "not_before_iso": string,
+ "not_after_iso": string
+ },
+ "subject": {
+ "country_name": string,
+ "state_or_province_name": string,
+ "locality_name": string,
+ "organization_name": array/string,
+ "organizational_unit_name": array/string,
+ "common_name": string,
+ "email_address": string
+ },
+ "subject_public_key_info": {
+ "algorithm": {
+ "algorithm": string,
+ "parameters": string/null,
+ },
+ "public_key": {
+ "modulus": string, # [0]
+ "public_exponent": integer
+ }
+ },
+ "issuer_unique_id": string/null,
+ "subject_unique_id": string/null,
+ "extensions": [
+ {
+ "extn_id": string,
+ "critical": boolean,
+ "extn_value": array/object/string/integer # [2]
+ }
+ ]
+ },
+ "signature_algorithm": {
+ "algorithm": string,
+ "parameters": string/null
+ },
+ "signature_value": string # [0]
+ }
+ ]
+
+ [0] in colon-delimited hex notation
+ [1] time-zone-aware (UTC) epoch timestamp
+ [2] See below for well-known Extension schemas:
+
+ Basic Constraints:
+ {
+ "extn_id": "basic_constraints",
+ "critical": boolean,
+ "extn_value": {
+ "ca": boolean,
+ "path_len_constraint": string/null
+ }
+ }
+
+ Key Usage:
+ {
+ "extn_id": "key_usage",
+ "critical": boolean,
+ "extn_value": [
+ string
+ ]
+ }
+
+ Key Identifier:
+ {
+ "extn_id": "key_identifier",
+ "critical": boolean,
+ "extn_value": string # [0]
+ }
+
+ Authority Key Identifier:
+ {
+ "extn_id": "authority_key_identifier",
+ "critical": boolean,
+ "extn_value": {
+ "key_identifier": string, # [0]
+ "authority_cert_issuer": string/null,
+ "authority_cert_serial_number": string/null
+ }
+ }
+
+Examples:
+
+ $ cat entrust-ec1.pem| jc --x509-cert -p
+ [
+ {
+ "tbs_certificate": {
+ "version": "v3",
+ "serial_number": "a6:8b:79:29:00:00:00:00:50:d0:91:f9",
+ "signature": {
+ "algorithm": "sha384_ecdsa",
+ "parameters": null
+ },
+ "issuer": {
+ "country_name": "US",
+ "organization_name": "Entrust, Inc.",
+ "organizational_unit_name": [
+ "See www.entrust.net/legal-terms",
+ "(c) 2012 Entrust, Inc. - for authorized use only"
+ ],
+ "common_name": "Entrust Root Certification Authority - EC1"
+ },
+ "validity": {
+ "not_before": 1355844336,
+ "not_after": 2144764536,
+ "not_before_iso": "2012-12-18T15:25:36+00:00",
+ "not_after_iso": "2037-12-18T15:55:36+00:00"
+ },
+ "subject": {
+ "country_name": "US",
+ "organization_name": "Entrust, Inc.",
+ "organizational_unit_name": [
+ "See www.entrust.net/legal-terms",
+ "(c) 2012 Entrust, Inc. - for authorized use only"
+ ],
+ "common_name": "Entrust Root Certification Authority - EC1"
+ },
+ "subject_public_key_info": {
+ "algorithm": {
+ "algorithm": "ec",
+ "parameters": "secp384r1"
+ },
+ "public_key": "04:84:13:c9:d0:ba:6d:41:7b:e2:6c:d0:eb:55:..."
+ },
+ "issuer_unique_id": null,
+ "subject_unique_id": null,
+ "extensions": [
+ {
+ "extn_id": "key_usage",
+ "critical": true,
+ "extn_value": [
+ "crl_sign",
+ "key_cert_sign"
+ ]
+ },
+ {
+ "extn_id": "basic_constraints",
+ "critical": true,
+ "extn_value": {
+ "ca": true,
+ "path_len_constraint": null
+ }
+ },
+ {
+ "extn_id": "key_identifier",
+ "critical": false,
+ "extn_value": "b7:63:e7:1a:dd:8d:e9:08:a6:55:83:a4:e0:6a:..."
+ }
+ ]
+ },
+ "signature_algorithm": {
+ "algorithm": "sha384_ecdsa",
+ "parameters": null
+ },
+ "signature_value": "30:64:02:30:61:79:d8:e5:42:47:df:1c:ae:53:..."
+ }
+ ]
+"""
+import binascii
+from collections import OrderedDict
+from datetime import datetime
+from typing import List, Dict, Union
+import jc.utils
+from jc.parsers.asn1crypto import pem, x509
+
+
+class info():
+ """Provides parser metadata (version, author, etc.)"""
+ version = '1.0'
+ description = 'X.509 PEM and DER certificate file parser'
+ author = 'Kelly Brazil'
+ author_email = 'kellyjonbrazil@gmail.com'
+ details = 'Using the asn1crypto library at https://github.com/wbond/asn1crypto/releases/tag/1.5.1'
+ compatible = ['linux', 'darwin', 'cygwin', 'win32', 'aix', 'freebsd']
+
+
+__version__ = info.version
+
+
+def _process(proc_data: List[Dict]) -> List[Dict]:
+ """
+ Final processing to conform to the schema.
+
+ Parameters:
+
+ proc_data: (List of Dictionaries) raw structured data to process
+
+ Returns:
+
+ List of Dictionaries. Structured to conform to the schema.
+ """
+ return proc_data
+
+
+def _i2b(integer: int) -> bytes:
+ """Convert long integers into a bytes object (big endian)"""
+ return integer.to_bytes((integer.bit_length() + 7) // 8, byteorder='big')
+
+
+def _b2a(byte_string: bytes) -> str:
+ """Convert a byte string to a colon-delimited hex ascii string"""
+ # need try/except since seperator was only introduced in python 3.8.
+ # provides compatibility for python 3.6 and 3.7.
+ try:
+ return binascii.hexlify(byte_string, ':').decode('utf-8')
+ except TypeError:
+ hex_string = binascii.hexlify(byte_string).decode('utf-8')
+ colon_seperated = ':'.join(hex_string[i:i+2] for i in range(0, len(hex_string), 2))
+ return colon_seperated
+
+
+def _fix_objects(obj):
+ """
+ Recursively traverse the nested dictionary or list and convert objects
+ into JSON serializable types.
+ """
+ if isinstance(obj, set):
+ obj = sorted(list(obj))
+
+ if isinstance(obj, OrderedDict):
+ obj = dict(obj)
+
+ if isinstance(obj, dict):
+ for k, v in obj.copy().items():
+ if k == 'serial_number':
+ obj.update({k: _b2a(_i2b(v))})
+ continue
+
+ if k == 'modulus':
+ obj.update({k: _b2a(_i2b(v))})
+ continue
+
+ if isinstance(v, datetime):
+ iso = v.isoformat()
+ v = int(round(v.timestamp()))
+ obj.update({k: v, f'{k}_iso': iso})
+ continue
+
+ if isinstance(v, bytes):
+ v = _b2a(v)
+ obj.update({k: v})
+ continue
+
+ if isinstance(v, set):
+ v = sorted(list(v))
+ obj.update({k: v})
+
+ if isinstance(v, OrderedDict):
+ v = dict(v)
+ obj.update({k: v})
+
+ if isinstance(v, dict):
+ obj.update({k: _fix_objects(v)})
+ continue
+
+ if isinstance(v, list):
+ newlist =[]
+ for i in v:
+ newlist.append(_fix_objects(i))
+ obj.update({k: newlist})
+ continue
+
+ if isinstance(obj, list):
+ new_list = []
+ for i in obj:
+ new_list.append(_fix_objects(i))
+ obj = new_list
+
+ return obj
+
+
+def parse(
+ data: Union[str, bytes],
+ raw: bool = False,
+ quiet: bool = False
+) -> List[Dict]:
+ """
+ Main text parsing function
+
+ Parameters:
+
+ data: (string) text data to parse
+ raw: (boolean) unprocessed output if True
+ quiet: (boolean) suppress warning messages if True
+
+ Returns:
+
+ List of Dictionaries. Raw or processed structured data.
+ """
+ jc.utils.compatibility(__name__, info.compatible, quiet)
+
+ raw_output: List = []
+
+ if jc.utils.has_data(data):
+ # convert to bytes, if not already, for PEM detection since that's
+ # what pem.detect() needs. (cli.py will auto-convert to UTF-8 if it can)
+ try:
+ der_bytes = bytes(data, 'utf-8') # type: ignore
+ except TypeError:
+ der_bytes = data # type: ignore
+
+ certs = []
+ if pem.detect(der_bytes):
+ for type_name, headers, der_bytes in pem.unarmor(der_bytes, multiple=True):
+ if type_name == 'CERTIFICATE':
+ certs.append(x509.Certificate.load(der_bytes))
+
+ else:
+ certs.append(x509.Certificate.load(der_bytes))
+
+ raw_output = [_fix_objects(cert.native) for cert in certs]
+
+ return raw_output if raw else _process(raw_output)
diff --git a/jc/utils.py b/jc/utils.py
index 0504b367..2bde89ee 100644
--- a/jc/utils.py
+++ b/jc/utils.py
@@ -140,21 +140,27 @@ def compatibility(mod_name: str, compatible: List, quiet: bool = False) -> None:
])
-def has_data(data: str) -> bool:
+def has_data(data: Union[str, bytes]) -> bool:
"""
- Checks if the input contains data. If there are any non-whitespace
- characters then return `True`, else return `False`.
+ Checks if the string input contains data. If there are any
+ non-whitespace characters then return `True`, else return `False`.
+
+ For bytes, returns True if there is any data.
Parameters:
- data: (string) input to check whether it contains data
+ data: (string, bytes) input to check whether it contains data
Returns:
Boolean True if input string (data) contains non-whitespace
- characters, otherwise False
+ characters, otherwise False. For bytes data, returns
+ True if there is any data, otherwise False.
"""
- return bool(data and not data.isspace())
+ if isinstance(data, str):
+ return bool(data and not data.isspace())
+
+ return bool(data)
def convert_to_int(value: Union[str, float]) -> Optional[int]:
diff --git a/man/jc.1 b/man/jc.1
index b61e76ea..501f3796 100644
--- a/man/jc.1
+++ b/man/jc.1
@@ -1,4 +1,4 @@
-.TH jc 1 2022-06-15 1.20.1 "JSON Convert"
+.TH jc 1 2022-07-05 1.20.2 "JSON Convert"
.SH NAME
\fBjc\fP \- JSON Convert JSONifies the output of many CLI tools and file-types
.SH SYNOPSIS
@@ -152,6 +152,11 @@ CSV file streaming parser
\fB--git-log-s\fP
`git log` command streaming parser
+.TP
+.B
+\fB--gpg\fP
+`gpg --with-colons` command parser
+
.TP
.B
\fB--group\fP
@@ -512,6 +517,11 @@ Key/Value file parser
\fB--who\fP
`who` command parser
+.TP
+.B
+\fB--x509-cert\fP
+X.509 PEM and DER certificate file parser
+
.TP
.B
\fB--xml\fP
diff --git a/setup.py b/setup.py
index 1bcb83a0..beada190 100755
--- a/setup.py
+++ b/setup.py
@@ -5,7 +5,7 @@ with open('README.md', 'r') as f:
setuptools.setup(
name='jc',
- version='1.20.1',
+ version='1.20.2',
author='Kelly Brazil',
author_email='kellyjonbrazil@gmail.com',
description='Converts the output of popular command-line tools and file-types to JSON.',
diff --git a/tests/fixtures/generic/gpg.json b/tests/fixtures/generic/gpg.json
new file mode 100644
index 00000000..cfba869f
--- /dev/null
+++ b/tests/fixtures/generic/gpg.json
@@ -0,0 +1 @@
+[{"type":"pub","validity":"f","key_length":"1024","pub_key_alg":"17","key_id":"6C7EE1B8621CC013","creation_date":"899817715","expiration_date":"1055898235","certsn_uidhash_trustinfo":null,"owner_trust":"m","user_id":null,"signature_class":null,"key_capabilities":"scESC","cert_fingerprint_other":null,"flag":null,"token_sn":null,"hash_alg":null,"curve_name":null,"compliance_flags":null,"last_update_date":null,"origin":null,"comment":null},{"type":"fpr","validity":null,"key_length":null,"pub_key_alg":null,"key_id":null,"creation_date":null,"expiration_date":null,"certsn_uidhash_trustinfo":null,"owner_trust":null,"user_id":"ECAF7590EB3443B5C7CF3ACB6C7EE1B8621CC013","signature_class":null,"key_capabilities":null,"cert_fingerprint_other":null,"flag":null,"token_sn":null,"hash_alg":null,"curve_name":null,"compliance_flags":null,"last_update_date":null,"origin":null,"comment":null},{"type":"uid","validity":"f","key_length":null,"pub_key_alg":null,"key_id":null,"creation_date":null,"expiration_date":null,"certsn_uidhash_trustinfo":null,"owner_trust":null,"user_id":"Werner Koch ","signature_class":null,"key_capabilities":null,"cert_fingerprint_other":null,"flag":null,"token_sn":null,"hash_alg":null,"curve_name":null,"compliance_flags":null,"last_update_date":null,"origin":null,"comment":null},{"type":"uid","validity":"f","key_length":null,"pub_key_alg":null,"key_id":null,"creation_date":null,"expiration_date":null,"certsn_uidhash_trustinfo":null,"owner_trust":null,"user_id":"Werner Koch ","signature_class":null,"key_capabilities":null,"cert_fingerprint_other":null,"flag":null,"token_sn":null,"hash_alg":null,"curve_name":null,"compliance_flags":null,"last_update_date":null,"origin":null,"comment":null},{"type":"sub","validity":"f","key_length":"1536","pub_key_alg":"16","key_id":"06AD222CADF6A6E1","creation_date":"919537416","expiration_date":"1036177416","certsn_uidhash_trustinfo":null,"owner_trust":null,"user_id":null,"signature_class":null,"key_capabilities":"e","cert_fingerprint_other":null,"flag":null,"token_sn":null,"hash_alg":null,"curve_name":null,"compliance_flags":null,"last_update_date":null,"origin":null,"comment":null},{"type":"fpr","validity":null,"key_length":null,"pub_key_alg":null,"key_id":null,"creation_date":null,"expiration_date":null,"certsn_uidhash_trustinfo":null,"owner_trust":null,"user_id":"CF8BCC4B18DE08FCD8A1615906AD222CADF6A6E1","signature_class":null,"key_capabilities":null,"cert_fingerprint_other":null,"flag":null,"token_sn":null,"hash_alg":null,"curve_name":null,"compliance_flags":null,"last_update_date":null,"origin":null,"comment":null},{"type":"sub","validity":"r","key_length":"1536","pub_key_alg":"20","key_id":"5CE086B5B5A18FF4","creation_date":"899817788","expiration_date":"1025961788","certsn_uidhash_trustinfo":null,"owner_trust":null,"user_id":null,"signature_class":null,"key_capabilities":"esc","cert_fingerprint_other":null,"flag":null,"token_sn":null,"hash_alg":null,"curve_name":null,"compliance_flags":null,"last_update_date":null,"origin":null,"comment":null},{"type":"fpr","validity":null,"key_length":null,"pub_key_alg":null,"key_id":null,"creation_date":null,"expiration_date":null,"certsn_uidhash_trustinfo":null,"owner_trust":null,"user_id":"AB059359A3B81F410FCFF97F5CE086B5B5A18FF4","signature_class":null,"key_capabilities":null,"cert_fingerprint_other":null,"flag":null,"token_sn":null,"hash_alg":null,"curve_name":null,"compliance_flags":null,"last_update_date":null,"origin":null,"comment":null},{"type":"pkd","index":"0","bits":"1024","value":"B665B1435F4C2FF26ABB"},{"type":"tfs","version":"f1","validity":"f2","signature_count":"f3","encryption_count":"f4","policy":"f5","signature_first_seen":"f6","signature_most_recent_seen":"f7","encryption_first_done":"f8","encryption_most_recent_done":"f9"},{"type":"tru","staleness_reason":"o","trust_model":"0","trust_db_created":"1166697654","trust_db_expires":"1","marginally_trusted_users":"3","completely_trusted_users":"1","cert_chain_max_depth":"5"},{"type":"skp","subpacket_number":"f1","hex_flags":"f2","subpacket_length":"f3","subpacket_data":"f4"},{"type":"cfg","version":"1.3.5"},{"type":"cfg","pubkey":"1;2;3;16;17"},{"type":"cfg","cipher":"2;3;4;7;8;9;10"},{"type":"cfg","digest":"1;2;3;8;9;10"},{"type":"cfg","compress":"0;1;2;3"},{"type":"cfg","group":"mynames","members":"patti;joe;0x12345678;paige"},{"type":"cfg","curve_names":"ed25519;nistp256;nistp384;nistp521"}]
diff --git a/tests/fixtures/generic/gpg.out b/tests/fixtures/generic/gpg.out
new file mode 100644
index 00000000..edd41114
--- /dev/null
+++ b/tests/fixtures/generic/gpg.out
@@ -0,0 +1,21 @@
+pub:f:1024:17:6C7EE1B8621CC013:899817715:1055898235::m:::scESC:
+fpr:::::::::ECAF7590EB3443B5C7CF3ACB6C7EE1B8621CC013:
+uid:f::::::::Werner Koch :
+uid:f::::::::Werner Koch :
+sub:f:1536:16:06AD222CADF6A6E1:919537416:1036177416:::::e:
+fpr:::::::::CF8BCC4B18DE08FCD8A1615906AD222CADF6A6E1:
+sub:r:1536:20:5CE086B5B5A18FF4:899817788:1025961788:::::esc:
+fpr:::::::::AB059359A3B81F410FCFF97F5CE086B5B5A18FF4:
+
+pkd:0:1024:B665B1435F4C2FF26ABB:
+tfs:f1:f2:f3:f4:f5:f6:f7:f8:f9:
+tru:o:0:1166697654:1:3:1:5
+skp:f1:f2:f3:f4:
+cfg:version:1.3.5
+cfg:pubkey:1;2;3;16;17
+cfg:cipher:2;3;4;7;8;9;10
+cfg:digest:1;2;3;8;9;10
+cfg:compress:0;1;2;3
+cfg:group:mynames:patti;joe;0x12345678;paige
+cfg:curve:ed25519;nistp256;nistp384;nistp521
+
diff --git a/tests/fixtures/generic/x509-ca-cert.der b/tests/fixtures/generic/x509-ca-cert.der
new file mode 100644
index 00000000..fddd1267
Binary files /dev/null and b/tests/fixtures/generic/x509-ca-cert.der differ
diff --git a/tests/fixtures/generic/x509-ca-cert.json b/tests/fixtures/generic/x509-ca-cert.json
new file mode 100644
index 00000000..44bc52c8
--- /dev/null
+++ b/tests/fixtures/generic/x509-ca-cert.json
@@ -0,0 +1 @@
+[{"tbs_certificate":{"version":"v3","serial_number":"60:01:97:b7:46:a7:ea:b4:b4:9a:d6:4b:2f:f7:90:fb","signature":{"algorithm":"sha256_rsa","parameters":null},"issuer":{"country_name":"US","organization_name":"thawte, Inc.","organizational_unit_name":["Certification Services Division","(c) 2008 thawte, Inc. - For authorized use only"],"common_name":"thawte Primary Root CA - G3"},"validity":{"not_before":1207094400,"not_after":2143324799,"not_before_iso":"2008-04-02T00:00:00+00:00","not_after_iso":"2037-12-01T23:59:59+00:00"},"subject":{"country_name":"US","organization_name":"thawte, Inc.","organizational_unit_name":["Certification Services Division","(c) 2008 thawte, Inc. - For authorized use only"],"common_name":"thawte Primary Root CA - G3"},"subject_public_key_info":{"algorithm":{"algorithm":"rsa","parameters":null},"public_key":{"modulus":"b2:bf:27:2c:fb:db:d8:5b:dd:78:7b:1b:9e:77:66:81:cb:3e:bc:7c:ae:f3:a6:27:9a:34:a3:68:31:71:38:33:62:e4:f3:71:66:79:b1:a9:65:a3:a5:8b:d5:8f:60:2d:3f:42:cc:aa:6b:32:c0:23:cb:2c:41:dd:e4:df:fc:61:9c:e2:73:b2:22:95:11:43:18:5f:c4:b6:1f:57:6c:0a:05:58:22:c8:36:4c:3a:7c:a5:d1:cf:86:af:88:a7:44:02:13:74:71:73:0a:42:59:02:f8:1b:14:6b:42:df:6f:5f:ba:6b:82:a2:9d:5b:e7:4a:bd:1e:01:72:db:4b:74:e8:3b:7f:7f:7d:1f:04:b4:26:9b:e0:b4:5a:ac:47:3d:55:b8:d7:b0:26:52:28:01:31:40:66:d8:d9:24:bd:f6:2a:d8:ec:21:49:5c:9b:f6:7a:e9:7f:55:35:7e:96:6b:8d:93:93:27:cb:92:bb:ea:ac:40:c0:9f:c2:f8:80:cf:5d:f4:5a:dc:ce:74:86:a6:3e:6c:0b:53:ca:bd:92:ce:19:06:72:e6:0c:5c:38:69:c7:04:d6:bc:6c:ce:5b:f6:f7:68:9c:dc:25:15:48:88:a1:e9:a9:f8:98:9c:e0:f3:d5:31:28:61:11:6c:67:96:8d:39:99:cb:c2:45:24:39","public_exponent":65537}},"issuer_unique_id":null,"subject_unique_id":null,"extensions":[{"extn_id":"basic_constraints","critical":true,"extn_value":{"ca":true,"path_len_constraint":null}},{"extn_id":"key_usage","critical":true,"extn_value":["crl_sign","key_cert_sign"]},{"extn_id":"key_identifier","critical":false,"extn_value":"ad:6c:aa:94:60:9c:ed:e4:ff:fa:3e:0a:74:2b:63:03:f7:b6:59:bf"}]},"signature_algorithm":{"algorithm":"sha256_rsa","parameters":null},"signature_value":"1a:40:d8:95:65:ac:09:92:89:c6:39:f4:10:e5:a9:0e:66:53:5d:78:de:fa:24:91:bb:e7:44:51:df:c6:16:34:0a:ef:6a:44:51:ea:2b:07:8a:03:7a:c3:eb:3f:0a:2c:52:16:a0:2b:43:b9:25:90:3f:70:a9:33:25:6d:45:1a:28:3b:27:cf:aa:c3:29:42:1b:df:3b:4c:c0:33:34:5b:41:88:bf:6b:2b:65:af:28:ef:b2:f5:c3:aa:66:ce:7b:56:ee:b7:c8:cb:67:c1:c9:9c:1a:18:b8:c4:c3:49:03:f1:60:0e:50:cd:46:c5:f3:77:79:f7:b6:15:e0:38:db:c7:2f:28:a0:0c:3f:77:26:74:d9:25:12:da:31:da:1a:1e:dc:29:41:91:22:3c:69:a7:bb:02:f2:b6:5c:27:03:89:f4:06:ea:9b:e4:72:82:e3:a1:09:c1:e9:00:19:d3:3e:d4:70:6b:ba:71:a6:aa:58:ae:f4:bb:e9:6c:b6:ef:87:cc:9b:bb:ff:39:e6:56:61:d3:0a:a7:c4:5c:4c:60:7b:05:77:26:7a:bf:d8:07:52:2c:62:f7:70:63:d9:39:bc:6f:1c:c2:79:dc:76:29:af:ce:c5:2c:64:04:5e:88:36:6e:31:d4:40:1a:62:34:36:3f:35:01:ae:ac:63:a0"}]
diff --git a/tests/fixtures/generic/x509-cert-and-key.json b/tests/fixtures/generic/x509-cert-and-key.json
new file mode 100644
index 00000000..5adcba84
--- /dev/null
+++ b/tests/fixtures/generic/x509-cert-and-key.json
@@ -0,0 +1 @@
+[{"tbs_certificate":{"version":"v3","serial_number":"f7:f9:4e:5f:30:7d:ba:c6","signature":{"algorithm":"sha256_rsa","parameters":null},"issuer":{"country_name":"US","state_or_province_name":"California","locality_name":"San Francisco","organization_name":"BadSSL","common_name":"BadSSL Client Root Certificate Authority"},"validity":{"not_before":1652822124,"not_after":1715894124,"not_before_iso":"2022-05-17T21:15:24+00:00","not_after_iso":"2024-05-16T21:15:24+00:00"},"subject":{"country_name":"US","state_or_province_name":"California","locality_name":"San Francisco","organization_name":"BadSSL","common_name":"BadSSL Client Certificate"},"subject_public_key_info":{"algorithm":{"algorithm":"rsa","parameters":null},"public_key":{"modulus":"c7:37:5f:11:eb:1e:4e:cf:eb:ba:48:e5:cb:a3:12:2c:73:3e:46:1d:1e:9c:0d:c0:8b:83:23:da:c7:65:df:5c:77:49:b3:e8:7a:7d:3c:ba:d5:61:8c:f9:a5:c4:85:1d:92:23:06:e3:e7:df:7b:b3:7e:26:d0:cb:1b:be:42:6b:16:69:f4:2c:72:b5:7e:e4:cb:0a:28:44:12:6c:46:74:21:99:03:dc:6b:c3:11:58:02:41:23:3f:b0:fc:bf:b7:00:59:13:22:a5:81:7f:24:fe:d5:53:bc:4d:52:8f:90:4a:46:74:b0:e8:bd:93:a6:cd:90:00:4a:2f:7f:b2:3f:a3:ea:03:3b:01:a0:a2:0d:e6:53:7f:61:12:eb:a6:9b:03:9a:4e:a7:ad:10:e8:e1:1d:c2:0f:ef:09:42:5f:6a:b8:4a:0e:98:bd:b6:3d:cf:ea:a4:e8:cb:d6:38:0e:20:54:84:e7:2d:e0:c1:bc:c3:95:f0:98:a0:02:f9:57:e6:f2:d6:fb:b4:c8:94:a1:4d:32:bc:a2:8e:70:be:98:5c:15:f1:07:69:0f:70:e6:31:60:da:1b:5d:ab:df:54:11:1d:c1:2a:e3:43:b8:bf:b3:7a:3a:86:41:90:96:6f:45:ec:93:c4:b9:58:1b:97:f2:5d:c1:ae:b8:39:82:2a:8d","public_exponent":65537}},"issuer_unique_id":null,"subject_unique_id":null,"extensions":[{"extn_id":"basic_constraints","critical":false,"extn_value":{"ca":false,"path_len_constraint":null}},{"extn_id":"netscape_certificate_type","critical":false,"extn_value":["ssl_client"]},{"extn_id":"key_usage","critical":false,"extn_value":["digital_signature","key_encipherment","non_repudiation"]}]},"signature_algorithm":{"algorithm":"sha256_rsa","parameters":null},"signature_value":"52:34:ca:43:bc:95:21:c5:fa:1d:bd:0c:3b:94:3f:d6:c0:96:ae:3e:7b:61:86:a6:da:94:80:cd:4a:13:2c:e7:11:7d:13:af:0b:c6:63:a9:54:b8:4d:f9:c9:3f:1e:0d:74:ee:db:c9:bf:04:7b:48:6e:18:93:cf:2c:3c:e9:bf:35:48:e0:03:34:1e:11:6c:30:f2:5a:4a:49:f5:d5:54:2d:69:79:c9:a3:bc:a5:73:ea:43:0a:ac:bc:79:09:12:14:40:43:16:95:c5:65:f1:67:f0:6c:b1:33:60:f5:a1:23:68:e6:47:94:52:ef:44:85:85:92:9b:54:ba:61:aa:45:c1:0f:0d:38:6a:4a:f0:47:9d:cf:b3:7e:1c:e1:45:7e:b3:53:54:78:ed:96:7d:89:66:86:49:f6:cd:de:3a:df:69:88:a3:41:1f:7d:60:63:c1:6c:b3:f8:a0:f0:1b:5f:94:d9:a2:19:ee:15:68:06:4f:50:1c:f4:83:f1:9f:13:64:db:47:a0:cc:5b:19:f6:8b:f6:b2:bf:b9:39:16:d9:e6:19:0f:ce:c2:10:15:ea:58:06:58:0c:04:7a:5a:2b:ae:a1:f3:3f:6e:2f:9c:56:0c:7c:85:c2:7f:d0:17:fb:ab:c4:1d:42:fb:fc:4b:96:ff:3c:30:d2:d6:9d:ae:09:25:2c:b6:cc:43:51:df:4b:3e:78:f2:d8:bc:34:b9:81:6d:f2:3a:38:12:4d:64:25:32:e8:a8:8b:e5:5b:24:3a:9e:a5:67:29:3c:34:57:34:c0:b2:b2:6e:80:b5:96:0e:69:7f:fb:e0:f0:36:98:2d:93:fd:1c:2f:28:30:c9:31:9b:3a:3f:48:bb:fd:e8:83:40:59:05:64:74:35:d7:5e:17:b1:6f:5a:ab:63:24:8f:d0:51:58:c8:2c:ab:a8:84:aa:44:b2:13:09:51:26:3b:6e:35:7d:85:41:45:24:54:a9:92:7f:8f:d6:e9:20:03:06:45:64:d6:58:f3:d1:7e:01:7e:16:0b:45:e1:b9:a1:e3:2c:43:ff:1c:9a:aa:e4:c7:82:cb:80:86:d7:3f:17:2c:96:31:93:1b:d4:41:64:24:c0:36:6e:14:b9:ed:eb:da:6d:48:52:1f:31:c1:11:c0:69:71:e0:04:97:11:4f:a4:c6:fc:3a:69:93:b9:02:0a:e0:d2:6b:9e:88:0e:69:1a:e0:fd:17:37:80:01:f4:d0:27:c3:01:f4:64:c5:fc:44:ca:d7:e9:75:55:be:61:fd:5d:7c:ee:47:1d:5b:f6:15:d8:5e:00:dd:23:b3:fa:95:f4:61:79:04:6a:b6:82:97:6c:ab:be:78:c1:8d"}]
diff --git a/tests/fixtures/generic/x509-cert-and-key.pem b/tests/fixtures/generic/x509-cert-and-key.pem
new file mode 100644
index 00000000..0bd98f37
--- /dev/null
+++ b/tests/fixtures/generic/x509-cert-and-key.pem
@@ -0,0 +1,64 @@
+Bag Attributes
+ localKeyID: 68 B5 22 00 77 DE 8B BE AE D8 E1 C2 54 0F EC 6C 16 B4 18 A8
+subject=/C=US/ST=California/L=San Francisco/O=BadSSL/CN=BadSSL Client Certificate
+issuer=/C=US/ST=California/L=San Francisco/O=BadSSL/CN=BadSSL Client Root Certificate Authority
+-----BEGIN CERTIFICATE-----
+MIIEnTCCAoWgAwIBAgIJAPf5Tl8wfbrGMA0GCSqGSIb3DQEBCwUAMH4xCzAJBgNV
+BAYTAlVTMRMwEQYDVQQIDApDYWxpZm9ybmlhMRYwFAYDVQQHDA1TYW4gRnJhbmNp
+c2NvMQ8wDQYDVQQKDAZCYWRTU0wxMTAvBgNVBAMMKEJhZFNTTCBDbGllbnQgUm9v
+dCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwHhcNMjIwNTE3MjExNTI0WhcNMjQwNTE2
+MjExNTI0WjBvMQswCQYDVQQGEwJVUzETMBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQG
+A1UEBwwNU2FuIEZyYW5jaXNjbzEPMA0GA1UECgwGQmFkU1NMMSIwIAYDVQQDDBlC
+YWRTU0wgQ2xpZW50IENlcnRpZmljYXRlMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A
+MIIBCgKCAQEAxzdfEeseTs/rukjly6MSLHM+Rh0enA3Ai4Mj2sdl31x3SbPoen08
+utVhjPmlxIUdkiMG4+ffe7N+JtDLG75CaxZp9CxytX7kywooRBJsRnQhmQPca8MR
+WAJBIz+w/L+3AFkTIqWBfyT+1VO8TVKPkEpGdLDovZOmzZAASi9/sj+j6gM7AaCi
+DeZTf2ES66abA5pOp60Q6OEdwg/vCUJfarhKDpi9tj3P6qToy9Y4DiBUhOct4MG8
+w5XwmKAC+Vfm8tb7tMiUoU0yvKKOcL6YXBXxB2kPcOYxYNobXavfVBEdwSrjQ7i/
+s3o6hkGQlm9F7JPEuVgbl/Jdwa64OYIqjQIDAQABoy0wKzAJBgNVHRMEAjAAMBEG
+CWCGSAGG+EIBAQQEAwIHgDALBgNVHQ8EBAMCBeAwDQYJKoZIhvcNAQELBQADggIB
+AFI0ykO8lSHF+h29DDuUP9bAlq4+e2GGptqUgM1KEyznEX0TrwvGY6lUuE35yT8e
+DXTu28m/BHtIbhiTzyw86b81SOADNB4RbDDyWkpJ9dVULWl5yaO8pXPqQwqsvHkJ
+EhRAQxaVxWXxZ/BssTNg9aEjaOZHlFLvRIWFkptUumGqRcEPDThqSvBHnc+zfhzh
+RX6zU1R47ZZ9iWaGSfbN3jrfaYijQR99YGPBbLP4oPAbX5TZohnuFWgGT1Ac9IPx
+nxNk20egzFsZ9ov2sr+5ORbZ5hkPzsIQFepYBlgMBHpaK66h8z9uL5xWDHyFwn/Q
+F/urxB1C+/xLlv88MNLWna4JJSy2zENR30s+ePLYvDS5gW3yOjgSTWQlMuioi+Vb
+JDqepWcpPDRXNMCysm6AtZYOaX/74PA2mC2T/RwvKDDJMZs6P0i7/eiDQFkFZHQ1
+114XsW9aq2Mkj9BRWMgsq6iEqkSyEwlRJjtuNX2FQUUkVKmSf4/W6SADBkVk1ljz
+0X4BfhYLReG5oeMsQ/8cmqrkx4LLgIbXPxcsljGTG9RBZCTANm4Uue3r2m1IUh8x
+wRHAaXHgBJcRT6TG/Dppk7kCCuDSa56IDmka4P0XN4AB9NAnwwH0ZMX8RMrX6XVV
+vmH9XXzuRx1b9hXYXgDdI7P6lfRheQRqtoKXbKu+eMGN
+-----END CERTIFICATE-----
+Bag Attributes
+ localKeyID: 68 B5 22 00 77 DE 8B BE AE D8 E1 C2 54 0F EC 6C 16 B4 18 A8
+Key Attributes:
+-----BEGIN ENCRYPTED PRIVATE KEY-----
+MIIFDjBABgkqhkiG9w0BBQ0wMzAbBgkqhkiG9w0BBQwwDgQIRZnUXGoJtbUCAggA
+MBQGCCqGSIb3DQMHBAjgV2LmquuUdwSCBMjaZtNVgqmwUsevUBY4Rx3+Y+Z4lfHT
+7BJqv53p0hOMZdaLRAhK4hHfTLrtQRYK+rEroTF1gElfTUnmjdGBs0b2sLaNrRAW
+3W5nv//r6HahZ4RmsBOHS2GBj4V3BEa2c2VdjyZ+HuZ/nnqMQyNzBooLXctkyp0/
+10UO4a8IU0iPPRI+Zu4d/A1UAGDW2RRbNiYBDbUNdplZr0T1y5vC2felHHptlp8i
+cslBR4ASRllYG/kjC6fyI+Jz5yGaFKd3Jlr9wc7cMml8rMHTQrhYScHK2D1L3Bgp
+yK3sa9EVu7lBlBAEs8vjl5RE5rO6G487wZpmCX21j3lh6IOyjEEJXPfiFOZxt6Zn
+437Aumr84MvuTyPP8mI65B/grdsvth8R8k8dns0n9SKWmW84jtkwRa3yHx3g/9ef
+lz5uqHoO1J3+em/rFrj+eJXTrXE4Sqe2Ohb96bs+6ho2Hz8+B+zoQfeGV2/nPduC
+Lty/VVnr5YoRPfbmin4rmbmVrURLLRnQ4RujbVKp4msmEQIYa/B6sMzY7gSKFFpT
+jJm2nLy0eU0FEWbXPTDn0qi5vcHo7lIahBLX6TRTcV4vkhh9miPQc64TwB+tIjhf
+B0/k3gjBblmIbBIb+6N9LZRqO5D4CIN7Su6w3e0lmdVIR+NvU1NMynpVsJvkM44j
+YVfQQFckHe0BdmAdsZrcjyTe35P/lNwssZ58XiM3P3W7zrhL2infXJ5sZQ9QOdqS
+hIoSSBigGF7CAl2UqZ4G/Yf2MvbswtH9RjRIwZGDA09PrKwfa5lK2y9RnyEeC0Qn
+OPis+XmzIA+brOwpDOIdVN6AmZWtyxQnlp+Ad26wm5ACAM5hH1uAJ3Pro9+sDK0J
+dDp3LH6eehjt1NJfpmj9yDxgXchsouo2JeE/+Nc46uDc+vEe23bmi9xMqvhlidIT
+XtGWjyIWs69iEa78qg+HonHM+ahq7ugSj3zwkKXDc3+67ia5/J6RWh4CiH5+AIt+
+5hpgEYZLtAR13JaMm/fH+2loV2vgX55for3CoVbo5pYm2s5Kk+tJ8MFVHw7Khr6i
+l+3EEuxfmtT/CqXhusMJ5Yh1sNpCIw14wBGI5Bey9Dw5Su57vE1cZndO94O6XI7t
+kK4zxtYy47UU5wOGf5VrUJhEr/YY0hjB5zDRlLoEkXGQ9NmcW3X956b2NeqnX4Tz
+xP5hEvOYFKKyn3XPXtjvY+X8cGMmvhOSQ7/C8uz0G0+IpiYI99H8Yxl+26OiPxdI
+u4XLEJg3/L82OKs0ekUkNYowGJnlxDQ40wWZUBQonn1imqY3OKn4gXr5OxDvCqX2
+fXF2eYkOTmOQGpfjBPcdfDwcRU7eCUS/csW1Ri/XyGI+xdX9SG+2grXLqA1o/BFX
+aTxSx5Yw6tZ7knhj53KtxFX8AFSPLQ5CJxsC5YyNAuj7ALsKZtJlZA34kXh23+bS
+yAZ/ABg/Us6+F7TXnGPItETChwgCwEK637vQmxeQ4A/Y63HWUEAWkdUiinWjKDXS
+NjVDK67Qj3KoOz4Qr4lNZuFkbIE7ipWhQpRudM2NBlYOLlSp2eg2AE97BPi9zM3v
+TWSxEAt03CPm1fZtUvsxnlT5LFfZt/EiZpWRaHAyNIAu7kHDThBcYb2ms82aSRVq
+DUc=
+-----END ENCRYPTED PRIVATE KEY-----
diff --git a/tests/fixtures/generic/x509-letsencrypt.json b/tests/fixtures/generic/x509-letsencrypt.json
new file mode 100644
index 00000000..b2d3265b
--- /dev/null
+++ b/tests/fixtures/generic/x509-letsencrypt.json
@@ -0,0 +1 @@
+[{"tbs_certificate":{"version":"v3","serial_number":"04:c1:47:a5:16:71:a8:ad:84:6f:e5:cf:ec:ca:42:cc:c2:ad","signature":{"algorithm":"sha256_rsa","parameters":null},"issuer":{"country_name":"US","organization_name":"Let's Encrypt","common_name":"R3"},"validity":{"not_before":1655260836,"not_after":1663036835,"not_before_iso":"2022-06-15T02:40:36+00:00","not_after_iso":"2022-09-13T02:40:35+00:00"},"subject":{"common_name":"tls.automattic.com"},"subject_public_key_info":{"algorithm":{"algorithm":"rsa","parameters":null},"public_key":{"modulus":"c2:4b:45:fe:e7:f1:ec:68:7c:dd:e5:9b:b6:2d:2b:31:dd:5c:9f:d4:4f:07:19:d7:d6:c5:6a:6b:44:38:ec:e7:c5:88:ff:88:f2:75:46:ef:10:e5:28:9d:2d:cf:3f:60:d1:65:b9:69:44:9b:59:99:fb:8b:00:cd:71:88:87:0a:30:2a:17:bf:5d:97:e1:c0:56:98:ad:87:c5:00:9e:c6:bd:25:78:de:9e:d7:ee:53:5a:9f:16:23:51:5e:f3:a8:09:42:70:d1:2d:6f:11:6e:94:7e:db:1d:45:fc:0a:0d:f9:e5:a2:87:33:f4:71:d2:39:e5:22:22:9b:86:31:97:b5:3d:d1:35:68:a2:8d:75:2e:4c:ae:14:2b:51:cd:90:cf:d6:43:d4:49:80:3f:42:ab:1f:21:37:05:1e:ea:08:0d:e0:4d:e0:b6:cc:48:bb:f4:7e:8e:e9:0d:3a:02:85:89:ae:d0:f4:9a:f7:85:6b:0d:58:c9:1f:a6:db:ac:0c:d5:3d:62:b8:45:a8:77:31:3f:51:c6:84:dc:fe:1c:d8:b5:a3:93:2e:78:9d:e4:fe:72:7a:81:e9:6f:26:fe:4c:61:3a:55:6d:bd:f8:4a:38:68:5e:97:e3:36:c3:d6:bc:31:2b:c7:c8:ad:ee:64:56:3f:0f:ea:4b:f9:a5:b1:6b","public_exponent":65537}},"issuer_unique_id":null,"subject_unique_id":null,"extensions":[{"extn_id":"key_usage","critical":true,"extn_value":["digital_signature","key_encipherment"]},{"extn_id":"extended_key_usage","critical":false,"extn_value":["server_auth","client_auth"]},{"extn_id":"basic_constraints","critical":true,"extn_value":{"ca":false,"path_len_constraint":null}},{"extn_id":"key_identifier","critical":false,"extn_value":"63:cf:50:5f:d2:3e:a0:75:61:86:d9:60:1b:ec:d9:d8:dd:c7:30:5a"},{"extn_id":"authority_key_identifier","critical":false,"extn_value":{"key_identifier":"14:2e:b3:17:b7:58:56:cb:ae:50:09:40:e6:1f:af:9d:8b:14:c2:c6","authority_cert_issuer":null,"authority_cert_serial_number":null}},{"extn_id":"authority_information_access","critical":false,"extn_value":[{"access_method":"ocsp","access_location":"http://r3.o.lencr.org"},{"access_method":"ca_issuers","access_location":"http://r3.i.lencr.org/"}]},{"extn_id":"subject_alt_name","critical":false,"extn_value":["baffl.ca","blog.kellybrazil.com","bro-pa.org","competence.game.blog","dirtyroulette366.game.blog","giftsbypearl.com","giuman.me","globaltransactorsltd.com","grilltimerestaurants.com","gureametsetakolorategia.com","happyluckyenjoy.blog","healthbasedbeauty.fitness.blog","healthisknowledge.com","imake3ddesigns.com","javascript.game.blog","journeyingbacktowellness.health.blog","jquery.game.blog","kahlertregionalcancer.org","karmadesignstudios.graphics","noticia.science.blog","reyvingamer.game.blog","sailingresferber.co.uk","stardust.game.blog","sweetlove.fashion.blog","tls.automattic.com","wanderlustwatts.com","www.angelguardians.legal","www.baffl.ca","www.blog.kellybrazil.com","www.bro-pa.org","www.competence.game.blog","www.dirtyroulette366.game.blog","www.giftsbypearl.com","www.giuman.me","www.globaltransactorsltd.com","www.grilltimerestaurants.com","www.gureametsetakolorategia.com","www.happyluckyenjoy.blog","www.healthbasedbeauty.fitness.blog","www.healthisknowledge.com","www.imake3ddesigns.com","www.javascript.game.blog","www.journeyingbacktowellness.health.blog","www.jquery.game.blog","www.kahlertregionalcancer.org","www.karmadesignstudios.graphics","www.reyvingamer.game.blog","www.ruplayingboard.game.blog","www.stardust.game.blog","www.sweetlove.fashion.blog"]},{"extn_id":"certificate_policies","critical":false,"extn_value":[{"policy_identifier":"2.23.140.1.2.1","policy_qualifiers":null},{"policy_identifier":"1.3.6.1.4.1.44947.1.1.1","policy_qualifiers":[{"policy_qualifier_id":"certification_practice_statement","qualifier":"http://cps.letsencrypt.org"}]}]},{"extn_id":"signed_certificate_timestamp_list","critical":false,"extn_value":"00:f0:00:76:00:46:a5:55:eb:75:fa:91:20:30:b5:a2:89:69:f4:f3:7d:11:2c:41:74:be:fd:49:b8:85:ab:f2:fc:70:fe:6d:47:00:00:01:81:65:72:e3:44:00:00:04:03:00:47:30:45:02:21:00:de:61:59:6e:de:1c:75:7c:28:d7:3e:78:80:d3:85:5f:8a:ff:93:85:8d:e3:4a:e4:f7:2f:99:1d:36:b4:c4:62:02:20:6a:cd:23:6b:f4:27:41:a4:1d:9c:95:cd:36:be:2d:24:9b:87:aa:3c:14:15:70:5f:f5:e9:fa:d1:14:dc:df:da:00:76:00:6f:53:76:ac:31:f0:31:19:d8:99:00:a4:51:15:ff:77:15:1c:11:d9:02:c1:00:29:06:8d:b2:08:9a:37:d9:13:00:00:01:81:65:72:e4:18:00:00:04:03:00:47:30:45:02:21:00:f5:40:50:bc:99:c2:e9:a2:74:7a:83:f9:ec:6b:e5:c5:89:62:5a:37:b0:0c:51:e4:1c:11:f7:8b:bb:a4:97:d9:02:20:07:35:4e:5d:41:3b:ef:83:9f:18:e0:58:60:06:63:51:22:2d:8c:82:ae:b2:5e:a7:c2:5c:f8:2d:4b:50:14:86"}]},"signature_algorithm":{"algorithm":"sha256_rsa","parameters":null},"signature_value":"a8:ad:cb:4f:b7:ec:b9:d1:c0:50:8e:dd:e1:14:93:cb:be:e5:6e:45:07:2a:eb:92:f0:be:3d:bd:0e:e6:45:c7:8d:79:3e:09:d7:40:48:d6:8a:22:43:10:39:14:11:e8:f9:b8:a5:52:32:90:0c:92:94:74:57:bf:6e:3d:b0:3f:ce:a1:06:5f:9b:e2:0a:da:5a:ff:83:4f:28:2b:ac:cb:67:03:0b:7b:86:aa:d7:60:c2:4b:6f:fd:66:bd:8d:61:d1:48:24:29:5c:43:49:4e:79:2a:8e:3a:46:3b:ce:9b:f6:e5:9f:dc:ed:c8:ad:d4:a6:ee:e3:33:af:4c:34:41:27:de:b8:d5:63:df:45:8f:a7:11:78:71:28:a7:d8:29:5f:0b:8b:e2:07:44:c9:54:be:e0:a3:77:82:2d:07:5c:f0:4e:0a:11:06:6b:dc:90:f7:df:fb:60:28:96:f0:81:dc:4d:24:a8:53:0f:e3:d0:f0:22:fa:5e:a7:eb:a9:e4:5f:d4:cc:70:b7:c5:b9:7c:4b:e9:3a:aa:1b:a8:c8:2a:b2:87:79:d5:6d:63:b4:2e:7e:d7:24:9a:fc:0d:8f:ac:04:bb:98:ce:05:ae:6f:07:0b:49:cd:d6:ad:f9:37:7d:ff:1b:fc:e6:3a:25:9b:ea:d3:b8:bb:a7:83:44:84:6d"}]
diff --git a/tests/fixtures/generic/x509-letsencrypt.pem b/tests/fixtures/generic/x509-letsencrypt.pem
new file mode 100644
index 00000000..19b427b7
--- /dev/null
+++ b/tests/fixtures/generic/x509-letsencrypt.pem
@@ -0,0 +1,56 @@
+-----BEGIN CERTIFICATE-----
+MIIJ7zCCCNegAwIBAgISBMFHpRZxqK2Eb+XP7MpCzMKtMA0GCSqGSIb3DQEBCwUA
+MDIxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1MZXQncyBFbmNyeXB0MQswCQYDVQQD
+EwJSMzAeFw0yMjA2MTUwMjQwMzZaFw0yMjA5MTMwMjQwMzVaMB0xGzAZBgNVBAMT
+EnRscy5hdXRvbWF0dGljLmNvbTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
+ggEBAMJLRf7n8exofN3lm7YtKzHdXJ/UTwcZ19bFamtEOOznxYj/iPJ1Ru8Q5Sid
+Lc8/YNFluWlEm1mZ+4sAzXGIhwowKhe/XZfhwFaYrYfFAJ7GvSV43p7X7lNanxYj
+UV7zqAlCcNEtbxFulH7bHUX8Cg355aKHM/Rx0jnlIiKbhjGXtT3RNWiijXUuTK4U
+K1HNkM/WQ9RJgD9Cqx8hNwUe6ggN4E3gtsxIu/R+jukNOgKFia7Q9Jr3hWsNWMkf
+ptusDNU9YrhFqHcxP1HGhNz+HNi1o5MueJ3k/nJ6gelvJv5MYTpVbb34SjhoXpfj
+NsPWvDErx8it7mRWPw/qS/mlsWsCAwEAAaOCBxIwggcOMA4GA1UdDwEB/wQEAwIF
+oDAdBgNVHSUEFjAUBggrBgEFBQcDAQYIKwYBBQUHAwIwDAYDVR0TAQH/BAIwADAd
+BgNVHQ4EFgQUY89QX9I+oHVhhtlgG+zZ2N3HMFowHwYDVR0jBBgwFoAUFC6zF7dY
+VsuuUAlA5h+vnYsUwsYwVQYIKwYBBQUHAQEESTBHMCEGCCsGAQUFBzABhhVodHRw
+Oi8vcjMuby5sZW5jci5vcmcwIgYIKwYBBQUHMAKGFmh0dHA6Ly9yMy5pLmxlbmNy
+Lm9yZy8wggTgBgNVHREEggTXMIIE04IIYmFmZmwuY2GCFGJsb2cua2VsbHlicmF6
+aWwuY29tggpicm8tcGEub3JnghRjb21wZXRlbmNlLmdhbWUuYmxvZ4IaZGlydHly
+b3VsZXR0ZTM2Ni5nYW1lLmJsb2eCEGdpZnRzYnlwZWFybC5jb22CCWdpdW1hbi5t
+ZYIYZ2xvYmFsdHJhbnNhY3RvcnNsdGQuY29tghhncmlsbHRpbWVyZXN0YXVyYW50
+cy5jb22CG2d1cmVhbWV0c2V0YWtvbG9yYXRlZ2lhLmNvbYIUaGFwcHlsdWNreWVu
+am95LmJsb2eCHmhlYWx0aGJhc2VkYmVhdXR5LmZpdG5lc3MuYmxvZ4IVaGVhbHRo
+aXNrbm93bGVkZ2UuY29tghJpbWFrZTNkZGVzaWducy5jb22CFGphdmFzY3JpcHQu
+Z2FtZS5ibG9ngiRqb3VybmV5aW5nYmFja3Rvd2VsbG5lc3MuaGVhbHRoLmJsb2eC
+EGpxdWVyeS5nYW1lLmJsb2eCGWthaGxlcnRyZWdpb25hbGNhbmNlci5vcmeCG2th
+cm1hZGVzaWduc3R1ZGlvcy5ncmFwaGljc4IUbm90aWNpYS5zY2llbmNlLmJsb2eC
+FXJleXZpbmdhbWVyLmdhbWUuYmxvZ4IWc2FpbGluZ3Jlc2ZlcmJlci5jby51a4IS
+c3RhcmR1c3QuZ2FtZS5ibG9nghZzd2VldGxvdmUuZmFzaGlvbi5ibG9nghJ0bHMu
+YXV0b21hdHRpYy5jb22CE3dhbmRlcmx1c3R3YXR0cy5jb22CGHd3dy5hbmdlbGd1
+YXJkaWFucy5sZWdhbIIMd3d3LmJhZmZsLmNhghh3d3cuYmxvZy5rZWxseWJyYXpp
+bC5jb22CDnd3dy5icm8tcGEub3Jnghh3d3cuY29tcGV0ZW5jZS5nYW1lLmJsb2eC
+Hnd3dy5kaXJ0eXJvdWxldHRlMzY2LmdhbWUuYmxvZ4IUd3d3LmdpZnRzYnlwZWFy
+bC5jb22CDXd3dy5naXVtYW4ubWWCHHd3dy5nbG9iYWx0cmFuc2FjdG9yc2x0ZC5j
+b22CHHd3dy5ncmlsbHRpbWVyZXN0YXVyYW50cy5jb22CH3d3dy5ndXJlYW1ldHNl
+dGFrb2xvcmF0ZWdpYS5jb22CGHd3dy5oYXBweWx1Y2t5ZW5qb3kuYmxvZ4Iid3d3
+LmhlYWx0aGJhc2VkYmVhdXR5LmZpdG5lc3MuYmxvZ4IZd3d3LmhlYWx0aGlza25v
+d2xlZGdlLmNvbYIWd3d3LmltYWtlM2RkZXNpZ25zLmNvbYIYd3d3LmphdmFzY3Jp
+cHQuZ2FtZS5ibG9ngih3d3cuam91cm5leWluZ2JhY2t0b3dlbGxuZXNzLmhlYWx0
+aC5ibG9nghR3d3cuanF1ZXJ5LmdhbWUuYmxvZ4Idd3d3LmthaGxlcnRyZWdpb25h
+bGNhbmNlci5vcmeCH3d3dy5rYXJtYWRlc2lnbnN0dWRpb3MuZ3JhcGhpY3OCGXd3
+dy5yZXl2aW5nYW1lci5nYW1lLmJsb2eCHHd3dy5ydXBsYXlpbmdib2FyZC5nYW1l
+LmJsb2eCFnd3dy5zdGFyZHVzdC5nYW1lLmJsb2eCGnd3dy5zd2VldGxvdmUuZmFz
+aGlvbi5ibG9nMEwGA1UdIARFMEMwCAYGZ4EMAQIBMDcGCysGAQQBgt8TAQEBMCgw
+JgYIKwYBBQUHAgEWGmh0dHA6Ly9jcHMubGV0c2VuY3J5cHQub3JnMIIBBAYKKwYB
+BAHWeQIEAgSB9QSB8gDwAHYARqVV63X6kSAwtaKJafTzfREsQXS+/Um4havy/HD+
+bUcAAAGBZXLjRAAABAMARzBFAiEA3mFZbt4cdXwo1z54gNOFX4r/k4WN40rk9y+Z
+HTa0xGICIGrNI2v0J0GkHZyVzTa+LSSbh6o8FBVwX/Xp+tEU3N/aAHYAb1N2rDHw
+MRnYmQCkURX/dxUcEdkCwQApBo2yCJo32RMAAAGBZXLkGAAABAMARzBFAiEA9UBQ
+vJnC6aJ0eoP57GvlxYliWjewDFHkHBH3i7ukl9kCIAc1Tl1BO++DnxjgWGAGY1Ei
+LYyCrrJep8Jc+C1LUBSGMA0GCSqGSIb3DQEBCwUAA4IBAQCorctPt+y50cBQjt3h
+FJPLvuVuRQcq65Lwvj29DuZFx415PgnXQEjWiiJDEDkUEej5uKVSMpAMkpR0V79u
+PbA/zqEGX5viCtpa/4NPKCusy2cDC3uGqtdgwktv/Wa9jWHRSCQpXENJTnkqjjpG
+O86b9uWf3O3IrdSm7uMzr0w0QSfeuNVj30WPpxF4cSin2ClfC4viB0TJVL7go3eC
+LQdc8E4KEQZr3JD33/tgKJbwgdxNJKhTD+PQ8CL6XqfrqeRf1Mxwt8W5fEvpOqob
+qMgqsod51W1jtC5+1ySa/A2PrAS7mM4Frm8HC0nN1q35N33/G/zmOiWb6tO4u6eD
+RIRt
+-----END CERTIFICATE-----
diff --git a/tests/fixtures/generic/x509-multi-cert.json b/tests/fixtures/generic/x509-multi-cert.json
new file mode 100644
index 00000000..c78efc70
--- /dev/null
+++ b/tests/fixtures/generic/x509-multi-cert.json
@@ -0,0 +1 @@
+[{"tbs_certificate":{"version":"v3","serial_number":"01","signature":{"algorithm":"sha1_rsa","parameters":null},"issuer":{"country_name":"FR","state_or_province_name":"Alsace","locality_name":"Strasbourg","organization_name":"www.freelan.org","organizational_unit_name":"freelan","common_name":"Freelan Sample Certificate Authority","email_address":"contact@freelan.org"},"validity":{"not_before":1335522678,"not_after":1650882678,"not_before_iso":"2012-04-27T10:31:18+00:00","not_after_iso":"2022-04-25T10:31:18+00:00"},"subject":{"country_name":"FR","state_or_province_name":"Alsace","organization_name":"www.freelan.org","organizational_unit_name":"freelan","common_name":"alice","email_address":"contact@freelan.org"},"subject_public_key_info":{"algorithm":{"algorithm":"rsa","parameters":null},"public_key":{"modulus":"dd:6d:bd:f8:80:fa:d7:de:1b:1f:a7:a3:2e:b2:02:e2:16:f6:52:0a:3c:bf:a6:42:f8:ca:dc:93:67:4d:60:c3:4f:8d:c3:8a:00:1b:f1:c4:4b:41:6a:69:d2:69:e5:3f:21:8e:c5:0b:f8:22:37:ad:b6:2c:4b:55:ff:7a:03:72:bb:9a:d3:ec:96:b9:56:9f:cb:19:99:c9:32:94:6f:8f:c6:52:06:9f:45:03:df:fd:e8:97:f6:ea:d6:ba:bb:48:2b:b5:e0:34:61:4d:52:36:0f:ab:87:52:25:03:cf:87:00:87:13:f2:ca:03:29:16:9d:90:57:46:b5:f4:0e:ae:17:c8:0a:4d:92:ed:08:a6:32:23:11:71:fe:f2:2c:44:d7:6c:07:f3:0b:7b:0c:4b:dd:3b:b4:f7:37:70:9f:51:b6:88:4e:5d:6a:05:7f:8d:9b:66:7a:ab:80:20:fe:ee:6b:97:c3:49:7d:78:3b:d5:99:97:03:75:ce:8f:bc:c5:be:9c:9a:a5:12:19:70:f9:a4:bd:96:27:ed:23:02:a7:c7:57:c9:71:cf:76:94:a2:21:62:f6:b8:1d:ca:88:ee:09:ad:46:2f:b7:61:b3:2c:15:13:86:9f:a5:35:26:5a:67:f4:37:c8:e6:80:01:49:0e:c7:ed:61:d3:cd:bc:e4:f8:be:3f:c9:4e:f8:7d:97:89:ce:12:bc:ca:b5:c6:d2:e0:d9:b3:68:3c:2e:4a:9d:b4:5f:b8:53:ee:50:3d:bf:dd:d4:a2:8a:b6:a0:27:ab:98:0c:b3:b2:58:90:e2:bc:a1:ad:ff:bd:8e:55:31:0f:00:bf:68:e9:3d:a9:19:9a:f0:6d:0b:a2:14:6a:c6:4c:c6:4e:bd:63:12:a5:0b:4d:97:eb:42:09:79:53:e2:65:aa:24:34:70:b8:c1:ab:23:80:e7:9c:6c:ed:dc:82:aa:37:04:b8:43:2a:3d:2a:a8:cc:20:fc:27:5d:90:26:58:f9:b7:14:e2:9e:e2:c1:70:73:97:e9:6b:02:8e:d3:52:59:7b:00:ec:61:30:f1:56:3f:9c:c1:7c:05:c5:b1:36:c8:18:85:cf:61:40:1f:07:e8:a7:06:87:df:9a:77:0b:a9:64:72:03:f6:93:fc:e0:02:59:c1:96:ec:c0:09:42:3e:30:a2:7f:1b:48:2f:fe:e0:21:8f:53:87:25:0d:cb:ea:49:f5:4a:9b:d0:e3:5f:ee:78:18:e5:ba:71:31:a9:04:98:0f:b1:ad:67:52:a0:f2:e3:9c:ab:6a:fe:58:84:84:dd:07:3d:32:94:05:16:45:15:96:59:a0:58:6c:18:0e:e3:77:66:c7:b3:f7:99","public_exponent":65537}},"issuer_unique_id":null,"subject_unique_id":null,"extensions":[{"extn_id":"basic_constraints","critical":false,"extn_value":{"ca":false,"path_len_constraint":null}},{"extn_id":"2.16.840.1.113730.1.13","critical":false,"extn_value":"16:1d:4f:70:65:6e:53:53:4c:20:47:65:6e:65:72:61:74:65:64:20:43:65:72:74:69:66:69:63:61:74:65"},{"extn_id":"key_identifier","critical":false,"extn_value":"59:5f:c9:13:ba:1b:cc:b9:a8:41:4a:8a:49:79:6a:36:f6:7d:3e:d7"},{"extn_id":"authority_key_identifier","critical":false,"extn_value":{"key_identifier":"23:6c:2d:3d:3e:29:5d:78:b8:6c:3e:aa:e2:bb:2e:1e:6c:87:f2:53","authority_cert_issuer":null,"authority_cert_serial_number":null}}]},"signature_algorithm":{"algorithm":"sha1_rsa","parameters":null},"signature_value":"13:e7:02:45:3e:a7:ab:bd:b8:da:e7:ef:74:88:ac:62:d5:dd:10:56:d5:46:07:ec:fa:6a:80:0c:b9:62:be:aa:08:b4:be:0b:eb:9a:ef:68:b7:69:6f:4d:20:92:9d:18:63:7a:23:f4:48:87:6a:14:c3:91:98:1b:4e:08:59:3f:91:80:e9:f4:cf:fd:d5:bf:af:4b:e4:bd:78:09:71:ac:d0:81:e5:53:9f:3e:ac:44:3e:9f:f0:bf:5a:c1:70:4e:06:04:ef:dc:e8:77:05:a2:7d:c5:fa:80:58:0a:c5:10:6d:90:ca:49:26:71:84:39:b7:9a:3e:e9:6f:ae:c5:35:b6:5b:24:8c:c9:ef:41:c3:b1:17:b6:3b:4e:28:89:3c:7e:87:a8:3a:a5:6d:dc:39:03:20:20:0b:c5:80:a3:79:13:1e:f6:ec:ae:36:df:40:74:34:87:46:93:3b:a3:e0:a4:8c:2f:43:4c:b2:54:80:71:76:78:d4:ea:12:28:d8:f2:e3:80:55:11:9b:f4:65:dc:53:0e:b4:4c:e0:4c:09:b4:dc:a0:80:5c:e6:b5:3b:95:d3:69:e4:52:3d:5b:61:86:02:e5:fd:0b:00:3a:fa:b3:45:cc:c9:a3:64:f2:dc:25:59:89:58:0d:9e:6e:28:3a:55:45:50:5f:88:67:2a:d2:e2:48:cc:8b:de:9a:1b:93:ae:87:e1:f2:90:50:40:d9:0f:44:31:53:46:ad:62:4e:8d:48:86:19:77:fc:59:75:91:79:35:59:1d:e3:4e:33:5b:e2:31:d7:ee:52:28:5f:0a:70:a7:be:bb:1c:03:ca:1a:18:d0:f5:c1:5b:9c:73:04:b6:4a:e8:46:52:58:76:d4:6a:e6:67:1c:0e:dc:13:d0:61:72:a0:92:cb:05:97:47:1c:c1:c9:cf:41:7d:1f:b1:4d:93:6b:53:41:03:21:2b:93:15:63:08:3e:2c:86:9e:7b:9f:3a:09:05:6a:7d:bb:1c:a7:b7:af:96:08:cb:5b:df:07:fb:9c:f2:95:11:c0:82:81:f6:1b:bf:5a:1e:58:cd:28:ca:7d:04:eb:aa:e9:29:c4:82:51:2c:89:61:95:b6:ed:a5:86:7c:7c:48:1d:ec:54:96:47:79:ea:fc:7f:f5:10:43:0a:9b:00:ef:8a:77:2e:f4:36:66:d2:6a:a6:95:b6:9f:23:3b:12:e2:89:d5:a4:c1:2c:91:4e:cb:94:e8:3f:22:0e:21:f9:b8:4a:81:5c:4c:63:ae:3d:05:b2:5c:5c:54:a7:55:8f:98:25:55:c4:a6:90:bc:19:29:b1:14:d4:e2:b0:95:e4:ff:89:71:61:be:8a:16:85"},{"tbs_certificate":{"version":"v3","serial_number":"02","signature":{"algorithm":"sha1_rsa","parameters":null},"issuer":{"country_name":"FR","state_or_province_name":"Alsace","locality_name":"Strasbourg","organization_name":"www.freelan.org","organizational_unit_name":"freelan","common_name":"Freelan Sample Certificate Authority","email_address":"contact@freelan.org"},"validity":{"not_before":1335524080,"not_after":1650884080,"not_before_iso":"2012-04-27T10:54:40+00:00","not_after_iso":"2022-04-25T10:54:40+00:00"},"subject":{"country_name":"FR","state_or_province_name":"Alsace","organization_name":"www.freelan.org","organizational_unit_name":"freelan","common_name":"bob","email_address":"contact@freelan.org"},"subject_public_key_info":{"algorithm":{"algorithm":"rsa","parameters":null},"public_key":{"modulus":"c2:3f:43:14:4a:d4:dd:43:5a:b9:43:5e:2d:bb:89:a1:17:18:f7:ae:47:4b:7a:f4:d4:dc:a3:e1:b7:85:3a:10:20:eb:bc:51:18:d8:8b:25:c6:04:95:4f:80:e9:05:5c:00:f4:7c:23:7b:d1:ad:81:58:f1:9d:43:c3:37:ee:7f:61:03:b5:ff:29:bb:10:1a:fb:a8:77:97:9b:de:4c:7d:3f:ca:ff:53:8c:37:30:b6:88:f2:0e:be:7c:dc:92:76:c9:5f:22:96:19:0b:91:ea:9c:18:96:9f:43:d1:9d:22:9e:d9:c3:12:9f:80:05:85:1f:70:bb:87:5d:63:c1:5a:51:3d:7e:69:3d:76:6d:b0:56:ea:db:3f:ae:f0:cd:0c:19:48:b1:f2:d5:2e:e7:fa:12:dd:15:bc:8c:dc:09:c2:26:9c:dc:22:52:8e:c8:1c:c1:cd:01:bd:1a:24:c5:be:4f:18:08:f3:de:59:1c:8f:63:a6:63:1d:4f:5a:92:68:7a:49:94:26:54:d1:83:be:16:e4:5e:8f:73:2f:81:3a:3a:30:80:fd:57:a9:7f:1b:7b:e5:0f:6c:01:68:f7:1f:45:49:fe:06:3c:08:57:64:27:a5:0b:55:18:b7:30:be:08:45:70:8b:cd:43:ea:fc:80:1e:03:5c:c3:52:8d:a9:55:53:55:f4:61:2e:8b:50:64:6a:30:a7:6f:bd:b8:80:12:ee:66:98:d8:78:5f:a0:f5:65:6a:6d:f5:09:cc:62:4d:55:56:80:21:75:48:73:4d:b9:e3:f9:1d:96:c9:2c:5d:79:4d:3c:c5:7a:9e:84:ff:9d:c7:94:87:0a:3e:69:81:d2:7f:c0:5f:67:9c:06:8c:33:5c:a3:9f:52:e7:04:c7:d3:81:ef:b2:77:1e:d0:57:1f:1f:90:a5:69:c0:0d:43:c5:f6:a6:7e:f7:ea:45:7c:60:b6:68:1f:64:59:dc:60:33:c2:13:8c:b7:06:c2:2a:cd:cc:2b:02:de:a2:e9:70:0c:db:79:fe:ce:eb:5e:c0:06:eb:76:43:09:e0:2a:c7:ee:1e:6a:af:60:49:73:3c:a8:53:8c:e1:39:2c:e7:9e:fe:fd:44:20:f0:85:9a:1f:eb:c7:40:c8:5b:90:43:e6:a1:6a:00:50:4b:73:73:72:c5:39:77:13:1e:3c:95:be:a9:37:6a:d1:4e:34:3d:34:ec:87:f8:1e:6c:e7:dc:8b:7f:8e:d1:3c:78:c2:e2:09:93:d7:c0:68:ae:70:81:b9:f0:d0:f7:26:a4:e2:c0:12:1d:2f:01:63:eb:53:05:cb:aa:db:66:b0:fb:16:9b:e7:e7:be:c3:66:da:5c:c9","public_exponent":65537}},"issuer_unique_id":null,"subject_unique_id":null,"extensions":[{"extn_id":"basic_constraints","critical":false,"extn_value":{"ca":false,"path_len_constraint":null}},{"extn_id":"2.16.840.1.113730.1.13","critical":false,"extn_value":"16:1d:4f:70:65:6e:53:53:4c:20:47:65:6e:65:72:61:74:65:64:20:43:65:72:74:69:66:69:63:61:74:65"},{"extn_id":"key_identifier","critical":false,"extn_value":"9c:d2:71:50:35:f7:10:43:dd:e8:ce:75:29:a3:53:5d:11:a7:a8:3b"},{"extn_id":"authority_key_identifier","critical":false,"extn_value":{"key_identifier":"23:6c:2d:3d:3e:29:5d:78:b8:6c:3e:aa:e2:bb:2e:1e:6c:87:f2:53","authority_cert_issuer":null,"authority_cert_serial_number":null}}]},"signature_algorithm":{"algorithm":"sha1_rsa","parameters":null},"signature_value":"c3:b0:a4:82:f5:64:e5:4e:a0:e5:74:5e:c4:3d:d0:9c:f7:4e:f7:8d:af:8b:2e:80:59:63:b5:6e:2f:10:5b:66:d6:29:2a:ca:e2:01:20:68:e1:2b:ff:d6:e1:e1:f2:a6:e0:cc:f5:8f:9f:5c:72:b8:fa:81:76:7d:5c:ee:60:29:e5:d7:de:8f:4a:9c:55:3e:e5:27:1c:76:bc:35:e7:16:80:6f:32:77:fd:57:ae:51:87:fb:be:c2:a1:cc:76:9a:61:01:c9:ff:86:00:ff:d1:96:cd:ff:2c:0f:48:9e:ae:83:d8:df:d4:78:1d:4c:37:87:f5:58:5d:26:c6:ca:16:cd:fa:16:1d:6f:42:ae:57:4a:99:45:52:80:5c:1c:76:42:a8:f8:f3:15:9c:1b:3e:36:01:e0:09:5e:d8:19:b1:ed:a0:ef:3b:c7:09:a7:aa:5f:b6:2d:c1:20:84:9b:2c:87:1a:2b:35:de:9e:9c:0c:d9:0c:5e:cf:51:38:d6:d6:80:ae:91:15:b5:c6:22:df:7e:17:9f:c3:eb:bf:fd:d5:3b:4b:ea:66:00:72:a0:b5:b7:65:a8:5a:d9:a8:f1:67:c1:41:d8:79:dd:cc:2f:78:7a:9e:5e:0a:9d:77:0e:59:52:49:d2:10:94:1c:eb:f4:3c:04:0e:3c:1c:1a:75:a6:e8:23:d5:f0:73:14:90:b1:71:5a:32:57:8d:34:d7:6a:61:dc:73:1a:da:1d:1f:56:a7:2e:ef:0d:a4:f5:fb:94:0b:f4:cf:1d:d2:10:0f:07:cd:ba:9d:78:87:e8:04:63:6a:e5:7a:6b:20:bd:bd:29:c2:39:5b:fc:86:84:77:0b:e3:f8:2c:37:ac:af:1b:ed:4f:b9:d6:08:a3:ac:2f:31:07:4a:f8:8e:cf:11:dd:92:1c:c9:aa:c7:a5:b7:62:a4:77:6e:58:20:78:17:cb:5e:ef:6d:41:eb:b6:c2:1f:7f:a1:de:fa:bb:71:92:20:de:b1:5e:34:84:6c:ed:6c:e1:43:86:13:f0:3f:d7:2d:c5:ba:c0:de:37:8d:48:bc:df:c7:4f:b3:a6:a5:e5:c2:db:f1:ef:db:0c:25:69:e6:58:8d:ba:72:bd:5e:3f:cf:81:36:b6:ab:ee:a8:67:8f:ee:bb:fe:6f:c9:1f:8a:1f:ef:e9:c9:7a:52:40:ad:a0:3f:23:45:7a:63:95:98:3d:12:b8:e2:f3:0b:88:10:38:04:68:b0:f1:a7:8b:d0:61:d7:0f:2f:cf:17:51:21:eb:76:69:2d:19:e8:01:c5:33:fd:61:cd:46:64:87:89:43:e9:31:d0:be:88:a0:a2:82:0c:7f:9f:66:41:3a:9a:5a:6a"},{"tbs_certificate":{"version":"v3","serial_number":"03","signature":{"algorithm":"sha1_rsa","parameters":null},"issuer":{"country_name":"FR","state_or_province_name":"Alsace","locality_name":"Strasbourg","organization_name":"www.freelan.org","organizational_unit_name":"freelan","common_name":"Freelan Sample Certificate Authority","email_address":"contact@freelan.org"},"validity":{"not_before":1335524093,"not_after":1650884093,"not_before_iso":"2012-04-27T10:54:53+00:00","not_after_iso":"2022-04-25T10:54:53+00:00"},"subject":{"country_name":"FR","state_or_province_name":"Alsace","organization_name":"www.freelan.org","organizational_unit_name":"freelan","common_name":"carol","email_address":"contact@freelan.org"},"subject_public_key_info":{"algorithm":{"algorithm":"rsa","parameters":null},"public_key":{"modulus":"d7:c0:a7:c6:e9:48:c4:53:40:b3:76:d9:2f:37:28:3d:a3:c4:42:d0:76:cd:08:9b:50:e3:1c:51:e5:14:72:fa:2b:a0:b1:06:23:f3:c1:ad:92:7c:79:fe:15:54:d1:e5:67:62:da:ed:81:aa:7e:e2:b1:50:a9:fb:d8:29:09:da:84:4d:3c:f4:6e:13:ab:0b:d5:ee:80:63:32:7d:57:af:83:3c:1c:27:ed:ec:67:d6:fd:1c:13:2d:40:bf:d1:da:bf:7a:b6:67:7e:b0:75:3b:6d:61:9d:cc:6c:1a:97:f1:56:de:9f:80:d3:16:60:bb:8a:6f:46:9b:be:34:75:c3:4c:d2:f1:c8:f3:3e:98:28:30:e4:cb:2d:25:61:62:48:be:2e:dc:ed:90:93:ae:74:b7:fa:49:43:65:20:ac:8e:fe:52:6c:00:8e:51:3e:b6:9a:c6:4f:44:1c:7b:84:17:bd:5c:f6:36:e9:4c:91:89:6f:4e:ad:ac:10:41:c5:c5:65:8a:20:c8:f7:27:a3:ea:ac:5b:74:09:99:27:88:60:c7:44:69:18:0c:32:1a:77:f2:47:53:46:e3:12:c5:69:95:45:15:9a:14:60:76:20:a7:b5:8c:51:bf:5a:57:19:5a:c7:a8:bc:0b:c4:30:ca:0b:e6:d0:f8:c4:a8:84:d9:24:a2:92:f6:84:f2:13:ea:a4:93:97:fe:ed:77:d8:2f:75:7a:2c:39:88:3c:44:56:0a:ef:12:57:d5:9e:8f:35:8e:7f:84:e7:1a:d1:19:8d:23:db:b5:ce:c5:7f:e1:88:6d:04:d6:01:de:f0:72:3e:51:95:1d:4f:30:b6:32:0a:0f:84:b5:00:34:e4:bf:80:71:10:62:14:c1:32:5a:a9:a6:de:c2:58:e8:52:eb:66:5a:b8:5e:c2:06:7c:a6:6a:33:f2:1e:8a:41:07:53:bb:6b:41:92:59:85:79:04:a9:df:56:4c:e0:62:1e:98:87:95:07:b1:10:49:34:9c:90:4c:0b:83:25:27:9f:01:27:fb:d0:c4:6e:50:cc:f5:02:47:2c:45:9a:31:e5:ce:7d:86:8f:db:fd:83:ea:a6:00:49:71:14:44:a1:8e:9d:ba:a4:a4:cf:9d:15:20:2d:67:76:42:81:63:a2:76:4e:4b:22:b5:de:3d:d8:f8:e0:43:7f:a3:10:f0:73:fb:6e:e1:6a:37:99:dc:87:a3:05:4c:29:f5:63:14:9b:eb:a3:3a:9b:2b:b4:51:f5:05:03:de:41:e5:cb:1a:8e:76:eb:47:93:53:90:71:c5:8f:86:5f:9e:0b:4d:33:9c:3c:88:8a:90:9f:90:a6:35:90:81:f1","public_exponent":65537}},"issuer_unique_id":null,"subject_unique_id":null,"extensions":[{"extn_id":"basic_constraints","critical":false,"extn_value":{"ca":false,"path_len_constraint":null}},{"extn_id":"2.16.840.1.113730.1.13","critical":false,"extn_value":"16:1d:4f:70:65:6e:53:53:4c:20:47:65:6e:65:72:61:74:65:64:20:43:65:72:74:69:66:69:63:61:74:65"},{"extn_id":"key_identifier","critical":false,"extn_value":"b5:5d:0d:4f:55:f6:75:1a:23:b3:f5:8c:bc:6b:5a:b6:96:6c:ae:e0"},{"extn_id":"authority_key_identifier","critical":false,"extn_value":{"key_identifier":"23:6c:2d:3d:3e:29:5d:78:b8:6c:3e:aa:e2:bb:2e:1e:6c:87:f2:53","authority_cert_issuer":null,"authority_cert_serial_number":null}}]},"signature_algorithm":{"algorithm":"sha1_rsa","parameters":null},"signature_value":"bf:3f:e7:16:a2:ba:b1:cf:d6:79:f3:84:ed:a5:10:3e:60:42:0e:d5:1a:c6:e9:b1:39:86:5a:2e:dd:ae:b6:b7:16:33:33:17:3e:83:f7:a1:f7:b4:1b:09:74:8f:9b:0d:8e:4c:c7:a1:d6:66:6c:02:3a:b5:f2:72:aa:c9:e4:b3:c6:9d:6e:c0:48:dc:39:21:30:18:a0:6f:cb:09:be:de:0f:63:83:04:32:73:a7:bc:42:34:b7:a1:dc:21:21:08:86:65:bc:2e:c5:78:ae:fb:fe:ab:fb:8b:85:bf:61:e0:e2:aa:52:5f:1e:0d:19:22:13:94:7a:b4:bd:5c:30:8d:43:22:b4:e9:13:62:7e:3e:f5:e2:7a:2a:3b:da:1f:57:4a:5d:b8:6c:4c:f5:6e:34:b9:bd:b4:1f:dc:88:d0:28:20:a2:0c:31:e8:7f:3a:23:b8:60:48:c8:4e:e1:02:62:ae:00:fb:d0:a5:76:cb:ea:f3:d7:75:0d:9e:56:48:c1:2e:44:c7:0c:9f:03:b3:ac:96:c5:a2:a0:06:9e:2b:c3:eb:b5:04:15:33:79:4a:9e:28:94:1d:28:50:98:e3:eb:b5:74:69:7f:69:bc:61:72:d1:8a:cc:fb:89:be:51:34:81:11:7b:fa:8a:cf:e7:bf:81:91:34:1a:11:63:92:41:eb:62:7d:7a:2a:5a:2b:a3:85:36:5b:39:08:40:6b:0d:bc:b7:ed:36:42:60:45:ee:0c:27:f1:41:38:9e:db:99:8f:0f:ff:1b:ea:02:98:9f:19:21:33:ca:a2:47:89:cb:1d:a9:4c:94:b6:3d:b2:e2:bf:1d:f7:12:8d:01:ff:77:d6:72:65:70:ca:80:8e:a2:2d:78:0c:b2:9d:84:3a:50:f9:e8:8e:85:03:58:eb:0a:d3:5b:d3:55:d0:bd:7d:de:c8:5b:80:ea:0e:53:d6:35:86:60:10:ed:bd:06:f4:59:15:64:75:4c:bd:2f:fb:8a:fa:c1:d0:c2:d9:68:09:2b:9a:91:c4:00:b1:65:7d:6d:a8:c2:42:d1:d7:f1:71:ae:db:96:33:e7:a9:29:27:f3:89:8d:c8:ac:87:14:fa:a5:cf:ec:b6:1b:a6:03:93:d7:ef:7f:49:b0:d5:22:fe:9e:5a:1b:e1:ff:e9:e3:71:fa:e9:09:3f:b4:1a:33:ae:3a:60:27:d2:e6:2f:12:f4:32:54:be:29:be:fc:14:a5:2a:2d:99:88:e0:9d:d0:c6:07:e1:76:fb:96:60:0e:4c:d9:93:bd:26:29:2a:8f:49:d9:f6:7d:7a:bc:34:31:84:81:4f:28:e1:e8:5e:cf:45:b1:c1:8a:2b:e0:52:72:5f:19"}]
diff --git a/tests/fixtures/generic/x509-multi-cert.pem b/tests/fixtures/generic/x509-multi-cert.pem
new file mode 100644
index 00000000..d39024db
--- /dev/null
+++ b/tests/fixtures/generic/x509-multi-cert.pem
@@ -0,0 +1,285 @@
+-----BEGIN CERTIFICATE-----
+MIIGJzCCBA+gAwIBAgIBATANBgkqhkiG9w0BAQUFADCBsjELMAkGA1UEBhMCRlIx
+DzANBgNVBAgMBkFsc2FjZTETMBEGA1UEBwwKU3RyYXNib3VyZzEYMBYGA1UECgwP
+d3d3LmZyZWVsYW4ub3JnMRAwDgYDVQQLDAdmcmVlbGFuMS0wKwYDVQQDDCRGcmVl
+bGFuIFNhbXBsZSBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxIjAgBgkqhkiG9w0BCQEW
+E2NvbnRhY3RAZnJlZWxhbi5vcmcwHhcNMTIwNDI3MTAzMTE4WhcNMjIwNDI1MTAz
+MTE4WjB+MQswCQYDVQQGEwJGUjEPMA0GA1UECAwGQWxzYWNlMRgwFgYDVQQKDA93
+d3cuZnJlZWxhbi5vcmcxEDAOBgNVBAsMB2ZyZWVsYW4xDjAMBgNVBAMMBWFsaWNl
+MSIwIAYJKoZIhvcNAQkBFhNjb250YWN0QGZyZWVsYW4ub3JnMIICIjANBgkqhkiG
+9w0BAQEFAAOCAg8AMIICCgKCAgEA3W29+ID6194bH6ejLrIC4hb2Ugo8v6ZC+Mrc
+k2dNYMNPjcOKABvxxEtBamnSaeU/IY7FC/giN622LEtV/3oDcrua0+yWuVafyxmZ
+yTKUb4/GUgafRQPf/eiX9urWurtIK7XgNGFNUjYPq4dSJQPPhwCHE/LKAykWnZBX
+RrX0Dq4XyApNku0IpjIjEXH+8ixE12wH8wt7DEvdO7T3N3CfUbaITl1qBX+Nm2Z6
+q4Ag/u5rl8NJfXg71ZmXA3XOj7zFvpyapRIZcPmkvZYn7SMCp8dXyXHPdpSiIWL2
+uB3KiO4JrUYvt2GzLBUThp+lNSZaZ/Q3yOaAAUkOx+1h08285Pi+P8lO+H2Xic4S
+vMq1xtLg2bNoPC5KnbRfuFPuUD2/3dSiiragJ6uYDLOyWJDivKGt/72OVTEPAL9o
+6T2pGZrwbQuiFGrGTMZOvWMSpQtNl+tCCXlT4mWqJDRwuMGrI4DnnGzt3IKqNwS4
+Qyo9KqjMIPwnXZAmWPm3FOKe4sFwc5fpawKO01JZewDsYTDxVj+cwXwFxbE2yBiF
+z2FAHwfopwaH35p3C6lkcgP2k/zgAlnBluzACUI+MKJ/G0gv/uAhj1OHJQ3L6kn1
+SpvQ41/ueBjlunExqQSYD7GtZ1Kg8uOcq2r+WISE3Qc9MpQFFkUVllmgWGwYDuN3
+Zsez95kCAwEAAaN7MHkwCQYDVR0TBAIwADAsBglghkgBhvhCAQ0EHxYdT3BlblNT
+TCBHZW5lcmF0ZWQgQ2VydGlmaWNhdGUwHQYDVR0OBBYEFFlfyRO6G8y5qEFKikl5
+ajb2fT7XMB8GA1UdIwQYMBaAFCNsLT0+KV14uGw+quK7Lh5sh/JTMA0GCSqGSIb3
+DQEBBQUAA4ICAQAT5wJFPqervbja5+90iKxi1d0QVtVGB+z6aoAMuWK+qgi0vgvr
+mu9ot2lvTSCSnRhjeiP0SIdqFMORmBtOCFk/kYDp9M/91b+vS+S9eAlxrNCB5VOf
+PqxEPp/wv1rBcE4GBO/c6HcFon3F+oBYCsUQbZDKSSZxhDm3mj7pb67FNbZbJIzJ
+70HDsRe2O04oiTx+h6g6pW3cOQMgIAvFgKN5Ex727K4230B0NIdGkzuj4KSML0NM
+slSAcXZ41OoSKNjy44BVEZv0ZdxTDrRM4EwJtNyggFzmtTuV02nkUj1bYYYC5f0L
+ADr6s0XMyaNk8twlWYlYDZ5uKDpVRVBfiGcq0uJIzIvemhuTrofh8pBQQNkPRDFT
+Rq1iTo1Ihhl3/Fl1kXk1WR3jTjNb4jHX7lIoXwpwp767HAPKGhjQ9cFbnHMEtkro
+RlJYdtRq5mccDtwT0GFyoJLLBZdHHMHJz0F9H7FNk2tTQQMhK5MVYwg+LIaee586
+CQVqfbscp7evlgjLW98H+5zylRHAgoH2G79aHljNKMp9BOuq6SnEglEsiWGVtu2l
+hnx8SB3sVJZHeer8f/UQQwqbAO+Kdy70NmbSaqaVtp8jOxLiidWkwSyRTsuU6D8i
+DiH5uEqBXExjrj0FslxcVKdVj5glVcSmkLwZKbEU1OKwleT/iXFhvooWhQ==
+-----END CERTIFICATE-----
+Certificate:
+ Data:
+ Version: 3 (0x2)
+ Serial Number: 2 (0x2)
+ Signature Algorithm: sha1WithRSAEncryption
+ Issuer: C=FR, ST=Alsace, L=Strasbourg, O=www.freelan.org, OU=freelan, CN=Freelan Sample Certificate Authority/emailAddress=contact@freelan.org
+ Validity
+ Not Before: Apr 27 10:54:40 2012 GMT
+ Not After : Apr 25 10:54:40 2022 GMT
+ Subject: C=FR, ST=Alsace, O=www.freelan.org, OU=freelan, CN=bob/emailAddress=contact@freelan.org
+ Subject Public Key Info:
+ Public Key Algorithm: rsaEncryption
+ Public-Key: (4096 bit)
+ Modulus:
+ 00:c2:3f:43:14:4a:d4:dd:43:5a:b9:43:5e:2d:bb:
+ 89:a1:17:18:f7:ae:47:4b:7a:f4:d4:dc:a3:e1:b7:
+ 85:3a:10:20:eb:bc:51:18:d8:8b:25:c6:04:95:4f:
+ 80:e9:05:5c:00:f4:7c:23:7b:d1:ad:81:58:f1:9d:
+ 43:c3:37:ee:7f:61:03:b5:ff:29:bb:10:1a:fb:a8:
+ 77:97:9b:de:4c:7d:3f:ca:ff:53:8c:37:30:b6:88:
+ f2:0e:be:7c:dc:92:76:c9:5f:22:96:19:0b:91:ea:
+ 9c:18:96:9f:43:d1:9d:22:9e:d9:c3:12:9f:80:05:
+ 85:1f:70:bb:87:5d:63:c1:5a:51:3d:7e:69:3d:76:
+ 6d:b0:56:ea:db:3f:ae:f0:cd:0c:19:48:b1:f2:d5:
+ 2e:e7:fa:12:dd:15:bc:8c:dc:09:c2:26:9c:dc:22:
+ 52:8e:c8:1c:c1:cd:01:bd:1a:24:c5:be:4f:18:08:
+ f3:de:59:1c:8f:63:a6:63:1d:4f:5a:92:68:7a:49:
+ 94:26:54:d1:83:be:16:e4:5e:8f:73:2f:81:3a:3a:
+ 30:80:fd:57:a9:7f:1b:7b:e5:0f:6c:01:68:f7:1f:
+ 45:49:fe:06:3c:08:57:64:27:a5:0b:55:18:b7:30:
+ be:08:45:70:8b:cd:43:ea:fc:80:1e:03:5c:c3:52:
+ 8d:a9:55:53:55:f4:61:2e:8b:50:64:6a:30:a7:6f:
+ bd:b8:80:12:ee:66:98:d8:78:5f:a0:f5:65:6a:6d:
+ f5:09:cc:62:4d:55:56:80:21:75:48:73:4d:b9:e3:
+ f9:1d:96:c9:2c:5d:79:4d:3c:c5:7a:9e:84:ff:9d:
+ c7:94:87:0a:3e:69:81:d2:7f:c0:5f:67:9c:06:8c:
+ 33:5c:a3:9f:52:e7:04:c7:d3:81:ef:b2:77:1e:d0:
+ 57:1f:1f:90:a5:69:c0:0d:43:c5:f6:a6:7e:f7:ea:
+ 45:7c:60:b6:68:1f:64:59:dc:60:33:c2:13:8c:b7:
+ 06:c2:2a:cd:cc:2b:02:de:a2:e9:70:0c:db:79:fe:
+ ce:eb:5e:c0:06:eb:76:43:09:e0:2a:c7:ee:1e:6a:
+ af:60:49:73:3c:a8:53:8c:e1:39:2c:e7:9e:fe:fd:
+ 44:20:f0:85:9a:1f:eb:c7:40:c8:5b:90:43:e6:a1:
+ 6a:00:50:4b:73:73:72:c5:39:77:13:1e:3c:95:be:
+ a9:37:6a:d1:4e:34:3d:34:ec:87:f8:1e:6c:e7:dc:
+ 8b:7f:8e:d1:3c:78:c2:e2:09:93:d7:c0:68:ae:70:
+ 81:b9:f0:d0:f7:26:a4:e2:c0:12:1d:2f:01:63:eb:
+ 53:05:cb:aa:db:66:b0:fb:16:9b:e7:e7:be:c3:66:
+ da:5c:c9
+ Exponent: 65537 (0x10001)
+ X509v3 extensions:
+ X509v3 Basic Constraints:
+ CA:FALSE
+ Netscape Comment:
+ OpenSSL Generated Certificate
+ X509v3 Subject Key Identifier:
+ 9C:D2:71:50:35:F7:10:43:DD:E8:CE:75:29:A3:53:5D:11:A7:A8:3B
+ X509v3 Authority Key Identifier:
+ keyid:23:6C:2D:3D:3E:29:5D:78:B8:6C:3E:AA:E2:BB:2E:1E:6C:87:F2:53
+
+ Signature Algorithm: sha1WithRSAEncryption
+ c3:b0:a4:82:f5:64:e5:4e:a0:e5:74:5e:c4:3d:d0:9c:f7:4e:
+ f7:8d:af:8b:2e:80:59:63:b5:6e:2f:10:5b:66:d6:29:2a:ca:
+ e2:01:20:68:e1:2b:ff:d6:e1:e1:f2:a6:e0:cc:f5:8f:9f:5c:
+ 72:b8:fa:81:76:7d:5c:ee:60:29:e5:d7:de:8f:4a:9c:55:3e:
+ e5:27:1c:76:bc:35:e7:16:80:6f:32:77:fd:57:ae:51:87:fb:
+ be:c2:a1:cc:76:9a:61:01:c9:ff:86:00:ff:d1:96:cd:ff:2c:
+ 0f:48:9e:ae:83:d8:df:d4:78:1d:4c:37:87:f5:58:5d:26:c6:
+ ca:16:cd:fa:16:1d:6f:42:ae:57:4a:99:45:52:80:5c:1c:76:
+ 42:a8:f8:f3:15:9c:1b:3e:36:01:e0:09:5e:d8:19:b1:ed:a0:
+ ef:3b:c7:09:a7:aa:5f:b6:2d:c1:20:84:9b:2c:87:1a:2b:35:
+ de:9e:9c:0c:d9:0c:5e:cf:51:38:d6:d6:80:ae:91:15:b5:c6:
+ 22:df:7e:17:9f:c3:eb:bf:fd:d5:3b:4b:ea:66:00:72:a0:b5:
+ b7:65:a8:5a:d9:a8:f1:67:c1:41:d8:79:dd:cc:2f:78:7a:9e:
+ 5e:0a:9d:77:0e:59:52:49:d2:10:94:1c:eb:f4:3c:04:0e:3c:
+ 1c:1a:75:a6:e8:23:d5:f0:73:14:90:b1:71:5a:32:57:8d:34:
+ d7:6a:61:dc:73:1a:da:1d:1f:56:a7:2e:ef:0d:a4:f5:fb:94:
+ 0b:f4:cf:1d:d2:10:0f:07:cd:ba:9d:78:87:e8:04:63:6a:e5:
+ 7a:6b:20:bd:bd:29:c2:39:5b:fc:86:84:77:0b:e3:f8:2c:37:
+ ac:af:1b:ed:4f:b9:d6:08:a3:ac:2f:31:07:4a:f8:8e:cf:11:
+ dd:92:1c:c9:aa:c7:a5:b7:62:a4:77:6e:58:20:78:17:cb:5e:
+ ef:6d:41:eb:b6:c2:1f:7f:a1:de:fa:bb:71:92:20:de:b1:5e:
+ 34:84:6c:ed:6c:e1:43:86:13:f0:3f:d7:2d:c5:ba:c0:de:37:
+ 8d:48:bc:df:c7:4f:b3:a6:a5:e5:c2:db:f1:ef:db:0c:25:69:
+ e6:58:8d:ba:72:bd:5e:3f:cf:81:36:b6:ab:ee:a8:67:8f:ee:
+ bb:fe:6f:c9:1f:8a:1f:ef:e9:c9:7a:52:40:ad:a0:3f:23:45:
+ 7a:63:95:98:3d:12:b8:e2:f3:0b:88:10:38:04:68:b0:f1:a7:
+ 8b:d0:61:d7:0f:2f:cf:17:51:21:eb:76:69:2d:19:e8:01:c5:
+ 33:fd:61:cd:46:64:87:89:43:e9:31:d0:be:88:a0:a2:82:0c:
+ 7f:9f:66:41:3a:9a:5a:6a
+-----BEGIN CERTIFICATE-----
+MIIGJTCCBA2gAwIBAgIBAjANBgkqhkiG9w0BAQUFADCBsjELMAkGA1UEBhMCRlIx
+DzANBgNVBAgMBkFsc2FjZTETMBEGA1UEBwwKU3RyYXNib3VyZzEYMBYGA1UECgwP
+d3d3LmZyZWVsYW4ub3JnMRAwDgYDVQQLDAdmcmVlbGFuMS0wKwYDVQQDDCRGcmVl
+bGFuIFNhbXBsZSBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxIjAgBgkqhkiG9w0BCQEW
+E2NvbnRhY3RAZnJlZWxhbi5vcmcwHhcNMTIwNDI3MTA1NDQwWhcNMjIwNDI1MTA1
+NDQwWjB8MQswCQYDVQQGEwJGUjEPMA0GA1UECAwGQWxzYWNlMRgwFgYDVQQKDA93
+d3cuZnJlZWxhbi5vcmcxEDAOBgNVBAsMB2ZyZWVsYW4xDDAKBgNVBAMMA2JvYjEi
+MCAGCSqGSIb3DQEJARYTY29udGFjdEBmcmVlbGFuLm9yZzCCAiIwDQYJKoZIhvcN
+AQEBBQADggIPADCCAgoCggIBAMI/QxRK1N1DWrlDXi27iaEXGPeuR0t69NTco+G3
+hToQIOu8URjYiyXGBJVPgOkFXAD0fCN70a2BWPGdQ8M37n9hA7X/KbsQGvuod5eb
+3kx9P8r/U4w3MLaI8g6+fNySdslfIpYZC5HqnBiWn0PRnSKe2cMSn4AFhR9wu4dd
+Y8FaUT1+aT12bbBW6ts/rvDNDBlIsfLVLuf6Et0VvIzcCcImnNwiUo7IHMHNAb0a
+JMW+TxgI895ZHI9jpmMdT1qSaHpJlCZU0YO+FuRej3MvgTo6MID9V6l/G3vlD2wB
+aPcfRUn+BjwIV2QnpQtVGLcwvghFcIvNQ+r8gB4DXMNSjalVU1X0YS6LUGRqMKdv
+vbiAEu5mmNh4X6D1ZWpt9QnMYk1VVoAhdUhzTbnj+R2WySxdeU08xXqehP+dx5SH
+Cj5pgdJ/wF9nnAaMM1yjn1LnBMfTge+ydx7QVx8fkKVpwA1DxfamfvfqRXxgtmgf
+ZFncYDPCE4y3BsIqzcwrAt6i6XAM23n+zutewAbrdkMJ4CrH7h5qr2BJczyoU4zh
+OSznnv79RCDwhZof68dAyFuQQ+ahagBQS3NzcsU5dxMePJW+qTdq0U40PTTsh/ge
+bOfci3+O0Tx4wuIJk9fAaK5wgbnw0PcmpOLAEh0vAWPrUwXLqttmsPsWm+fnvsNm
+2lzJAgMBAAGjezB5MAkGA1UdEwQCMAAwLAYJYIZIAYb4QgENBB8WHU9wZW5TU0wg
+R2VuZXJhdGVkIENlcnRpZmljYXRlMB0GA1UdDgQWBBSc0nFQNfcQQ93oznUpo1Nd
+EaeoOzAfBgNVHSMEGDAWgBQjbC09PildeLhsPqriuy4ebIfyUzANBgkqhkiG9w0B
+AQUFAAOCAgEAw7CkgvVk5U6g5XRexD3QnPdO942viy6AWWO1bi8QW2bWKSrK4gEg
+aOEr/9bh4fKm4Mz1j59ccrj6gXZ9XO5gKeXX3o9KnFU+5Sccdrw15xaAbzJ3/Veu
+UYf7vsKhzHaaYQHJ/4YA/9GWzf8sD0ieroPY39R4HUw3h/VYXSbGyhbN+hYdb0Ku
+V0qZRVKAXBx2Qqj48xWcGz42AeAJXtgZse2g7zvHCaeqX7YtwSCEmyyHGis13p6c
+DNkMXs9RONbWgK6RFbXGIt9+F5/D67/91TtL6mYAcqC1t2WoWtmo8WfBQdh53cwv
+eHqeXgqddw5ZUknSEJQc6/Q8BA48HBp1pugj1fBzFJCxcVoyV40012ph3HMa2h0f
+Vqcu7w2k9fuUC/TPHdIQDwfNup14h+gEY2rlemsgvb0pwjlb/IaEdwvj+Cw3rK8b
+7U+51gijrC8xB0r4js8R3ZIcyarHpbdipHduWCB4F8te721B67bCH3+h3vq7cZIg
+3rFeNIRs7WzhQ4YT8D/XLcW6wN43jUi838dPs6al5cLb8e/bDCVp5liNunK9Xj/P
+gTa2q+6oZ4/uu/5vyR+KH+/pyXpSQK2gPyNFemOVmD0SuOLzC4gQOARosPGni9Bh
+1w8vzxdRIet2aS0Z6AHFM/1hzUZkh4lD6THQvoigooIMf59mQTqaWmo=
+-----END CERTIFICATE-----
+Certificate:
+ Data:
+ Version: 3 (0x2)
+ Serial Number: 3 (0x3)
+ Signature Algorithm: sha1WithRSAEncryption
+ Issuer: C=FR, ST=Alsace, L=Strasbourg, O=www.freelan.org, OU=freelan, CN=Freelan Sample Certificate Authority/emailAddress=contact@freelan.org
+ Validity
+ Not Before: Apr 27 10:54:53 2012 GMT
+ Not After : Apr 25 10:54:53 2022 GMT
+ Subject: C=FR, ST=Alsace, O=www.freelan.org, OU=freelan, CN=carol/emailAddress=contact@freelan.org
+ Subject Public Key Info:
+ Public Key Algorithm: rsaEncryption
+ Public-Key: (4096 bit)
+ Modulus:
+ 00:d7:c0:a7:c6:e9:48:c4:53:40:b3:76:d9:2f:37:
+ 28:3d:a3:c4:42:d0:76:cd:08:9b:50:e3:1c:51:e5:
+ 14:72:fa:2b:a0:b1:06:23:f3:c1:ad:92:7c:79:fe:
+ 15:54:d1:e5:67:62:da:ed:81:aa:7e:e2:b1:50:a9:
+ fb:d8:29:09:da:84:4d:3c:f4:6e:13:ab:0b:d5:ee:
+ 80:63:32:7d:57:af:83:3c:1c:27:ed:ec:67:d6:fd:
+ 1c:13:2d:40:bf:d1:da:bf:7a:b6:67:7e:b0:75:3b:
+ 6d:61:9d:cc:6c:1a:97:f1:56:de:9f:80:d3:16:60:
+ bb:8a:6f:46:9b:be:34:75:c3:4c:d2:f1:c8:f3:3e:
+ 98:28:30:e4:cb:2d:25:61:62:48:be:2e:dc:ed:90:
+ 93:ae:74:b7:fa:49:43:65:20:ac:8e:fe:52:6c:00:
+ 8e:51:3e:b6:9a:c6:4f:44:1c:7b:84:17:bd:5c:f6:
+ 36:e9:4c:91:89:6f:4e:ad:ac:10:41:c5:c5:65:8a:
+ 20:c8:f7:27:a3:ea:ac:5b:74:09:99:27:88:60:c7:
+ 44:69:18:0c:32:1a:77:f2:47:53:46:e3:12:c5:69:
+ 95:45:15:9a:14:60:76:20:a7:b5:8c:51:bf:5a:57:
+ 19:5a:c7:a8:bc:0b:c4:30:ca:0b:e6:d0:f8:c4:a8:
+ 84:d9:24:a2:92:f6:84:f2:13:ea:a4:93:97:fe:ed:
+ 77:d8:2f:75:7a:2c:39:88:3c:44:56:0a:ef:12:57:
+ d5:9e:8f:35:8e:7f:84:e7:1a:d1:19:8d:23:db:b5:
+ ce:c5:7f:e1:88:6d:04:d6:01:de:f0:72:3e:51:95:
+ 1d:4f:30:b6:32:0a:0f:84:b5:00:34:e4:bf:80:71:
+ 10:62:14:c1:32:5a:a9:a6:de:c2:58:e8:52:eb:66:
+ 5a:b8:5e:c2:06:7c:a6:6a:33:f2:1e:8a:41:07:53:
+ bb:6b:41:92:59:85:79:04:a9:df:56:4c:e0:62:1e:
+ 98:87:95:07:b1:10:49:34:9c:90:4c:0b:83:25:27:
+ 9f:01:27:fb:d0:c4:6e:50:cc:f5:02:47:2c:45:9a:
+ 31:e5:ce:7d:86:8f:db:fd:83:ea:a6:00:49:71:14:
+ 44:a1:8e:9d:ba:a4:a4:cf:9d:15:20:2d:67:76:42:
+ 81:63:a2:76:4e:4b:22:b5:de:3d:d8:f8:e0:43:7f:
+ a3:10:f0:73:fb:6e:e1:6a:37:99:dc:87:a3:05:4c:
+ 29:f5:63:14:9b:eb:a3:3a:9b:2b:b4:51:f5:05:03:
+ de:41:e5:cb:1a:8e:76:eb:47:93:53:90:71:c5:8f:
+ 86:5f:9e:0b:4d:33:9c:3c:88:8a:90:9f:90:a6:35:
+ 90:81:f1
+ Exponent: 65537 (0x10001)
+ X509v3 extensions:
+ X509v3 Basic Constraints:
+ CA:FALSE
+ Netscape Comment:
+ OpenSSL Generated Certificate
+ X509v3 Subject Key Identifier:
+ B5:5D:0D:4F:55:F6:75:1A:23:B3:F5:8C:BC:6B:5A:B6:96:6C:AE:E0
+ X509v3 Authority Key Identifier:
+ keyid:23:6C:2D:3D:3E:29:5D:78:B8:6C:3E:AA:E2:BB:2E:1E:6C:87:F2:53
+
+ Signature Algorithm: sha1WithRSAEncryption
+ bf:3f:e7:16:a2:ba:b1:cf:d6:79:f3:84:ed:a5:10:3e:60:42:
+ 0e:d5:1a:c6:e9:b1:39:86:5a:2e:dd:ae:b6:b7:16:33:33:17:
+ 3e:83:f7:a1:f7:b4:1b:09:74:8f:9b:0d:8e:4c:c7:a1:d6:66:
+ 6c:02:3a:b5:f2:72:aa:c9:e4:b3:c6:9d:6e:c0:48:dc:39:21:
+ 30:18:a0:6f:cb:09:be:de:0f:63:83:04:32:73:a7:bc:42:34:
+ b7:a1:dc:21:21:08:86:65:bc:2e:c5:78:ae:fb:fe:ab:fb:8b:
+ 85:bf:61:e0:e2:aa:52:5f:1e:0d:19:22:13:94:7a:b4:bd:5c:
+ 30:8d:43:22:b4:e9:13:62:7e:3e:f5:e2:7a:2a:3b:da:1f:57:
+ 4a:5d:b8:6c:4c:f5:6e:34:b9:bd:b4:1f:dc:88:d0:28:20:a2:
+ 0c:31:e8:7f:3a:23:b8:60:48:c8:4e:e1:02:62:ae:00:fb:d0:
+ a5:76:cb:ea:f3:d7:75:0d:9e:56:48:c1:2e:44:c7:0c:9f:03:
+ b3:ac:96:c5:a2:a0:06:9e:2b:c3:eb:b5:04:15:33:79:4a:9e:
+ 28:94:1d:28:50:98:e3:eb:b5:74:69:7f:69:bc:61:72:d1:8a:
+ cc:fb:89:be:51:34:81:11:7b:fa:8a:cf:e7:bf:81:91:34:1a:
+ 11:63:92:41:eb:62:7d:7a:2a:5a:2b:a3:85:36:5b:39:08:40:
+ 6b:0d:bc:b7:ed:36:42:60:45:ee:0c:27:f1:41:38:9e:db:99:
+ 8f:0f:ff:1b:ea:02:98:9f:19:21:33:ca:a2:47:89:cb:1d:a9:
+ 4c:94:b6:3d:b2:e2:bf:1d:f7:12:8d:01:ff:77:d6:72:65:70:
+ ca:80:8e:a2:2d:78:0c:b2:9d:84:3a:50:f9:e8:8e:85:03:58:
+ eb:0a:d3:5b:d3:55:d0:bd:7d:de:c8:5b:80:ea:0e:53:d6:35:
+ 86:60:10:ed:bd:06:f4:59:15:64:75:4c:bd:2f:fb:8a:fa:c1:
+ d0:c2:d9:68:09:2b:9a:91:c4:00:b1:65:7d:6d:a8:c2:42:d1:
+ d7:f1:71:ae:db:96:33:e7:a9:29:27:f3:89:8d:c8:ac:87:14:
+ fa:a5:cf:ec:b6:1b:a6:03:93:d7:ef:7f:49:b0:d5:22:fe:9e:
+ 5a:1b:e1:ff:e9:e3:71:fa:e9:09:3f:b4:1a:33:ae:3a:60:27:
+ d2:e6:2f:12:f4:32:54:be:29:be:fc:14:a5:2a:2d:99:88:e0:
+ 9d:d0:c6:07:e1:76:fb:96:60:0e:4c:d9:93:bd:26:29:2a:8f:
+ 49:d9:f6:7d:7a:bc:34:31:84:81:4f:28:e1:e8:5e:cf:45:b1:
+ c1:8a:2b:e0:52:72:5f:19
+-----BEGIN CERTIFICATE-----
+MIIGJzCCBA+gAwIBAgIBAzANBgkqhkiG9w0BAQUFADCBsjELMAkGA1UEBhMCRlIx
+DzANBgNVBAgMBkFsc2FjZTETMBEGA1UEBwwKU3RyYXNib3VyZzEYMBYGA1UECgwP
+d3d3LmZyZWVsYW4ub3JnMRAwDgYDVQQLDAdmcmVlbGFuMS0wKwYDVQQDDCRGcmVl
+bGFuIFNhbXBsZSBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxIjAgBgkqhkiG9w0BCQEW
+E2NvbnRhY3RAZnJlZWxhbi5vcmcwHhcNMTIwNDI3MTA1NDUzWhcNMjIwNDI1MTA1
+NDUzWjB+MQswCQYDVQQGEwJGUjEPMA0GA1UECAwGQWxzYWNlMRgwFgYDVQQKDA93
+d3cuZnJlZWxhbi5vcmcxEDAOBgNVBAsMB2ZyZWVsYW4xDjAMBgNVBAMMBWNhcm9s
+MSIwIAYJKoZIhvcNAQkBFhNjb250YWN0QGZyZWVsYW4ub3JnMIICIjANBgkqhkiG
+9w0BAQEFAAOCAg8AMIICCgKCAgEA18CnxulIxFNAs3bZLzcoPaPEQtB2zQibUOMc
+UeUUcvoroLEGI/PBrZJ8ef4VVNHlZ2La7YGqfuKxUKn72CkJ2oRNPPRuE6sL1e6A
+YzJ9V6+DPBwn7exn1v0cEy1Av9Hav3q2Z36wdTttYZ3MbBqX8Vben4DTFmC7im9G
+m740dcNM0vHI8z6YKDDkyy0lYWJIvi7c7ZCTrnS3+klDZSCsjv5SbACOUT62msZP
+RBx7hBe9XPY26UyRiW9OrawQQcXFZYogyPcno+qsW3QJmSeIYMdEaRgMMhp38kdT
+RuMSxWmVRRWaFGB2IKe1jFG/WlcZWseovAvEMMoL5tD4xKiE2SSikvaE8hPqpJOX
+/u132C91eiw5iDxEVgrvElfVno81jn+E5xrRGY0j27XOxX/hiG0E1gHe8HI+UZUd
+TzC2MgoPhLUANOS/gHEQYhTBMlqppt7CWOhS62ZauF7CBnymajPyHopBB1O7a0GS
+WYV5BKnfVkzgYh6Yh5UHsRBJNJyQTAuDJSefASf70MRuUMz1AkcsRZox5c59ho/b
+/YPqpgBJcRREoY6duqSkz50VIC1ndkKBY6J2Tksitd492PjgQ3+jEPBz+27hajeZ
+3IejBUwp9WMUm+ujOpsrtFH1BQPeQeXLGo5260eTU5BxxY+GX54LTTOcPIiKkJ+Q
+pjWQgfECAwEAAaN7MHkwCQYDVR0TBAIwADAsBglghkgBhvhCAQ0EHxYdT3BlblNT
+TCBHZW5lcmF0ZWQgQ2VydGlmaWNhdGUwHQYDVR0OBBYEFLVdDU9V9nUaI7P1jLxr
+WraWbK7gMB8GA1UdIwQYMBaAFCNsLT0+KV14uGw+quK7Lh5sh/JTMA0GCSqGSIb3
+DQEBBQUAA4ICAQC/P+cWorqxz9Z584TtpRA+YEIO1RrG6bE5hlou3a62txYzMxc+
+g/eh97QbCXSPmw2OTMeh1mZsAjq18nKqyeSzxp1uwEjcOSEwGKBvywm+3g9jgwQy
+c6e8QjS3odwhIQiGZbwuxXiu+/6r+4uFv2Hg4qpSXx4NGSITlHq0vVwwjUMitOkT
+Yn4+9eJ6KjvaH1dKXbhsTPVuNLm9tB/ciNAoIKIMMeh/OiO4YEjITuECYq4A+9Cl
+dsvq89d1DZ5WSMEuRMcMnwOzrJbFoqAGnivD67UEFTN5Sp4olB0oUJjj67V0aX9p
+vGFy0YrM+4m+UTSBEXv6is/nv4GRNBoRY5JB62J9eipaK6OFNls5CEBrDby37TZC
+YEXuDCfxQTie25mPD/8b6gKYnxkhM8qiR4nLHalMlLY9suK/HfcSjQH/d9ZyZXDK
+gI6iLXgMsp2EOlD56I6FA1jrCtNb01XQvX3eyFuA6g5T1jWGYBDtvQb0WRVkdUy9
+L/uK+sHQwtloCSuakcQAsWV9bajCQtHX8XGu25Yz56kpJ/OJjcishxT6pc/sthum
+A5PX739JsNUi/p5aG+H/6eNx+ukJP7QaM646YCfS5i8S9DJUvim+/BSlKi2ZiOCd
+0MYH4Xb7lmAOTNmTvSYpKo9J2fZ9erw0MYSBTyjh6F7PRbHBiivgUnJfGQ==
+-----END CERTIFICATE-----
diff --git a/tests/test_gpg.py b/tests/test_gpg.py
new file mode 100644
index 00000000..50e9e6be
--- /dev/null
+++ b/tests/test_gpg.py
@@ -0,0 +1,35 @@
+import os
+import unittest
+import json
+import jc.parsers.gpg
+
+THIS_DIR = os.path.dirname(os.path.abspath(__file__))
+
+
+class MyTests(unittest.TestCase):
+
+ def setUp(self):
+ # input
+ with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/gpg.out'), 'r', encoding='utf-8') as f:
+ self.gpg = f.read()
+
+ # output
+ with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/gpg.json'), 'r', encoding='utf-8') as f:
+ self.gpg_json = json.loads(f.read())
+
+
+ def test_gpg_nodata(self):
+ """
+ Test 'gpg' with no data
+ """
+ self.assertEqual(jc.parsers.gpg.parse('', quiet=True), [])
+
+ def test_gpg(self):
+ """
+ Test 'gpg --with-colons --list-keys --with-fingerprint --with-fingerprint wk@gnupg.org'
+ """
+ self.assertEqual(jc.parsers.gpg.parse(self.gpg, quiet=True), self.gpg_json)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/tests/test_x509_cert.py b/tests/test_x509_cert.py
new file mode 100644
index 00000000..a9ffdc4d
--- /dev/null
+++ b/tests/test_x509_cert.py
@@ -0,0 +1,71 @@
+import os
+import unittest
+import json
+import jc.parsers.x509_cert
+
+THIS_DIR = os.path.dirname(os.path.abspath(__file__))
+
+
+class MyTests(unittest.TestCase):
+
+ def setUp(self):
+ # input
+ with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/x509-ca-cert.der'), 'rb') as f:
+ self.x509_ca_cert = f.read()
+
+ with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/x509-cert-and-key.pem'), 'r', encoding='utf-8') as f:
+ self.x509_cert_and_key_pem = f.read()
+
+ with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/x509-letsencrypt.pem'), 'r', encoding='utf-8') as f:
+ self.x509_letsencrypt = f.read()
+
+ with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/x509-multi-cert.pem'), 'r', encoding='utf-8') as f:
+ self.x509_multi_cert = f.read()
+
+ # output
+ with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/x509-ca-cert.json'), 'r', encoding='utf-8') as f:
+ self.x509_ca_cert_json = json.loads(f.read())
+
+ with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/x509-cert-and-key.json'), 'r', encoding='utf-8') as f:
+ self.x509_cert_and_key_pem_json = json.loads(f.read())
+
+ with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/x509-letsencrypt.json'), 'r', encoding='utf-8') as f:
+ self.x509_letsencrypt_json = json.loads(f.read())
+
+ with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/x509-multi-cert.json'), 'r', encoding='utf-8') as f:
+ self.x509_multi_cert_json = json.loads(f.read())
+
+
+ def test_x509_cert_nodata(self):
+ """
+ Test 'x509_cert' with no data
+ """
+ self.assertEqual(jc.parsers.x509_cert.parse('', quiet=True), [])
+
+ def test_x509_ca_cert(self):
+ """
+ Test 'cat x509-ca-cert.der' (CA cert in DER format)
+ """
+ self.assertEqual(jc.parsers.x509_cert.parse(self.x509_ca_cert, quiet=True), self.x509_ca_cert_json)
+
+ def test_x509_cert_and_key(self):
+ """
+ Test 'cat x509-cert-and-key.pem' (combo cert and key file in PEM format)
+ """
+ self.assertEqual(jc.parsers.x509_cert.parse(self.x509_cert_and_key_pem, quiet=True), self.x509_cert_and_key_pem_json)
+
+ def test_x509_letsencrypt(self):
+ """
+ Test 'cat x509-letsencrypt.pem' (letsencrypt cert in PEM format)
+ """
+ self.assertEqual(jc.parsers.x509_cert.parse(self.x509_letsencrypt, quiet=True), self.x509_letsencrypt_json)
+
+ def test_x509_multi_cert(self):
+ """
+ Test 'cat x509-multi-cert.pem' (PEM file with multiple certificates)
+ """
+ self.assertEqual(jc.parsers.x509_cert.parse(self.x509_letsencrypt, quiet=True), self.x509_letsencrypt_json)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/updatedocs.sh b/updatedocs.sh
index 6bcedf79..5480e8f5 100755
--- a/updatedocs.sh
+++ b/updatedocs.sh
@@ -19,3 +19,7 @@
wait
echo
echo "All documentation updated"
+
+echo
+echo "Building shell completion scripts"
+./build-completions.py && echo "++++ shell completion build successful" || echo "---- shell completion build failed"