Stream-parse CRLs for caching file locations.
AT-AT needs to maintain a key-value CRL cache where each key is the DER byte-string of the issuer and the value is a dictionary of the CRL file path and expiration. This way when it checks a client certificate, it can load the correct CRL by comparing the issuers. This is preferable to loading all of the CRLs in-memory. However, it still requires that AT-AT load and parse each CRL when the application boots. Because of the size of the CRLs and their parsed, in-memory size, this leads to the application spiking to use nearly 900MB of memory (resting usage is around 50MB). This change introduces a small function to ad-hoc parse the CRL and obtain the information in the CRL we need: the issuer and the expiration. It does this by reading the CRL byte-by-byte until it reaches the ASN1 sequence that corresponds to the issuer, and then looks ahead to find the nextUpdate field (i.e., the expiration date). The CRLCache class uses this function to build its cache and JSON-serializes the cache to disk. If another AT-AT application process finds the serialized version, it will load that copy instead of rebuilding it. It also entails a change to the function signature for the init method of CRLCache: now it expects the CRL directory as its second argument, instead of a list of locations. The Python script invoked by `script/sync-crls` will rebuild the location cache each time it's run. This means that when the Kubernetes CronJob for CRLs runs, it will refresh the cache each time. When a new application container boots, it will get the refreshed cache. This also adds a nightly CircleCI job to sync the CRLs and test that the ad-hoc parsing function returns the same result as a proper parsing using the Python cryptography library. This provides extra insurance that the function is returning correct results on real data.
This commit is contained in:
@@ -1,6 +1,5 @@
|
||||
import os
|
||||
import re
|
||||
import pathlib
|
||||
from configparser import ConfigParser
|
||||
from datetime import datetime
|
||||
from flask import Flask, request, g, session
|
||||
@@ -247,11 +246,10 @@ def make_crl_validator(app):
|
||||
if app.config.get("DISABLE_CRL_CHECK"):
|
||||
app.crl_cache = NoOpCRLCache(logger=app.logger)
|
||||
else:
|
||||
crl_locations = []
|
||||
for filename in pathlib.Path(app.config["CRL_STORAGE_CONTAINER"]).glob("*.crl"):
|
||||
crl_locations.append(filename.absolute())
|
||||
app.crl_cache = CRLCache(
|
||||
app.config["CA_CHAIN"], crl_locations, logger=app.logger
|
||||
app.config["CA_CHAIN"],
|
||||
app.config["CRL_STORAGE_CONTAINER"],
|
||||
logger=app.logger,
|
||||
)
|
||||
|
||||
|
||||
|
@@ -1,11 +1,13 @@
|
||||
import sys
|
||||
import os
|
||||
import re
|
||||
import hashlib
|
||||
import logging
|
||||
from flask import current_app as app
|
||||
from datetime import datetime
|
||||
|
||||
from OpenSSL import crypto, SSL
|
||||
from datetime import datetime
|
||||
from flask import current_app as app
|
||||
|
||||
from .util import load_crl_locations_cache, serialize_crl_locations_cache
|
||||
|
||||
# error codes from OpenSSL: https://github.com/openssl/openssl/blob/2c75f03b39de2fa7d006bc0f0d7c58235a54d9bb/include/openssl/x509_vfy.h#L111
|
||||
CRL_EXPIRED_ERROR_CODE = 12
|
||||
@@ -70,12 +72,12 @@ class CRLCache(CRLInterface):
|
||||
def __init__(
|
||||
self,
|
||||
root_location,
|
||||
crl_locations=[],
|
||||
crl_dir,
|
||||
store_class=crypto.X509Store,
|
||||
logger=None,
|
||||
crl_update_func=None,
|
||||
):
|
||||
self._crl_locations = crl_locations
|
||||
self._crl_dir = crl_dir
|
||||
self.logger = logger
|
||||
self.store_class = store_class
|
||||
self.certificate_authorities = {}
|
||||
@@ -96,16 +98,10 @@ class CRLCache(CRLInterface):
|
||||
return [match.group(0) for match in self._PEM_RE.finditer(root_str)]
|
||||
|
||||
def _build_crl_cache(self):
|
||||
self.crl_cache = {}
|
||||
for crl_location in self._crl_locations:
|
||||
crl = self._load_crl(crl_location)
|
||||
if crl:
|
||||
issuer_der = crl.get_issuer().der()
|
||||
expires = crl.to_cryptography().next_update
|
||||
self.crl_cache[issuer_der] = {
|
||||
"location": crl_location,
|
||||
"expires": expires,
|
||||
}
|
||||
try:
|
||||
self.crl_cache = load_crl_locations_cache(self._crl_dir)
|
||||
except FileNotFoundError:
|
||||
self.crl_cache = serialize_crl_locations_cache(self._crl_dir)
|
||||
|
||||
def _load_crl(self, crl_location):
|
||||
with open(crl_location, "rb") as crl_file:
|
||||
|
@@ -1,5 +1,7 @@
|
||||
import re
|
||||
from datetime import datetime
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
|
||||
import pendulum
|
||||
import requests
|
||||
@@ -9,6 +11,10 @@ class CRLNotFoundError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class CRLParseError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
MODIFIED_TIME_BUFFER = 15 * 60
|
||||
|
||||
|
||||
@@ -70,6 +76,113 @@ CRL_LIST = [
|
||||
]
|
||||
|
||||
|
||||
def scan_for_issuer_and_next_update(crl):
|
||||
"""
|
||||
Scans a CRL file byte-by-byte to find the issuer and nextUpdate fields.
|
||||
|
||||
Per RFC 5280, the issuer is the fourth ASN1 sequence element to occur in a
|
||||
DER-encoded CRL file (https://tools.ietf.org/html/rfc5280#section-5.1).
|
||||
This function takes a brute-force approach and scans the file until if find
|
||||
the fourth sequence, then begins collecting that and the following bytes to
|
||||
construct the issuer. It stop doing this when it finds \x17, which begins
|
||||
the thisUpdate field. It then scans for the next UTCTime element (the next
|
||||
occurrence of \x17) and grabs the 13 following bytes. It parses the ASN1
|
||||
UTCTime byte string to derive a datetime object.
|
||||
|
||||
:param crl:
|
||||
The path to a CRL file on-disk.
|
||||
|
||||
:return:
|
||||
A two-element tuple. The first element is the raw DER bytes of the
|
||||
issuer, the second is the parsed Python datetime object for nextUpdate.
|
||||
"""
|
||||
with open(crl, "rb") as f:
|
||||
byte = f.read(1)
|
||||
sequences = 0
|
||||
issuer_finished = False
|
||||
issuer = b""
|
||||
while byte:
|
||||
if not issuer_finished:
|
||||
if byte == b"0" and sequences < 4:
|
||||
sequences += 1
|
||||
|
||||
if byte == b"\x17" and sequences == 4:
|
||||
issuer_finished = True
|
||||
|
||||
if sequences == 4 and not issuer_finished:
|
||||
issuer += byte
|
||||
else:
|
||||
if byte == b"\x17":
|
||||
byte_str = f.read(13)
|
||||
next_update = datetime.strptime(
|
||||
byte_str[1:].decode(), "%y%m%d%H%M%S"
|
||||
)
|
||||
return (issuer, next_update)
|
||||
|
||||
byte = f.read(1)
|
||||
|
||||
raise CRLParseError("CRL could not be scanned.")
|
||||
|
||||
|
||||
def build_crl_locations_cache(crl_locations, logger=None):
|
||||
crl_cache = {}
|
||||
for crl_location in crl_locations:
|
||||
try:
|
||||
issuer_der, next_update = scan_for_issuer_and_next_update(crl_location)
|
||||
crl_cache[issuer_der] = {"location": crl_location, "expires": next_update}
|
||||
except CRLParseError:
|
||||
if logger:
|
||||
logger.warning(
|
||||
"CRL could not be scanned for caching: {}".format(crl_location)
|
||||
)
|
||||
continue
|
||||
|
||||
return crl_cache
|
||||
|
||||
|
||||
JSON_CACHE = "crl_locations.json"
|
||||
|
||||
|
||||
def _serialize_cache_items(cache):
|
||||
return {
|
||||
der.hex(): {
|
||||
k: v.timestamp() if hasattr(v, "timestamp") else v
|
||||
for (k, v) in data.items()
|
||||
}
|
||||
for (der, data) in cache.items()
|
||||
}
|
||||
|
||||
|
||||
def _deserialize_cache_items(cache):
|
||||
return {
|
||||
bytes.fromhex(der): {
|
||||
k: datetime.fromtimestamp(v) if isinstance(v, float) else v
|
||||
for (k, v) in data.items()
|
||||
}
|
||||
for (der, data) in cache.items()
|
||||
}
|
||||
|
||||
|
||||
def serialize_crl_locations_cache(crl_dir, logger=None):
|
||||
crl_locations = [
|
||||
"{}/{}".format(crl_dir, crl_path) for crl_path in os.listdir(crl_dir)
|
||||
]
|
||||
location_cache = build_crl_locations_cache(crl_locations, logger=logger)
|
||||
json_location = "{}/{}".format(crl_dir, JSON_CACHE)
|
||||
with open(json_location, "w") as json_file:
|
||||
json_ready = _serialize_cache_items(location_cache)
|
||||
json.dump(json_ready, json_file)
|
||||
|
||||
return location_cache
|
||||
|
||||
|
||||
def load_crl_locations_cache(crl_dir):
|
||||
json_location = "{}/{}".format(crl_dir, JSON_CACHE)
|
||||
with open(json_location, "r") as json_file:
|
||||
cache = json.load(json_file)
|
||||
return _deserialize_cache_items(cache)
|
||||
|
||||
|
||||
def crl_local_path(out_dir, crl_location):
|
||||
name = re.split("/", crl_location)[-1]
|
||||
crl = os.path.join(out_dir, name)
|
||||
@@ -150,7 +263,10 @@ if __name__ == "__main__":
|
||||
logger = logging.getLogger()
|
||||
logger.info("Updating CRLs")
|
||||
try:
|
||||
refresh_crls(sys.argv[1], sys.argv[2], logger)
|
||||
tmp_location = sys.argv[1]
|
||||
final_location = sys.argv[2]
|
||||
refresh_crls(tmp_location, final_location, logger)
|
||||
serialize_crl_locations_cache(tmp_location, logger=logger)
|
||||
except Exception as err:
|
||||
logger.exception("Fatal error encountered, stopping")
|
||||
sys.exit(1)
|
||||
|
Reference in New Issue
Block a user