Initial Checkin
This commit is contained in:
+10
@@ -0,0 +1,10 @@
|
||||
# Python-generated files
|
||||
__pycache__/
|
||||
*.py[oc]
|
||||
build/
|
||||
dist/
|
||||
wheels/
|
||||
*.egg-info
|
||||
|
||||
# Virtual environments
|
||||
.venv
|
||||
@@ -0,0 +1 @@
|
||||
3.12
|
||||
Binary file not shown.
@@ -0,0 +1,52 @@
|
||||
#!/usr/bin/env python3
|
||||
# /// script
|
||||
# requires-python = ">=3.10"
|
||||
# dependencies = ["requests", "cryptography"]
|
||||
# ///
|
||||
"""Quick CRL parse debug test."""
|
||||
import requests, traceback
|
||||
from cryptography import x509
|
||||
|
||||
urls = [
|
||||
"http://pki.imy.se/IMY-RootCA01/CRL/IMY-RootCA01.crl",
|
||||
"http://pki.imy.se/IMY-IssuingCA02/CRL/IMY-IssuingCA02.crl",
|
||||
"http://pki.imy.se/CDP/IMYRootCa01.crl",
|
||||
"http://pki.imy.se/CDP/IMY%20Sub%20CA+.crl",
|
||||
]
|
||||
|
||||
session = requests.Session()
|
||||
session.verify = False
|
||||
session.headers.update({
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64)',
|
||||
'Accept-Encoding': 'identity',
|
||||
})
|
||||
requests.packages.urllib3.disable_warnings()
|
||||
|
||||
for url in urls:
|
||||
print(f"\n{'='*70}")
|
||||
print(f"URL: {url}")
|
||||
try:
|
||||
resp = session.get(url, timeout=30)
|
||||
data = resp.content
|
||||
print(f"Size: {len(data)} bytes | Status: {resp.status_code}")
|
||||
print(f"Content-Type: {resp.headers.get('content-type')}")
|
||||
print(f"First 20 bytes hex: {data[:20].hex(' ')}")
|
||||
except Exception as e:
|
||||
print(f"Download failed: {e}")
|
||||
continue
|
||||
|
||||
print(f"\nTrying DER parse...")
|
||||
try:
|
||||
crl = x509.load_der_x509_crl(data)
|
||||
print(f" ✔ SUCCESS! Issuer: {crl.issuer}")
|
||||
except Exception as e:
|
||||
print(f" ✘ FAILED:")
|
||||
traceback.print_exc()
|
||||
|
||||
print(f"\nTrying PEM parse...")
|
||||
try:
|
||||
crl = x509.load_pem_x509_crl(data)
|
||||
print(f" ✔ SUCCESS! Issuer: {crl.issuer}")
|
||||
except Exception as e:
|
||||
print(f" ✘ FAILED:")
|
||||
traceback.print_exc()
|
||||
@@ -0,0 +1,6 @@
|
||||
def main():
|
||||
print("Hello from pki-analyzer!")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
+628
@@ -0,0 +1,628 @@
|
||||
#!/usr/bin/env python3
|
||||
# /// script
|
||||
# requires-python = ">=3.10"
|
||||
# dependencies = [
|
||||
# "requests",
|
||||
# "beautifulsoup4",
|
||||
# "cryptography",
|
||||
# ]
|
||||
# ///
|
||||
"""
|
||||
PKI Health Checker
|
||||
==================
|
||||
Discovers CA certificates and CRLs from IIS directory-browsing sites,
|
||||
builds the CA hierarchy, and validates CDP/AIA for each CA certificate.
|
||||
|
||||
Usage:
|
||||
uv run pki_health.py <url> [<url2> ...]
|
||||
|
||||
Examples:
|
||||
uv run pki_health.py http://pki.matas.dk/aia http://pki.matas.dk/cdp
|
||||
uv run pki_health.py http://pki.imy.se/
|
||||
"""
|
||||
|
||||
import sys
|
||||
from datetime import datetime, timezone
|
||||
from urllib.parse import urljoin, unquote, urlparse
|
||||
|
||||
import requests
|
||||
from bs4 import BeautifulSoup
|
||||
from cryptography import x509
|
||||
from cryptography.x509.oid import ExtensionOID, AuthorityInformationAccessOID
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
from cryptography.hazmat.primitives.asymmetric import padding, rsa, ec
|
||||
from cryptography.exceptions import InvalidSignature
|
||||
|
||||
|
||||
CERT_EXTENSIONS = ('.crt', '.cer', '.pem')
|
||||
CRL_EXTENSIONS = ('.crl',)
|
||||
TIMEOUT = 30
|
||||
MAX_DEPTH = 5
|
||||
|
||||
|
||||
# ─── Terminal ────────────────────────────────────────────────────────────────
|
||||
|
||||
class C:
|
||||
H = '\033[95m'; B = '\033[94m'; CN = '\033[96m'
|
||||
G = '\033[92m'; Y = '\033[93m'; R = '\033[91m'
|
||||
BD = '\033[1m'; DM = '\033[2m'; RS = '\033[0m'
|
||||
|
||||
|
||||
def header(t):
|
||||
print(f"\n{C.BD}{C.H}{'='*78}{C.RS}")
|
||||
print(f"{C.BD}{C.H} {t}{C.RS}")
|
||||
print(f"{C.BD}{C.H}{'='*78}{C.RS}")
|
||||
|
||||
|
||||
# ─── Helpers ─────────────────────────────────────────────────────────────────
|
||||
|
||||
def sha1(cert):
|
||||
return cert.fingerprint(hashes.SHA1()).hex(':')
|
||||
|
||||
def get_cn(name):
|
||||
for attr in reversed(list(name)):
|
||||
if attr.oid == x509.oid.NameOID.COMMON_NAME:
|
||||
return attr.value
|
||||
for attr in name:
|
||||
return f"{attr.oid._name}={attr.value}"
|
||||
return "(unknown)"
|
||||
|
||||
def ski_hex(obj):
|
||||
try:
|
||||
return obj.extensions.get_extension_for_oid(
|
||||
ExtensionOID.SUBJECT_KEY_IDENTIFIER).value.digest.hex()
|
||||
except x509.ExtensionNotFound:
|
||||
return None
|
||||
|
||||
def aki_hex(obj):
|
||||
try:
|
||||
ext = obj.extensions.get_extension_for_oid(
|
||||
ExtensionOID.AUTHORITY_KEY_IDENTIFIER)
|
||||
return ext.value.key_identifier.hex() if ext.value.key_identifier else None
|
||||
except x509.ExtensionNotFound:
|
||||
return None
|
||||
|
||||
def cdp_urls(cert):
|
||||
try:
|
||||
ext = cert.extensions.get_extension_for_oid(
|
||||
ExtensionOID.CRL_DISTRIBUTION_POINTS)
|
||||
urls = []
|
||||
for dp in ext.value:
|
||||
if dp.full_name:
|
||||
for n in dp.full_name:
|
||||
if isinstance(n, x509.UniformResourceIdentifier):
|
||||
urls.append(n.value)
|
||||
return urls
|
||||
except x509.ExtensionNotFound:
|
||||
return []
|
||||
|
||||
def aia_urls(cert):
|
||||
try:
|
||||
ext = cert.extensions.get_extension_for_oid(
|
||||
ExtensionOID.AUTHORITY_INFORMATION_ACCESS)
|
||||
ca, ocsp = [], []
|
||||
for d in ext.value:
|
||||
if isinstance(d.access_location, x509.UniformResourceIdentifier):
|
||||
if d.access_method == AuthorityInformationAccessOID.CA_ISSUERS:
|
||||
ca.append(d.access_location.value)
|
||||
elif d.access_method == AuthorityInformationAccessOID.OCSP:
|
||||
ocsp.append(d.access_location.value)
|
||||
return ca, ocsp
|
||||
except x509.ExtensionNotFound:
|
||||
return [], []
|
||||
|
||||
def is_ca(cert):
|
||||
try:
|
||||
return cert.extensions.get_extension_for_oid(
|
||||
ExtensionOID.BASIC_CONSTRAINTS).value.ca
|
||||
except x509.ExtensionNotFound:
|
||||
return False
|
||||
|
||||
def is_self_signed(cert):
|
||||
return cert.issuer == cert.subject
|
||||
|
||||
def short_id(h):
|
||||
return h[:16] + '...' if h and len(h) > 16 else (h or '(none)')
|
||||
|
||||
|
||||
# ─── Network ────────────────────────────────────────────────────────────────
|
||||
|
||||
def make_session():
|
||||
s = requests.Session()
|
||||
s.verify = False
|
||||
s.headers.update({
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) '
|
||||
'AppleWebKit/537.36 Chrome/125.0.0.0 Safari/537.36',
|
||||
'Accept': '*/*', 'Accept-Encoding': 'identity',
|
||||
})
|
||||
requests.packages.urllib3.disable_warnings()
|
||||
return s
|
||||
|
||||
def dl(url, session):
|
||||
resp = session.get(url, timeout=TIMEOUT)
|
||||
resp.raise_for_status()
|
||||
return resp.content
|
||||
|
||||
def scrape(base_url, session, origin=None, depth=0, visited=None):
|
||||
if visited is None:
|
||||
visited = set()
|
||||
if not base_url.endswith('/'):
|
||||
base_url += '/'
|
||||
if base_url in visited or depth > MAX_DEPTH:
|
||||
return [], []
|
||||
visited.add(base_url)
|
||||
if origin is None:
|
||||
origin = urlparse(base_url).netloc
|
||||
try:
|
||||
resp = session.get(base_url, timeout=TIMEOUT)
|
||||
resp.raise_for_status()
|
||||
except Exception:
|
||||
return [], []
|
||||
soup = BeautifulSoup(resp.text, 'html.parser')
|
||||
certs, crls, dirs = [], [], []
|
||||
for a in soup.find_all('a', href=True):
|
||||
href = a['href']
|
||||
full = urljoin(base_url, href)
|
||||
low = unquote(full).lower()
|
||||
if urlparse(full).netloc != origin: continue
|
||||
if href in ('../', '..'): continue
|
||||
if full.rstrip('/') == base_url.rstrip('/'): continue
|
||||
if any(low.endswith(e) for e in CERT_EXTENSIONS): certs.append(full)
|
||||
elif any(low.endswith(e) for e in CRL_EXTENSIONS): crls.append(full)
|
||||
elif full.endswith('/') and full not in visited: dirs.append(full)
|
||||
for d in dirs:
|
||||
sc, sl = scrape(d, session, origin, depth+1, visited)
|
||||
certs.extend(sc); crls.extend(sl)
|
||||
return certs, crls
|
||||
|
||||
def load_cert(data):
|
||||
for fn in [x509.load_der_x509_certificate, x509.load_pem_x509_certificate]:
|
||||
try: return fn(data)
|
||||
except Exception: pass
|
||||
return None
|
||||
|
||||
def load_crl(data):
|
||||
for fn in [x509.load_der_x509_crl, x509.load_pem_x509_crl]:
|
||||
try: return fn(data)
|
||||
except Exception: pass
|
||||
return None
|
||||
|
||||
|
||||
# ─── CRL Signature ──────────────────────────────────────────────────────────
|
||||
|
||||
def verify_crl_sig(crl, issuer_cert):
|
||||
try:
|
||||
pub = issuer_cert.public_key()
|
||||
if isinstance(pub, rsa.RSAPublicKey):
|
||||
pub.verify(crl.signature, crl.tbs_certlist_bytes,
|
||||
padding.PKCS1v15(), crl.signature_hash_algorithm)
|
||||
elif isinstance(pub, ec.EllipticCurvePublicKey):
|
||||
from cryptography.hazmat.primitives.asymmetric import ec as ecm
|
||||
pub.verify(crl.signature, crl.tbs_certlist_bytes,
|
||||
ecm.ECDSA(crl.signature_hash_algorithm))
|
||||
else:
|
||||
return None, "Unsupported key type"
|
||||
return True, "Signature OK"
|
||||
except InvalidSignature:
|
||||
return False, "Signature INVALID"
|
||||
except Exception as e:
|
||||
return None, f"Verify error: {e}"
|
||||
|
||||
|
||||
# ─── Tree Builder ────────────────────────────────────────────────────────────
|
||||
|
||||
class CANode:
|
||||
def __init__(self, cert, url):
|
||||
self.cert = cert
|
||||
self.url = url
|
||||
self.tp = sha1(cert)
|
||||
self.name = get_cn(cert.subject)
|
||||
self.ski = ski_hex(cert)
|
||||
self.aki = aki_hex(cert)
|
||||
self.root = is_self_signed(cert)
|
||||
self.children = []
|
||||
self.alternates = [] # (cert, url, thumbprint) — renewed same-key certs
|
||||
|
||||
|
||||
def build_tree(certs_dict):
|
||||
by_ski = {}
|
||||
for tp, (cert, url) in certs_dict.items():
|
||||
ski = ski_hex(cert)
|
||||
if ski:
|
||||
by_ski.setdefault(ski, []).append((tp, cert, url))
|
||||
|
||||
nodes = {}
|
||||
ski_primary = {}
|
||||
|
||||
for ski, group in by_ski.items():
|
||||
group.sort(key=lambda x: x[1].not_valid_after_utc, reverse=True)
|
||||
tp, cert, url = group[0]
|
||||
node = CANode(cert, url)
|
||||
for atp, acert, aurl in group[1:]:
|
||||
node.alternates.append((acert, aurl, sha1(acert)))
|
||||
nodes[tp] = node
|
||||
ski_primary[ski] = tp
|
||||
|
||||
roots = []
|
||||
for tp, node in nodes.items():
|
||||
if node.root:
|
||||
roots.append(node)
|
||||
elif node.aki and node.aki in ski_primary:
|
||||
ptk = ski_primary[node.aki]
|
||||
if ptk in nodes and ptk != tp:
|
||||
nodes[ptk].children.append(node)
|
||||
else:
|
||||
roots.append(node)
|
||||
else:
|
||||
roots.append(node)
|
||||
|
||||
return roots
|
||||
|
||||
|
||||
# ─── CDP Check ───────────────────────────────────────────────────────────────
|
||||
|
||||
def check_cdp(url, issuer_ski, issuer_cert, session, p):
|
||||
if not url.lower().startswith('http'):
|
||||
print(f"{p} {C.DM}LDAP — not checked{C.RS}")
|
||||
return False
|
||||
|
||||
try:
|
||||
data = dl(url, session)
|
||||
except Exception as e:
|
||||
print(f"{p} {C.R}✘ Unreachable{C.RS}")
|
||||
return True
|
||||
|
||||
crl = load_crl(data)
|
||||
if crl is None:
|
||||
print(f"{p} {C.R}✘ Failed to parse{C.RS}")
|
||||
return True
|
||||
|
||||
parts = []
|
||||
has_issue = False
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
# Expiry
|
||||
if crl.next_update_utc:
|
||||
if crl.next_update_utc < now:
|
||||
days = (now - crl.next_update_utc).days
|
||||
parts.append(f"{C.R}EXPIRED {days}d ago{C.RS}")
|
||||
has_issue = True
|
||||
else:
|
||||
days = (crl.next_update_utc - now).days
|
||||
color = C.G if days > 7 else C.Y
|
||||
parts.append(f"{color}Valid ({days}d){C.RS}")
|
||||
|
||||
# AKI match
|
||||
crl_aki = aki_hex(crl)
|
||||
if crl_aki and issuer_ski:
|
||||
if crl_aki == issuer_ski:
|
||||
parts.append(f"{C.G}AKI match{C.RS}")
|
||||
else:
|
||||
parts.append(f"{C.R}AKI MISMATCH{C.RS}")
|
||||
has_issue = True
|
||||
|
||||
# Signature
|
||||
if issuer_cert is not None:
|
||||
valid, msg = verify_crl_sig(crl, issuer_cert)
|
||||
if valid is True:
|
||||
parts.append(f"{C.G}Sig OK{C.RS}")
|
||||
elif valid is False:
|
||||
parts.append(f"{C.R}Sig INVALID{C.RS}")
|
||||
has_issue = True
|
||||
else:
|
||||
parts.append(f"{C.Y}{msg}{C.RS}")
|
||||
|
||||
# Delta
|
||||
try:
|
||||
crl.extensions.get_extension_for_oid(ExtensionOID.DELTA_CRL_INDICATOR)
|
||||
parts.append("Delta")
|
||||
except x509.ExtensionNotFound:
|
||||
pass
|
||||
|
||||
# Revoked count
|
||||
parts.append(f"{sum(1 for _ in crl)} revoked")
|
||||
|
||||
print(f"{p} {' | '.join(parts)}")
|
||||
|
||||
# Show details on mismatch
|
||||
if crl_aki and issuer_ski and crl_aki != issuer_ski:
|
||||
print(f"{p} {C.R}CRL AKI: {crl_aki}{C.RS}")
|
||||
print(f"{p} {C.R}Issuer SKI: {issuer_ski}{C.RS}")
|
||||
|
||||
return has_issue
|
||||
|
||||
|
||||
# ─── AIA Check ───────────────────────────────────────────────────────────────
|
||||
|
||||
def check_aia(url, expected_tp, session, p):
|
||||
if not url.lower().startswith('http'):
|
||||
print(f"{p} {C.DM}LDAP — not checked{C.RS}")
|
||||
return False
|
||||
|
||||
try:
|
||||
data = dl(url, session)
|
||||
except Exception:
|
||||
print(f"{p} {C.R}✘ Unreachable{C.RS}")
|
||||
return True
|
||||
|
||||
cert = load_cert(data)
|
||||
if cert is None:
|
||||
print(f"{p} {C.R}✘ Failed to parse{C.RS}")
|
||||
return True
|
||||
|
||||
got_tp = sha1(cert)
|
||||
if expected_tp:
|
||||
if got_tp == expected_tp:
|
||||
print(f"{p} {C.G}✔ Matches issuer{C.RS} ({got_tp[:23]}...)")
|
||||
return False
|
||||
else:
|
||||
print(f"{p} {C.R}✘ MISMATCH{C.RS}")
|
||||
print(f"{p} Got: {get_cn(cert.subject)} ({got_tp[:23]}...)")
|
||||
print(f"{p} Expected: {expected_tp[:23]}...")
|
||||
return True
|
||||
else:
|
||||
print(f"{p} Downloaded: {get_cn(cert.subject)} ({got_tp[:23]}...)")
|
||||
return False
|
||||
|
||||
|
||||
# ─── Display a CA Node ───────────────────────────────────────────────────────
|
||||
|
||||
def show_cert_block(node, parent, session, p, issues):
|
||||
"""Show one CA certificate block with CDP/AIA checks."""
|
||||
cert = node.cert
|
||||
now = datetime.now(timezone.utc)
|
||||
nb = cert.not_valid_before_utc
|
||||
na = cert.not_valid_after_utc
|
||||
days = (na - now).days
|
||||
expired = na < now
|
||||
|
||||
vc = C.R if expired else (C.Y if days < 90 else C.G)
|
||||
vl = "EXPIRED" if expired else f"{days} days"
|
||||
|
||||
print(f"{p} Thumbprint: {node.tp}")
|
||||
print(f"{p} SKI: {short_id(node.ski)}")
|
||||
if node.aki:
|
||||
match = ""
|
||||
if parent and parent.ski:
|
||||
if node.aki == parent.ski:
|
||||
match = f" {C.G}✔ matches parent{C.RS}"
|
||||
else:
|
||||
match = f" {C.R}✘ MISMATCH with parent SKI!{C.RS}"
|
||||
issues.append(f"AKI mismatch: {node.name}")
|
||||
print(f"{p} AKI: {short_id(node.aki)}{match}")
|
||||
print(f"{p} Valid: {nb.strftime('%Y-%m-%d')} → {na.strftime('%Y-%m-%d')} {vc}[{vl}]{C.RS}")
|
||||
if expired:
|
||||
issues.append(f"EXPIRED: {node.name}")
|
||||
|
||||
# Alternates
|
||||
if node.alternates:
|
||||
print(f"{p} {C.Y}Also (renewed, same key):{C.RS}")
|
||||
for ac, au, atp in node.alternates:
|
||||
ana = ac.not_valid_after_utc
|
||||
ad = (ana - now).days
|
||||
ae = ana < now
|
||||
ac2 = C.R if ae else (C.Y if ad < 90 else C.G)
|
||||
al = "EXPIRED" if ae else f"{ad} days"
|
||||
print(f"{p} {atp} {ac2}[{al}]{C.RS}")
|
||||
|
||||
# Issuer info for checks
|
||||
issuer_cert = parent.cert if parent else None
|
||||
issuer_ski = parent.ski if parent else None
|
||||
issuer_tp = parent.tp if parent else None
|
||||
|
||||
# CDP
|
||||
cdps = cdp_urls(cert)
|
||||
if cdps:
|
||||
for url in cdps:
|
||||
print(f"{p} CDP: {url}")
|
||||
has_issue = check_cdp(url, issuer_ski, issuer_cert, session, p)
|
||||
if has_issue:
|
||||
issues.append(f"CDP issue: {node.name} → {url}")
|
||||
elif node.root:
|
||||
print(f"{p} CDP: (none — root)")
|
||||
else:
|
||||
print(f"{p} {C.Y}CDP: (none — missing!){C.RS}")
|
||||
issues.append(f"No CDP: {node.name}")
|
||||
|
||||
# AIA
|
||||
ca_issuers, ocsp = aia_urls(cert)
|
||||
if ca_issuers:
|
||||
for url in ca_issuers:
|
||||
print(f"{p} AIA: {url}")
|
||||
has_issue = check_aia(url, issuer_tp, session, p)
|
||||
if has_issue:
|
||||
issues.append(f"AIA issue: {node.name} → {url}")
|
||||
elif node.root:
|
||||
print(f"{p} AIA: (none — root)")
|
||||
else:
|
||||
print(f"{p} {C.Y}AIA: (none — missing!){C.RS}")
|
||||
issues.append(f"No AIA: {node.name}")
|
||||
|
||||
if ocsp:
|
||||
for url in ocsp:
|
||||
print(f"{p} OCSP: {url}")
|
||||
|
||||
|
||||
# ─── Display Chain ───────────────────────────────────────────────────────────
|
||||
|
||||
def display_chain(root, chain_num, session, issues):
|
||||
"""Display a full chain starting from root."""
|
||||
print(f"\n{C.BD}{C.CN}Chain {chain_num}{C.RS}")
|
||||
print(f"{C.BD}{C.CN}{'─'*78}{C.RS}")
|
||||
|
||||
# Root
|
||||
print(f"\n{C.BD}{C.B}{root.name}{C.RS}")
|
||||
show_cert_block(root, None, session, "", issues)
|
||||
|
||||
# Children
|
||||
for child in root.children:
|
||||
print(f"\n {C.BD}{C.B}→ {child.name}{C.RS}")
|
||||
show_cert_block(child, root, session, " ", issues)
|
||||
|
||||
# Grandchildren (3-tier)
|
||||
for gc in child.children:
|
||||
print(f"\n {C.BD}{C.B}→ → {gc.name}{C.RS}")
|
||||
show_cert_block(gc, child, session, " ", issues)
|
||||
|
||||
|
||||
# ─── Orphan Detection ───────────────────────────────────────────────────────
|
||||
|
||||
def collect_referenced(roots):
|
||||
urls = set()
|
||||
def walk(node):
|
||||
for u in cdp_urls(node.cert):
|
||||
urls.add(unquote(u).lower())
|
||||
ca, _ = aia_urls(node.cert)
|
||||
for u in ca:
|
||||
urls.add(unquote(u).lower())
|
||||
for ch in node.children:
|
||||
walk(ch)
|
||||
for r in roots:
|
||||
walk(r)
|
||||
return urls
|
||||
|
||||
def collect_tree_sources(roots):
|
||||
srcs = set()
|
||||
def walk(node):
|
||||
srcs.add(node.url)
|
||||
for _, u, _ in node.alternates:
|
||||
srcs.add(u)
|
||||
for ch in node.children:
|
||||
walk(ch)
|
||||
for r in roots:
|
||||
walk(r)
|
||||
return srcs
|
||||
|
||||
|
||||
# ─── Main ────────────────────────────────────────────────────────────────────
|
||||
|
||||
def main():
|
||||
if len(sys.argv) < 2:
|
||||
print(f"Usage: {sys.argv[0]} <url> [<url2> ...]")
|
||||
print(f"Example: {sys.argv[0]} http://pki.imy.se/")
|
||||
sys.exit(1)
|
||||
|
||||
session = make_session()
|
||||
all_cert_urls, all_crl_urls = [], []
|
||||
|
||||
# ── Discover ─────────────────────────────────────────────────────────
|
||||
for base_url in sys.argv[1:]:
|
||||
header(f"Scanning: {base_url}")
|
||||
cu, cl = scrape(base_url, session)
|
||||
for u in cu:
|
||||
print(f" 📜 {unquote(u.split('/')[-1])}")
|
||||
for u in cl:
|
||||
print(f" 📋 {unquote(u.split('/')[-1])}")
|
||||
all_cert_urls.extend(cu)
|
||||
all_crl_urls.extend(cl)
|
||||
|
||||
all_cert_urls = list(dict.fromkeys(all_cert_urls))
|
||||
all_crl_urls = list(dict.fromkeys(all_crl_urls))
|
||||
print(f"\n Found {len(all_cert_urls)} cert(s), {len(all_crl_urls)} CRL(s) on site")
|
||||
|
||||
# ── Load CA certs ────────────────────────────────────────────────────
|
||||
header("Loading CA Certificates")
|
||||
certs = {}
|
||||
|
||||
for url in all_cert_urls:
|
||||
try:
|
||||
data = dl(url, session)
|
||||
cert = load_cert(data)
|
||||
if cert is not None and is_ca(cert):
|
||||
tp = sha1(cert)
|
||||
if tp not in certs:
|
||||
certs[tp] = (cert, url)
|
||||
print(f" {C.G}✔{C.RS} {get_cn(cert.subject)}")
|
||||
except Exception as e:
|
||||
print(f" {C.R}✘{C.RS} {url} — {e}")
|
||||
|
||||
# Follow AIA to get parent certs we might not have
|
||||
for tp, (cert, url) in list(certs.items()):
|
||||
ca_issuers, _ = aia_urls(cert)
|
||||
for aia_url in ca_issuers:
|
||||
if not aia_url.lower().startswith('http'):
|
||||
continue
|
||||
try:
|
||||
data = dl(aia_url, session)
|
||||
c = load_cert(data)
|
||||
if c is not None and is_ca(c):
|
||||
t = sha1(c)
|
||||
if t not in certs:
|
||||
certs[t] = (c, aia_url)
|
||||
print(f" {C.G}✔{C.RS} {get_cn(c.subject)} (via AIA)")
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
print(f"\n {len(certs)} CA certificate(s) loaded")
|
||||
|
||||
# ── Build and display ────────────────────────────────────────────────
|
||||
roots = build_tree(certs)
|
||||
|
||||
if not roots:
|
||||
print(f"\n {C.R}✘ Could not build CA hierarchy{C.RS}")
|
||||
sys.exit(1)
|
||||
|
||||
header("PKI Health Check")
|
||||
|
||||
all_issues = []
|
||||
for i, root in enumerate(roots, 1):
|
||||
display_chain(root, i, session, all_issues)
|
||||
|
||||
# ── Orphans ──────────────────────────────────────────────────────────
|
||||
header("Orphaned Files")
|
||||
|
||||
referenced = collect_referenced(roots)
|
||||
tree_srcs = collect_tree_sources(roots)
|
||||
|
||||
orphan_crls = [u for u in all_crl_urls
|
||||
if unquote(u).lower() not in referenced]
|
||||
orphan_certs = [u for u in all_cert_urls
|
||||
if u not in tree_srcs]
|
||||
|
||||
if orphan_crls:
|
||||
print(f"\n {C.Y}⚠{C.RS} {len(orphan_crls)} CRL(s) on site not referenced by any cert CDP:")
|
||||
for url in orphan_crls:
|
||||
name = unquote(url.split('/')[-1])
|
||||
try:
|
||||
data = dl(url, session)
|
||||
crl = load_crl(data)
|
||||
if crl is not None:
|
||||
now = datetime.now(timezone.utc)
|
||||
nu = crl.next_update_utc
|
||||
if nu and nu < now:
|
||||
exp = f"{C.R}EXPIRED{C.RS}"
|
||||
elif nu:
|
||||
exp = f"{C.G}{(nu-now).days}d{C.RS}"
|
||||
else:
|
||||
exp = "?"
|
||||
print(f" 📋 {name} (issuer: {get_cn(crl.issuer)}, {exp})")
|
||||
else:
|
||||
print(f" 📋 {name} (could not parse)")
|
||||
except Exception:
|
||||
print(f" 📋 {name} (could not fetch)")
|
||||
else:
|
||||
print(f"\n {C.G}✔{C.RS} No orphaned CRLs")
|
||||
|
||||
if orphan_certs:
|
||||
print(f"\n {C.Y}⚠{C.RS} {len(orphan_certs)} cert(s) on site not in hierarchy:")
|
||||
for url in orphan_certs:
|
||||
print(f" 📜 {unquote(url.split('/')[-1])}")
|
||||
else:
|
||||
print(f" {C.G}✔{C.RS} No orphaned certificates")
|
||||
|
||||
# ── Summary ──────────────────────────────────────────────────────────
|
||||
header("Summary")
|
||||
|
||||
print(f" Chains: {len(roots)} | CA certs: {len(certs)} | "
|
||||
f"Site CRLs: {len(all_crl_urls)}")
|
||||
|
||||
if not all_issues:
|
||||
print(f"\n {C.G}{C.BD}✔ All checks passed{C.RS}")
|
||||
else:
|
||||
print(f"\n {C.R}{C.BD}✘ {len(all_issues)} issue(s):{C.RS}")
|
||||
for issue in all_issues:
|
||||
print(f" {C.R}• {issue}{C.RS}")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@@ -0,0 +1,770 @@
|
||||
#!/usr/bin/env python3
|
||||
# /// script
|
||||
# requires-python = ">=3.10"
|
||||
# dependencies = [
|
||||
# "requests",
|
||||
# "beautifulsoup4",
|
||||
# "cryptography",
|
||||
# ]
|
||||
# ///
|
||||
"""
|
||||
PKI Site Analyzer v8
|
||||
====================
|
||||
Scrapes IIS PKI sites recursively, builds certificate chains,
|
||||
fetches and validates CRLs referenced from each certificate's CDP,
|
||||
and flags orphaned CRLs that may need cleanup.
|
||||
|
||||
Usage:
|
||||
uv run pki_analyzer.py <url> [<url2> ...]
|
||||
|
||||
Examples:
|
||||
uv run pki_analyzer.py http://pki.matas.dk/aia http://pki.matas.dk/cdp
|
||||
uv run pki_analyzer.py http://pki.imy.se/
|
||||
"""
|
||||
|
||||
import sys
|
||||
from datetime import datetime, timezone
|
||||
from urllib.parse import urljoin, unquote, urlparse
|
||||
from collections import defaultdict
|
||||
|
||||
import requests
|
||||
from bs4 import BeautifulSoup
|
||||
from cryptography import x509
|
||||
from cryptography.x509.oid import ExtensionOID, AuthorityInformationAccessOID
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
from cryptography.hazmat.primitives.asymmetric import padding, rsa, ec
|
||||
from cryptography.exceptions import InvalidSignature
|
||||
|
||||
|
||||
CERT_EXTENSIONS = ('.crt', '.cer', '.pem')
|
||||
CRL_EXTENSIONS = ('.crl',)
|
||||
TIMEOUT = 30
|
||||
MAX_DEPTH = 5
|
||||
DEBUG_SCRAPE = False
|
||||
|
||||
|
||||
class C:
|
||||
HEADER = '\033[95m'
|
||||
BLUE = '\033[94m'
|
||||
CYAN = '\033[96m'
|
||||
GREEN = '\033[92m'
|
||||
YELLOW = '\033[93m'
|
||||
RED = '\033[91m'
|
||||
BOLD = '\033[1m'
|
||||
DIM = '\033[2m'
|
||||
RESET = '\033[0m'
|
||||
|
||||
|
||||
def print_header(text):
|
||||
print(f"\n{C.BOLD}{C.HEADER}{'='*80}{C.RESET}")
|
||||
print(f"{C.BOLD}{C.HEADER} {text}{C.RESET}")
|
||||
print(f"{C.BOLD}{C.HEADER}{'='*80}{C.RESET}")
|
||||
|
||||
def print_section(text):
|
||||
print(f"\n{C.BOLD}{C.CYAN}── {text} {'─'*(74-len(text))}{C.RESET}")
|
||||
|
||||
def print_ok(text):
|
||||
print(f" {C.GREEN}✔{C.RESET} {text}")
|
||||
|
||||
def print_warn(text):
|
||||
print(f" {C.YELLOW}⚠{C.RESET} {text}")
|
||||
|
||||
def print_err(text):
|
||||
print(f" {C.RED}✘{C.RESET} {text}")
|
||||
|
||||
def print_info(label, value):
|
||||
print(f" {C.BOLD}{label:.<40}{C.RESET} {value}")
|
||||
|
||||
def print_debug(text):
|
||||
if DEBUG_SCRAPE:
|
||||
print(f" {C.DIM}DBG: {text}{C.RESET}")
|
||||
|
||||
def sha1_fingerprint(cert):
|
||||
return cert.fingerprint(hashes.SHA1()).hex(':')
|
||||
|
||||
def subject_str(name):
|
||||
parts = []
|
||||
for attr in name:
|
||||
parts.append(f"{attr.oid._name}={attr.value}")
|
||||
return ', '.join(parts) if parts else '(empty)'
|
||||
|
||||
|
||||
# ─── Scraping ────────────────────────────────────────────────────────────────
|
||||
|
||||
def scrape_recursive(base_url, session, origin_host=None, depth=0, visited=None):
|
||||
if visited is None:
|
||||
visited = set()
|
||||
if not base_url.endswith('/'):
|
||||
base_url += '/'
|
||||
if base_url in visited or depth > MAX_DEPTH:
|
||||
return [], []
|
||||
visited.add(base_url)
|
||||
if origin_host is None:
|
||||
origin_host = urlparse(base_url).netloc
|
||||
|
||||
indent = " " * depth
|
||||
print(f"{indent} 🔍 Scanning: {base_url}")
|
||||
|
||||
try:
|
||||
resp = session.get(base_url, timeout=TIMEOUT)
|
||||
resp.raise_for_status()
|
||||
except Exception as e:
|
||||
print_warn(f"{indent} Could not fetch {base_url}: {e}")
|
||||
return [], []
|
||||
|
||||
soup = BeautifulSoup(resp.text, 'html.parser')
|
||||
cert_urls, crl_urls, subdirs = [], [], []
|
||||
|
||||
for a_tag in soup.find_all('a', href=True):
|
||||
href = a_tag['href']
|
||||
full_url = urljoin(base_url, href)
|
||||
decoded_url = unquote(full_url).lower()
|
||||
decoded_href = unquote(href)
|
||||
|
||||
print_debug(f" href='{href}' → '{full_url}'")
|
||||
|
||||
if urlparse(full_url).netloc != origin_host:
|
||||
continue
|
||||
if href in ('../', '..'):
|
||||
continue
|
||||
if full_url.rstrip('/') == base_url.rstrip('/'):
|
||||
continue
|
||||
|
||||
if any(decoded_url.endswith(ext) for ext in CERT_EXTENSIONS):
|
||||
cert_urls.append(full_url)
|
||||
print(f"{indent} 📜 {decoded_href}")
|
||||
elif any(decoded_url.endswith(ext) for ext in CRL_EXTENSIONS):
|
||||
crl_urls.append(full_url)
|
||||
print(f"{indent} 📋 {decoded_href}")
|
||||
elif full_url.endswith('/') and full_url not in visited:
|
||||
subdirs.append(full_url)
|
||||
print(f"{indent} 📁 {decoded_href}")
|
||||
|
||||
for subdir in subdirs:
|
||||
sc, sl = scrape_recursive(subdir, session, origin_host, depth + 1, visited)
|
||||
cert_urls.extend(sc)
|
||||
crl_urls.extend(sl)
|
||||
|
||||
return cert_urls, crl_urls
|
||||
|
||||
|
||||
def download_file(url, session):
|
||||
resp = session.get(url, timeout=TIMEOUT)
|
||||
resp.raise_for_status()
|
||||
return resp.content
|
||||
|
||||
|
||||
# ─── Parsing ─────────────────────────────────────────────────────────────────
|
||||
|
||||
def load_certificate(data, url=""):
|
||||
for loader in [x509.load_der_x509_certificate, x509.load_pem_x509_certificate]:
|
||||
try:
|
||||
return loader(data)
|
||||
except Exception:
|
||||
pass
|
||||
return None
|
||||
|
||||
|
||||
def load_crl(data, url=""):
|
||||
for loader in [x509.load_der_x509_crl, x509.load_pem_x509_crl]:
|
||||
try:
|
||||
return loader(data)
|
||||
except Exception:
|
||||
pass
|
||||
return None
|
||||
|
||||
|
||||
# ─── Extensions ──────────────────────────────────────────────────────────────
|
||||
|
||||
def get_extension_value(obj, oid):
|
||||
try:
|
||||
return obj.extensions.get_extension_for_oid(oid).value
|
||||
except x509.ExtensionNotFound:
|
||||
return None
|
||||
|
||||
def get_cdp_urls(cert):
|
||||
cdp = get_extension_value(cert, ExtensionOID.CRL_DISTRIBUTION_POINTS)
|
||||
urls = []
|
||||
if cdp:
|
||||
for dp in cdp:
|
||||
if dp.full_name:
|
||||
for n in dp.full_name:
|
||||
if isinstance(n, x509.UniformResourceIdentifier):
|
||||
urls.append(n.value)
|
||||
return urls
|
||||
|
||||
def get_aia_info(cert):
|
||||
aia = get_extension_value(cert, ExtensionOID.AUTHORITY_INFORMATION_ACCESS)
|
||||
ocsp, ca = [], []
|
||||
if aia:
|
||||
for d in aia:
|
||||
if isinstance(d.access_location, x509.UniformResourceIdentifier):
|
||||
if d.access_method == AuthorityInformationAccessOID.OCSP:
|
||||
ocsp.append(d.access_location.value)
|
||||
elif d.access_method == AuthorityInformationAccessOID.CA_ISSUERS:
|
||||
ca.append(d.access_location.value)
|
||||
return ca, ocsp
|
||||
|
||||
def get_subject_key_id(cert):
|
||||
ski = get_extension_value(cert, ExtensionOID.SUBJECT_KEY_IDENTIFIER)
|
||||
return ski.digest.hex() if ski else None
|
||||
|
||||
def get_authority_key_id(obj):
|
||||
aki = get_extension_value(obj, ExtensionOID.AUTHORITY_KEY_IDENTIFIER)
|
||||
return aki.key_identifier.hex() if aki and aki.key_identifier else None
|
||||
|
||||
def is_self_signed(cert):
|
||||
return cert.issuer == cert.subject
|
||||
|
||||
def is_ca(cert):
|
||||
bc = get_extension_value(cert, ExtensionOID.BASIC_CONSTRAINTS)
|
||||
return bc.ca if bc else False
|
||||
|
||||
def get_key_usage(cert):
|
||||
ku = get_extension_value(cert, ExtensionOID.KEY_USAGE)
|
||||
if not ku:
|
||||
return None
|
||||
usages = []
|
||||
for attr in ['digital_signature', 'key_encipherment', 'key_cert_sign',
|
||||
'crl_sign', 'content_commitment', 'data_encipherment',
|
||||
'key_agreement']:
|
||||
try:
|
||||
if getattr(ku, attr):
|
||||
usages.append(attr)
|
||||
except Exception:
|
||||
pass
|
||||
return ', '.join(usages)
|
||||
|
||||
def get_eku(cert):
|
||||
eku = get_extension_value(cert, ExtensionOID.EXTENDED_KEY_USAGE)
|
||||
return ', '.join(u.dotted_string for u in eku) if eku else None
|
||||
|
||||
def get_san(cert):
|
||||
san = get_extension_value(cert, ExtensionOID.SUBJECT_ALTERNATIVE_NAME)
|
||||
return ', '.join(str(n.value) for n in san) if san else None
|
||||
|
||||
|
||||
# ─── Chain Building ──────────────────────────────────────────────────────────
|
||||
|
||||
def build_chains(certs_dict):
|
||||
ski_index = {}
|
||||
for fp, (cert, url) in certs_dict.items():
|
||||
ski = get_subject_key_id(cert)
|
||||
if ski:
|
||||
if ski in ski_index:
|
||||
existing, _ = certs_dict[ski_index[ski]]
|
||||
if cert.not_valid_after_utc > existing.not_valid_after_utc:
|
||||
ski_index[ski] = fp
|
||||
else:
|
||||
ski_index[ski] = fp
|
||||
|
||||
parent_of = {}
|
||||
for fp, (cert, url) in certs_dict.items():
|
||||
if is_self_signed(cert):
|
||||
continue
|
||||
aki = get_authority_key_id(cert)
|
||||
if aki and aki in ski_index and ski_index[aki] != fp:
|
||||
parent_of[fp] = ski_index[aki]
|
||||
|
||||
roots = [fp for fp, (c, _) in certs_dict.items() if is_self_signed(c)]
|
||||
children_of = defaultdict(list)
|
||||
for child, par in parent_of.items():
|
||||
children_of[par].append(child)
|
||||
|
||||
chains = []
|
||||
def walk(fp, chain):
|
||||
chain.append(fp)
|
||||
kids = children_of.get(fp, [])
|
||||
if not kids:
|
||||
chains.append(list(chain))
|
||||
else:
|
||||
for k in kids:
|
||||
walk(k, chain)
|
||||
chain.pop()
|
||||
|
||||
for r in roots:
|
||||
walk(r, [])
|
||||
|
||||
seen = set()
|
||||
for c in chains:
|
||||
seen.update(c)
|
||||
for fp in certs_dict:
|
||||
if fp not in seen:
|
||||
chains.append([fp])
|
||||
|
||||
return chains
|
||||
|
||||
|
||||
# ─── CRL Operations ─────────────────────────────────────────────────────────
|
||||
|
||||
def verify_crl_signature(crl, issuer_cert):
|
||||
try:
|
||||
pub = issuer_cert.public_key()
|
||||
if isinstance(pub, rsa.RSAPublicKey):
|
||||
pub.verify(crl.signature, crl.tbs_certlist_bytes,
|
||||
padding.PKCS1v15(), crl.signature_hash_algorithm)
|
||||
elif isinstance(pub, ec.EllipticCurvePublicKey):
|
||||
from cryptography.hazmat.primitives.asymmetric import ec as ec_mod
|
||||
pub.verify(crl.signature, crl.tbs_certlist_bytes,
|
||||
ec_mod.ECDSA(crl.signature_hash_algorithm))
|
||||
else:
|
||||
return None, "Unsupported key type"
|
||||
return True, "Signature valid"
|
||||
except InvalidSignature:
|
||||
return False, "Signature INVALID"
|
||||
except Exception as e:
|
||||
return None, f"Verification error: {e}"
|
||||
|
||||
|
||||
def fetch_and_validate_crl(cdp_url, issuer_cert, session):
|
||||
"""Fetch a CRL from a CDP URL, validate it, and return results dict."""
|
||||
result = {
|
||||
'url': cdp_url,
|
||||
'reachable': False,
|
||||
'parseable': False,
|
||||
'issuer': None,
|
||||
'last_update': None,
|
||||
'next_update': None,
|
||||
'expired': None,
|
||||
'is_delta': False,
|
||||
'sig_valid': None,
|
||||
'sig_msg': None,
|
||||
'revoked_count': 0,
|
||||
'error': None,
|
||||
'crl': None,
|
||||
}
|
||||
|
||||
# Skip non-HTTP
|
||||
if not cdp_url.lower().startswith('http'):
|
||||
result['error'] = f"Non-HTTP CDP (skipped): {cdp_url}"
|
||||
return result
|
||||
|
||||
# Download
|
||||
try:
|
||||
data = download_file(cdp_url, session)
|
||||
result['reachable'] = True
|
||||
except Exception as e:
|
||||
result['error'] = f"Download failed: {e}"
|
||||
return result
|
||||
|
||||
# Parse
|
||||
crl = load_crl(data, cdp_url)
|
||||
if crl is None:
|
||||
result['error'] = f"Parse failed ({len(data)} bytes)"
|
||||
return result
|
||||
|
||||
result['parseable'] = True
|
||||
result['crl'] = crl
|
||||
result['issuer'] = subject_str(crl.issuer)
|
||||
result['last_update'] = crl.last_update_utc
|
||||
result['next_update'] = crl.next_update_utc
|
||||
result['revoked_count'] = len(list(crl))
|
||||
|
||||
now = datetime.now(timezone.utc)
|
||||
if crl.next_update_utc:
|
||||
result['expired'] = crl.next_update_utc < now
|
||||
|
||||
# Delta check
|
||||
try:
|
||||
crl.extensions.get_extension_for_oid(ExtensionOID.DELTA_CRL_INDICATOR)
|
||||
result['is_delta'] = True
|
||||
except x509.ExtensionNotFound:
|
||||
pass
|
||||
|
||||
# Signature verification
|
||||
if issuer_cert is not None:
|
||||
valid, msg = verify_crl_signature(crl, issuer_cert)
|
||||
result['sig_valid'] = valid
|
||||
result['sig_msg'] = msg
|
||||
else:
|
||||
result['sig_msg'] = "No issuer cert available"
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def is_cert_revoked(cert, crl):
|
||||
"""Check if a certificate's serial is on the CRL."""
|
||||
serial = cert.serial_number
|
||||
for revoked in crl:
|
||||
if revoked.serial_number == serial:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
# ─── Display ─────────────────────────────────────────────────────────────────
|
||||
|
||||
def display_cert(cert, url, indent=0):
|
||||
p = " " * indent
|
||||
now = datetime.now(timezone.utc)
|
||||
nb = cert.not_valid_before_utc
|
||||
na = cert.not_valid_after_utc
|
||||
exp = na < now
|
||||
dl = (na - now).days
|
||||
|
||||
print(f"{p}{C.BOLD}Subject:{C.RESET} {subject_str(cert.subject)}")
|
||||
print(f"{p}{C.BOLD}Issuer:{C.RESET} {subject_str(cert.issuer)}")
|
||||
print(f"{p}{C.BOLD}Serial:{C.RESET} {format(cert.serial_number, 'x')}")
|
||||
print(f"{p}{C.BOLD}Not Before:{C.RESET} {nb}")
|
||||
print(f"{p}{C.BOLD}Not After:{C.RESET} {na}", end="")
|
||||
if exp:
|
||||
print(f" {C.RED}[EXPIRED]{C.RESET}")
|
||||
elif dl < 90:
|
||||
print(f" {C.YELLOW}[{dl} days remaining]{C.RESET}")
|
||||
else:
|
||||
print(f" {C.GREEN}[{dl} days remaining]{C.RESET}")
|
||||
if nb > now:
|
||||
print(f"{p} {C.RED}[NOT YET VALID]{C.RESET}")
|
||||
|
||||
print(f"{p}{C.BOLD}SHA1:{C.RESET} {sha1_fingerprint(cert)}")
|
||||
print(f"{p}{C.BOLD}Self-signed:{C.RESET} {'Yes' if is_self_signed(cert) else 'No'}")
|
||||
print(f"{p}{C.BOLD}CA:{C.RESET} {'Yes' if is_ca(cert) else 'No'}")
|
||||
|
||||
ku = get_key_usage(cert)
|
||||
if ku:
|
||||
print(f"{p}{C.BOLD}Key Usage:{C.RESET} {ku}")
|
||||
eku = get_eku(cert)
|
||||
if eku:
|
||||
print(f"{p}{C.BOLD}EKU:{C.RESET} {eku}")
|
||||
san = get_san(cert)
|
||||
if san:
|
||||
print(f"{p}{C.BOLD}SAN:{C.RESET} {san}")
|
||||
|
||||
ski = get_subject_key_id(cert)
|
||||
if ski:
|
||||
print(f"{p}{C.BOLD}SKI:{C.RESET} {ski}")
|
||||
aki = get_authority_key_id(cert)
|
||||
if aki:
|
||||
print(f"{p}{C.BOLD}AKI:{C.RESET} {aki}")
|
||||
|
||||
# CDP
|
||||
cdps = get_cdp_urls(cert)
|
||||
if cdps:
|
||||
print(f"{p}{C.BOLD}CDP:{C.RESET}")
|
||||
for u in cdps:
|
||||
print(f"{p} → {u}")
|
||||
else:
|
||||
print(f"{p}{C.BOLD}CDP:{C.RESET} (none)")
|
||||
|
||||
# AIA
|
||||
ca_issuers, ocsp_urls = get_aia_info(cert)
|
||||
if ca_issuers or ocsp_urls:
|
||||
print(f"{p}{C.BOLD}AIA:{C.RESET}")
|
||||
for u in ca_issuers:
|
||||
print(f"{p} → CA Issuer: {u}")
|
||||
for u in ocsp_urls:
|
||||
print(f"{p} → OCSP: {u}")
|
||||
else:
|
||||
print(f"{p}{C.BOLD}AIA:{C.RESET} (none)")
|
||||
|
||||
# Policies
|
||||
policies = get_extension_value(cert, ExtensionOID.CERTIFICATE_POLICIES)
|
||||
if policies:
|
||||
print(f"{p}{C.BOLD}Policies:{C.RESET}")
|
||||
for pol in policies:
|
||||
print(f"{p} → {pol.policy_identifier.dotted_string}")
|
||||
if pol.policy_qualifiers:
|
||||
for q in pol.policy_qualifiers:
|
||||
if isinstance(q, str):
|
||||
print(f"{p} CPS: {q}")
|
||||
|
||||
print(f"{p}{C.BOLD}Source:{C.RESET} {url}")
|
||||
|
||||
|
||||
def display_crl_inline(result, indent=0):
|
||||
"""Display CRL validation results inline with its parent certificate."""
|
||||
p = " " * indent
|
||||
r = result
|
||||
|
||||
if r['error']:
|
||||
if 'Non-HTTP' in r['error']:
|
||||
print(f"{p}{C.DIM} ↳ CRL: {r['url']} (LDAP — not checked){C.RESET}")
|
||||
else:
|
||||
print(f"{p} {C.RED}↳ CRL: {r['url']}{C.RESET}")
|
||||
print(f"{p} {C.RED}✘ {r['error']}{C.RESET}")
|
||||
return
|
||||
|
||||
crl_type = "Delta" if r['is_delta'] else "Base"
|
||||
|
||||
print(f"{p} ↳ CRL: {r['url']}")
|
||||
print(f"{p} Type: {crl_type} | Revoked: {r['revoked_count']}", end="")
|
||||
|
||||
if r['expired']:
|
||||
print(f" | {C.RED}EXPIRED (next update: {r['next_update']}){C.RESET}")
|
||||
elif r['next_update']:
|
||||
rem = (r['next_update'] - datetime.now(timezone.utc)).days
|
||||
print(f" | {C.GREEN}Valid ({rem} days){C.RESET}")
|
||||
else:
|
||||
print()
|
||||
|
||||
print(f"{p} Published: {r['last_update']}")
|
||||
|
||||
if r['sig_valid'] is True:
|
||||
print(f"{p} {C.GREEN}✔ {r['sig_msg']}{C.RESET}")
|
||||
elif r['sig_valid'] is False:
|
||||
print(f"{p} {C.RED}✘ {r['sig_msg']}{C.RESET}")
|
||||
else:
|
||||
print(f"{p} {C.YELLOW}⚠ {r['sig_msg']}{C.RESET}")
|
||||
|
||||
|
||||
# ─── Main ────────────────────────────────────────────────────────────────────
|
||||
|
||||
def main():
|
||||
if len(sys.argv) < 2:
|
||||
print(f"Usage: {sys.argv[0]} <url> [<url2> ...]")
|
||||
print(f"Example: {sys.argv[0]} http://pki.imy.se/")
|
||||
sys.exit(1)
|
||||
|
||||
base_urls = sys.argv[1:]
|
||||
|
||||
session = requests.Session()
|
||||
session.verify = False
|
||||
session.headers.update({
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) '
|
||||
'AppleWebKit/537.36 (KHTML, like Gecko) '
|
||||
'Chrome/125.0.0.0 Safari/537.36',
|
||||
'Accept': '*/*',
|
||||
'Accept-Encoding': 'identity',
|
||||
})
|
||||
requests.packages.urllib3.disable_warnings()
|
||||
|
||||
all_cert_urls, all_crl_urls = [], []
|
||||
|
||||
# ── Step 1: Discover files ───────────────────────────────────────────
|
||||
for base_url in base_urls:
|
||||
print_header(f"Scanning: {base_url}")
|
||||
cu, cl = scrape_recursive(base_url, session)
|
||||
all_cert_urls.extend(cu)
|
||||
all_crl_urls.extend(cl)
|
||||
|
||||
all_cert_urls = list(dict.fromkeys(all_cert_urls))
|
||||
all_crl_urls = list(dict.fromkeys(all_crl_urls))
|
||||
|
||||
print_section("Discovery Summary")
|
||||
print_ok(f"Certificate files: {len(all_cert_urls)}")
|
||||
print_ok(f"CRL files on site: {len(all_crl_urls)}")
|
||||
|
||||
# ── Step 2: Download and parse certificates ──────────────────────────
|
||||
print_header("Loading Certificates")
|
||||
|
||||
certs_dict = {} # fingerprint -> (cert, url)
|
||||
for url in all_cert_urls:
|
||||
try:
|
||||
data = download_file(url, session)
|
||||
cert = load_certificate(data, url)
|
||||
if cert is not None:
|
||||
fp = sha1_fingerprint(cert)
|
||||
certs_dict[fp] = (cert, url)
|
||||
print_ok(f"{subject_str(cert.subject)}")
|
||||
else:
|
||||
print_warn(f"Could not parse: {url}")
|
||||
except Exception as e:
|
||||
print_err(f"Download failed: {url} — {e}")
|
||||
|
||||
# ── Step 3: Fetch additional certs from AIA ──────────────────────────
|
||||
print_header("Fetching CA Certs from AIA Extensions")
|
||||
aia_urls = set()
|
||||
for fp, (cert, url) in certs_dict.items():
|
||||
ca_issuers, _ = get_aia_info(cert)
|
||||
for u in ca_issuers:
|
||||
if u.lower().startswith('http'):
|
||||
aia_urls.add(u)
|
||||
|
||||
loaded_urls = set(u for _, (c, u) in certs_dict.items())
|
||||
new_aia = aia_urls - loaded_urls
|
||||
|
||||
if new_aia:
|
||||
for url in new_aia:
|
||||
try:
|
||||
data = download_file(url, session)
|
||||
cert = load_certificate(data, url)
|
||||
if cert is not None:
|
||||
fp = sha1_fingerprint(cert)
|
||||
if fp not in certs_dict:
|
||||
certs_dict[fp] = (cert, url)
|
||||
print_ok(f"Fetched: {subject_str(cert.subject)}")
|
||||
except Exception as e:
|
||||
print_err(f"Failed: {url} — {e}")
|
||||
else:
|
||||
print_info("No additional certs needed", "All issuers already loaded")
|
||||
|
||||
# ── Step 4: Build chains ─────────────────────────────────────────────
|
||||
chains = build_chains(certs_dict)
|
||||
|
||||
# Build issuer lookup: AKI -> issuer cert
|
||||
ski_to_cert = {}
|
||||
for fp, (cert, url) in certs_dict.items():
|
||||
ski = get_subject_key_id(cert)
|
||||
if ski:
|
||||
if ski not in ski_to_cert:
|
||||
ski_to_cert[ski] = cert
|
||||
else:
|
||||
existing = ski_to_cert[ski]
|
||||
if cert.not_valid_after_utc > existing.not_valid_after_utc:
|
||||
ski_to_cert[ski] = cert
|
||||
|
||||
# ── Step 5: Display chains with inline CRL validation ────────────────
|
||||
print_header("Certificate Chains & CRL Status")
|
||||
|
||||
# Track all CDP URLs we validate (to find orphans later)
|
||||
validated_cdp_urls = set()
|
||||
# Track all CRL results for summary
|
||||
all_crl_results = []
|
||||
# Track revocation status
|
||||
revocation_issues = []
|
||||
|
||||
for i, chain in enumerate(chains, 1):
|
||||
print_section(f"Chain {i} ({len(chain)} certificate(s))")
|
||||
|
||||
for depth, fp in enumerate(chain):
|
||||
cert, url = certs_dict[fp]
|
||||
|
||||
if depth == 0 and is_self_signed(cert):
|
||||
role = "Root CA"
|
||||
elif is_ca(cert):
|
||||
role = "Intermediate CA"
|
||||
else:
|
||||
role = "End Entity"
|
||||
|
||||
print(f"\n{' '*depth}{C.BOLD}{C.BLUE}[{depth}] {role}{C.RESET}")
|
||||
display_cert(cert, url, indent=depth)
|
||||
|
||||
# Fetch and validate each CDP for this certificate
|
||||
cdp_urls = get_cdp_urls(cert)
|
||||
if cdp_urls:
|
||||
# Find issuer cert for signature verification
|
||||
aki = get_authority_key_id(cert)
|
||||
issuer_cert = ski_to_cert.get(aki) if aki else None
|
||||
|
||||
for cdp_url in cdp_urls:
|
||||
validated_cdp_urls.add(cdp_url)
|
||||
result = fetch_and_validate_crl(cdp_url, issuer_cert, session)
|
||||
all_crl_results.append(result)
|
||||
display_crl_inline(result, indent=depth)
|
||||
|
||||
# Check if THIS certificate is revoked
|
||||
if result['crl'] is not None:
|
||||
if is_cert_revoked(cert, result['crl']):
|
||||
revocation_issues.append(
|
||||
(cert, url, cdp_url)
|
||||
)
|
||||
print(f"{' '*depth} {C.RED}{C.BOLD}"
|
||||
f"⚠ THIS CERTIFICATE IS REVOKED!{C.RESET}")
|
||||
elif not is_self_signed(cert):
|
||||
print(f"{' '*depth} {C.YELLOW}⚠ No CDP — cannot check revocation{C.RESET}")
|
||||
|
||||
# ── Step 6: Find orphaned CRLs on the site ──────────────────────────
|
||||
print_header("Orphaned CRL Files (on site but not referenced by any certificate)")
|
||||
|
||||
# Normalize URLs for comparison
|
||||
def normalize_url(u):
|
||||
return unquote(u).lower().rstrip('/')
|
||||
|
||||
referenced_normalized = set(normalize_url(u) for u in validated_cdp_urls)
|
||||
orphans = []
|
||||
|
||||
for crl_url in all_crl_urls:
|
||||
if normalize_url(crl_url) not in referenced_normalized:
|
||||
orphans.append(crl_url)
|
||||
|
||||
if orphans:
|
||||
print_warn(f"Found {len(orphans)} CRL file(s) not referenced by any certificate CDP:")
|
||||
for url in orphans:
|
||||
# Try to load and show basic info
|
||||
try:
|
||||
data = download_file(url, session)
|
||||
crl = load_crl(data, url)
|
||||
if crl is not None:
|
||||
now = datetime.now(timezone.utc)
|
||||
expired = ""
|
||||
if crl.next_update_utc and crl.next_update_utc < now:
|
||||
expired = f" {C.RED}[EXPIRED]{C.RESET}"
|
||||
print(f" 📋 {unquote(url)}")
|
||||
print(f" Issuer: {subject_str(crl.issuer)}")
|
||||
print(f" Next update: {crl.next_update_utc}{expired}")
|
||||
print(f" Revoked: {len(list(crl))}")
|
||||
else:
|
||||
print(f" 📋 {unquote(url)} (could not parse)")
|
||||
except Exception:
|
||||
print(f" 📋 {unquote(url)} (could not download)")
|
||||
print()
|
||||
print_warn("These may be leftover files that should be reviewed/removed.")
|
||||
else:
|
||||
print_ok("No orphaned CRL files — all CRLs on site are referenced by certificates")
|
||||
|
||||
# ── Step 7: Summary ──────────────────────────────────────────────────
|
||||
print_header("Summary")
|
||||
print_info("Certificates parsed", str(len(certs_dict)))
|
||||
print_info("Chains found", str(len(chains)))
|
||||
|
||||
# CRL stats
|
||||
total_cdps = len(all_crl_results)
|
||||
reachable = sum(1 for r in all_crl_results if r['reachable'])
|
||||
parsed = sum(1 for r in all_crl_results if r['parseable'])
|
||||
expired = sum(1 for r in all_crl_results if r['expired'])
|
||||
sig_ok = sum(1 for r in all_crl_results if r['sig_valid'] is True)
|
||||
sig_fail = sum(1 for r in all_crl_results if r['sig_valid'] is False)
|
||||
unreachable = sum(1 for r in all_crl_results
|
||||
if not r['reachable'] and r['error']
|
||||
and 'Non-HTTP' not in r['error'])
|
||||
ldap_skipped = sum(1 for r in all_crl_results
|
||||
if r['error'] and 'Non-HTTP' in r['error'])
|
||||
|
||||
print_info("CDP URLs checked", str(total_cdps))
|
||||
if ldap_skipped:
|
||||
print_info(" LDAP (skipped)", str(ldap_skipped))
|
||||
if reachable:
|
||||
print_info(" Reachable", str(reachable))
|
||||
if unreachable:
|
||||
print_info(" Unreachable", str(unreachable))
|
||||
if parsed:
|
||||
print_info(" Parsed OK", str(parsed))
|
||||
if sig_ok:
|
||||
print_info(" Signature valid", str(sig_ok))
|
||||
if sig_fail:
|
||||
print_info(" Signature INVALID", str(sig_fail))
|
||||
|
||||
# Expiry
|
||||
now = datetime.now(timezone.utc)
|
||||
exp_certs = [fp for fp, (c, _) in certs_dict.items()
|
||||
if c.not_valid_after_utc < now]
|
||||
exp_crls = [r for r in all_crl_results if r['expired']]
|
||||
|
||||
if exp_certs:
|
||||
print_err(f"Expired certificates: {len(exp_certs)}")
|
||||
for fp in exp_certs:
|
||||
c, u = certs_dict[fp]
|
||||
print(f" → {subject_str(c.subject)} (expired {c.not_valid_after_utc})")
|
||||
else:
|
||||
print_ok("No expired certificates")
|
||||
|
||||
if exp_crls:
|
||||
print_err(f"Expired CRLs: {len(exp_crls)}")
|
||||
for r in exp_crls:
|
||||
print(f" → {r['url']} (expired {r['next_update']})")
|
||||
else:
|
||||
print_ok("No expired CRLs")
|
||||
|
||||
if unreachable:
|
||||
print_err(f"Unreachable CDPs: {unreachable}")
|
||||
for r in all_crl_results:
|
||||
if not r['reachable'] and r['error'] and 'Non-HTTP' not in r['error']:
|
||||
print(f" → {r['url']}")
|
||||
print(f" {r['error']}")
|
||||
|
||||
if revocation_issues:
|
||||
print_err(f"REVOKED CERTIFICATES: {len(revocation_issues)}")
|
||||
for cert, url, cdp in revocation_issues:
|
||||
print(f" → {subject_str(cert.subject)}")
|
||||
print(f" Serial: {format(cert.serial_number, 'x')}")
|
||||
print(f" CRL: {cdp}")
|
||||
else:
|
||||
print_ok("No downloaded certificates are revoked")
|
||||
|
||||
if orphans:
|
||||
print_warn(f"Orphaned CRL files: {len(orphans)}")
|
||||
else:
|
||||
print_ok("No orphaned CRL files")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
+811
@@ -0,0 +1,811 @@
|
||||
#!/usr/bin/env python3
|
||||
# /// script
|
||||
# requires-python = ">=3.10"
|
||||
# dependencies = [
|
||||
# "requests",
|
||||
# "beautifulsoup4",
|
||||
# "cryptography",
|
||||
# ]
|
||||
# ///
|
||||
"""
|
||||
PKI Site Analyzer v6
|
||||
====================
|
||||
Scrapes IIS sites with directory browsing enabled (recursively),
|
||||
downloads all .crt/.cer/.crl files, parses certificates, builds
|
||||
chain relationships, and validates CRLs.
|
||||
|
||||
Usage:
|
||||
uv run pki_analyzer.py <url> [<url2> ...]
|
||||
|
||||
Examples:
|
||||
uv run pki_analyzer.py http://pki.matas.dk/cdp http://pki.matas.dk/aia
|
||||
uv run pki_analyzer.py http://pki.imy.se/
|
||||
"""
|
||||
|
||||
import sys
|
||||
import traceback
|
||||
from datetime import datetime, timezone
|
||||
from urllib.parse import urljoin, unquote, urlparse
|
||||
from collections import defaultdict
|
||||
|
||||
import requests
|
||||
from bs4 import BeautifulSoup
|
||||
from cryptography import x509
|
||||
from cryptography.x509.oid import ExtensionOID, AuthorityInformationAccessOID
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
from cryptography.hazmat.primitives.asymmetric import padding, rsa, ec
|
||||
from cryptography.exceptions import InvalidSignature
|
||||
|
||||
|
||||
# ─── Configuration ───────────────────────────────────────────────────────────
|
||||
|
||||
CERT_EXTENSIONS = ('.crt', '.cer', '.pem')
|
||||
CRL_EXTENSIONS = ('.crl',)
|
||||
TIMEOUT = 30
|
||||
MAX_DEPTH = 5
|
||||
DEBUG_SCRAPE = False # Set True to see all <a> tags during scraping
|
||||
|
||||
|
||||
class C:
|
||||
HEADER = '\033[95m'
|
||||
BLUE = '\033[94m'
|
||||
CYAN = '\033[96m'
|
||||
GREEN = '\033[92m'
|
||||
YELLOW = '\033[93m'
|
||||
RED = '\033[91m'
|
||||
BOLD = '\033[1m'
|
||||
DIM = '\033[2m'
|
||||
RESET = '\033[0m'
|
||||
|
||||
|
||||
# ─── Utility ─────────────────────────────────────────────────────────────────
|
||||
|
||||
def print_header(text):
|
||||
print(f"\n{C.BOLD}{C.HEADER}{'='*80}{C.RESET}")
|
||||
print(f"{C.BOLD}{C.HEADER} {text}{C.RESET}")
|
||||
print(f"{C.BOLD}{C.HEADER}{'='*80}{C.RESET}")
|
||||
|
||||
|
||||
def print_section(text):
|
||||
print(f"\n{C.BOLD}{C.CYAN}── {text} {'─'*(74-len(text))}{C.RESET}")
|
||||
|
||||
|
||||
def print_ok(text):
|
||||
print(f" {C.GREEN}✔{C.RESET} {text}")
|
||||
|
||||
|
||||
def print_warn(text):
|
||||
print(f" {C.YELLOW}⚠{C.RESET} {text}")
|
||||
|
||||
|
||||
def print_err(text):
|
||||
print(f" {C.RED}✘{C.RESET} {text}")
|
||||
|
||||
|
||||
def print_info(label, value):
|
||||
print(f" {C.BOLD}{label:.<40}{C.RESET} {value}")
|
||||
|
||||
|
||||
def print_debug(text):
|
||||
if DEBUG_SCRAPE:
|
||||
print(f" {C.DIM}DBG: {text}{C.RESET}")
|
||||
|
||||
|
||||
def sha1_fingerprint(cert):
|
||||
return cert.fingerprint(hashes.SHA1()).hex(':')
|
||||
|
||||
|
||||
def subject_str(name):
|
||||
parts = []
|
||||
for attr in name:
|
||||
parts.append(f"{attr.oid._name}={attr.value}")
|
||||
return ', '.join(parts) if parts else '(empty)'
|
||||
|
||||
|
||||
# ─── Recursive Scraping ─────────────────────────────────────────────────────
|
||||
|
||||
def scrape_recursive(base_url, session, origin_host=None, depth=0, visited=None):
|
||||
if visited is None:
|
||||
visited = set()
|
||||
|
||||
if not base_url.endswith('/'):
|
||||
base_url += '/'
|
||||
|
||||
if base_url in visited or depth > MAX_DEPTH:
|
||||
return [], []
|
||||
|
||||
visited.add(base_url)
|
||||
|
||||
if origin_host is None:
|
||||
origin_host = urlparse(base_url).netloc
|
||||
|
||||
indent = " " * depth
|
||||
print(f"{indent} 🔍 Scanning: {base_url}")
|
||||
|
||||
try:
|
||||
resp = session.get(base_url, timeout=TIMEOUT)
|
||||
resp.raise_for_status()
|
||||
except Exception as e:
|
||||
print_warn(f"{indent} Could not fetch {base_url}: {e}")
|
||||
return [], []
|
||||
|
||||
soup = BeautifulSoup(resp.text, 'html.parser')
|
||||
|
||||
cert_urls = []
|
||||
crl_urls = []
|
||||
subdirs = []
|
||||
|
||||
all_links = soup.find_all('a', href=True)
|
||||
print_debug(f"Found {len(all_links)} <a> tags in {base_url}")
|
||||
|
||||
for a_tag in all_links:
|
||||
href = a_tag['href']
|
||||
full_url = urljoin(base_url, href)
|
||||
decoded_url = unquote(full_url).lower()
|
||||
decoded_href = unquote(href)
|
||||
|
||||
print_debug(f" href='{href}' → '{full_url}'")
|
||||
|
||||
if urlparse(full_url).netloc != origin_host:
|
||||
print_debug(f" Skipped: different host")
|
||||
continue
|
||||
|
||||
if href in ('../', '..'):
|
||||
print_debug(f" Skipped: parent link")
|
||||
continue
|
||||
|
||||
if full_url.rstrip('/') == base_url.rstrip('/'):
|
||||
print_debug(f" Skipped: self link")
|
||||
continue
|
||||
|
||||
if any(decoded_url.endswith(ext) for ext in CERT_EXTENSIONS):
|
||||
cert_urls.append(full_url)
|
||||
print(f"{indent} 📜 {decoded_href}")
|
||||
elif any(decoded_url.endswith(ext) for ext in CRL_EXTENSIONS):
|
||||
crl_urls.append(full_url)
|
||||
print(f"{indent} 📋 {decoded_href}")
|
||||
elif full_url.endswith('/') and full_url not in visited:
|
||||
subdirs.append(full_url)
|
||||
print(f"{indent} 📁 {decoded_href}")
|
||||
else:
|
||||
print_debug(f" Skipped: not a cert/crl/dir")
|
||||
|
||||
for subdir in subdirs:
|
||||
sub_certs, sub_crls = scrape_recursive(
|
||||
subdir, session, origin_host, depth + 1, visited
|
||||
)
|
||||
cert_urls.extend(sub_certs)
|
||||
crl_urls.extend(sub_crls)
|
||||
|
||||
return cert_urls, crl_urls
|
||||
|
||||
|
||||
def download_file(url, session):
|
||||
resp = session.get(url, timeout=TIMEOUT)
|
||||
resp.raise_for_status()
|
||||
data = resp.content
|
||||
content_type = resp.headers.get('content-type', 'unknown')
|
||||
print(f" ↓ {len(data)} bytes | HTTP {resp.status_code} | {content_type}")
|
||||
return data
|
||||
|
||||
|
||||
# ─── Certificate Parsing ────────────────────────────────────────────────────
|
||||
|
||||
def load_certificate(data, url=""):
|
||||
if data[:50].lstrip().lower().startswith((b'<', b'<!', b'<html')):
|
||||
print_warn(f" Got HTML instead of certificate: {url}")
|
||||
return None
|
||||
|
||||
# Try DER first
|
||||
try:
|
||||
return x509.load_der_x509_certificate(data)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Try PEM
|
||||
try:
|
||||
return x509.load_pem_x509_certificate(data)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Try stripping BOM/whitespace
|
||||
stripped = data.lstrip(b'\xef\xbb\xbf\x00\r\n ')
|
||||
if stripped != data:
|
||||
try:
|
||||
return x509.load_der_x509_certificate(stripped)
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
return x509.load_pem_x509_certificate(stripped)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
print_warn(f" Could not parse certificate: {url}")
|
||||
return None
|
||||
|
||||
|
||||
def load_crl(data, url=""):
|
||||
if data[:50].lstrip().lower().startswith((b'<', b'<!', b'<html')):
|
||||
print_warn(f" Got HTML instead of CRL: {url}")
|
||||
return None
|
||||
|
||||
# Try DER first — this is the most common format for CRLs
|
||||
try:
|
||||
crl = x509.load_der_x509_crl(data)
|
||||
return crl
|
||||
except BaseException as e:
|
||||
der_err = f"{type(e).__name__}: {e}"
|
||||
|
||||
# Try PEM
|
||||
try:
|
||||
crl = x509.load_pem_x509_crl(data)
|
||||
return crl
|
||||
except BaseException as e:
|
||||
pem_err = f"{type(e).__name__}: {e}"
|
||||
|
||||
# Try stripping BOM/whitespace
|
||||
stripped = data.lstrip(b'\xef\xbb\xbf\x00\r\n ')
|
||||
if stripped != data:
|
||||
try:
|
||||
return x509.load_der_x509_crl(stripped)
|
||||
except BaseException:
|
||||
pass
|
||||
try:
|
||||
return x509.load_pem_x509_crl(stripped)
|
||||
except BaseException:
|
||||
pass
|
||||
|
||||
# All attempts failed — print diagnostics
|
||||
print_err(f" Failed to parse CRL: {url}")
|
||||
print_warn(f" DER: {der_err}")
|
||||
print_warn(f" PEM: {pem_err}")
|
||||
print_warn(f" Size: {len(data)} bytes")
|
||||
print_warn(f" Hex (first 40): {data[:40].hex(' ')}")
|
||||
if len(data) > 0:
|
||||
if data[0] == 0x30:
|
||||
print_warn(f" Starts with 0x30 (ASN.1 SEQUENCE)")
|
||||
elif data[:5] == b'-----':
|
||||
print_warn(f" Starts with PEM header")
|
||||
else:
|
||||
print_warn(f" First byte: 0x{data[0]:02x}")
|
||||
return None
|
||||
|
||||
|
||||
# ─── Extension Helpers ───────────────────────────────────────────────────────
|
||||
|
||||
def get_extension_value(obj, oid):
|
||||
try:
|
||||
return obj.extensions.get_extension_for_oid(oid).value
|
||||
except x509.ExtensionNotFound:
|
||||
return None
|
||||
|
||||
|
||||
def get_cdp_urls(cert):
|
||||
cdp = get_extension_value(cert, ExtensionOID.CRL_DISTRIBUTION_POINTS)
|
||||
urls = []
|
||||
if cdp:
|
||||
for dp in cdp:
|
||||
if dp.full_name:
|
||||
for name in dp.full_name:
|
||||
if isinstance(name, x509.UniformResourceIdentifier):
|
||||
urls.append(name.value)
|
||||
return urls
|
||||
|
||||
|
||||
def get_aia_info(cert):
|
||||
aia = get_extension_value(cert, ExtensionOID.AUTHORITY_INFORMATION_ACCESS)
|
||||
ocsp_urls = []
|
||||
ca_issuer_urls = []
|
||||
if aia:
|
||||
for desc in aia:
|
||||
if isinstance(desc.access_location, x509.UniformResourceIdentifier):
|
||||
if desc.access_method == AuthorityInformationAccessOID.OCSP:
|
||||
ocsp_urls.append(desc.access_location.value)
|
||||
elif desc.access_method == AuthorityInformationAccessOID.CA_ISSUERS:
|
||||
ca_issuer_urls.append(desc.access_location.value)
|
||||
return ca_issuer_urls, ocsp_urls
|
||||
|
||||
|
||||
def get_subject_key_id(cert):
|
||||
ski = get_extension_value(cert, ExtensionOID.SUBJECT_KEY_IDENTIFIER)
|
||||
return ski.digest.hex() if ski else None
|
||||
|
||||
|
||||
def get_authority_key_id(obj):
|
||||
aki = get_extension_value(obj, ExtensionOID.AUTHORITY_KEY_IDENTIFIER)
|
||||
return aki.key_identifier.hex() if aki and aki.key_identifier else None
|
||||
|
||||
|
||||
def is_self_signed(cert):
|
||||
return cert.issuer == cert.subject
|
||||
|
||||
|
||||
def is_ca(cert):
|
||||
bc = get_extension_value(cert, ExtensionOID.BASIC_CONSTRAINTS)
|
||||
return bc.ca if bc else False
|
||||
|
||||
|
||||
def get_key_usage(cert):
|
||||
ku = get_extension_value(cert, ExtensionOID.KEY_USAGE)
|
||||
if not ku:
|
||||
return None
|
||||
usages = []
|
||||
for attr in ['digital_signature', 'key_encipherment', 'key_cert_sign',
|
||||
'crl_sign', 'content_commitment', 'data_encipherment',
|
||||
'key_agreement']:
|
||||
try:
|
||||
if getattr(ku, attr):
|
||||
usages.append(attr)
|
||||
except Exception:
|
||||
pass
|
||||
return ', '.join(usages)
|
||||
|
||||
|
||||
def get_eku(cert):
|
||||
eku = get_extension_value(cert, ExtensionOID.EXTENDED_KEY_USAGE)
|
||||
if not eku:
|
||||
return None
|
||||
return ', '.join(u.dotted_string for u in eku)
|
||||
|
||||
|
||||
def get_san(cert):
|
||||
san = get_extension_value(cert, ExtensionOID.SUBJECT_ALTERNATIVE_NAME)
|
||||
if not san:
|
||||
return None
|
||||
return ', '.join(str(n.value) for n in san)
|
||||
|
||||
|
||||
# ─── Chain Building ──────────────────────────────────────────────────────────
|
||||
|
||||
def build_chains(certs_dict):
|
||||
ski_index = {}
|
||||
for fp, (cert, url) in certs_dict.items():
|
||||
ski = get_subject_key_id(cert)
|
||||
if ski:
|
||||
if ski in ski_index:
|
||||
existing_cert, _ = certs_dict[ski_index[ski]]
|
||||
if cert.not_valid_after_utc > existing_cert.not_valid_after_utc:
|
||||
ski_index[ski] = fp
|
||||
else:
|
||||
ski_index[ski] = fp
|
||||
|
||||
parent_of = {}
|
||||
for fp, (cert, url) in certs_dict.items():
|
||||
if is_self_signed(cert):
|
||||
continue
|
||||
aki = get_authority_key_id(cert)
|
||||
if aki and aki in ski_index:
|
||||
parent_fp = ski_index[aki]
|
||||
if parent_fp != fp:
|
||||
parent_of[fp] = parent_fp
|
||||
|
||||
roots = [fp for fp, (cert, _) in certs_dict.items() if is_self_signed(cert)]
|
||||
|
||||
children_of = defaultdict(list)
|
||||
for child_fp, par_fp in parent_of.items():
|
||||
children_of[par_fp].append(child_fp)
|
||||
|
||||
chains = []
|
||||
|
||||
def walk(fp, current_chain):
|
||||
current_chain.append(fp)
|
||||
kids = children_of.get(fp, [])
|
||||
if not kids:
|
||||
chains.append(list(current_chain))
|
||||
else:
|
||||
for kid in kids:
|
||||
walk(kid, current_chain)
|
||||
current_chain.pop()
|
||||
|
||||
for root_fp in roots:
|
||||
walk(root_fp, [])
|
||||
|
||||
all_in_chains = set()
|
||||
for chain in chains:
|
||||
all_in_chains.update(chain)
|
||||
for fp in certs_dict:
|
||||
if fp not in all_in_chains:
|
||||
chains.append([fp])
|
||||
|
||||
return chains
|
||||
|
||||
|
||||
# ─── CRL Validation ─────────────────────────────────────────────────────────
|
||||
|
||||
def verify_crl_signature(crl, issuer_cert):
|
||||
try:
|
||||
pub_key = issuer_cert.public_key()
|
||||
if isinstance(pub_key, rsa.RSAPublicKey):
|
||||
pub_key.verify(
|
||||
crl.signature,
|
||||
crl.tbs_certlist_bytes,
|
||||
padding.PKCS1v15(),
|
||||
crl.signature_hash_algorithm,
|
||||
)
|
||||
elif isinstance(pub_key, ec.EllipticCurvePublicKey):
|
||||
from cryptography.hazmat.primitives.asymmetric import ec as ec_mod
|
||||
pub_key.verify(
|
||||
crl.signature,
|
||||
crl.tbs_certlist_bytes,
|
||||
ec_mod.ECDSA(crl.signature_hash_algorithm),
|
||||
)
|
||||
else:
|
||||
return None, "Unsupported key type"
|
||||
return True, "Signature valid"
|
||||
except InvalidSignature:
|
||||
return False, "Signature INVALID"
|
||||
except Exception as e:
|
||||
return None, f"Verification error: {e}"
|
||||
|
||||
|
||||
def validate_crl(crl, crl_url, certs_dict):
|
||||
now = datetime.now(timezone.utc)
|
||||
results = {
|
||||
'url': crl_url,
|
||||
'issuer': subject_str(crl.issuer),
|
||||
'last_update': crl.last_update_utc,
|
||||
'next_update': crl.next_update_utc,
|
||||
'expired': crl.next_update_utc < now if crl.next_update_utc else None,
|
||||
'sig_valid': None,
|
||||
'sig_msg': 'No matching issuer found',
|
||||
'revoked_count': len(list(crl)),
|
||||
'revoked_serials': [],
|
||||
'is_delta': False,
|
||||
}
|
||||
|
||||
try:
|
||||
crl.extensions.get_extension_for_oid(ExtensionOID.DELTA_CRL_INDICATOR)
|
||||
results['is_delta'] = True
|
||||
except x509.ExtensionNotFound:
|
||||
pass
|
||||
|
||||
for revoked in crl:
|
||||
results['revoked_serials'].append(format(revoked.serial_number, 'x'))
|
||||
|
||||
crl_aki = get_authority_key_id(crl)
|
||||
|
||||
for fp, (cert, url) in certs_dict.items():
|
||||
ski = get_subject_key_id(cert)
|
||||
match = (crl_aki and ski == crl_aki) or (cert.subject == crl.issuer)
|
||||
if match:
|
||||
valid, msg = verify_crl_signature(crl, cert)
|
||||
results['sig_valid'] = valid
|
||||
results['sig_msg'] = msg
|
||||
results['verified_by'] = subject_str(cert.subject)
|
||||
break
|
||||
|
||||
return results
|
||||
|
||||
|
||||
# ─── Display Functions ───────────────────────────────────────────────────────
|
||||
|
||||
def display_cert(cert, url, indent=0):
|
||||
prefix = " " * indent
|
||||
now = datetime.now(timezone.utc)
|
||||
not_before = cert.not_valid_before_utc
|
||||
not_after = cert.not_valid_after_utc
|
||||
expired = not_after < now
|
||||
days_left = (not_after - now).days
|
||||
|
||||
print(f"{prefix}{C.BOLD}Subject:{C.RESET} {subject_str(cert.subject)}")
|
||||
print(f"{prefix}{C.BOLD}Issuer:{C.RESET} {subject_str(cert.issuer)}")
|
||||
print(f"{prefix}{C.BOLD}Serial:{C.RESET} {format(cert.serial_number, 'x')}")
|
||||
print(f"{prefix}{C.BOLD}Not Before:{C.RESET} {not_before}")
|
||||
print(f"{prefix}{C.BOLD}Not After:{C.RESET} {not_after}", end="")
|
||||
|
||||
if expired:
|
||||
print(f" {C.RED}[EXPIRED]{C.RESET}")
|
||||
elif days_left < 90:
|
||||
print(f" {C.YELLOW}[{days_left} days remaining]{C.RESET}")
|
||||
else:
|
||||
print(f" {C.GREEN}[{days_left} days remaining]{C.RESET}")
|
||||
|
||||
if not_before > now:
|
||||
print(f"{prefix} {C.RED}[NOT YET VALID]{C.RESET}")
|
||||
|
||||
print(f"{prefix}{C.BOLD}SHA1:{C.RESET} {sha1_fingerprint(cert)}")
|
||||
print(f"{prefix}{C.BOLD}Self-signed:{C.RESET} {'Yes' if is_self_signed(cert) else 'No'}")
|
||||
print(f"{prefix}{C.BOLD}CA:{C.RESET} {'Yes' if is_ca(cert) else 'No'}")
|
||||
|
||||
ku = get_key_usage(cert)
|
||||
if ku:
|
||||
print(f"{prefix}{C.BOLD}Key Usage:{C.RESET} {ku}")
|
||||
eku = get_eku(cert)
|
||||
if eku:
|
||||
print(f"{prefix}{C.BOLD}EKU:{C.RESET} {eku}")
|
||||
san = get_san(cert)
|
||||
if san:
|
||||
print(f"{prefix}{C.BOLD}SAN:{C.RESET} {san}")
|
||||
|
||||
ski = get_subject_key_id(cert)
|
||||
if ski:
|
||||
print(f"{prefix}{C.BOLD}SKI:{C.RESET} {ski}")
|
||||
aki = get_authority_key_id(cert)
|
||||
if aki:
|
||||
print(f"{prefix}{C.BOLD}AKI:{C.RESET} {aki}")
|
||||
|
||||
cdp_urls = get_cdp_urls(cert)
|
||||
if cdp_urls:
|
||||
print(f"{prefix}{C.BOLD}CDP:{C.RESET}")
|
||||
for u in cdp_urls:
|
||||
print(f"{prefix} → {u}")
|
||||
else:
|
||||
print(f"{prefix}{C.BOLD}CDP:{C.RESET} (none)")
|
||||
|
||||
ca_issuers, ocsp_urls = get_aia_info(cert)
|
||||
if ca_issuers or ocsp_urls:
|
||||
print(f"{prefix}{C.BOLD}AIA:{C.RESET}")
|
||||
for u in ca_issuers:
|
||||
print(f"{prefix} → CA Issuer: {u}")
|
||||
for u in ocsp_urls:
|
||||
print(f"{prefix} → OCSP: {u}")
|
||||
else:
|
||||
print(f"{prefix}{C.BOLD}AIA:{C.RESET} (none)")
|
||||
|
||||
policies = get_extension_value(cert, ExtensionOID.CERTIFICATE_POLICIES)
|
||||
if policies:
|
||||
print(f"{prefix}{C.BOLD}Policies:{C.RESET}")
|
||||
for pol in policies:
|
||||
oid = pol.policy_identifier.dotted_string
|
||||
print(f"{prefix} → {oid}")
|
||||
if pol.policy_qualifiers:
|
||||
for q in pol.policy_qualifiers:
|
||||
if isinstance(q, str):
|
||||
print(f"{prefix} CPS: {q}")
|
||||
|
||||
print(f"{prefix}{C.BOLD}Source:{C.RESET} {url}")
|
||||
|
||||
|
||||
def display_crl_results(results):
|
||||
r = results
|
||||
crl_type = "Delta CRL" if r['is_delta'] else "Base CRL"
|
||||
|
||||
print(f"\n {C.BOLD}CRL:{C.RESET} {r['url']}")
|
||||
print(f" {C.BOLD}Type:{C.RESET} {crl_type}")
|
||||
print(f" {C.BOLD}Issuer:{C.RESET} {r['issuer']}")
|
||||
print(f" {C.BOLD}Last Update:{C.RESET} {r['last_update']}")
|
||||
print(f" {C.BOLD}Next Update:{C.RESET} {r['next_update']}", end="")
|
||||
|
||||
if r['expired']:
|
||||
print(f" {C.RED}[EXPIRED]{C.RESET}")
|
||||
else:
|
||||
remaining = (r['next_update'] - datetime.now(timezone.utc)).days
|
||||
print(f" {C.GREEN}[valid, {remaining} days remaining]{C.RESET}")
|
||||
|
||||
if r['sig_valid'] is True:
|
||||
print_ok(f"Signature: {r['sig_msg']} (by {r.get('verified_by', 'unknown')})")
|
||||
elif r['sig_valid'] is False:
|
||||
print_err(f"Signature: {r['sig_msg']}")
|
||||
else:
|
||||
print_warn(f"Signature: {r['sig_msg']}")
|
||||
|
||||
count = r['revoked_count']
|
||||
print_info("Revoked certificates", str(count))
|
||||
if count > 0:
|
||||
for serial in r['revoked_serials'][:10]:
|
||||
print(f" Serial: {serial}")
|
||||
if count > 10:
|
||||
print(f" ... and {count - 10} more")
|
||||
|
||||
|
||||
# ─── Main ────────────────────────────────────────────────────────────────────
|
||||
|
||||
def main():
|
||||
if len(sys.argv) < 2:
|
||||
print(f"Usage: {sys.argv[0]} <url> [<url2> ...]")
|
||||
print(f"Example: {sys.argv[0]} http://pki.matas.dk/cdp http://pki.matas.dk/aia")
|
||||
print(f" {sys.argv[0]} http://pki.imy.se/")
|
||||
print(f"\nRecursively scans directories up to {MAX_DEPTH} levels deep.")
|
||||
sys.exit(1)
|
||||
|
||||
base_urls = sys.argv[1:]
|
||||
|
||||
session = requests.Session()
|
||||
session.verify = False
|
||||
session.headers.update({
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) '
|
||||
'AppleWebKit/537.36 (KHTML, like Gecko) '
|
||||
'Chrome/125.0.0.0 Safari/537.36',
|
||||
'Accept': '*/*',
|
||||
'Accept-Encoding': 'identity',
|
||||
})
|
||||
requests.packages.urllib3.disable_warnings()
|
||||
|
||||
all_cert_urls = []
|
||||
all_crl_urls = []
|
||||
|
||||
# ── Step 1: Recursively scrape all provided URLs ─────────────────────
|
||||
for base_url in base_urls:
|
||||
print_header(f"Scanning: {base_url}")
|
||||
cert_urls, crl_urls = scrape_recursive(base_url, session)
|
||||
all_cert_urls.extend(cert_urls)
|
||||
all_crl_urls.extend(crl_urls)
|
||||
|
||||
# Deduplicate
|
||||
all_cert_urls = list(dict.fromkeys(all_cert_urls))
|
||||
all_crl_urls = list(dict.fromkeys(all_crl_urls))
|
||||
|
||||
print_section("Discovery Summary")
|
||||
print_ok(f"Total certificate files: {len(all_cert_urls)}")
|
||||
print_ok(f"Total CRL files: {len(all_crl_urls)}")
|
||||
|
||||
# ── Step 2: Download and parse certificates ──────────────────────────
|
||||
print_header("Parsing Certificates")
|
||||
|
||||
certs_dict = {}
|
||||
for url in all_cert_urls:
|
||||
try:
|
||||
data = download_file(url, session)
|
||||
cert = load_certificate(data, url)
|
||||
if cert:
|
||||
fp = sha1_fingerprint(cert)
|
||||
certs_dict[fp] = (cert, url)
|
||||
print_ok(f"Loaded: {subject_str(cert.subject)}")
|
||||
else:
|
||||
print_warn(f"Could not parse certificate: {url}")
|
||||
except Exception as e:
|
||||
print_err(f"Failed to download {url}: {e}")
|
||||
|
||||
# ── Step 3: Download and parse CRLs ──────────────────────────────────
|
||||
print_header("Parsing CRLs")
|
||||
|
||||
crls_list = []
|
||||
for url in all_crl_urls:
|
||||
try:
|
||||
data = download_file(url, session)
|
||||
crl = load_crl(data, url)
|
||||
if crl:
|
||||
crls_list.append((crl, url))
|
||||
print_ok(f"Loaded CRL: {subject_str(crl.issuer)}")
|
||||
else:
|
||||
print_warn(f"Could not parse CRL: {url}")
|
||||
except Exception as e:
|
||||
print_err(f"Failed to download CRL {url}: {e}")
|
||||
traceback.print_exc()
|
||||
|
||||
# ── Step 4: Fetch CRLs from CDP extensions ───────────────────────────
|
||||
print_header("Fetching CRLs from CDP Extensions")
|
||||
|
||||
referenced_crl_urls = set()
|
||||
for fp, (cert, url) in certs_dict.items():
|
||||
for cdp_url in get_cdp_urls(cert):
|
||||
if cdp_url.lower().startswith('http'):
|
||||
referenced_crl_urls.add(cdp_url)
|
||||
|
||||
already_fetched = set(u for _, u in crls_list)
|
||||
new_crl_urls = referenced_crl_urls - already_fetched
|
||||
|
||||
if new_crl_urls:
|
||||
for url in new_crl_urls:
|
||||
try:
|
||||
data = download_file(url, session)
|
||||
crl = load_crl(data, url)
|
||||
if crl:
|
||||
crls_list.append((crl, url))
|
||||
print_ok(f"Fetched CDP CRL: {subject_str(crl.issuer)}")
|
||||
else:
|
||||
print_warn(f"Could not parse CDP CRL: {url}")
|
||||
except Exception as e:
|
||||
print_err(f"Failed to fetch CDP CRL {url}: {e}")
|
||||
else:
|
||||
print_info("No additional CRLs", "All CDP CRLs already downloaded")
|
||||
|
||||
# ── Step 5: Fetch CA certs from AIA extensions ───────────────────────
|
||||
print_header("Fetching CA Certs from AIA Extensions")
|
||||
|
||||
referenced_aia_urls = set()
|
||||
for fp, (cert, url) in certs_dict.items():
|
||||
ca_issuers, _ = get_aia_info(cert)
|
||||
for aia_url in ca_issuers:
|
||||
if aia_url.lower().startswith('http'):
|
||||
referenced_aia_urls.add(aia_url)
|
||||
|
||||
already_loaded_urls = set(u for _, (c, u) in certs_dict.items())
|
||||
new_aia_urls = referenced_aia_urls - already_loaded_urls
|
||||
|
||||
if new_aia_urls:
|
||||
for url in new_aia_urls:
|
||||
try:
|
||||
data = download_file(url, session)
|
||||
cert = load_certificate(data, url)
|
||||
if cert:
|
||||
fp = sha1_fingerprint(cert)
|
||||
if fp not in certs_dict:
|
||||
certs_dict[fp] = (cert, url)
|
||||
print_ok(f"Fetched AIA cert: {subject_str(cert.subject)}")
|
||||
else:
|
||||
print_info("Already loaded", subject_str(cert.subject))
|
||||
else:
|
||||
print_warn(f"Could not parse AIA cert: {url}")
|
||||
except Exception as e:
|
||||
print_err(f"Failed to fetch AIA cert {url}: {e}")
|
||||
else:
|
||||
print_info("No additional certs", "All AIA certs already loaded")
|
||||
|
||||
# ── Step 6: Build and display chains ─────────────────────────────────
|
||||
print_header("Certificate Chains")
|
||||
|
||||
chains = build_chains(certs_dict)
|
||||
|
||||
if not chains:
|
||||
print_warn("No certificate chains could be built (no certificates loaded)")
|
||||
else:
|
||||
for i, chain in enumerate(chains, 1):
|
||||
print_section(f"Chain {i} ({len(chain)} certificate(s))")
|
||||
for depth, fp in enumerate(chain):
|
||||
cert, url = certs_dict[fp]
|
||||
if depth == 0 and is_self_signed(cert):
|
||||
role = "Root CA"
|
||||
elif is_ca(cert):
|
||||
role = "Intermediate CA"
|
||||
else:
|
||||
role = "End Entity"
|
||||
|
||||
print(f"\n{' ' * depth}{C.BOLD}{C.BLUE}[{depth}] {role}{C.RESET}")
|
||||
display_cert(cert, url, indent=depth)
|
||||
|
||||
# ── Step 7: Validate CRLs ───────────────────────────────────────────
|
||||
print_header("CRL Validation")
|
||||
|
||||
if not crls_list:
|
||||
print_warn("No CRLs to validate")
|
||||
else:
|
||||
for crl, crl_url in crls_list:
|
||||
results = validate_crl(crl, crl_url, certs_dict)
|
||||
display_crl_results(results)
|
||||
|
||||
# ── Step 8: Revocation cross-check ───────────────────────────────────
|
||||
print_header("Revocation Check — Are Any Downloaded Certs Revoked?")
|
||||
|
||||
revoked_serials_by_aki = defaultdict(set)
|
||||
for crl, crl_url in crls_list:
|
||||
aki = get_authority_key_id(crl)
|
||||
for revoked in crl:
|
||||
key = aki or subject_str(crl.issuer)
|
||||
revoked_serials_by_aki[key].add(revoked.serial_number)
|
||||
|
||||
found_revoked = False
|
||||
for fp, (cert, url) in certs_dict.items():
|
||||
aki = get_authority_key_id(cert)
|
||||
serial = cert.serial_number
|
||||
if aki and aki in revoked_serials_by_aki:
|
||||
if serial in revoked_serials_by_aki[aki]:
|
||||
print_err(
|
||||
f"REVOKED: {subject_str(cert.subject)} "
|
||||
f"(serial {format(serial, 'x')})"
|
||||
)
|
||||
found_revoked = True
|
||||
|
||||
if not found_revoked:
|
||||
print_ok("None of the downloaded certificates appear on the downloaded CRLs")
|
||||
|
||||
# ── Summary ──────────────────────────────────────────────────────────
|
||||
print_header("Summary")
|
||||
print_info("Certificates parsed", str(len(certs_dict)))
|
||||
print_info("CRLs parsed", str(len(crls_list)))
|
||||
print_info("Chains found", str(len(chains)))
|
||||
|
||||
now = datetime.now(timezone.utc)
|
||||
expired_certs = [fp for fp, (c, u) in certs_dict.items()
|
||||
if c.not_valid_after_utc < now]
|
||||
expired_crls = [(crl, u) for crl, u in crls_list
|
||||
if crl.next_update_utc and crl.next_update_utc < now]
|
||||
|
||||
if expired_certs:
|
||||
print_err(f"Expired certificates: {len(expired_certs)}")
|
||||
for fp in expired_certs:
|
||||
cert, url = certs_dict[fp]
|
||||
print(f" → {subject_str(cert.subject)} (expired {cert.not_valid_after_utc})")
|
||||
else:
|
||||
print_ok("No expired certificates")
|
||||
|
||||
if expired_crls:
|
||||
print_err(f"Expired CRLs: {len(expired_crls)}")
|
||||
for crl, url in expired_crls:
|
||||
print(f" → {url} (expired {crl.next_update_utc})")
|
||||
else:
|
||||
print_ok("No expired CRLs")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
+881
@@ -0,0 +1,881 @@
|
||||
#!/usr/bin/env python3
|
||||
# /// script
|
||||
# requires-python = ">=3.10"
|
||||
# dependencies = [
|
||||
# "requests",
|
||||
# "beautifulsoup4",
|
||||
# "cryptography",
|
||||
# "pyopenssl",
|
||||
# ]
|
||||
# ///
|
||||
"""
|
||||
PKI Health Checker
|
||||
==================
|
||||
Discovers CA certificates from IIS directory-browsing sites, direct cert URLs,
|
||||
or live TLS servers. Builds the CA hierarchy by following AIA chains upward,
|
||||
validates CDP/AIA for each CA, and checks issuing CA CRLs.
|
||||
|
||||
Usage:
|
||||
uv run pki_health.py <url> [<url2> ...]
|
||||
|
||||
Input types:
|
||||
Directory: http://pki.kinda.se/
|
||||
Cert URL: https://r10.i.lencr.org/
|
||||
TLS server: https://letsencrypt.org
|
||||
|
||||
Examples:
|
||||
uv run pki_health.py http://pki.imy.se/
|
||||
uv run pki_health.py https://r10.i.lencr.org/
|
||||
uv run pki_health.py https://www.google.com
|
||||
"""
|
||||
|
||||
import ssl
|
||||
import socket
|
||||
import sys
|
||||
from datetime import datetime, timezone
|
||||
from urllib.parse import urljoin, unquote, urlparse
|
||||
|
||||
import requests
|
||||
from bs4 import BeautifulSoup
|
||||
from cryptography import x509
|
||||
from cryptography.x509.oid import ExtensionOID, AuthorityInformationAccessOID
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
from cryptography.hazmat.primitives.asymmetric import padding, rsa, ec
|
||||
from cryptography.hazmat.primitives.serialization import Encoding
|
||||
from cryptography.exceptions import InvalidSignature
|
||||
from OpenSSL import SSL, crypto
|
||||
|
||||
|
||||
CERT_EXTENSIONS = ('.crt', '.cer', '.pem')
|
||||
CRL_EXTENSIONS = ('.crl',)
|
||||
TIMEOUT = 30
|
||||
MAX_DEPTH = 5
|
||||
|
||||
|
||||
# ─── Terminal ────────────────────────────────────────────────────────────────
|
||||
|
||||
class C:
|
||||
H = '\033[95m'; B = '\033[94m'; CN = '\033[96m'
|
||||
G = '\033[92m'; Y = '\033[93m'; R = '\033[91m'
|
||||
BD = '\033[1m'; DM = '\033[2m'; RS = '\033[0m'
|
||||
|
||||
def header(t):
|
||||
print(f"\n{C.BD}{C.H}{'='*78}{C.RS}")
|
||||
print(f"{C.BD}{C.H} {t}{C.RS}")
|
||||
print(f"{C.BD}{C.H}{'='*78}{C.RS}")
|
||||
|
||||
def section(t):
|
||||
print(f"\n{C.BD}{C.CN}── {t} {'─'*(72-len(t))}{C.RS}")
|
||||
|
||||
|
||||
# ─── Helpers ─────────────────────────────────────────────────────────────────
|
||||
|
||||
def sha1(cert):
|
||||
return cert.fingerprint(hashes.SHA1()).hex(':')
|
||||
|
||||
def get_cn(name):
|
||||
for attr in reversed(list(name)):
|
||||
if attr.oid == x509.oid.NameOID.COMMON_NAME:
|
||||
return attr.value
|
||||
for attr in name:
|
||||
return f"{attr.oid._name}={attr.value}"
|
||||
return "(unknown)"
|
||||
|
||||
def ski_hex(obj):
|
||||
try:
|
||||
return obj.extensions.get_extension_for_oid(
|
||||
ExtensionOID.SUBJECT_KEY_IDENTIFIER).value.digest.hex()
|
||||
except x509.ExtensionNotFound:
|
||||
return None
|
||||
|
||||
def aki_hex(obj):
|
||||
try:
|
||||
ext = obj.extensions.get_extension_for_oid(
|
||||
ExtensionOID.AUTHORITY_KEY_IDENTIFIER)
|
||||
return ext.value.key_identifier.hex() if ext.value.key_identifier else None
|
||||
except x509.ExtensionNotFound:
|
||||
return None
|
||||
|
||||
def cdp_urls(cert):
|
||||
try:
|
||||
ext = cert.extensions.get_extension_for_oid(
|
||||
ExtensionOID.CRL_DISTRIBUTION_POINTS)
|
||||
urls = []
|
||||
for dp in ext.value:
|
||||
if dp.full_name:
|
||||
for n in dp.full_name:
|
||||
if isinstance(n, x509.UniformResourceIdentifier):
|
||||
urls.append(n.value)
|
||||
return urls
|
||||
except x509.ExtensionNotFound:
|
||||
return []
|
||||
|
||||
def aia_ca_issuer_urls(cert):
|
||||
try:
|
||||
ext = cert.extensions.get_extension_for_oid(
|
||||
ExtensionOID.AUTHORITY_INFORMATION_ACCESS)
|
||||
urls = []
|
||||
for d in ext.value:
|
||||
if isinstance(d.access_location, x509.UniformResourceIdentifier):
|
||||
if d.access_method == AuthorityInformationAccessOID.CA_ISSUERS:
|
||||
urls.append(d.access_location.value)
|
||||
return urls
|
||||
except x509.ExtensionNotFound:
|
||||
return []
|
||||
|
||||
def aia_ocsp_urls(cert):
|
||||
try:
|
||||
ext = cert.extensions.get_extension_for_oid(
|
||||
ExtensionOID.AUTHORITY_INFORMATION_ACCESS)
|
||||
urls = []
|
||||
for d in ext.value:
|
||||
if isinstance(d.access_location, x509.UniformResourceIdentifier):
|
||||
if d.access_method == AuthorityInformationAccessOID.OCSP:
|
||||
urls.append(d.access_location.value)
|
||||
return urls
|
||||
except x509.ExtensionNotFound:
|
||||
return []
|
||||
|
||||
def is_ca(cert):
|
||||
try:
|
||||
return cert.extensions.get_extension_for_oid(
|
||||
ExtensionOID.BASIC_CONSTRAINTS).value.ca
|
||||
except x509.ExtensionNotFound:
|
||||
return False
|
||||
|
||||
def is_self_signed(cert):
|
||||
return cert.issuer == cert.subject
|
||||
|
||||
def short_ski(h):
|
||||
return h[:16] + '...' if h and len(h) > 16 else (h or '(none)')
|
||||
|
||||
|
||||
# ─── Network ────────────────────────────────────────────────────────────────
|
||||
|
||||
def make_session():
|
||||
s = requests.Session()
|
||||
s.verify = False
|
||||
s.headers.update({
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) '
|
||||
'AppleWebKit/537.36 Chrome/125.0.0.0 Safari/537.36',
|
||||
'Accept': '*/*', 'Accept-Encoding': 'identity',
|
||||
})
|
||||
requests.packages.urllib3.disable_warnings()
|
||||
return s
|
||||
|
||||
def fetch(url, session):
|
||||
resp = session.get(url, timeout=TIMEOUT)
|
||||
resp.raise_for_status()
|
||||
return resp.content
|
||||
|
||||
def load_cert(data):
|
||||
for fn in [x509.load_der_x509_certificate, x509.load_pem_x509_certificate]:
|
||||
try:
|
||||
return fn(data)
|
||||
except Exception:
|
||||
pass
|
||||
return None
|
||||
|
||||
def load_crl(data):
|
||||
for fn in [x509.load_der_x509_crl, x509.load_pem_x509_crl]:
|
||||
try:
|
||||
return fn(data)
|
||||
except Exception:
|
||||
pass
|
||||
return None
|
||||
|
||||
|
||||
# ─── Input Detection & Loading ───────────────────────────────────────────────
|
||||
|
||||
def scrape_directory(base_url, session, origin=None, depth=0, visited=None):
|
||||
"""Recursively scrape IIS directory listing."""
|
||||
if visited is None:
|
||||
visited = set()
|
||||
if not base_url.endswith('/'):
|
||||
base_url += '/'
|
||||
if base_url in visited or depth > MAX_DEPTH:
|
||||
return [], []
|
||||
visited.add(base_url)
|
||||
if origin is None:
|
||||
origin = urlparse(base_url).netloc
|
||||
try:
|
||||
resp = session.get(base_url, timeout=TIMEOUT)
|
||||
resp.raise_for_status()
|
||||
except Exception:
|
||||
return [], []
|
||||
soup = BeautifulSoup(resp.text, 'html.parser')
|
||||
certs, crls, dirs = [], [], []
|
||||
for a in soup.find_all('a', href=True):
|
||||
href = a['href']
|
||||
full = urljoin(base_url, href)
|
||||
low = unquote(full).lower()
|
||||
if urlparse(full).netloc != origin: continue
|
||||
if href in ('../', '..'): continue
|
||||
if full.rstrip('/') == base_url.rstrip('/'): continue
|
||||
if any(low.endswith(e) for e in CERT_EXTENSIONS): certs.append(full)
|
||||
elif any(low.endswith(e) for e in CRL_EXTENSIONS): crls.append(full)
|
||||
elif full.endswith('/') and full not in visited: dirs.append(full)
|
||||
for d in dirs:
|
||||
sc, sl = scrape_directory(d, session, origin, depth+1, visited)
|
||||
certs.extend(sc); crls.extend(sl)
|
||||
return certs, crls
|
||||
|
||||
|
||||
def get_tls_chain(hostname, port=443):
|
||||
"""Connect to a TLS server and return the certificate chain as cryptography objects."""
|
||||
import select
|
||||
|
||||
ctx = SSL.Context(SSL.TLS_CLIENT_METHOD)
|
||||
ctx.set_verify(SSL.VERIFY_NONE, lambda *a: True)
|
||||
|
||||
sock = socket.create_connection((hostname, port), timeout=TIMEOUT)
|
||||
conn = SSL.Connection(ctx, sock)
|
||||
conn.set_tlsext_host_name(hostname.encode())
|
||||
conn.set_connect_state()
|
||||
|
||||
# Retry handshake — needed when socket has timeout set
|
||||
while True:
|
||||
try:
|
||||
conn.do_handshake()
|
||||
break
|
||||
except SSL.WantReadError:
|
||||
select.select([sock], [], [], 5)
|
||||
except SSL.WantWriteError:
|
||||
select.select([], [sock], [], 5)
|
||||
|
||||
certs = []
|
||||
chain = conn.get_peer_cert_chain()
|
||||
if chain:
|
||||
for pyopenssl_cert in chain:
|
||||
der = crypto.dump_certificate(crypto.FILETYPE_ASN1, pyopenssl_cert)
|
||||
cert = x509.load_der_x509_certificate(der)
|
||||
certs.append(cert)
|
||||
else:
|
||||
peer = conn.get_peer_certificate()
|
||||
if peer:
|
||||
der = crypto.dump_certificate(crypto.FILETYPE_ASN1, peer)
|
||||
certs.append(x509.load_der_x509_certificate(der))
|
||||
|
||||
conn.shutdown()
|
||||
conn.close()
|
||||
sock.close()
|
||||
|
||||
if not certs:
|
||||
raise Exception("No certificates received from server")
|
||||
|
||||
return certs
|
||||
|
||||
def walk_aia_chain(start_cert, session):
|
||||
"""Follow AIA CA Issuer URLs upward to collect the full chain."""
|
||||
collected = {} # thumbprint -> cert
|
||||
tp = sha1(start_cert)
|
||||
collected[tp] = start_cert
|
||||
|
||||
current = start_cert
|
||||
seen = {tp}
|
||||
while True:
|
||||
if is_self_signed(current):
|
||||
break
|
||||
issuer_urls = aia_ca_issuer_urls(current)
|
||||
http_urls = [u for u in issuer_urls if u.lower().startswith('http')]
|
||||
if not http_urls:
|
||||
break
|
||||
|
||||
found_parent = False
|
||||
for url in http_urls:
|
||||
try:
|
||||
data = fetch(url, session)
|
||||
parent = load_cert(data)
|
||||
if parent is not None:
|
||||
ptp = sha1(parent)
|
||||
if ptp not in seen:
|
||||
seen.add(ptp)
|
||||
collected[ptp] = parent
|
||||
current = parent
|
||||
found_parent = True
|
||||
break
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
if not found_parent:
|
||||
break
|
||||
|
||||
return collected
|
||||
|
||||
|
||||
def detect_and_load(url, session):
|
||||
"""
|
||||
Detect input type and return (certs_dict, crl_urls, input_type).
|
||||
certs_dict: thumbprint -> (cert, source_url_or_description)
|
||||
"""
|
||||
parsed = urlparse(url)
|
||||
certs_dict = {}
|
||||
crl_urls = []
|
||||
|
||||
# ── Try 1: TLS server (https:// without obvious file extension) ──────
|
||||
if parsed.scheme == 'https':
|
||||
low_path = parsed.path.lower().rstrip('/')
|
||||
is_file = any(low_path.endswith(e) for e in CERT_EXTENSIONS + CRL_EXTENSIONS)
|
||||
|
||||
if not is_file and (not low_path or low_path == '/'):
|
||||
# Looks like a server, try TLS handshake
|
||||
hostname = parsed.hostname
|
||||
port = parsed.port or 443
|
||||
try:
|
||||
print(f" 🔌 TLS connecting to {hostname}:{port}...")
|
||||
chain = get_tls_chain(hostname, port)
|
||||
if chain:
|
||||
print(f" ✔ Got {len(chain)} cert(s) from TLS handshake")
|
||||
for cert in chain:
|
||||
tp = sha1(cert)
|
||||
source = f"TLS:{hostname}:{port}"
|
||||
certs_dict[tp] = (cert, source)
|
||||
# Walk AIA from the deepest cert we got
|
||||
for cert in chain:
|
||||
aia_certs = walk_aia_chain(cert, session)
|
||||
for tp2, cert2 in aia_certs.items():
|
||||
if tp2 not in certs_dict:
|
||||
aia_url = aia_ca_issuer_urls(cert2)
|
||||
src = aia_url[0] if aia_url else "AIA chain"
|
||||
certs_dict[tp2] = (cert2, src)
|
||||
return certs_dict, crl_urls, "tls"
|
||||
except Exception as e:
|
||||
print(f" {C.Y}⚠ TLS failed ({e}), trying as URL...{C.RS}")
|
||||
|
||||
# ── Try 2: Direct certificate URL ────────────────────────────────────
|
||||
try:
|
||||
data = fetch(url, session)
|
||||
except Exception as e:
|
||||
print(f" {C.R}✘ Failed to fetch {url}: {e}{C.RS}")
|
||||
return certs_dict, crl_urls, "error"
|
||||
|
||||
# Check if it's a certificate
|
||||
cert = load_cert(data)
|
||||
if cert is not None:
|
||||
print(f" 📜 Direct certificate: {get_cn(cert.subject)}")
|
||||
tp = sha1(cert)
|
||||
certs_dict[tp] = (cert, url)
|
||||
|
||||
# Walk AIA upward
|
||||
print(f" 🔗 Walking AIA chain...")
|
||||
aia_certs = walk_aia_chain(cert, session)
|
||||
for tp2, cert2 in aia_certs.items():
|
||||
if tp2 not in certs_dict:
|
||||
urls = aia_ca_issuer_urls(cert2)
|
||||
src = urls[0] if urls else "AIA"
|
||||
certs_dict[tp2] = (cert2, src)
|
||||
print(f" ↑ {get_cn(cert2.subject)}")
|
||||
|
||||
return certs_dict, crl_urls, "cert"
|
||||
|
||||
# ── Try 3: Directory listing ─────────────────────────────────────────
|
||||
# If we got HTML, assume it's a directory
|
||||
if data[:50].lstrip().lower().startswith((b'<', b'<!', b'<html')):
|
||||
print(f" 📁 Directory listing detected")
|
||||
cert_urls, crl_found = scrape_directory(url, session)
|
||||
for u in cert_urls:
|
||||
print(f" 📜 {unquote(u.split('/')[-1])}")
|
||||
for u in crl_found:
|
||||
print(f" 📋 {unquote(u.split('/')[-1])}")
|
||||
crl_urls.extend(crl_found)
|
||||
|
||||
for cert_url in cert_urls:
|
||||
try:
|
||||
d = fetch(cert_url, session)
|
||||
c = load_cert(d)
|
||||
if c is not None:
|
||||
t = sha1(c)
|
||||
if t not in certs_dict:
|
||||
certs_dict[t] = (c, cert_url)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Follow AIA for any certs we loaded
|
||||
for tp, (cert, src) in list(certs_dict.items()):
|
||||
aia = walk_aia_chain(cert, session)
|
||||
for tp2, cert2 in aia.items():
|
||||
if tp2 not in certs_dict:
|
||||
urls = aia_ca_issuer_urls(cert2)
|
||||
s = urls[0] if urls else "AIA"
|
||||
certs_dict[tp2] = (cert2, s)
|
||||
|
||||
return certs_dict, crl_urls, "directory"
|
||||
|
||||
print(f" {C.Y}⚠ Could not determine input type for {url}{C.RS}")
|
||||
return certs_dict, crl_urls, "unknown"
|
||||
|
||||
|
||||
# ─── CRL Signature ──────────────────────────────────────────────────────────
|
||||
|
||||
def verify_crl_sig(crl, issuer_cert):
|
||||
try:
|
||||
pub = issuer_cert.public_key()
|
||||
if isinstance(pub, rsa.RSAPublicKey):
|
||||
pub.verify(crl.signature, crl.tbs_certlist_bytes,
|
||||
padding.PKCS1v15(), crl.signature_hash_algorithm)
|
||||
elif isinstance(pub, ec.EllipticCurvePublicKey):
|
||||
from cryptography.hazmat.primitives.asymmetric import ec as ecm
|
||||
pub.verify(crl.signature, crl.tbs_certlist_bytes,
|
||||
ecm.ECDSA(crl.signature_hash_algorithm))
|
||||
else:
|
||||
return None, "Unsupported key type"
|
||||
return True, "Sig OK"
|
||||
except InvalidSignature:
|
||||
return False, "Sig INVALID"
|
||||
except Exception as e:
|
||||
return None, f"Verify error: {e}"
|
||||
|
||||
|
||||
# ─── Tree Builder ────────────────────────────────────────────────────────────
|
||||
|
||||
class CANode:
|
||||
def __init__(self, cert, url):
|
||||
self.cert = cert
|
||||
self.url = url
|
||||
self.tp = sha1(cert)
|
||||
self.name = get_cn(cert.subject)
|
||||
self.ski = ski_hex(cert)
|
||||
self.aki = aki_hex(cert)
|
||||
self.root = is_self_signed(cert)
|
||||
self.children = []
|
||||
self.alternates = []
|
||||
|
||||
|
||||
def build_tree(certs_dict):
|
||||
# Only CA certs
|
||||
ca_certs = {tp: (c, u) for tp, (c, u) in certs_dict.items() if is_ca(c)}
|
||||
|
||||
by_ski = {}
|
||||
for tp, (cert, url) in ca_certs.items():
|
||||
ski = ski_hex(cert)
|
||||
if ski:
|
||||
by_ski.setdefault(ski, []).append((tp, cert, url))
|
||||
|
||||
nodes = {}
|
||||
ski_primary = {}
|
||||
|
||||
for ski, group in by_ski.items():
|
||||
group.sort(key=lambda x: x[1].not_valid_after_utc, reverse=True)
|
||||
tp, cert, url = group[0]
|
||||
node = CANode(cert, url)
|
||||
for atp, acert, aurl in group[1:]:
|
||||
node.alternates.append((acert, aurl, sha1(acert)))
|
||||
nodes[tp] = node
|
||||
ski_primary[ski] = tp
|
||||
|
||||
roots = []
|
||||
for tp, node in nodes.items():
|
||||
if node.root:
|
||||
roots.append(node)
|
||||
elif node.aki and node.aki in ski_primary:
|
||||
ptk = ski_primary[node.aki]
|
||||
if ptk in nodes and ptk != tp:
|
||||
nodes[ptk].children.append(node)
|
||||
else:
|
||||
roots.append(node)
|
||||
else:
|
||||
roots.append(node)
|
||||
|
||||
return roots, nodes, ski_primary
|
||||
|
||||
|
||||
# ─── CDP Check ───────────────────────────────────────────────────────────────
|
||||
|
||||
def check_cdp(url, issuer_ski, issuer_cert, session, p):
|
||||
if not url.lower().startswith('http'):
|
||||
print(f"{p} {C.DM}LDAP — not checked{C.RS}")
|
||||
return False
|
||||
|
||||
try:
|
||||
data = fetch(url, session)
|
||||
except Exception:
|
||||
print(f"{p} {C.R}✘ Unreachable{C.RS}")
|
||||
return True
|
||||
|
||||
crl = load_crl(data)
|
||||
if crl is None:
|
||||
print(f"{p} {C.R}✘ Failed to parse ({len(data)} bytes){C.RS}")
|
||||
return True
|
||||
|
||||
parts = []
|
||||
has_issue = False
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
if crl.next_update_utc:
|
||||
if crl.next_update_utc < now:
|
||||
days = (now - crl.next_update_utc).days
|
||||
parts.append(f"{C.R}EXPIRED {days}d ago{C.RS}")
|
||||
has_issue = True
|
||||
else:
|
||||
days = (crl.next_update_utc - now).days
|
||||
color = C.G if days > 7 else C.Y
|
||||
parts.append(f"{color}Valid ({days}d){C.RS}")
|
||||
|
||||
crl_aki = aki_hex(crl)
|
||||
if crl_aki and issuer_ski:
|
||||
if crl_aki == issuer_ski:
|
||||
parts.append(f"{C.G}AKI match{C.RS}")
|
||||
else:
|
||||
parts.append(f"{C.R}AKI MISMATCH{C.RS}")
|
||||
has_issue = True
|
||||
|
||||
if issuer_cert is not None:
|
||||
valid, msg = verify_crl_sig(crl, issuer_cert)
|
||||
if valid is True:
|
||||
parts.append(f"{C.G}{msg}{C.RS}")
|
||||
elif valid is False:
|
||||
parts.append(f"{C.R}{msg}{C.RS}")
|
||||
has_issue = True
|
||||
else:
|
||||
parts.append(f"{C.Y}{msg}{C.RS}")
|
||||
|
||||
try:
|
||||
crl.extensions.get_extension_for_oid(ExtensionOID.DELTA_CRL_INDICATOR)
|
||||
parts.append("Delta")
|
||||
except x509.ExtensionNotFound:
|
||||
pass
|
||||
|
||||
parts.append(f"{sum(1 for _ in crl)} revoked")
|
||||
print(f"{p} {' | '.join(parts)}")
|
||||
|
||||
if crl_aki and issuer_ski and crl_aki != issuer_ski:
|
||||
print(f"{p} {C.R}CRL AKI: {crl_aki}{C.RS}")
|
||||
print(f"{p} {C.R}Issuer SKI: {issuer_ski}{C.RS}")
|
||||
|
||||
return has_issue
|
||||
|
||||
|
||||
# ─── AIA Check ───────────────────────────────────────────────────────────────
|
||||
|
||||
def check_aia(url, expected_tp, session, p):
|
||||
if not url.lower().startswith('http'):
|
||||
print(f"{p} {C.DM}LDAP — not checked{C.RS}")
|
||||
return False
|
||||
|
||||
try:
|
||||
data = fetch(url, session)
|
||||
except Exception:
|
||||
print(f"{p} {C.R}✘ Unreachable{C.RS}")
|
||||
return True
|
||||
|
||||
cert = load_cert(data)
|
||||
if cert is None:
|
||||
print(f"{p} {C.R}✘ Failed to parse{C.RS}")
|
||||
return True
|
||||
|
||||
got_tp = sha1(cert)
|
||||
if expected_tp:
|
||||
if got_tp == expected_tp:
|
||||
print(f"{p} {C.G}✔ Matches issuer{C.RS} ({got_tp[:23]}...)")
|
||||
return False
|
||||
else:
|
||||
# Check if same key (renewed cert)
|
||||
got_ski = ski_hex(cert)
|
||||
# find expected cert's SKI from our data
|
||||
print(f"{p} {C.Y}⚠ Different thumbprint{C.RS}")
|
||||
print(f"{p} Got: {get_cn(cert.subject)} ({got_tp[:23]}...)")
|
||||
print(f"{p} Expected: {expected_tp[:23]}...")
|
||||
return True
|
||||
else:
|
||||
print(f"{p} Downloaded: {get_cn(cert.subject)} ({got_tp[:23]}...)")
|
||||
return False
|
||||
|
||||
|
||||
# ─── Display ─────────────────────────────────────────────────────────────────
|
||||
|
||||
def show_cert(node, parent, session, p, issues):
|
||||
cert = node.cert
|
||||
now = datetime.now(timezone.utc)
|
||||
nb = cert.not_valid_before_utc
|
||||
na = cert.not_valid_after_utc
|
||||
days = (na - now).days
|
||||
expired = na < now
|
||||
|
||||
vc = C.R if expired else (C.Y if days < 90 else C.G)
|
||||
vl = "EXPIRED" if expired else f"{days} days"
|
||||
|
||||
print(f"{p}Thumbprint: {node.tp}")
|
||||
print(f"{p}SKI: {short_ski(node.ski)}")
|
||||
if node.aki and parent and parent.ski and node.aki != parent.ski:
|
||||
print(f"{p}AKI: {short_ski(node.aki)} "
|
||||
f"{C.R}✘ MISMATCH with parent SKI ({short_ski(parent.ski)}){C.RS}")
|
||||
issues.append(f"AKI mismatch: {node.name}")
|
||||
print(f"{p}Valid: {nb.strftime('%Y-%m-%d')} → {na.strftime('%Y-%m-%d')} "
|
||||
f"{vc}[{vl}]{C.RS}")
|
||||
if expired:
|
||||
issues.append(f"EXPIRED: {node.name}")
|
||||
|
||||
if node.alternates:
|
||||
print(f"{p}{C.Y}Also (renewed, same key):{C.RS}")
|
||||
for ac, au, atp in node.alternates:
|
||||
ana = ac.not_valid_after_utc
|
||||
ad = (ana - now).days
|
||||
ae = ana < now
|
||||
ac2 = C.R if ae else (C.Y if ad < 90 else C.G)
|
||||
al = "EXPIRED" if ae else f"{ad} days"
|
||||
print(f"{p} {atp} {ac2}[{al}]{C.RS}")
|
||||
|
||||
issuer_cert = parent.cert if parent else None
|
||||
issuer_ski = parent.ski if parent else None
|
||||
issuer_tp = parent.tp if parent else None
|
||||
|
||||
# CDP
|
||||
cdps = cdp_urls(cert)
|
||||
if cdps:
|
||||
for url in cdps:
|
||||
print(f"{p}CDP:")
|
||||
print(f"{p} {url}")
|
||||
has = check_cdp(url, issuer_ski, issuer_cert, session, p)
|
||||
if has:
|
||||
issues.append(f"CDP issue: {node.name}")
|
||||
elif node.root:
|
||||
print(f"{p}CDP: (none — root)")
|
||||
else:
|
||||
print(f"{p}{C.Y}CDP: (none — missing!){C.RS}")
|
||||
issues.append(f"No CDP: {node.name}")
|
||||
|
||||
# AIA
|
||||
ca_urls = aia_ca_issuer_urls(cert)
|
||||
if ca_urls:
|
||||
for url in ca_urls:
|
||||
print(f"{p}AIA:")
|
||||
print(f"{p} {url}")
|
||||
has = check_aia(url, issuer_tp, session, p)
|
||||
if has:
|
||||
issues.append(f"AIA issue: {node.name}")
|
||||
elif node.root:
|
||||
print(f"{p}AIA: (none — root)")
|
||||
else:
|
||||
print(f"{p}{C.Y}AIA: (none — missing!){C.RS}")
|
||||
issues.append(f"No AIA: {node.name}")
|
||||
|
||||
# OCSP
|
||||
ocsp = aia_ocsp_urls(cert)
|
||||
for url in ocsp:
|
||||
print(f"{p}OCSP: {url}")
|
||||
|
||||
|
||||
def display_chain(root, num, session, issues):
|
||||
print(f"\n{C.BD}{C.CN}Chain {num}{C.RS}")
|
||||
print(f"{C.BD}{C.CN}{'─'*78}{C.RS}")
|
||||
|
||||
print(f"\n{C.BD}{C.B}{root.name}{C.RS}")
|
||||
show_cert(root, None, session, " ", issues)
|
||||
|
||||
for child in root.children:
|
||||
print(f"\n {C.BD}{C.B}→ {child.name}{C.RS}")
|
||||
show_cert(child, root, session, " ", issues)
|
||||
|
||||
for gc in child.children:
|
||||
print(f"\n {C.BD}{C.B}→ → {gc.name}{C.RS}")
|
||||
show_cert(gc, child, session, " ", issues)
|
||||
|
||||
|
||||
# ─── Issuing CA CRLs ────────────────────────────────────────────────────────
|
||||
|
||||
def check_issuing_ca_crls(roots, all_crl_urls, session, issues):
|
||||
"""Check CRLs on the site that are published by CAs in our tree (for leaf validation)."""
|
||||
# Collect all CA nodes
|
||||
ca_nodes = {} # ski -> node
|
||||
|
||||
def collect(node):
|
||||
if node.ski:
|
||||
ca_nodes[node.ski] = node
|
||||
for ch in node.children:
|
||||
collect(ch)
|
||||
|
||||
for r in roots:
|
||||
collect(r)
|
||||
|
||||
# Collect all CDP URLs referenced by certs in the tree
|
||||
referenced_cdps = set()
|
||||
|
||||
def collect_cdps(node):
|
||||
for u in cdp_urls(node.cert):
|
||||
referenced_cdps.add(unquote(u).lower())
|
||||
for ch in node.children:
|
||||
collect_cdps(ch)
|
||||
|
||||
for r in roots:
|
||||
collect_cdps(r)
|
||||
|
||||
# Find CRLs on site that are issued by a known CA but NOT referenced as CDP
|
||||
issuing_crls = []
|
||||
orphan_crls = []
|
||||
|
||||
for url in all_crl_urls:
|
||||
normalized = unquote(url).lower()
|
||||
if normalized in referenced_cdps:
|
||||
continue # Already checked inline with the cert
|
||||
|
||||
try:
|
||||
data = fetch(url, session)
|
||||
crl = load_crl(data)
|
||||
if crl is None:
|
||||
orphan_crls.append((url, None, "Could not parse"))
|
||||
continue
|
||||
|
||||
crl_aki = aki_hex(crl)
|
||||
if crl_aki and crl_aki in ca_nodes:
|
||||
issuing_crls.append((url, crl, ca_nodes[crl_aki]))
|
||||
else:
|
||||
# Try match by issuer name
|
||||
matched = False
|
||||
for ski, node in ca_nodes.items():
|
||||
if crl.issuer == node.cert.subject:
|
||||
issuing_crls.append((url, crl, node))
|
||||
matched = True
|
||||
break
|
||||
if not matched:
|
||||
orphan_crls.append((url, crl, None))
|
||||
|
||||
except Exception:
|
||||
orphan_crls.append((url, None, "Could not fetch"))
|
||||
|
||||
# Display issuing CA CRLs
|
||||
if issuing_crls:
|
||||
header("Issuing CA CRLs (for leaf certificate validation)")
|
||||
for url, crl, node in issuing_crls:
|
||||
print(f"\n {C.BD}{node.name}{C.RS}")
|
||||
print(f" {url}")
|
||||
|
||||
parts = []
|
||||
has_issue = False
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
if crl.next_update_utc:
|
||||
if crl.next_update_utc < now:
|
||||
days = (now - crl.next_update_utc).days
|
||||
parts.append(f"{C.R}EXPIRED {days}d ago{C.RS}")
|
||||
has_issue = True
|
||||
else:
|
||||
days = (crl.next_update_utc - now).days
|
||||
color = C.G if days > 7 else C.Y
|
||||
parts.append(f"{color}Valid ({days}d){C.RS}")
|
||||
|
||||
crl_aki = aki_hex(crl)
|
||||
if crl_aki and node.ski:
|
||||
if crl_aki == node.ski:
|
||||
parts.append(f"{C.G}AKI match{C.RS}")
|
||||
else:
|
||||
parts.append(f"{C.R}AKI MISMATCH{C.RS}")
|
||||
has_issue = True
|
||||
|
||||
valid, msg = verify_crl_sig(crl, node.cert)
|
||||
if valid is True:
|
||||
parts.append(f"{C.G}{msg}{C.RS}")
|
||||
elif valid is False:
|
||||
parts.append(f"{C.R}{msg}{C.RS}")
|
||||
has_issue = True
|
||||
else:
|
||||
parts.append(f"{C.Y}{msg}{C.RS}")
|
||||
|
||||
try:
|
||||
crl.extensions.get_extension_for_oid(ExtensionOID.DELTA_CRL_INDICATOR)
|
||||
parts.append("Delta")
|
||||
except x509.ExtensionNotFound:
|
||||
pass
|
||||
|
||||
parts.append(f"{sum(1 for _ in crl)} revoked")
|
||||
print(f" {' | '.join(parts)}")
|
||||
|
||||
if has_issue:
|
||||
issues.append(f"Issuing CA CRL issue: {node.name}")
|
||||
|
||||
# Display orphans
|
||||
if orphan_crls:
|
||||
header("Orphaned Files")
|
||||
print(f"\n {C.Y}⚠ {len(orphan_crls)} CRL(s) not matched to any CA:{C.RS}")
|
||||
for url, crl, reason in orphan_crls:
|
||||
name = unquote(url.split('/')[-1])
|
||||
if crl is not None:
|
||||
now = datetime.now(timezone.utc)
|
||||
nu = crl.next_update_utc
|
||||
if nu and nu < now:
|
||||
exp = f"{C.R}EXPIRED{C.RS}"
|
||||
elif nu:
|
||||
exp = f"{C.G}{(nu-now).days}d{C.RS}"
|
||||
else:
|
||||
exp = "?"
|
||||
print(f" 📋 {name} ({get_cn(crl.issuer)}, {exp})")
|
||||
else:
|
||||
print(f" 📋 {name} ({reason})")
|
||||
elif all_crl_urls:
|
||||
if not issuing_crls:
|
||||
header("Orphaned Files")
|
||||
print(f"\n {C.G}✔{C.RS} No orphaned files")
|
||||
|
||||
return issuing_crls, orphan_crls
|
||||
|
||||
|
||||
# ─── Main ────────────────────────────────────────────────────────────────────
|
||||
|
||||
def main():
|
||||
if len(sys.argv) < 2:
|
||||
print(f"Usage: {sys.argv[0]} <url> [<url2> ...]")
|
||||
print(f"\nInput types:")
|
||||
print(f" Directory: http://pki.kinda.se/")
|
||||
print(f" Cert URL: https://r10.i.lencr.org/")
|
||||
print(f" TLS server: https://www.google.com")
|
||||
sys.exit(1)
|
||||
|
||||
session = make_session()
|
||||
all_certs = {} # thumbprint -> (cert, source)
|
||||
all_crl_urls = []
|
||||
|
||||
# ── Load from all inputs ─────────────────────────────────────────────
|
||||
for url in sys.argv[1:]:
|
||||
header(f"Input: {url}")
|
||||
certs, crls, input_type = detect_and_load(url, session)
|
||||
for tp, (cert, src) in certs.items():
|
||||
if tp not in all_certs:
|
||||
all_certs[tp] = (cert, src)
|
||||
all_crl_urls.extend(crls)
|
||||
|
||||
all_crl_urls = list(dict.fromkeys(all_crl_urls))
|
||||
|
||||
# Count CA certs
|
||||
ca_count = sum(1 for tp, (c, u) in all_certs.items() if is_ca(c))
|
||||
total = len(all_certs)
|
||||
non_ca = total - ca_count
|
||||
print(f"\n Loaded: {ca_count} CA cert(s)", end="")
|
||||
if non_ca:
|
||||
print(f", {non_ca} non-CA cert(s) (skipped)")
|
||||
else:
|
||||
print()
|
||||
|
||||
# ── Build tree ───────────────────────────────────────────────────────
|
||||
roots, nodes, ski_primary = build_tree(all_certs)
|
||||
|
||||
if not roots:
|
||||
print(f"\n {C.R}✘ Could not build CA hierarchy{C.RS}")
|
||||
sys.exit(1)
|
||||
|
||||
# ── Display chains ───────────────────────────────────────────────────
|
||||
header("PKI Health Check")
|
||||
all_issues = []
|
||||
for i, root in enumerate(roots, 1):
|
||||
display_chain(root, i, session, all_issues)
|
||||
|
||||
# ── Issuing CA CRLs & Orphans ────────────────────────────────────────
|
||||
if all_crl_urls:
|
||||
check_issuing_ca_crls(roots, all_crl_urls, session, all_issues)
|
||||
|
||||
# ── Summary ──────────────────────────────────────────────────────────
|
||||
header("Summary")
|
||||
print(f" Chains: {len(roots)} | CA certs: {ca_count} | "
|
||||
f"Site CRLs: {len(all_crl_urls)}")
|
||||
|
||||
if not all_issues:
|
||||
print(f"\n {C.G}{C.BD}✔ All checks passed{C.RS}")
|
||||
else:
|
||||
print(f"\n {C.R}{C.BD}✘ {len(all_issues)} issue(s):{C.RS}")
|
||||
seen = set()
|
||||
for issue in all_issues:
|
||||
if issue not in seen:
|
||||
seen.add(issue)
|
||||
print(f" {C.R}• {issue}{C.RS}")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@@ -0,0 +1,11 @@
|
||||
[project]
|
||||
name = "pki-analyzer"
|
||||
version = "0.1.0"
|
||||
description = "Add your description here"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.12"
|
||||
dependencies = [
|
||||
"beautifulsoup4>=4.14.3",
|
||||
"cryptography>=48.0.0",
|
||||
"requests>=2.34.0",
|
||||
]
|
||||
+39
@@ -0,0 +1,39 @@
|
||||
#!/usr/bin/env python3
|
||||
# /// script
|
||||
# requires-python = ">=3.10"
|
||||
# dependencies = ["pyopenssl"]
|
||||
# ///
|
||||
from OpenSSL import SSL, crypto
|
||||
import socket
|
||||
import select
|
||||
|
||||
ctx = SSL.Context(SSL.TLS_CLIENT_METHOD)
|
||||
ctx.set_verify(SSL.VERIFY_NONE, lambda *a: True)
|
||||
|
||||
sock = socket.create_connection(('gp.se', 443), timeout=10)
|
||||
conn = SSL.Connection(ctx, sock)
|
||||
conn.set_tlsext_host_name(b'gp.se')
|
||||
conn.set_connect_state()
|
||||
|
||||
# Retry handshake — needed when socket has timeout set
|
||||
while True:
|
||||
try:
|
||||
conn.do_handshake()
|
||||
break
|
||||
except SSL.WantReadError:
|
||||
select.select([sock], [], [], 5)
|
||||
except SSL.WantWriteError:
|
||||
select.select([], [sock], [], 5)
|
||||
|
||||
chain = conn.get_peer_cert_chain()
|
||||
print(f'Chain: {len(chain) if chain else 0} certs')
|
||||
|
||||
if chain:
|
||||
for i, c in enumerate(chain):
|
||||
print(f' [{i}] {c.get_subject().CN}')
|
||||
|
||||
peer = conn.get_peer_certificate()
|
||||
print(f'Peer: {peer.get_subject().CN if peer else None}')
|
||||
|
||||
conn.shutdown()
|
||||
sock.close()
|
||||
@@ -0,0 +1,285 @@
|
||||
version = 1
|
||||
revision = 3
|
||||
requires-python = ">=3.12"
|
||||
|
||||
[[package]]
|
||||
name = "beautifulsoup4"
|
||||
version = "4.14.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "soupsieve" },
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/c3/b0/1c6a16426d389813b48d95e26898aff79abbde42ad353958ad95cc8c9b21/beautifulsoup4-4.14.3.tar.gz", hash = "sha256:6292b1c5186d356bba669ef9f7f051757099565ad9ada5dd630bd9de5fa7fb86", size = 627737, upload-time = "2025-11-30T15:08:26.084Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/1a/39/47f9197bdd44df24d67ac8893641e16f386c984a0619ef2ee4c51fbbc019/beautifulsoup4-4.14.3-py3-none-any.whl", hash = "sha256:0918bfe44902e6ad8d57732ba310582e98da931428d231a5ecb9e7c703a735bb", size = 107721, upload-time = "2025-11-30T15:08:24.087Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "certifi"
|
||||
version = "2026.4.22"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/25/ee/6caf7a40c36a1220410afe15a1cc64993a1f864871f698c0f93acb72842a/certifi-2026.4.22.tar.gz", hash = "sha256:8d455352a37b71bf76a79caa83a3d6c25afee4a385d632127b6afb3963f1c580", size = 137077, upload-time = "2026-04-22T11:26:11.191Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/22/30/7cd8fdcdfbc5b869528b079bfb76dcdf6056b1a2097a662e5e8c04f42965/certifi-2026.4.22-py3-none-any.whl", hash = "sha256:3cb2210c8f88ba2318d29b0388d1023c8492ff72ecdde4ebdaddbb13a31b1c4a", size = 135707, upload-time = "2026-04-22T11:26:09.372Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cffi"
|
||||
version = "2.0.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "pycparser", marker = "implementation_name != 'PyPy'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/ea/47/4f61023ea636104d4f16ab488e268b93008c3d0bb76893b1b31db1f96802/cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d", size = 185271, upload-time = "2025-09-08T23:22:44.795Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/df/a2/781b623f57358e360d62cdd7a8c681f074a71d445418a776eef0aadb4ab4/cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c", size = 181048, upload-time = "2025-09-08T23:22:45.938Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe", size = 212529, upload-time = "2025-09-08T23:22:47.349Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d5/72/12b5f8d3865bf0f87cf1404d8c374e7487dcf097a1c91c436e72e6badd83/cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062", size = 220097, upload-time = "2025-09-08T23:22:48.677Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c2/95/7a135d52a50dfa7c882ab0ac17e8dc11cec9d55d2c18dda414c051c5e69e/cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e", size = 207983, upload-time = "2025-09-08T23:22:50.06Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3a/c8/15cb9ada8895957ea171c62dc78ff3e99159ee7adb13c0123c001a2546c1/cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037", size = 206519, upload-time = "2025-09-08T23:22:51.364Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/78/2d/7fa73dfa841b5ac06c7b8855cfc18622132e365f5b81d02230333ff26e9e/cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba", size = 219572, upload-time = "2025-09-08T23:22:52.902Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/07/e0/267e57e387b4ca276b90f0434ff88b2c2241ad72b16d31836adddfd6031b/cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94", size = 222963, upload-time = "2025-09-08T23:22:54.518Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b6/75/1f2747525e06f53efbd878f4d03bac5b859cbc11c633d0fb81432d98a795/cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187", size = 221361, upload-time = "2025-09-08T23:22:55.867Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7b/2b/2b6435f76bfeb6bbf055596976da087377ede68df465419d192acf00c437/cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18", size = 172932, upload-time = "2025-09-08T23:22:57.188Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f8/ed/13bd4418627013bec4ed6e54283b1959cf6db888048c7cf4b4c3b5b36002/cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5", size = 183557, upload-time = "2025-09-08T23:22:58.351Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/95/31/9f7f93ad2f8eff1dbc1c3656d7ca5bfd8fb52c9d786b4dcf19b2d02217fa/cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6", size = 177762, upload-time = "2025-09-08T23:22:59.668Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4b/8d/a0a47a0c9e413a658623d014e91e74a50cdd2c423f7ccfd44086ef767f90/cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb", size = 185230, upload-time = "2025-09-08T23:23:00.879Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4a/d2/a6c0296814556c68ee32009d9c2ad4f85f2707cdecfd7727951ec228005d/cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca", size = 181043, upload-time = "2025-09-08T23:23:02.231Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b0/1e/d22cc63332bd59b06481ceaac49d6c507598642e2230f201649058a7e704/cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b", size = 212446, upload-time = "2025-09-08T23:23:03.472Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a9/f5/a2c23eb03b61a0b8747f211eb716446c826ad66818ddc7810cc2cc19b3f2/cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b", size = 220101, upload-time = "2025-09-08T23:23:04.792Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f2/7f/e6647792fc5850d634695bc0e6ab4111ae88e89981d35ac269956605feba/cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2", size = 207948, upload-time = "2025-09-08T23:23:06.127Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cb/1e/a5a1bd6f1fb30f22573f76533de12a00bf274abcdc55c8edab639078abb6/cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3", size = 206422, upload-time = "2025-09-08T23:23:07.753Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/98/df/0a1755e750013a2081e863e7cd37e0cdd02664372c754e5560099eb7aa44/cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26", size = 219499, upload-time = "2025-09-08T23:23:09.648Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/50/e1/a969e687fcf9ea58e6e2a928ad5e2dd88cc12f6f0ab477e9971f2309b57c/cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c", size = 222928, upload-time = "2025-09-08T23:23:10.928Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/36/54/0362578dd2c9e557a28ac77698ed67323ed5b9775ca9d3fe73fe191bb5d8/cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b", size = 221302, upload-time = "2025-09-08T23:23:12.42Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/eb/6d/bf9bda840d5f1dfdbf0feca87fbdb64a918a69bca42cfa0ba7b137c48cb8/cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27", size = 172909, upload-time = "2025-09-08T23:23:14.32Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/37/18/6519e1ee6f5a1e579e04b9ddb6f1676c17368a7aba48299c3759bbc3c8b3/cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75", size = 183402, upload-time = "2025-09-08T23:23:15.535Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cb/0e/02ceeec9a7d6ee63bb596121c2c8e9b3a9e150936f4fbef6ca1943e6137c/cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91", size = 177780, upload-time = "2025-09-08T23:23:16.761Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/92/c4/3ce07396253a83250ee98564f8d7e9789fab8e58858f35d07a9a2c78de9f/cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5", size = 185320, upload-time = "2025-09-08T23:23:18.087Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/59/dd/27e9fa567a23931c838c6b02d0764611c62290062a6d4e8ff7863daf9730/cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13", size = 181487, upload-time = "2025-09-08T23:23:19.622Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d6/43/0e822876f87ea8a4ef95442c3d766a06a51fc5298823f884ef87aaad168c/cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b", size = 220049, upload-time = "2025-09-08T23:23:20.853Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b4/89/76799151d9c2d2d1ead63c2429da9ea9d7aac304603de0c6e8764e6e8e70/cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c", size = 207793, upload-time = "2025-09-08T23:23:22.08Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bb/dd/3465b14bb9e24ee24cb88c9e3730f6de63111fffe513492bf8c808a3547e/cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef", size = 206300, upload-time = "2025-09-08T23:23:23.314Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/47/d9/d83e293854571c877a92da46fdec39158f8d7e68da75bf73581225d28e90/cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775", size = 219244, upload-time = "2025-09-08T23:23:24.541Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2b/0f/1f177e3683aead2bb00f7679a16451d302c436b5cbf2505f0ea8146ef59e/cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205", size = 222828, upload-time = "2025-09-08T23:23:26.143Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c6/0f/cafacebd4b040e3119dcb32fed8bdef8dfe94da653155f9d0b9dc660166e/cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1", size = 220926, upload-time = "2025-09-08T23:23:27.873Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3e/aa/df335faa45b395396fcbc03de2dfcab242cd61a9900e914fe682a59170b1/cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f", size = 175328, upload-time = "2025-09-08T23:23:44.61Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bb/92/882c2d30831744296ce713f0feb4c1cd30f346ef747b530b5318715cc367/cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25", size = 185650, upload-time = "2025-09-08T23:23:45.848Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9f/2c/98ece204b9d35a7366b5b2c6539c350313ca13932143e79dc133ba757104/cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad", size = 180687, upload-time = "2025-09-08T23:23:47.105Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3e/61/c768e4d548bfa607abcda77423448df8c471f25dbe64fb2ef6d555eae006/cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9", size = 188773, upload-time = "2025-09-08T23:23:29.347Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2c/ea/5f76bce7cf6fcd0ab1a1058b5af899bfbef198bea4d5686da88471ea0336/cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d", size = 185013, upload-time = "2025-09-08T23:23:30.63Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/be/b4/c56878d0d1755cf9caa54ba71e5d049479c52f9e4afc230f06822162ab2f/cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c", size = 221593, upload-time = "2025-09-08T23:23:31.91Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e0/0d/eb704606dfe8033e7128df5e90fee946bbcb64a04fcdaa97321309004000/cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8", size = 209354, upload-time = "2025-09-08T23:23:33.214Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d8/19/3c435d727b368ca475fb8742ab97c9cb13a0de600ce86f62eab7fa3eea60/cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc", size = 208480, upload-time = "2025-09-08T23:23:34.495Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d0/44/681604464ed9541673e486521497406fadcc15b5217c3e326b061696899a/cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592", size = 221584, upload-time = "2025-09-08T23:23:36.096Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/25/8e/342a504ff018a2825d395d44d63a767dd8ebc927ebda557fecdaca3ac33a/cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512", size = 224443, upload-time = "2025-09-08T23:23:37.328Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e1/5e/b666bacbbc60fbf415ba9988324a132c9a7a0448a9a8f125074671c0f2c3/cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4", size = 223437, upload-time = "2025-09-08T23:23:38.945Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a0/1d/ec1a60bd1a10daa292d3cd6bb0b359a81607154fb8165f3ec95fe003b85c/cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e", size = 180487, upload-time = "2025-09-08T23:23:40.423Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bf/41/4c1168c74fac325c0c8156f04b6749c8b6a8f405bbf91413ba088359f60d/cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6", size = 191726, upload-time = "2025-09-08T23:23:41.742Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ae/3a/dbeec9d1ee0844c679f6bb5d6ad4e9f198b1224f4e7a32825f47f6192b0c/cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9", size = 184195, upload-time = "2025-09-08T23:23:43.004Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "charset-normalizer"
|
||||
version = "3.4.7"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/e7/a1/67fe25fac3c7642725500a3f6cfe5821ad557c3abb11c9d20d12c7008d3e/charset_normalizer-3.4.7.tar.gz", hash = "sha256:ae89db9e5f98a11a4bf50407d4363e7b09b31e55bc117b4f7d80aab97ba009e5", size = 144271, upload-time = "2026-04-02T09:28:39.342Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/0c/eb/4fc8d0a7110eb5fc9cc161723a34a8a6c200ce3b4fbf681bc86feee22308/charset_normalizer-3.4.7-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:eca9705049ad3c7345d574e3510665cb2cf844c2f2dcfe675332677f081cbd46", size = 311328, upload-time = "2026-04-02T09:26:24.331Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f8/e3/0fadc706008ac9d7b9b5be6dc767c05f9d3e5df51744ce4cc9605de7b9f4/charset_normalizer-3.4.7-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6178f72c5508bfc5fd446a5905e698c6212932f25bcdd4b47a757a50605a90e2", size = 208061, upload-time = "2026-04-02T09:26:25.568Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/42/f0/3dd1045c47f4a4604df85ec18ad093912ae1344ac706993aff91d38773a2/charset_normalizer-3.4.7-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e1421b502d83040e6d7fb2fb18dff63957f720da3d77b2fbd3187ceb63755d7b", size = 229031, upload-time = "2026-04-02T09:26:26.865Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/dc/67/675a46eb016118a2fbde5a277a5d15f4f69d5f3f5f338e5ee2f8948fcf43/charset_normalizer-3.4.7-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:edac0f1ab77644605be2cbba52e6b7f630731fc42b34cb0f634be1a6eface56a", size = 225239, upload-time = "2026-04-02T09:26:28.044Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4b/f8/d0118a2f5f23b02cd166fa385c60f9b0d4f9194f574e2b31cef350ad7223/charset_normalizer-3.4.7-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5649fd1c7bade02f320a462fdefd0b4bd3ce036065836d4f42e0de958038e116", size = 216589, upload-time = "2026-04-02T09:26:29.239Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b1/f1/6d2b0b261b6c4ceef0fcb0d17a01cc5bc53586c2d4796fa04b5c540bc13d/charset_normalizer-3.4.7-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:203104ed3e428044fd943bc4bf45fa73c0730391f9621e37fe39ecf477b128cb", size = 202733, upload-time = "2026-04-02T09:26:30.5Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6f/c0/7b1f943f7e87cc3db9626ba17807d042c38645f0a1d4415c7a14afb5591f/charset_normalizer-3.4.7-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:298930cec56029e05497a76988377cbd7457ba864beeea92ad7e844fe74cd1f1", size = 212652, upload-time = "2026-04-02T09:26:31.709Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/38/dd/5a9ab159fe45c6e72079398f277b7d2b523e7f716acc489726115a910097/charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:708838739abf24b2ceb208d0e22403dd018faeef86ddac04319a62ae884c4f15", size = 211229, upload-time = "2026-04-02T09:26:33.282Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d5/ff/531a1cad5ca855d1c1a8b69cb71abfd6d85c0291580146fda7c82857caa1/charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:0f7eb884681e3938906ed0434f20c63046eacd0111c4ba96f27b76084cd679f5", size = 203552, upload-time = "2026-04-02T09:26:34.845Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c1/4c/a5fb52d528a8ca41f7598cb619409ece30a169fbdf9cdce592e53b46c3a6/charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4dc1e73c36828f982bfe79fadf5919923f8a6f4df2860804db9a98c48824ce8d", size = 230806, upload-time = "2026-04-02T09:26:36.152Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/59/7a/071feed8124111a32b316b33ae4de83d36923039ef8cf48120266844285b/charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:aed52fea0513bac0ccde438c188c8a471c4e0f457c2dd20cdbf6ea7a450046c7", size = 212316, upload-time = "2026-04-02T09:26:37.672Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fd/35/f7dba3994312d7ba508e041eaac39a36b120f32d4c8662b8814dab876431/charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:fea24543955a6a729c45a73fe90e08c743f0b3334bbf3201e6c4bc1b0c7fa464", size = 227274, upload-time = "2026-04-02T09:26:38.93Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8a/2d/a572df5c9204ab7688ec1edc895a73ebded3b023bb07364710b05dd1c9be/charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bb6d88045545b26da47aa879dd4a89a71d1dce0f0e549b1abcb31dfe4a8eac49", size = 218468, upload-time = "2026-04-02T09:26:40.17Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/86/eb/890922a8b03a568ca2f336c36585a4713c55d4d67bf0f0c78924be6315ca/charset_normalizer-3.4.7-cp312-cp312-win32.whl", hash = "sha256:2257141f39fe65a3fdf38aeccae4b953e5f3b3324f4ff0daf9f15b8518666a2c", size = 148460, upload-time = "2026-04-02T09:26:41.416Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/35/d9/0e7dffa06c5ab081f75b1b786f0aefc88365825dfcd0ac544bdb7b2b6853/charset_normalizer-3.4.7-cp312-cp312-win_amd64.whl", hash = "sha256:5ed6ab538499c8644b8a3e18debabcd7ce684f3fa91cf867521a7a0279cab2d6", size = 159330, upload-time = "2026-04-02T09:26:42.554Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9e/5d/481bcc2a7c88ea6b0878c299547843b2521ccbc40980cb406267088bc701/charset_normalizer-3.4.7-cp312-cp312-win_arm64.whl", hash = "sha256:56be790f86bfb2c98fb742ce566dfb4816e5a83384616ab59c49e0604d49c51d", size = 147828, upload-time = "2026-04-02T09:26:44.075Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c1/3b/66777e39d3ae1ddc77ee606be4ec6d8cbd4c801f65e5a1b6f2b11b8346dd/charset_normalizer-3.4.7-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:f496c9c3cc02230093d8330875c4c3cdfc3b73612a5fd921c65d39cbcef08063", size = 309627, upload-time = "2026-04-02T09:26:45.198Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2e/4e/b7f84e617b4854ade48a1b7915c8ccfadeba444d2a18c291f696e37f0d3b/charset_normalizer-3.4.7-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0ea948db76d31190bf08bd371623927ee1339d5f2a0b4b1b4a4439a65298703c", size = 207008, upload-time = "2026-04-02T09:26:46.824Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c4/bb/ec73c0257c9e11b268f018f068f5d00aa0ef8c8b09f7753ebd5f2880e248/charset_normalizer-3.4.7-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a277ab8928b9f299723bc1a2dabb1265911b1a76341f90a510368ca44ad9ab66", size = 228303, upload-time = "2026-04-02T09:26:48.397Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/85/fb/32d1f5033484494619f701e719429c69b766bfc4dbc61aa9e9c8c166528b/charset_normalizer-3.4.7-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3bec022aec2c514d9cf199522a802bd007cd588ab17ab2525f20f9c34d067c18", size = 224282, upload-time = "2026-04-02T09:26:49.684Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fa/07/330e3a0dda4c404d6da83b327270906e9654a24f6c546dc886a0eb0ffb23/charset_normalizer-3.4.7-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e044c39e41b92c845bc815e5ae4230804e8e7bc29e399b0437d64222d92809dd", size = 215595, upload-time = "2026-04-02T09:26:50.915Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e3/7c/fc890655786e423f02556e0216d4b8c6bcb6bdfa890160dc66bf52dee468/charset_normalizer-3.4.7-cp313-cp313-manylinux_2_31_armv7l.whl", hash = "sha256:f495a1652cf3fbab2eb0639776dad966c2fb874d79d87ca07f9d5f059b8bd215", size = 201986, upload-time = "2026-04-02T09:26:52.197Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d8/97/bfb18b3db2aed3b90cf54dc292ad79fdd5ad65c4eae454099475cbeadd0d/charset_normalizer-3.4.7-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e712b419df8ba5e42b226c510472b37bd57b38e897d3eca5e8cfd410a29fa859", size = 211711, upload-time = "2026-04-02T09:26:53.49Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6f/a5/a581c13798546a7fd557c82614a5c65a13df2157e9ad6373166d2a3e645d/charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7804338df6fcc08105c7745f1502ba68d900f45fd770d5bdd5288ddccb8a42d8", size = 210036, upload-time = "2026-04-02T09:26:54.975Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8c/bf/b3ab5bcb478e4193d517644b0fb2bf5497fbceeaa7a1bc0f4d5b50953861/charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:481551899c856c704d58119b5025793fa6730adda3571971af568f66d2424bb5", size = 202998, upload-time = "2026-04-02T09:26:56.303Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e7/4e/23efd79b65d314fa320ec6017b4b5834d5c12a58ba4610aa353af2e2f577/charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f59099f9b66f0d7145115e6f80dd8b1d847176df89b234a5a6b3f00437aa0832", size = 230056, upload-time = "2026-04-02T09:26:57.554Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b9/9f/1e1941bc3f0e01df116e68dc37a55c4d249df5e6fa77f008841aef68264f/charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:f59ad4c0e8f6bba240a9bb85504faa1ab438237199d4cce5f622761507b8f6a6", size = 211537, upload-time = "2026-04-02T09:26:58.843Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/80/0f/088cbb3020d44428964a6c97fe1edfb1b9550396bf6d278330281e8b709c/charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:3dedcc22d73ec993f42055eff4fcfed9318d1eeb9a6606c55892a26964964e48", size = 226176, upload-time = "2026-04-02T09:27:00.437Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6a/9f/130394f9bbe06f4f63e22641d32fc9b202b7e251c9aef4db044324dac493/charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:64f02c6841d7d83f832cd97ccf8eb8a906d06eb95d5276069175c696b024b60a", size = 217723, upload-time = "2026-04-02T09:27:02.021Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/73/55/c469897448a06e49f8fa03f6caae97074fde823f432a98f979cc42b90e69/charset_normalizer-3.4.7-cp313-cp313-win32.whl", hash = "sha256:4042d5c8f957e15221d423ba781e85d553722fc4113f523f2feb7b188cc34c5e", size = 148085, upload-time = "2026-04-02T09:27:03.192Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5d/78/1b74c5bbb3f99b77a1715c91b3e0b5bdb6fe302d95ace4f5b1bec37b0167/charset_normalizer-3.4.7-cp313-cp313-win_amd64.whl", hash = "sha256:3946fa46a0cf3e4c8cb1cc52f56bb536310d34f25f01ca9b6c16afa767dab110", size = 158819, upload-time = "2026-04-02T09:27:04.454Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/68/86/46bd42279d323deb8687c4a5a811fd548cb7d1de10cf6535d099877a9a9f/charset_normalizer-3.4.7-cp313-cp313-win_arm64.whl", hash = "sha256:80d04837f55fc81da168b98de4f4b797ef007fc8a79ab71c6ec9bc4dd662b15b", size = 147915, upload-time = "2026-04-02T09:27:05.971Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/97/c8/c67cb8c70e19ef1960b97b22ed2a1567711de46c4ddf19799923adc836c2/charset_normalizer-3.4.7-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:c36c333c39be2dbca264d7803333c896ab8fa7d4d6f0ab7edb7dfd7aea6e98c0", size = 309234, upload-time = "2026-04-02T09:27:07.194Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/99/85/c091fdee33f20de70d6c8b522743b6f831a2f1cd3ff86de4c6a827c48a76/charset_normalizer-3.4.7-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1c2aed2e5e41f24ea8ef1590b8e848a79b56f3a5564a65ceec43c9d692dc7d8a", size = 208042, upload-time = "2026-04-02T09:27:08.749Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/87/1c/ab2ce611b984d2fd5d86a5a8a19c1ae26acac6bad967da4967562c75114d/charset_normalizer-3.4.7-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:54523e136b8948060c0fa0bc7b1b50c32c186f2fceee897a495406bb6e311d2b", size = 228706, upload-time = "2026-04-02T09:27:09.951Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a8/29/2b1d2cb00bf085f59d29eb773ce58ec2d325430f8c216804a0a5cd83cbca/charset_normalizer-3.4.7-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:715479b9a2802ecac752a3b0efa2b0b60285cf962ee38414211abdfccc233b41", size = 224727, upload-time = "2026-04-02T09:27:11.175Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/47/5c/032c2d5a07fe4d4855fea851209cca2b6f03ebeb6d4e3afdb3358386a684/charset_normalizer-3.4.7-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bd6c2a1c7573c64738d716488d2cdd3c00e340e4835707d8fdb8dc1a66ef164e", size = 215882, upload-time = "2026-04-02T09:27:12.446Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2c/c2/356065d5a8b78ed04499cae5f339f091946a6a74f91e03476c33f0ab7100/charset_normalizer-3.4.7-cp314-cp314-manylinux_2_31_armv7l.whl", hash = "sha256:c45e9440fb78f8ddabcf714b68f936737a121355bf59f3907f4e17721b9d1aae", size = 200860, upload-time = "2026-04-02T09:27:13.721Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0c/cd/a32a84217ced5039f53b29f460962abb2d4420def55afabe45b1c3c7483d/charset_normalizer-3.4.7-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:3534e7dcbdcf757da6b85a0bbf5b6868786d5982dd959b065e65481644817a18", size = 211564, upload-time = "2026-04-02T09:27:15.272Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/44/86/58e6f13ce26cc3b8f4a36b94a0f22ae2f00a72534520f4ae6857c4b81f89/charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:e8ac484bf18ce6975760921bb6148041faa8fef0547200386ea0b52b5d27bf7b", size = 211276, upload-time = "2026-04-02T09:27:16.834Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8f/fe/d17c32dc72e17e155e06883efa84514ca375f8a528ba2546bee73fc4df81/charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:a5fe03b42827c13cdccd08e6c0247b6a6d4b5e3cdc53fd1749f5896adcdc2356", size = 201238, upload-time = "2026-04-02T09:27:18.229Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6a/29/f33daa50b06525a237451cdb6c69da366c381a3dadcd833fa5676bc468b3/charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:2d6eb928e13016cea4f1f21d1e10c1cebd5a421bc57ddf5b1142ae3f86824fab", size = 230189, upload-time = "2026-04-02T09:27:19.445Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b6/6e/52c84015394a6a0bdcd435210a7e944c5f94ea1055f5cc5d56c5fe368e7b/charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:e74327fb75de8986940def6e8dee4f127cc9752bee7355bb323cc5b2659b6d46", size = 211352, upload-time = "2026-04-02T09:27:20.79Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8c/d7/4353be581b373033fb9198bf1da3cf8f09c1082561e8e922aa7b39bf9fe8/charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:d6038d37043bced98a66e68d3aa2b6a35505dc01328cd65217cefe82f25def44", size = 227024, upload-time = "2026-04-02T09:27:22.063Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/30/45/99d18aa925bd1740098ccd3060e238e21115fffbfdcb8f3ece837d0ace6c/charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:7579e913a5339fb8fa133f6bbcfd8e6749696206cf05acdbdca71a1b436d8e72", size = 217869, upload-time = "2026-04-02T09:27:23.486Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5c/05/5ee478aa53f4bb7996482153d4bfe1b89e0f087f0ab6b294fcf92d595873/charset_normalizer-3.4.7-cp314-cp314-win32.whl", hash = "sha256:5b77459df20e08151cd6f8b9ef8ef1f961ef73d85c21a555c7eed5b79410ec10", size = 148541, upload-time = "2026-04-02T09:27:25.146Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/48/77/72dcb0921b2ce86420b2d79d454c7022bf5be40202a2a07906b9f2a35c97/charset_normalizer-3.4.7-cp314-cp314-win_amd64.whl", hash = "sha256:92a0a01ead5e668468e952e4238cccd7c537364eb7d851ab144ab6627dbbe12f", size = 159634, upload-time = "2026-04-02T09:27:26.642Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c6/a3/c2369911cd72f02386e4e340770f6e158c7980267da16af8f668217abaa0/charset_normalizer-3.4.7-cp314-cp314-win_arm64.whl", hash = "sha256:67f6279d125ca0046a7fd386d01b311c6363844deac3e5b069b514ba3e63c246", size = 148384, upload-time = "2026-04-02T09:27:28.271Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/94/09/7e8a7f73d24dba1f0035fbbf014d2c36828fc1bf9c88f84093e57d315935/charset_normalizer-3.4.7-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:effc3f449787117233702311a1b7d8f59cba9ced946ba727bdc329ec69028e24", size = 330133, upload-time = "2026-04-02T09:27:29.474Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8d/da/96975ddb11f8e977f706f45cddd8540fd8242f71ecdb5d18a80723dcf62c/charset_normalizer-3.4.7-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fbccdc05410c9ee21bbf16a35f4c1d16123dcdeb8a1d38f33654fa21d0234f79", size = 216257, upload-time = "2026-04-02T09:27:30.793Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e5/e8/1d63bf8ef2d388e95c64b2098f45f84758f6d102a087552da1485912637b/charset_normalizer-3.4.7-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:733784b6d6def852c814bce5f318d25da2ee65dd4839a0718641c696e09a2960", size = 234851, upload-time = "2026-04-02T09:27:32.44Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9b/40/e5ff04233e70da2681fa43969ad6f66ca5611d7e669be0246c4c7aaf6dc8/charset_normalizer-3.4.7-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a89c23ef8d2c6b27fd200a42aa4ac72786e7c60d40efdc76e6011260b6e949c4", size = 233393, upload-time = "2026-04-02T09:27:34.03Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/be/c1/06c6c49d5a5450f76899992f1ee40b41d076aee9279b49cf9974d2f313d5/charset_normalizer-3.4.7-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6c114670c45346afedc0d947faf3c7f701051d2518b943679c8ff88befe14f8e", size = 223251, upload-time = "2026-04-02T09:27:35.369Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2b/9f/f2ff16fb050946169e3e1f82134d107e5d4ae72647ec8a1b1446c148480f/charset_normalizer-3.4.7-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:a180c5e59792af262bf263b21a3c49353f25945d8d9f70628e73de370d55e1e1", size = 206609, upload-time = "2026-04-02T09:27:36.661Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/69/d5/a527c0cd8d64d2eab7459784fb4169a0ac76e5a6fc5237337982fd61347e/charset_normalizer-3.4.7-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:3c9a494bc5ec77d43cea229c4f6db1e4d8fe7e1bbffa8b6f0f0032430ff8ab44", size = 220014, upload-time = "2026-04-02T09:27:38.019Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7e/80/8a7b8104a3e203074dc9aa2c613d4b726c0e136bad1cc734594b02867972/charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8d828b6667a32a728a1ad1d93957cdf37489c57b97ae6c4de2860fa749b8fc1e", size = 218979, upload-time = "2026-04-02T09:27:39.37Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/02/9a/b759b503d507f375b2b5c153e4d2ee0a75aa215b7f2489cf314f4541f2c0/charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:cf1493cd8607bec4d8a7b9b004e699fcf8f9103a9284cc94962cb73d20f9d4a3", size = 209238, upload-time = "2026-04-02T09:27:40.722Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c2/4e/0f3f5d47b86bdb79256e7290b26ac847a2832d9a4033f7eb2cd4bcf4bb5b/charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:0c96c3b819b5c3e9e165495db84d41914d6894d55181d2d108cc1a69bfc9cce0", size = 236110, upload-time = "2026-04-02T09:27:42.33Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/96/23/bce28734eb3ed2c91dcf93abeb8a5cf393a7b2749725030bb630e554fdd8/charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:752a45dc4a6934060b3b0dab47e04edc3326575f82be64bc4fc293914566503e", size = 219824, upload-time = "2026-04-02T09:27:43.924Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2c/6f/6e897c6984cc4d41af319b077f2f600fc8214eb2fe2d6bcb79141b882400/charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:8778f0c7a52e56f75d12dae53ae320fae900a8b9b4164b981b9c5ce059cd1fcb", size = 233103, upload-time = "2026-04-02T09:27:45.348Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/76/22/ef7bd0fe480a0ae9b656189ec00744b60933f68b4f42a7bb06589f6f576a/charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ce3412fbe1e31eb81ea42f4169ed94861c56e643189e1e75f0041f3fe7020abe", size = 225194, upload-time = "2026-04-02T09:27:46.706Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c5/a7/0e0ab3e0b5bc1219bd80a6a0d4d72ca74d9250cb2382b7c699c147e06017/charset_normalizer-3.4.7-cp314-cp314t-win32.whl", hash = "sha256:c03a41a8784091e67a39648f70c5f97b5b6a37f216896d44d2cdcb82615339a0", size = 159827, upload-time = "2026-04-02T09:27:48.053Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7a/1d/29d32e0fb40864b1f878c7f5a0b343ae676c6e2b271a2d55cc3a152391da/charset_normalizer-3.4.7-cp314-cp314t-win_amd64.whl", hash = "sha256:03853ed82eeebbce3c2abfdbc98c96dc205f32a79627688ac9a27370ea61a49c", size = 174168, upload-time = "2026-04-02T09:27:49.795Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/de/32/d92444ad05c7a6e41fb2036749777c163baf7a0301a040cb672d6b2b1ae9/charset_normalizer-3.4.7-cp314-cp314t-win_arm64.whl", hash = "sha256:c35abb8bfff0185efac5878da64c45dafd2b37fb0383add1be155a763c1f083d", size = 153018, upload-time = "2026-04-02T09:27:51.116Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/db/8f/61959034484a4a7c527811f4721e75d02d653a35afb0b6054474d8185d4c/charset_normalizer-3.4.7-py3-none-any.whl", hash = "sha256:3dce51d0f5e7951f8bb4900c257dad282f49190fdbebecd4ba99bcc41fef404d", size = 61958, upload-time = "2026-04-02T09:28:37.794Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cryptography"
|
||||
version = "48.0.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "cffi", marker = "platform_python_implementation != 'PyPy'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/9f/a9/db8f313fdcd85d767d4973515e1db101f9c71f95fced83233de224673757/cryptography-48.0.0.tar.gz", hash = "sha256:5c3932f4436d1cccb036cb0eaef46e6e2db91035166f1ad6505c3c9d5a635920", size = 832984, upload-time = "2026-05-04T22:59:38.133Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/df/3d/01f6dd9190170a5a241e0e98c2d04be3664a9e6f5b9b872cde63aff1c3dd/cryptography-48.0.0-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:0c558d2cdffd8f4bbb30fc7134c74d2ca9a476f830bb053074498fbc86f41ed6", size = 8001587, upload-time = "2026-05-04T22:57:36.803Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b2/6e/e90527eef33f309beb811cf7c982c3aeffcce8e3edb178baa4ca3ae4a6fa/cryptography-48.0.0-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f5333311663ea94f75dd408665686aaf426563556bb5283554a3539177e03b8c", size = 4690433, upload-time = "2026-05-04T22:57:40.373Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/90/04/673510ed51ddff56575f306cf1617d80411ee76831ccd3097599140efdfe/cryptography-48.0.0-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7995ef305d7165c3f11ae07f2517e5a4f1d5c18da1376a0a9ed496336b69e5f3", size = 4710620, upload-time = "2026-05-04T22:57:42.935Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/14/d5/e9c4ef932c8d800490c34d8bd589d64a31d5890e27ec9e9ad532be893294/cryptography-48.0.0-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:40ba1f85eaa6959837b1d51c9767e230e14612eea4ef110ee8854ada22da1bf5", size = 4696283, upload-time = "2026-05-04T22:57:45.294Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0c/29/174b9dfb60b12d59ecfc6cfa04bc88c21b42a54f01b8aae09bb6e51e4c7f/cryptography-48.0.0-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:369a6348999f94bbd53435c894377b20ab95f25a9065c283570e70150d8abc3c", size = 5296573, upload-time = "2026-05-04T22:57:47.933Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/95/38/0d29a6fd7d0d1373f0c0c88a04ba20e359b257753ac497564cd660fc1d55/cryptography-48.0.0-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a0e692c683f4df67815a2d258b324e66f4738bd7a96a218c826dce4f4bd05d8f", size = 4743677, upload-time = "2026-05-04T22:57:50.067Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/30/be/eef653013d5c63b6a490529e0316f9ac14a37602965d4903efed1399f32b/cryptography-48.0.0-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:18349bbc56f4743c8b12dc32e2bccb2cf83ee8b69a3bba74ef8ae857e26b3d25", size = 4330808, upload-time = "2026-05-04T22:57:52.301Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/84/9e/500463e87abb7a0a0f9f256ec21123ecde0a7b5541a15e840ea54551fd81/cryptography-48.0.0-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:7e8eac43dfca5c4cccc6dad9a80504436fca53bb9bc3100a2386d730fbe6b602", size = 4695941, upload-time = "2026-05-04T22:57:54.603Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e3/dc/7303087450c2ec9e7fbb750e17c2abfbc658f23cbd0e54009509b7cc4091/cryptography-48.0.0-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:9ccdac7d40688ecb5a3b4a604b8a88c8002e3442d6c60aead1db2a89a041560c", size = 5252579, upload-time = "2026-05-04T22:57:57.207Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d0/c0/7101d3b7215edcdc90c45da544961fd8ed2d6448f77577460fa75a8443f7/cryptography-48.0.0-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:bd72e68b06bb1e96913f97dd4901119bc17f39d4586a5adf2d3e47bc2b9d58b5", size = 4743326, upload-time = "2026-05-04T22:57:59.535Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ac/d8/5b833bad13016f562ab9d063d68199a4bd121d18458e439515601d3357ec/cryptography-48.0.0-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:59baa2cb386c4f0b9905bd6eb4c2a79a69a128408fd31d32ca4d7102d4156321", size = 4826672, upload-time = "2026-05-04T22:58:01.996Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/98/e1/7074eb8bf3c135558c73fc2bcf0f5633f912e6fb87e868a55c454080ef09/cryptography-48.0.0-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:9249e3cd978541d665967ac2cb2787fd6a62bddf1e75b3e347a594d7dacf4f74", size = 4972574, upload-time = "2026-05-04T22:58:03.968Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/04/70/e5a1b41d325f797f39427aa44ef8baf0be500065ab6d8e10369d850d4a4f/cryptography-48.0.0-cp311-abi3-win32.whl", hash = "sha256:9c459db21422be75e2809370b829a87eb37f74cd785fc4aa9ea1e5f43b47cda4", size = 3294868, upload-time = "2026-05-04T22:58:06.467Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f4/ac/8ac51b4a5fc5932eb7ee5c517ba7dc8cd834f0048962b6b352f00f41ebf9/cryptography-48.0.0-cp311-abi3-win_amd64.whl", hash = "sha256:5b012212e08b8dd5edc78ef54da83dd9892fd9105323b3993eff6bea65dc21d7", size = 3817107, upload-time = "2026-05-04T22:58:08.845Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6b/84/70e3feea9feea87fd7cbe77efb2712ae1e3e6edf10749dc6e95f4e60e455/cryptography-48.0.0-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:3cb07a3ed6431663cd321ea8a000a1314c74211f823e4177fefa2255e057d1ec", size = 7986556, upload-time = "2026-05-04T22:58:11.172Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/89/6e/18e07a618bb5442ba10cf4df16e99c071365528aa570dfcb8c02e25a303b/cryptography-48.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8c7378637d7d88016fa6791c159f698b3d3eed28ebf844ac36b9dc04a14dae18", size = 4684776, upload-time = "2026-05-04T22:58:13.712Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/be/6a/4ea3b4c6c6759794d5ee2103c304a5076dc4b19ae1f9fe47dba439e159e9/cryptography-48.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:cc90c0b39b2e3c65ef52c804b72e3c58f8a04ab2a1871272798e5f9572c17d20", size = 4698121, upload-time = "2026-05-04T22:58:16.448Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2f/59/6ff6ad6cae03bb887da2a5860b2c9805f8dac969ef01ce563336c49bd1d1/cryptography-48.0.0-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:76341972e1eff8b4bea859f09c0d3e64b96ce931b084f9b9b7db8ef364c30eff", size = 4690042, upload-time = "2026-05-04T22:58:18.544Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ca/b4/fc334ed8cfd705aca282fe4d8f5ae64a8e0f74932e9feecb344610cf6e4d/cryptography-48.0.0-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:55b7718303bf06a5753dcdccf2f3945cf18ad7bffde41b61226e4db31ab89a9c", size = 5282526, upload-time = "2026-05-04T22:58:20.75Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/11/08/9f8c5386cc4cd90d8255c7cdd0f5baf459a08502a09de30dc51f553d38dc/cryptography-48.0.0-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:a64697c641c7b1b2178e573cbc31c7c6684cd56883a478d75143dbb7118036db", size = 4733116, upload-time = "2026-05-04T22:58:23.627Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b8/77/99307d7574045699f8805aa500fa0fb83422d115b5400a064ddd306d7750/cryptography-48.0.0-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:561215ea3879cb1cbbf272867e2efda62476f240fb58c64de6b393ae19246741", size = 4316030, upload-time = "2026-05-04T22:58:25.581Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fd/36/a608b98337af3cb2aff4818e406649d30572b7031918b04c87d979495348/cryptography-48.0.0-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:ad64688338ed4bc1a6618076ba75fd7194a5f1797ac60b47afe926285adb3166", size = 4689640, upload-time = "2026-05-04T22:58:27.747Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/dd/a6/825010a291b4438aecc1f568bc428189fc1175515223632477c07dc0a6df/cryptography-48.0.0-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:906cbf0670286c6e0044156bc7d4af9cbb0ef6db9f73e52c3ec56ba6bdde5336", size = 5237657, upload-time = "2026-05-04T22:58:29.848Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b9/09/4e76a09b4caa29aad535ddc806f5d4c5d01885bd978bd984fbc6ca032cae/cryptography-48.0.0-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:ea8990436d914540a40ab24b6a77c0969695ed52f4a4874c5137ccf7045a7057", size = 4732362, upload-time = "2026-05-04T22:58:32.009Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/18/78/444fa04a77d0cb95f417dda20d450e13c56ba8e5220fc892a1658f44f882/cryptography-48.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:c18684a7f0cc9a3cb60328f496b8e3372def7c5d2df39ac267878b05565aaaae", size = 4819580, upload-time = "2026-05-04T22:58:34.254Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/38/85/ea67067c70a1fd4be2c63d35eeed82658023021affccc7b17705f8527dd2/cryptography-48.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:9be5aafa5736574f8f15f262adc81b2a9869e2cfe9014d52a44633905b40d52c", size = 4963283, upload-time = "2026-05-04T22:58:36.376Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/75/54/cc6d0f3deac3e81c7f847e8a189a12b6cdd65059b43dad25d4316abd849a/cryptography-48.0.0-cp314-cp314t-win32.whl", hash = "sha256:c17dfe85494deaeddc5ce251aebd1d60bbe6afc8b62071bb0b469431a000124f", size = 3270954, upload-time = "2026-05-04T22:58:38.791Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/49/67/cc947e288c0758a4e5473d1dcb743037ab7785541265a969240b8885441a/cryptography-48.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:27241b1dc9962e056062a8eef1991d02c3a24569c95975bd2322a8a52c6e5e12", size = 3797313, upload-time = "2026-05-04T22:58:40.746Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f2/63/61d4a4e1c6b6bab6ce1e213cd36a24c415d90e76d78c5eb8577c5541d2e8/cryptography-48.0.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:58d00498e8933e4a194f3076aee1b4a97dfec1a6da444535755822fe5d8b0b86", size = 7983482, upload-time = "2026-05-04T22:58:43.769Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d5/ac/f5b5995b87770c693e2596559ffafe195b4033a57f14a82268a2842953f3/cryptography-48.0.0-cp39-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:614d0949f4790582d2cc25553abd09dd723025f0c0e7c67376a1d77196743d6e", size = 4683266, upload-time = "2026-05-04T22:58:46.064Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ec/c6/8b14f67e18338fbc4adb76f66c001f5c3610b3e2d1837f268f47a347dbbb/cryptography-48.0.0-cp39-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7ce4bfae76319a532a2dc68f82cc32f5676ee792a983187dac07183690e5c66f", size = 4696228, upload-time = "2026-05-04T22:58:48.22Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ea/73/f808fbae9514bd91b47875b003f13e284c8c6bdfd904b7944e803937eec1/cryptography-48.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:2eb992bbd4661238c5a397594c83f5b4dc2bc5b848c365c8f991b6780efcc5c7", size = 4689097, upload-time = "2026-05-04T22:58:50.9Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/93/01/d86632d7d28db8ae83221995752eeb6639ffb374c2d22955648cf8d52797/cryptography-48.0.0-cp39-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:22a5cb272895dce158b2cacdfdc3debd299019659f42947dbdac6f32d68fe832", size = 5283582, upload-time = "2026-05-04T22:58:53.017Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/02/e1/50edc7a50334807cc4791fc4a0ce7468b4a1416d9138eab358bfc9a3d70b/cryptography-48.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:2b4d59804e8408e2fea7d1fbaf218e5ec984325221db76e6a241a9abd6cdd95c", size = 4730479, upload-time = "2026-05-04T22:58:55.611Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6f/af/99a582b1b1641ff5911ac559beb45097cf79efd4ead4657f578ef1af2d47/cryptography-48.0.0-cp39-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:984a20b0f62a26f48a3396c72e4bc34c66e356d356bf370053066b3b6d54634a", size = 4326481, upload-time = "2026-05-04T22:58:57.607Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/90/ee/89aa26a06ef0a7d7611788ffd571a7c50e368cc6a4d5eef8b4884e866edb/cryptography-48.0.0-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:5a5ed8fde7a1d09376ca0b40e68cd59c69fe23b1f9768bd5824f54681626032a", size = 4688713, upload-time = "2026-05-04T22:59:00.077Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/70/ba/bcb1b0bb7a33d4c7c0c4d4c7874b4a62ae4f56113a5f4baefa362dfb1f0f/cryptography-48.0.0-cp39-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:8cd666227ef7af430aa5914a9910e0ddd703e75f039cef0825cd0da71b6b711a", size = 5238165, upload-time = "2026-05-04T22:59:02.317Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c9/70/ca4003b1ce5ca3dc3186ada51908c8a9b9ff7d5cab83cc0d43ee14ec144f/cryptography-48.0.0-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:9071196d81abc88b3516ac8cdfad32e2b66dd4a5393a8e68a961e9161ddc6239", size = 4729947, upload-time = "2026-05-04T22:59:05.255Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/44/a0/4ec7cf774207905aef1a8d11c3750d5a1db805eb380ee4e16df317870128/cryptography-48.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:1e2d54c8be6152856a36f0882ab231e70f8ec7f14e93cf87db8a2ed056bf160c", size = 4822059, upload-time = "2026-05-04T22:59:07.802Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1e/75/a2e55f99c16fcac7b5d6c1eb19ad8e00799854d6be5ca845f9259eae1681/cryptography-48.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a5da777e32ffed6f85a7b2b3f7c5cbc88c146bfcd0a1d7baf5fcc6c52ee35dd4", size = 4960575, upload-time = "2026-05-04T22:59:09.851Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b8/23/6e6f32143ab5d8b36ca848a502c4bcd477ae75b9e1677e3530d669062578/cryptography-48.0.0-cp39-abi3-win32.whl", hash = "sha256:77a2ccbbe917f6710e05ba9adaa25fb5075620bf3ea6fb751997875aff4ae4bd", size = 3279117, upload-time = "2026-05-04T22:59:12.019Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9d/9a/0fea98a70cf1749d41d738836f6349d97945f7c89433a259a6c2642eefeb/cryptography-48.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:16cd65b9330583e4619939b3a3843eec1e6e789744bb01e7c7e2e62e33c239c8", size = 3792100, upload-time = "2026-05-04T22:59:14.884Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
version = "3.14"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/05/b1/efac073e0c297ecf2fb33c346989a529d4e19164f1759102dee5953ee17e/idna-3.14.tar.gz", hash = "sha256:466d810d7a2cc1022bea9b037c39728d51ae7dad40d480fc9b7d7ecf98ba8ee3", size = 198272, upload-time = "2026-05-10T20:32:15.935Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/6c/3c/3f62dee257eb3d6b2c1ef2a09d36d9793c7111156a73b5654d2c2305e5ce/idna-3.14-py3-none-any.whl", hash = "sha256:e677eaf072e290f7b725f9acf0b3a2bd55f9fd6f7c70abe5f0e34823d0accf69", size = 72184, upload-time = "2026-05-10T20:32:14.295Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pki-analyzer"
|
||||
version = "0.1.0"
|
||||
source = { virtual = "." }
|
||||
dependencies = [
|
||||
{ name = "beautifulsoup4" },
|
||||
{ name = "cryptography" },
|
||||
{ name = "requests" },
|
||||
]
|
||||
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "beautifulsoup4", specifier = ">=4.14.3" },
|
||||
{ name = "cryptography", specifier = ">=48.0.0" },
|
||||
{ name = "requests", specifier = ">=2.34.0" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pycparser"
|
||||
version = "3.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/1b/7d/92392ff7815c21062bea51aa7b87d45576f649f16458d78b7cf94b9ab2e6/pycparser-3.0.tar.gz", hash = "sha256:600f49d217304a5902ac3c37e1281c9fe94e4d0489de643a9504c5cdfdfc6b29", size = 103492, upload-time = "2026-01-21T14:26:51.89Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/0c/c3/44f3fbbfa403ea2a7c779186dc20772604442dde72947e7d01069cbe98e3/pycparser-3.0-py3-none-any.whl", hash = "sha256:b727414169a36b7d524c1c3e31839a521725078d7b2ff038656844266160a992", size = 48172, upload-time = "2026-01-21T14:26:50.693Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "requests"
|
||||
version = "2.34.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "certifi" },
|
||||
{ name = "charset-normalizer" },
|
||||
{ name = "idna" },
|
||||
{ name = "urllib3" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/43/b8/7a707d60fea4c49094e40262cc0e2ca6c768cca21587e34d3f705afec47e/requests-2.34.0.tar.gz", hash = "sha256:7d62fe92f50eb82c529b0916bb445afa1531a566fc8f35ffdc64446e771b856a", size = 142436, upload-time = "2026-05-11T19:29:51.717Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/ef/e6/e300fce5fe83c30520607a015dabd985df3251e188d234bfe9492e17a389/requests-2.34.0-py3-none-any.whl", hash = "sha256:917520a21b767485ce7c588f4ebb917c436b24a31231b44228715eaeb5a52c60", size = 73021, upload-time = "2026-05-11T19:29:49.923Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "soupsieve"
|
||||
version = "2.8.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/7b/ae/2d9c981590ed9999a0d91755b47fc74f74de286b0f5cee14c9269041e6c4/soupsieve-2.8.3.tar.gz", hash = "sha256:3267f1eeea4251fb42728b6dfb746edc9acaffc4a45b27e19450b676586e8349", size = 118627, upload-time = "2026-01-20T04:27:02.457Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/46/2c/1462b1d0a634697ae9e55b3cecdcb64788e8b7d63f54d923fcd0bb140aed/soupsieve-2.8.3-py3-none-any.whl", hash = "sha256:ed64f2ba4eebeab06cc4962affce381647455978ffc1e36bb79a545b91f45a95", size = 37016, upload-time = "2026-01-20T04:27:01.012Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "typing-extensions"
|
||||
version = "4.15.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "urllib3"
|
||||
version = "2.7.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/53/0c/06f8b233b8fd13b9e5ee11424ef85419ba0d8ba0b3138bf360be2ff56953/urllib3-2.7.0.tar.gz", hash = "sha256:231e0ec3b63ceb14667c67be60f2f2c40a518cb38b03af60abc813da26505f4c", size = 433602, upload-time = "2026-05-07T16:13:18.596Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/7f/3e/5db95bcf282c52709639744ca2a8b149baccf648e39c8cc87553df9eae0c/urllib3-2.7.0-py3-none-any.whl", hash = "sha256:9fb4c81ebbb1ce9531cce37674bbc6f1360472bc18ca9a553ede278ef7276897", size = 131087, upload-time = "2026-05-07T16:13:17.151Z" },
|
||||
]
|
||||
Reference in New Issue
Block a user