aboutsummaryrefslogtreecommitdiff
path: root/scripts
diff options
context:
space:
mode:
authorRob Austein <sra@hactrn.net>2013-08-14 01:27:16 +0000
committerRob Austein <sra@hactrn.net>2013-08-14 01:27:16 +0000
commit9bb32829ff2f855bc2440250c2f6ae32898c39b7 (patch)
tree0f1721abedd0f2d70d390d1bb104ab6286a72c86 /scripts
parent302d3784422128c54fe38e13e7c7bc9d67b24e17 (diff)
Checkpoint. Nowhere near usable yet.
svn path=/trunk/; revision=5454
Diffstat (limited to 'scripts')
-rwxr-xr-xscripts/rcynic-lta544
1 files changed, 544 insertions, 0 deletions
diff --git a/scripts/rcynic-lta b/scripts/rcynic-lta
new file mode 100755
index 00000000..c5bf91e2
--- /dev/null
+++ b/scripts/rcynic-lta
@@ -0,0 +1,544 @@
+#!/usr/local/bin/python
+
+# $Id$
+
+# Copyright (C) 2013 Dragon Research Labs ("DRL")
+#
+# Permission to use, copy, modify, and distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND DRL DISCLAIMS ALL WARRANTIES WITH
+# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+# AND FITNESS. IN NO EVENT SHALL DRL BE LIABLE FOR ANY SPECIAL, DIRECT,
+# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+# PERFORMANCE OF THIS SOFTWARE.
+
+# Preliminary script to work out what's involved in building an
+# SQLite3 database of RP objects. We haven't bothered with this until
+# now in rcynic, because we mostly just walk the filesystem tree, but
+# LTA and some of the ideas Tim is playing with require a lot of
+# lookups based on things that are not the URIs we use as filenames,
+# so some kind of indexing may become necessary. Given the complexity
+# of building any kind of real index over RFC 3779 resources,
+# otherwise fine lightweight tools like the Python shelve library
+# probably won't cut it here, and I don't want to add a dependency on
+# MySQL on the RP side (yet?), so let's see what we can do with SQLite3.
+
+import os
+import sys
+import yaml
+import glob
+import base64
+import sqlite3
+import weakref
+import rpki.POW
+import rpki.x509
+import rpki.resource_set
+
+sqlite3.register_adapter(rpki.POW.IPAddress,
+ lambda x: buffer("_" + x.toBytes()))
+
+sqlite3.register_converter("RangeVal",
+ lambda s: long(s) if s.isdigit() else rpki.POW.IPAddress.fromBytes(s[1:]))
+
+sqlite3.register_adapter(rpki.x509.X501DN,
+ str)
+
+def main():
+ rpdb = RPDB()
+ rpdb.load()
+ test(rpdb)
+ rpdb.close()
+
+def test(rpdb):
+ fn2s = [None] + rpdb.fn2map.keys()
+
+ print
+ print "Testing YAML parsing"
+ parse_yaml(rpdb)
+
+ build_chains(rpdb)
+
+ print
+ print "Looking for certificates without AKI"
+ for r in rpdb.find_by_aki(None, "cer"):
+ print r, r.uris
+ print
+ print "Testing range functions"
+ for fn2 in fn2s:
+ if fn2 is not None:
+ print
+ print "Restricting search to type", fn2
+ print
+ print "Looking for range that should include adrilankha and psg again"
+ for r in rpdb.find_by_range("147.28.0.19", "147.28.0.62", fn2):
+ print r, r.uris
+ print
+ print "Looking for range that should include adrilankha"
+ for r in rpdb.find_by_range("147.28.0.19", "147.28.0.19", fn2):
+ print r, r.uris
+ print
+ print "Looking for range that should include ASN 3130"
+ for r in rpdb.find_by_range(3130, 3130, fn2):
+ print r, r.uris
+ print
+ print "Moving on to resource sets"
+ for fn2 in fn2s:
+ if fn2 is not None:
+ print
+ print "Restricting search to type", fn2
+ for expr in ("147.28.0.19-147.28.0.62",
+ "3130",
+ "2001:418:1::19/128",
+ "147.28.0.19-147.28.0.62,198.180.150.50/32",
+ "3130,147.28.0.19-147.28.0.62,198.180.150.50/32",
+ "2001:418:1::62/128,198.180.150.50/32,2001:418:8006::50/128",
+ "147.28.0.19-147.28.0.62,2001:418:1::19/128,2001:418:1::62/128,198.180.150.50/32,2001:418:8006::50/128"):
+ print
+ print "Trying", expr
+ for r in rpdb.find_by_resource_bag(rpki.resource_set.resource_bag.from_str(expr), fn2):
+ print r, r.uris
+
+
+def parse_xki(s):
+ """
+ Parse text form of an SKI or AKI. We accept two encodings:
+ colon-delimited hexadecimal, and URL-safe Base64. The former is
+ what OpenSSL prints in its text representation of SKI and AKI
+ extensions; the latter is the g(SKI) value that some RPKI CA engines
+ (including rpkid) use when constructing filenames.
+
+ In either case, we check that the decoded result contains the right
+ number of octets to be a SHA-1 hash.
+ """
+
+ if ":" in s:
+ b = "".join(chr(int(c, 16)) for c in s.split(":"))
+ else:
+ b = base64.urlsafe_b64decode(s + ("=" * (4 - len(s) % 4)))
+ if len(b) != 20:
+ raise RuntimeError("Bad length for SHA1 xKI value: %r" % s)
+ return b
+
+
+tals = {}
+
+def parse_tals(tal_directory):
+ global tals
+ for fn in glob.iglob(os.path.join(tal_directory, "*.tal")):
+ with open(fn, "r") as f:
+ uri = f.readline().strip()
+ key = rpki.x509.RSApublic(Base64 = f.read())
+ tals[uri] = key
+
+
+def build_chains(rpdb):
+ for uri, key in tals.iteritems():
+ cer = rpdb.find_by_uri(uri)[0]
+ if cer.getPublicKey() == key:
+ cer.nochain = False
+ else:
+ print "TAL public key mismatch for %s, skipping: %s %s" % (uri, key.hSKI(), cer.hSKI())
+ before = after = None
+ while before is None or before != after:
+ before = after
+ rpdb.cur.execute(
+ """
+ UPDATE object SET nochain = 0
+ WHERE aki || issuer IN (SELECT ski || subject FROM object WHERE fn2 = 'cer' AND nochain = 0)
+ """)
+ rpdb.cur.execute("SELECT SUM(nochain) FROM object")
+ after = rpdb.cur.fetchone()[0]
+ rpdb.db.commit()
+
+
+def parse_yaml(rpdb, fn = "rcynic-lta.yaml"):
+ yy = yaml.safe_load(open(fn, "r"))
+
+ parse_tals(yy["tal-directory"])
+
+ for y in yy["constraints"]:
+
+ ski = None
+ uri = None
+ obj = set()
+
+ print
+
+ if "ski" in y:
+ ski = parse_xki(y["ski"])
+ obj.update(rpdb.find_by_ski(ski))
+ if "uri" in y:
+ uri = y["uri"]
+ obj.update(rpdb.find_by_uri(uri))
+ if len(obj) == 1:
+ obj = obj.pop()
+ else:
+ raise RuntimeError("Constraint entry must name a unique object using SKI, URI, or both (%r, %r, %r)" % (
+ ski, uri, obj))
+
+ print "URI:", uri
+ print "SKI:", " ".join("%02X" % ord(c) for c in ski), "(" + y["ski"] + ")"
+
+ new_resources = old_resources = obj.get_3779resources()
+
+ if "set" in y:
+ new_resources = rpki.resource_set.resource_bag.from_str(y["set"])
+
+ if "add" in y:
+ new_resources = new_resources | rpki.resource_set.resource_bag.from_str(y["add"])
+
+ if "sub" in y:
+ new_resources = new_resources - rpki.resource_set.resource_bag.from_str(y["sub"])
+
+ if new_resources == old_resources:
+ print "No resource change, skipping"
+ continue
+
+ print "Old:", old_resources
+ print "New:", new_resources
+ print "Add:", new_resources - old_resources
+ print "Sub:", old_resources - new_resources
+
+ # See draft-ietf-sidr-ltamgmt-08.txt for real processing details, but overview:
+ #
+ # - Process constraints file as above to determine list of target
+ # certificates (2.1). May need to add more fields to YAML hash
+ # for things like CP, CRLDP, etc, although I'm not entirely sure
+ # yet which of those it really makes sense to tweak via
+ # constraints.
+ #
+ # - Use resources from selected target certificates to determine
+ # which additional certificates we need to reissue to remove those
+ # resources (2.2, "perforation"). In theory we already have SQL
+ # that will just locate all of these for us.
+ #
+ # - Figure out which trust anchors to process (2.3, TA
+ # re-parenting); we can look in SQL for NULL AKI, but that's just
+ # a hint, we either have to verify that rcynic accepted those TAs
+ # or we have to look at the TALs. Looking at TALs is probably
+ # easier.
+ #
+ # At some point we probably need to parse the constraints file into
+ # Constraints objects or something like that, except that we may
+ # really need something more general that will accomodate
+ # perforation and TA reparenting as well. Figure out and refactor
+ # as we go along, most likely.
+
+
+class DER_object_mixin(object):
+ """
+ Mixin to add some SQL-related methods to classes derived from
+ rpki.x509.DER_object.
+ """
+
+ _rpdb = None
+ _rowid = None
+ _nochain = True
+ _original = False
+ _para = False
+ _target = False
+
+ @property
+ def rowid(self):
+ return self._rowid
+
+ def _update_bool(self, name, value):
+ assert self._rpdb is not None and self._rowid is not None and isinstance(value, bool)
+ self._rpdb.cur.execute("UPDATE object SET %s = ? WHERE id = ?" % name, (value, self._rowid))
+ setattr(self, "_" + name, value)
+ self._rpdb.db.commit()
+
+ @property
+ def nochain(self): return self._nochain
+
+ @nochain.setter
+ def nochain(self, value): self._update_bool("nochain", value)
+
+ @property
+ def original(self): return self._original
+
+ @original.setter
+ def original(self, value): self._update_bool("original", value)
+
+ @property
+ def para(self): return self._para
+
+ @para.setter
+ def para(self, value): self._update_bool("para", value)
+
+ @property
+ def target(self): return self._target
+
+ @target.setter
+ def target(self, value): self._update_bool("target", value)
+
+class X509 (rpki.x509.X509, DER_object_mixin): pass
+class CRL (rpki.x509.CRL, DER_object_mixin): pass
+class SignedManifest (rpki.x509.SignedManifest, DER_object_mixin): pass
+class ROA (rpki.x509.ROA, DER_object_mixin): pass
+class Ghostbuster (rpki.x509.Ghostbuster, DER_object_mixin): pass
+
+class RPDB(object):
+ """
+ Relying party database.
+
+ For now just wire in the database name and rcynic root, fix this
+ later if overall approach seems usable. Might even end up just
+ being an in-memory SQL database, who knows?
+ """
+
+ fn2map = dict(cer = X509,
+ crl = CRL,
+ mft = SignedManifest,
+ roa = ROA,
+ gbr = Ghostbuster)
+
+ def __init__(self, db_name = "rcynic-lta.db", delete_old_db = True):
+
+ if delete_old_db:
+ try:
+ os.unlink(db_name)
+ except:
+ pass
+
+ exists = os.path.exists(db_name)
+
+ self.db = sqlite3.connect(db_name, detect_types = sqlite3.PARSE_DECLTYPES)
+ self.db.text_factory = str
+ self.cur = self.db.cursor()
+
+ self.cache = weakref.WeakValueDictionary()
+
+ if exists:
+ return
+
+ self.cur.executescript('''
+ PRAGMA foreign_keys = on;
+
+ CREATE TABLE object (
+ id INTEGER PRIMARY KEY NOT NULL,
+ der BLOB NOT NULL,
+ fn2 TEXT NOT NULL,
+ ski BLOB,
+ aki BLOB,
+ issuer TEXT,
+ subject TEXT,
+ inherits BOOLEAN NOT NULL,
+ nochain BOOLEAN NOT NULL DEFAULT 1,
+ original BOOLEAN NOT NULL DEFAULT 0,
+ para BOOLEAN NOT NULL DEFAULT 0,
+ target BOOLEAN NOT NULL DEFAULT 0,
+ UNIQUE (der));
+
+ CREATE TABLE uri (
+ id INTEGER NOT NULL,
+ uri TEXT NOT NULL,
+ UNIQUE (uri),
+ FOREIGN KEY (id) REFERENCES object(id)
+ ON DELETE CASCADE
+ ON UPDATE CASCADE);
+
+ CREATE INDEX uri_index ON uri(id);
+
+ CREATE TABLE range (
+ id INTEGER NOT NULL,
+ min RangeVal NOT NULL,
+ max RangeVal NOT NULL,
+ UNIQUE (id, min, max),
+ FOREIGN KEY (id) REFERENCES object(id)
+ ON DELETE CASCADE
+ ON UPDATE CASCADE);
+
+ CREATE INDEX range_index ON range(min, max);
+ ''')
+
+ def load(self,
+ rcynic_root = os.path.expanduser("~/rpki/subvert-rpki.hactrn.net/trunk/"
+ "rcynic/rcynic-data/unauthenticated"),
+ spinner = 100):
+
+ nobj = 0
+
+ for root, dirs, files in os.walk(rcynic_root):
+ for fn in files:
+ fn = os.path.join(root, fn)
+ fn2 = os.path.splitext(fn)[1][1:]
+
+ try:
+ obj = self.fn2map[fn2](DER_file = fn)
+ except:
+ continue
+
+ if spinner and nobj % spinner == 0:
+ sys.stderr.write("\r%s %d..." % ("|\\-/"[(nobj/spinner) & 3], nobj))
+
+ nobj += 1
+
+ if fn2 == "crl":
+ ski = None
+ aki = buffer(obj.get_AKI())
+ cer = None
+ bag = None
+ issuer = obj.getIssuer()
+ subject = None
+
+ else:
+ if fn2 == "cer":
+ cer = obj
+ else:
+ cer = rpki.x509.X509(POW = obj.get_POW().certs()[0])
+ ski = buffer(cer.get_SKI())
+ try:
+ aki = buffer(cer.get_AKI())
+ except:
+ aki = None
+ bag = cer.get_3779resources()
+ issuer = cer.getIssuer()
+ subject = cer.getSubject()
+
+ inherits = bag is not None and (bag.asn.inherit or bag.v4.inherit or bag.v6.inherit)
+
+ der = buffer(obj.get_DER())
+ uri = "rsync://" + fn[len(rcynic_root) + 1:]
+
+ try:
+ self.cur.execute("INSERT INTO object (der, fn2, ski, aki, inherits, issuer, subject) VALUES (?, ?, ?, ?, ?, ?, ?)",
+ (der, fn2, ski, aki, inherits, issuer, subject))
+ rowid = self.cur.lastrowid
+
+ except sqlite3.IntegrityError:
+ self.cur.execute("SELECT id FROM object WHERE der = ? AND fn2 = ?", (der, fn2))
+ rows = self.cur.fetchall()
+ rowid = rows[0][0]
+ assert len(rows) == 1
+
+ else:
+ if bag is not None:
+ for rset in (bag.asn, bag.v4, bag.v6):
+ if rset is not None:
+ self.cur.executemany("REPLACE INTO range (id, min, max) VALUES (?, ?, ?)",
+ ((rowid, i.min, i.max) for i in rset))
+
+ self.cur.execute("INSERT INTO uri (id, uri) VALUES (?, ?)",
+ (rowid, uri))
+
+ if spinner:
+ sys.stderr.write("\r= %d objects, committing..." % nobj)
+
+ self.db.commit()
+
+ if spinner:
+ sys.stderr.write("done.\n")
+
+ object_fields = " object.id, fn2, der, nochain, original, para, target "
+
+ def find_by_ski(self, ski, fn2 = None):
+ if ski is None:
+ return self._find_results(fn2, "SELECT" + self.object_fields + "FROM object WHERE ski IS NULL")
+ else:
+ return self._find_results(fn2, "SELECT" + self.object_fields + "FROM object WHERE ski = ?", [buffer(ski)])
+
+ def find_by_aki(self, aki, fn2 = None):
+ if aki is None:
+ return self._find_results(fn2, "SELECT" + self.object_fields + "FROM object WHERE aki IS NULL")
+ else:
+ return self._find_results(fn2, "SELECT" + self.object_fields + "FROM object WHERE aki = ?", [buffer(aki)])
+
+
+ def find_products(self, aki, issuer, fn2 = None):
+ return self._find_results(fn2, "SELECT" + self.object_fields + "FROM object WHERE aki = ? AND issuer = ?", [buffer(aki), issuer])
+
+
+ def find_by_uri(self, uri):
+ return self._find_results(None, "SELECT" + self.object_fields + "FROM object, uri WHERE uri.uri = ? AND object.id = uri.id", [uri])
+
+ # It's easiest to understand overlap conditions by understanding
+ # non-overlap then inverting and and applying De Morgan's law. Ranges
+ # A and B do not overlap if either A.min > B.max or A.max < B.min;
+ # therefore they do overlap if A.min <= B.max and A.max >= B.min.
+
+ def find_by_range(self, range_min, range_max = None, fn2 = None):
+ if range_max is None:
+ range_max = range_min
+ if isinstance(range_min, (str, unicode)):
+ range_min = long(range_min) if range_min.isdigit() else rpki.POW.IPAddress(range_min)
+ if isinstance(range_max, (str, unicode)):
+ range_max = long(range_max) if range_max.isdigit() else rpki.POW.IPAddress(range_max)
+ assert isinstance(range_min, (int, long, rpki.POW.IPAddress))
+ assert isinstance(range_max, (int, long, rpki.POW.IPAddress))
+ return self._find_results(
+ fn2,
+ """
+ SELECT %s
+ FROM object, range
+ WHERE ? <= max AND ? >= min AND object.id = range.id
+ """ % self.object_fields,
+ [range_min, range_max])
+
+
+ def find_by_resource_bag(self, bag, fn2 = None):
+ assert bag.asn or bag.v4 or bag.v6
+ qset = []
+ aset = []
+ for rset in (bag.asn, bag.v4, bag.v6):
+ if rset:
+ for r in rset:
+ qset.append("(? <= max AND ? >= min)")
+ aset.append(r.min)
+ aset.append(r.max)
+ return self._find_results(
+ fn2,
+ """
+ SELECT %s
+ FROM object, range
+ WHERE object.id = range.id AND (%s)
+ """ % (self.object_fields, " OR ".join(qset)),
+ aset)
+
+
+ def _find_results(self, fn2, query, args = None):
+ if args is None:
+ args = []
+ if fn2 is not None:
+ assert fn2 in self.fn2map
+ query += " AND fn2 = ?"
+ args.append(fn2)
+ query += " GROUP BY object.id"
+ results = []
+ self.cur.execute(query, args)
+ selections = self.cur.fetchall()
+ for rowid, fn2, der, nochain, original, para, target in selections:
+ if rowid in self.cache:
+ obj = self.cache[rowid]
+ assert obj.rowid == rowid
+ assert obj._nochain == nochain
+ assert obj._original == original
+ assert obj._para == para
+ assert obj._target == target
+ else:
+ obj = self.fn2map[fn2](DER = der)
+ self.cur.execute("SELECT uri FROM uri WHERE id = ?", (rowid,))
+ obj.uris = [u[0] for u in self.cur.fetchall()]
+ obj.uri = obj.uris[0] if len(obj.uris) == 1 else None
+ obj._rpdb = self
+ obj._rowid = rowid
+ obj._nochain = nochain
+ obj._original = original
+ obj._para = para
+ obj._target = target
+ self.cache[rowid] = obj
+ results.append(obj)
+ return results
+
+
+ def close(self):
+ self.cur.close()
+ self.db.close()
+
+
+if __name__ == "__main__":
+ main()